xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/fold-const.c (revision 946379e7b37692fc43f68eb0d1c10daa0a7f3b6c)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2013 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
62 
63 /* Nonzero if we are folding constants inside an initializer; zero
64    otherwise.  */
65 int folding_initializer = 0;
66 
67 /* The following constants represent a bit based encoding of GCC's
68    comparison operators.  This encoding simplifies transformations
69    on relational comparison operators, such as AND and OR.  */
70 enum comparison_code {
71   COMPCODE_FALSE = 0,
72   COMPCODE_LT = 1,
73   COMPCODE_EQ = 2,
74   COMPCODE_LE = 3,
75   COMPCODE_GT = 4,
76   COMPCODE_LTGT = 5,
77   COMPCODE_GE = 6,
78   COMPCODE_ORD = 7,
79   COMPCODE_UNORD = 8,
80   COMPCODE_UNLT = 9,
81   COMPCODE_UNEQ = 10,
82   COMPCODE_UNLE = 11,
83   COMPCODE_UNGT = 12,
84   COMPCODE_NE = 13,
85   COMPCODE_UNGE = 14,
86   COMPCODE_TRUE = 15
87 };
88 
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 				HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 					tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 				    HOST_WIDE_INT *,
108 				    enum machine_mode *, int *, int *,
109 				    tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 					tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 						 enum tree_code, tree,
126 						 tree, tree,
127 						 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 				 enum built_in_function, enum tree_code,
130 				 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
138 
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140    Otherwise, return LOC.  */
141 
142 static location_t
143 expr_location_or (tree t, location_t loc)
144 {
145   location_t tloc = EXPR_LOCATION (t);
146   return tloc == UNKNOWN_LOCATION ? loc : tloc;
147 }
148 
149 /* Similar to protected_set_expr_location, but never modify x in place,
150    if location can and needs to be set, unshare it.  */
151 
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
154 {
155   if (CAN_HAVE_LOCATION_P (x)
156       && EXPR_LOCATION (x) != loc
157       && !(TREE_CODE (x) == SAVE_EXPR
158 	   || TREE_CODE (x) == TARGET_EXPR
159 	   || TREE_CODE (x) == BIND_EXPR))
160     {
161       x = copy_node (x);
162       SET_EXPR_LOCATION (x, loc);
163     }
164   return x;
165 }
166 
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168    of type CODE and returns the quotient.
169    Otherwise returns NULL_TREE.  */
170 
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
173 {
174   double_int quo, rem;
175   int uns;
176 
177   /* The sign of the division is according to operand two, that
178      does the correct thing for POINTER_PLUS_EXPR where we want
179      a signed division.  */
180   uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
181 
182   quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 					  uns, code, &rem);
184 
185   if (rem.is_zero ())
186     return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
187 
188   return NULL_TREE;
189 }
190 
191 /* This is nonzero if we should defer warnings about undefined
192    overflow.  This facility exists because these warnings are a
193    special case.  The code to estimate loop iterations does not want
194    to issue any warnings, since it works with expressions which do not
195    occur in user code.  Various bits of cleanup code call fold(), but
196    only use the result if it has certain characteristics (e.g., is a
197    constant); that code only wants to issue a warning if the result is
198    used.  */
199 
200 static int fold_deferring_overflow_warnings;
201 
202 /* If a warning about undefined overflow is deferred, this is the
203    warning.  Note that this may cause us to turn two warnings into
204    one, but that is fine since it is sufficient to only give one
205    warning per expression.  */
206 
207 static const char* fold_deferred_overflow_warning;
208 
209 /* If a warning about undefined overflow is deferred, this is the
210    level at which the warning should be emitted.  */
211 
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213 
214 /* Start deferring overflow warnings.  We could use a stack here to
215    permit nested calls, but at present it is not necessary.  */
216 
217 void
218 fold_defer_overflow_warnings (void)
219 {
220   ++fold_deferring_overflow_warnings;
221 }
222 
223 /* Stop deferring overflow warnings.  If there is a pending warning,
224    and ISSUE is true, then issue the warning if appropriate.  STMT is
225    the statement with which the warning should be associated (used for
226    location information); STMT may be NULL.  CODE is the level of the
227    warning--a warn_strict_overflow_code value.  This function will use
228    the smaller of CODE and the deferred code when deciding whether to
229    issue the warning.  CODE may be zero to mean to always use the
230    deferred code.  */
231 
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
234 {
235   const char *warnmsg;
236   location_t locus;
237 
238   gcc_assert (fold_deferring_overflow_warnings > 0);
239   --fold_deferring_overflow_warnings;
240   if (fold_deferring_overflow_warnings > 0)
241     {
242       if (fold_deferred_overflow_warning != NULL
243 	  && code != 0
244 	  && code < (int) fold_deferred_overflow_code)
245 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246       return;
247     }
248 
249   warnmsg = fold_deferred_overflow_warning;
250   fold_deferred_overflow_warning = NULL;
251 
252   if (!issue || warnmsg == NULL)
253     return;
254 
255   if (gimple_no_warning_p (stmt))
256     return;
257 
258   /* Use the smallest code level when deciding to issue the
259      warning.  */
260   if (code == 0 || code > (int) fold_deferred_overflow_code)
261     code = fold_deferred_overflow_code;
262 
263   if (!issue_strict_overflow_warning (code))
264     return;
265 
266   if (stmt == NULL)
267     locus = input_location;
268   else
269     locus = gimple_location (stmt);
270   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 }
272 
273 /* Stop deferring overflow warnings, ignoring any deferred
274    warnings.  */
275 
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
278 {
279   fold_undefer_overflow_warnings (false, NULL, 0);
280 }
281 
282 /* Whether we are deferring overflow warnings.  */
283 
284 bool
285 fold_deferring_overflow_warnings_p (void)
286 {
287   return fold_deferring_overflow_warnings > 0;
288 }
289 
290 /* This is called when we fold something based on the fact that signed
291    overflow is undefined.  */
292 
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 {
296   if (fold_deferring_overflow_warnings > 0)
297     {
298       if (fold_deferred_overflow_warning == NULL
299 	  || wc < fold_deferred_overflow_code)
300 	{
301 	  fold_deferred_overflow_warning = gmsgid;
302 	  fold_deferred_overflow_code = wc;
303 	}
304     }
305   else if (issue_strict_overflow_warning (wc))
306     warning (OPT_Wstrict_overflow, gmsgid);
307 }
308 
309 /* Return true if the built-in mathematical function specified by CODE
310    is odd, i.e. -f(x) == f(-x).  */
311 
312 static bool
313 negate_mathfn_p (enum built_in_function code)
314 {
315   switch (code)
316     {
317     CASE_FLT_FN (BUILT_IN_ASIN):
318     CASE_FLT_FN (BUILT_IN_ASINH):
319     CASE_FLT_FN (BUILT_IN_ATAN):
320     CASE_FLT_FN (BUILT_IN_ATANH):
321     CASE_FLT_FN (BUILT_IN_CASIN):
322     CASE_FLT_FN (BUILT_IN_CASINH):
323     CASE_FLT_FN (BUILT_IN_CATAN):
324     CASE_FLT_FN (BUILT_IN_CATANH):
325     CASE_FLT_FN (BUILT_IN_CBRT):
326     CASE_FLT_FN (BUILT_IN_CPROJ):
327     CASE_FLT_FN (BUILT_IN_CSIN):
328     CASE_FLT_FN (BUILT_IN_CSINH):
329     CASE_FLT_FN (BUILT_IN_CTAN):
330     CASE_FLT_FN (BUILT_IN_CTANH):
331     CASE_FLT_FN (BUILT_IN_ERF):
332     CASE_FLT_FN (BUILT_IN_LLROUND):
333     CASE_FLT_FN (BUILT_IN_LROUND):
334     CASE_FLT_FN (BUILT_IN_ROUND):
335     CASE_FLT_FN (BUILT_IN_SIN):
336     CASE_FLT_FN (BUILT_IN_SINH):
337     CASE_FLT_FN (BUILT_IN_TAN):
338     CASE_FLT_FN (BUILT_IN_TANH):
339     CASE_FLT_FN (BUILT_IN_TRUNC):
340       return true;
341 
342     CASE_FLT_FN (BUILT_IN_LLRINT):
343     CASE_FLT_FN (BUILT_IN_LRINT):
344     CASE_FLT_FN (BUILT_IN_NEARBYINT):
345     CASE_FLT_FN (BUILT_IN_RINT):
346       return !flag_rounding_math;
347 
348     default:
349       break;
350     }
351   return false;
352 }
353 
354 /* Check whether we may negate an integer constant T without causing
355    overflow.  */
356 
357 bool
358 may_negate_without_overflow_p (const_tree t)
359 {
360   unsigned HOST_WIDE_INT val;
361   unsigned int prec;
362   tree type;
363 
364   gcc_assert (TREE_CODE (t) == INTEGER_CST);
365 
366   type = TREE_TYPE (t);
367   if (TYPE_UNSIGNED (type))
368     return false;
369 
370   prec = TYPE_PRECISION (type);
371   if (prec > HOST_BITS_PER_WIDE_INT)
372     {
373       if (TREE_INT_CST_LOW (t) != 0)
374 	return true;
375       prec -= HOST_BITS_PER_WIDE_INT;
376       val = TREE_INT_CST_HIGH (t);
377     }
378   else
379     val = TREE_INT_CST_LOW (t);
380   if (prec < HOST_BITS_PER_WIDE_INT)
381     val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382   return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
383 }
384 
385 /* Determine whether an expression T can be cheaply negated using
386    the function negate_expr without introducing undefined overflow.  */
387 
388 static bool
389 negate_expr_p (tree t)
390 {
391   tree type;
392 
393   if (t == 0)
394     return false;
395 
396   type = TREE_TYPE (t);
397 
398   STRIP_SIGN_NOPS (t);
399   switch (TREE_CODE (t))
400     {
401     case INTEGER_CST:
402       if (TYPE_OVERFLOW_WRAPS (type))
403 	return true;
404 
405       /* Check that -CST will not overflow type.  */
406       return may_negate_without_overflow_p (t);
407     case BIT_NOT_EXPR:
408       return (INTEGRAL_TYPE_P (type)
409 	      && TYPE_OVERFLOW_WRAPS (type));
410 
411     case FIXED_CST:
412     case NEGATE_EXPR:
413       return true;
414 
415     case REAL_CST:
416       /* We want to canonicalize to positive real constants.  Pretend
417          that only negative ones can be easily negated.  */
418       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
419 
420     case COMPLEX_CST:
421       return negate_expr_p (TREE_REALPART (t))
422 	     && negate_expr_p (TREE_IMAGPART (t));
423 
424     case COMPLEX_EXPR:
425       return negate_expr_p (TREE_OPERAND (t, 0))
426 	     && negate_expr_p (TREE_OPERAND (t, 1));
427 
428     case CONJ_EXPR:
429       return negate_expr_p (TREE_OPERAND (t, 0));
430 
431     case PLUS_EXPR:
432       if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 	  || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 	return false;
435       /* -(A + B) -> (-B) - A.  */
436       if (negate_expr_p (TREE_OPERAND (t, 1))
437 	  && reorder_operands_p (TREE_OPERAND (t, 0),
438 				 TREE_OPERAND (t, 1)))
439 	return true;
440       /* -(A + B) -> (-A) - B.  */
441       return negate_expr_p (TREE_OPERAND (t, 0));
442 
443     case MINUS_EXPR:
444       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
445       return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 	     && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 	     && reorder_operands_p (TREE_OPERAND (t, 0),
448 				    TREE_OPERAND (t, 1));
449 
450     case MULT_EXPR:
451       if (TYPE_UNSIGNED (TREE_TYPE (t)))
452         break;
453 
454       /* Fall through.  */
455 
456     case RDIV_EXPR:
457       if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 	return negate_expr_p (TREE_OPERAND (t, 1))
459 	       || negate_expr_p (TREE_OPERAND (t, 0));
460       break;
461 
462     case TRUNC_DIV_EXPR:
463     case ROUND_DIV_EXPR:
464     case EXACT_DIV_EXPR:
465       /* In general we can't negate A / B, because if A is INT_MIN and
466 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
467 	 and actually traps on some architectures.  But if overflow is
468 	 undefined, we can negate, because - (INT_MIN / 1) is an
469 	 overflow.  */
470       if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
471 	{
472 	  if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
473 	    break;
474 	  /* If overflow is undefined then we have to be careful because
475 	     we ask whether it's ok to associate the negate with the
476 	     division which is not ok for example for
477 	     -((a - b) / c) where (-(a - b)) / c may invoke undefined
478 	     overflow because of negating INT_MIN.  So do not use
479 	     negate_expr_p here but open-code the two important cases.  */
480 	  if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
481 	      || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
482 		  && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
483 	    return true;
484 	}
485       else if (negate_expr_p (TREE_OPERAND (t, 0)))
486 	return true;
487       return negate_expr_p (TREE_OPERAND (t, 1));
488 
489     case NOP_EXPR:
490       /* Negate -((double)float) as (double)(-float).  */
491       if (TREE_CODE (type) == REAL_TYPE)
492 	{
493 	  tree tem = strip_float_extensions (t);
494 	  if (tem != t)
495 	    return negate_expr_p (tem);
496 	}
497       break;
498 
499     case CALL_EXPR:
500       /* Negate -f(x) as f(-x).  */
501       if (negate_mathfn_p (builtin_mathfn_code (t)))
502 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
503       break;
504 
505     case RSHIFT_EXPR:
506       /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
507       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
508 	{
509 	  tree op1 = TREE_OPERAND (t, 1);
510 	  if (TREE_INT_CST_HIGH (op1) == 0
511 	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
512 		 == TREE_INT_CST_LOW (op1))
513 	    return true;
514 	}
515       break;
516 
517     default:
518       break;
519     }
520   return false;
521 }
522 
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524    simplification is possible.
525    If negate_expr_p would return true for T, NULL_TREE will never be
526    returned.  */
527 
528 static tree
529 fold_negate_expr (location_t loc, tree t)
530 {
531   tree type = TREE_TYPE (t);
532   tree tem;
533 
534   switch (TREE_CODE (t))
535     {
536     /* Convert - (~A) to A + 1.  */
537     case BIT_NOT_EXPR:
538       if (INTEGRAL_TYPE_P (type))
539         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540                             build_int_cst (type, 1));
541       break;
542 
543     case INTEGER_CST:
544       tem = fold_negate_const (t, type);
545       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 	  || !TYPE_OVERFLOW_TRAPS (type))
547 	return tem;
548       break;
549 
550     case REAL_CST:
551       tem = fold_negate_const (t, type);
552       /* Two's complement FP formats, such as c4x, may overflow.  */
553       if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
554 	return tem;
555       break;
556 
557     case FIXED_CST:
558       tem = fold_negate_const (t, type);
559       return tem;
560 
561     case COMPLEX_CST:
562       {
563 	tree rpart = negate_expr (TREE_REALPART (t));
564 	tree ipart = negate_expr (TREE_IMAGPART (t));
565 
566 	if ((TREE_CODE (rpart) == REAL_CST
567 	     && TREE_CODE (ipart) == REAL_CST)
568 	    || (TREE_CODE (rpart) == INTEGER_CST
569 		&& TREE_CODE (ipart) == INTEGER_CST))
570 	  return build_complex (type, rpart, ipart);
571       }
572       break;
573 
574     case COMPLEX_EXPR:
575       if (negate_expr_p (t))
576 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
577 			    fold_negate_expr (loc, TREE_OPERAND (t, 0)),
578 			    fold_negate_expr (loc, TREE_OPERAND (t, 1)));
579       break;
580 
581     case CONJ_EXPR:
582       if (negate_expr_p (t))
583 	return fold_build1_loc (loc, CONJ_EXPR, type,
584 			    fold_negate_expr (loc, TREE_OPERAND (t, 0)));
585       break;
586 
587     case NEGATE_EXPR:
588       return TREE_OPERAND (t, 0);
589 
590     case PLUS_EXPR:
591       if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
592 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
593 	{
594 	  /* -(A + B) -> (-B) - A.  */
595 	  if (negate_expr_p (TREE_OPERAND (t, 1))
596 	      && reorder_operands_p (TREE_OPERAND (t, 0),
597 				     TREE_OPERAND (t, 1)))
598 	    {
599 	      tem = negate_expr (TREE_OPERAND (t, 1));
600 	      return fold_build2_loc (loc, MINUS_EXPR, type,
601 				  tem, TREE_OPERAND (t, 0));
602 	    }
603 
604 	  /* -(A + B) -> (-A) - B.  */
605 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
606 	    {
607 	      tem = negate_expr (TREE_OPERAND (t, 0));
608 	      return fold_build2_loc (loc, MINUS_EXPR, type,
609 				  tem, TREE_OPERAND (t, 1));
610 	    }
611 	}
612       break;
613 
614     case MINUS_EXPR:
615       /* - (A - B) -> B - A  */
616       if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
617 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
618 	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
619 	return fold_build2_loc (loc, MINUS_EXPR, type,
620 			    TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
621       break;
622 
623     case MULT_EXPR:
624       if (TYPE_UNSIGNED (type))
625         break;
626 
627       /* Fall through.  */
628 
629     case RDIV_EXPR:
630       if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
631 	{
632 	  tem = TREE_OPERAND (t, 1);
633 	  if (negate_expr_p (tem))
634 	    return fold_build2_loc (loc, TREE_CODE (t), type,
635 				TREE_OPERAND (t, 0), negate_expr (tem));
636 	  tem = TREE_OPERAND (t, 0);
637 	  if (negate_expr_p (tem))
638 	    return fold_build2_loc (loc, TREE_CODE (t), type,
639 				negate_expr (tem), TREE_OPERAND (t, 1));
640 	}
641       break;
642 
643     case TRUNC_DIV_EXPR:
644     case ROUND_DIV_EXPR:
645     case EXACT_DIV_EXPR:
646       /* In general we can't negate A / B, because if A is INT_MIN and
647 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
648 	 and actually traps on some architectures.  But if overflow is
649 	 undefined, we can negate, because - (INT_MIN / 1) is an
650 	 overflow.  */
651       if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
652         {
653 	  const char * const warnmsg = G_("assuming signed overflow does not "
654 					  "occur when negating a division");
655           tem = TREE_OPERAND (t, 1);
656           if (negate_expr_p (tem))
657 	    {
658 	      if (INTEGRAL_TYPE_P (type)
659 		  && (TREE_CODE (tem) != INTEGER_CST
660 		      || integer_onep (tem)))
661 		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
662 	      return fold_build2_loc (loc, TREE_CODE (t), type,
663 				  TREE_OPERAND (t, 0), negate_expr (tem));
664 	    }
665 	  /* If overflow is undefined then we have to be careful because
666 	     we ask whether it's ok to associate the negate with the
667 	     division which is not ok for example for
668 	     -((a - b) / c) where (-(a - b)) / c may invoke undefined
669 	     overflow because of negating INT_MIN.  So do not use
670 	     negate_expr_p here but open-code the two important cases.  */
671           tem = TREE_OPERAND (t, 0);
672 	  if ((INTEGRAL_TYPE_P (type)
673 	       && (TREE_CODE (tem) == NEGATE_EXPR
674 		   || (TREE_CODE (tem) == INTEGER_CST
675 		       && may_negate_without_overflow_p (tem))))
676 	      || !INTEGRAL_TYPE_P (type))
677 	    return fold_build2_loc (loc, TREE_CODE (t), type,
678 				    negate_expr (tem), TREE_OPERAND (t, 1));
679         }
680       break;
681 
682     case NOP_EXPR:
683       /* Convert -((double)float) into (double)(-float).  */
684       if (TREE_CODE (type) == REAL_TYPE)
685 	{
686 	  tem = strip_float_extensions (t);
687 	  if (tem != t && negate_expr_p (tem))
688 	    return fold_convert_loc (loc, type, negate_expr (tem));
689 	}
690       break;
691 
692     case CALL_EXPR:
693       /* Negate -f(x) as f(-x).  */
694       if (negate_mathfn_p (builtin_mathfn_code (t))
695 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
696 	{
697 	  tree fndecl, arg;
698 
699 	  fndecl = get_callee_fndecl (t);
700 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
701 	  return build_call_expr_loc (loc, fndecl, 1, arg);
702 	}
703       break;
704 
705     case RSHIFT_EXPR:
706       /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
707       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
708 	{
709 	  tree op1 = TREE_OPERAND (t, 1);
710 	  if (TREE_INT_CST_HIGH (op1) == 0
711 	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
712 		 == TREE_INT_CST_LOW (op1))
713 	    {
714 	      tree ntype = TYPE_UNSIGNED (type)
715 			   ? signed_type_for (type)
716 			   : unsigned_type_for (type);
717 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
718 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
719 	      return fold_convert_loc (loc, type, temp);
720 	    }
721 	}
722       break;
723 
724     default:
725       break;
726     }
727 
728   return NULL_TREE;
729 }
730 
731 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
732    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
733    return NULL_TREE. */
734 
735 static tree
736 negate_expr (tree t)
737 {
738   tree type, tem;
739   location_t loc;
740 
741   if (t == NULL_TREE)
742     return NULL_TREE;
743 
744   loc = EXPR_LOCATION (t);
745   type = TREE_TYPE (t);
746   STRIP_SIGN_NOPS (t);
747 
748   tem = fold_negate_expr (loc, t);
749   if (!tem)
750     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
751   return fold_convert_loc (loc, type, tem);
752 }
753 
754 /* Split a tree IN into a constant, literal and variable parts that could be
755    combined with CODE to make IN.  "constant" means an expression with
756    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
757    commutative arithmetic operation.  Store the constant part into *CONP,
758    the literal in *LITP and return the variable part.  If a part isn't
759    present, set it to null.  If the tree does not decompose in this way,
760    return the entire tree as the variable part and the other parts as null.
761 
762    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
763    case, we negate an operand that was subtracted.  Except if it is a
764    literal for which we use *MINUS_LITP instead.
765 
766    If NEGATE_P is true, we are negating all of IN, again except a literal
767    for which we use *MINUS_LITP instead.
768 
769    If IN is itself a literal or constant, return it as appropriate.
770 
771    Note that we do not guarantee that any of the three values will be the
772    same type as IN, but they will have the same signedness and mode.  */
773 
774 static tree
775 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
776 	    tree *minus_litp, int negate_p)
777 {
778   tree var = 0;
779 
780   *conp = 0;
781   *litp = 0;
782   *minus_litp = 0;
783 
784   /* Strip any conversions that don't change the machine mode or signedness.  */
785   STRIP_SIGN_NOPS (in);
786 
787   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
788       || TREE_CODE (in) == FIXED_CST)
789     *litp = in;
790   else if (TREE_CODE (in) == code
791 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
792 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
793 	       /* We can associate addition and subtraction together (even
794 		  though the C standard doesn't say so) for integers because
795 		  the value is not affected.  For reals, the value might be
796 		  affected, so we can't.  */
797 	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
798 		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
799     {
800       tree op0 = TREE_OPERAND (in, 0);
801       tree op1 = TREE_OPERAND (in, 1);
802       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
803       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
804 
805       /* First see if either of the operands is a literal, then a constant.  */
806       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
807 	  || TREE_CODE (op0) == FIXED_CST)
808 	*litp = op0, op0 = 0;
809       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
810 	       || TREE_CODE (op1) == FIXED_CST)
811 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
812 
813       if (op0 != 0 && TREE_CONSTANT (op0))
814 	*conp = op0, op0 = 0;
815       else if (op1 != 0 && TREE_CONSTANT (op1))
816 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
817 
818       /* If we haven't dealt with either operand, this is not a case we can
819 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
820       if (op0 != 0 && op1 != 0)
821 	var = in;
822       else if (op0 != 0)
823 	var = op0;
824       else
825 	var = op1, neg_var_p = neg1_p;
826 
827       /* Now do any needed negations.  */
828       if (neg_litp_p)
829 	*minus_litp = *litp, *litp = 0;
830       if (neg_conp_p)
831 	*conp = negate_expr (*conp);
832       if (neg_var_p)
833 	var = negate_expr (var);
834     }
835   else if (TREE_CODE (in) == BIT_NOT_EXPR
836 	   && code == PLUS_EXPR)
837     {
838       /* -X - 1 is folded to ~X, undo that here.  */
839       *minus_litp = build_one_cst (TREE_TYPE (in));
840       var = negate_expr (TREE_OPERAND (in, 0));
841     }
842   else if (TREE_CONSTANT (in))
843     *conp = in;
844   else
845     var = in;
846 
847   if (negate_p)
848     {
849       if (*litp)
850 	*minus_litp = *litp, *litp = 0;
851       else if (*minus_litp)
852 	*litp = *minus_litp, *minus_litp = 0;
853       *conp = negate_expr (*conp);
854       var = negate_expr (var);
855     }
856 
857   return var;
858 }
859 
860 /* Re-associate trees split by the above function.  T1 and T2 are
861    either expressions to associate or null.  Return the new
862    expression, if any.  LOC is the location of the new expression.  If
863    we build an operation, do it in TYPE and with CODE.  */
864 
865 static tree
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
867 {
868   if (t1 == 0)
869     return t2;
870   else if (t2 == 0)
871     return t1;
872 
873   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874      try to fold this since we will have infinite recursion.  But do
875      deal with any NEGATE_EXPRs.  */
876   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
878     {
879       if (code == PLUS_EXPR)
880 	{
881 	  if (TREE_CODE (t1) == NEGATE_EXPR)
882 	    return build2_loc (loc, MINUS_EXPR, type,
883 			       fold_convert_loc (loc, type, t2),
884 			       fold_convert_loc (loc, type,
885 						 TREE_OPERAND (t1, 0)));
886 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
887 	    return build2_loc (loc, MINUS_EXPR, type,
888 			       fold_convert_loc (loc, type, t1),
889 			       fold_convert_loc (loc, type,
890 						 TREE_OPERAND (t2, 0)));
891 	  else if (integer_zerop (t2))
892 	    return fold_convert_loc (loc, type, t1);
893 	}
894       else if (code == MINUS_EXPR)
895 	{
896 	  if (integer_zerop (t2))
897 	    return fold_convert_loc (loc, type, t1);
898 	}
899 
900       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 			 fold_convert_loc (loc, type, t2));
902     }
903 
904   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 			  fold_convert_loc (loc, type, t2));
906 }
907 
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909    for use in int_const_binop, size_binop and size_diffop.  */
910 
911 static bool
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
913 {
914   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
915     return false;
916   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
917     return false;
918 
919   switch (code)
920     {
921     case LSHIFT_EXPR:
922     case RSHIFT_EXPR:
923     case LROTATE_EXPR:
924     case RROTATE_EXPR:
925       return true;
926 
927     default:
928       break;
929     }
930 
931   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
934 }
935 
936 
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938    to produce a new constant.  Return NULL_TREE if we don't know how
939    to evaluate CODE at compile-time.  */
940 
941 static tree
942 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
943 		   int overflowable)
944 {
945   double_int op1, op2, res, tmp;
946   tree t;
947   tree type = TREE_TYPE (arg1);
948   bool uns = TYPE_UNSIGNED (type);
949   bool overflow = false;
950 
951   op1 = tree_to_double_int (arg1);
952   op2 = tree_to_double_int (arg2);
953 
954   switch (code)
955     {
956     case BIT_IOR_EXPR:
957       res = op1 | op2;
958       break;
959 
960     case BIT_XOR_EXPR:
961       res = op1 ^ op2;
962       break;
963 
964     case BIT_AND_EXPR:
965       res = op1 & op2;
966       break;
967 
968     case RSHIFT_EXPR:
969       res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
970       break;
971 
972     case LSHIFT_EXPR:
973       /* It's unclear from the C standard whether shifts can overflow.
974 	 The following code ignores overflow; perhaps a C standard
975 	 interpretation ruling is needed.  */
976       res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
977       break;
978 
979     case RROTATE_EXPR:
980       res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
981       break;
982 
983     case LROTATE_EXPR:
984       res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
985       break;
986 
987     case PLUS_EXPR:
988       res = op1.add_with_sign (op2, false, &overflow);
989       break;
990 
991     case MINUS_EXPR:
992       res = op1.sub_with_overflow (op2, &overflow);
993       break;
994 
995     case MULT_EXPR:
996       res = op1.mul_with_sign (op2, false, &overflow);
997       break;
998 
999     case MULT_HIGHPART_EXPR:
1000       if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1001 	return NULL_TREE;
1002       else
1003 	{
1004 	  bool dummy_overflow;
1005 	  /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1006 	     is performed in twice the precision of arguments.  */
1007 	  tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1008 	  res = tmp.rshift (TYPE_PRECISION (type),
1009 			    2 * TYPE_PRECISION (type), !uns);
1010 	}
1011       break;
1012 
1013     case TRUNC_DIV_EXPR:
1014     case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1015     case EXACT_DIV_EXPR:
1016       /* This is a shortcut for a common special case.  */
1017       if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1018 	  && !TREE_OVERFLOW (arg1)
1019 	  && !TREE_OVERFLOW (arg2)
1020 	  && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1021 	{
1022 	  if (code == CEIL_DIV_EXPR)
1023 	    op1.low += op2.low - 1;
1024 
1025 	  res.low = op1.low / op2.low, res.high = 0;
1026 	  break;
1027 	}
1028 
1029       /* ... fall through ...  */
1030 
1031     case ROUND_DIV_EXPR:
1032       if (op2.is_zero ())
1033 	return NULL_TREE;
1034       if (op2.is_one ())
1035 	{
1036 	  res = op1;
1037 	  break;
1038 	}
1039       if (op1 == op2 && !op1.is_zero ())
1040 	{
1041 	  res = double_int_one;
1042 	  break;
1043 	}
1044       res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1045       break;
1046 
1047     case TRUNC_MOD_EXPR:
1048     case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1049       /* This is a shortcut for a common special case.  */
1050       if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1051 	  && !TREE_OVERFLOW (arg1)
1052 	  && !TREE_OVERFLOW (arg2)
1053 	  && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1054 	{
1055 	  if (code == CEIL_MOD_EXPR)
1056 	    op1.low += op2.low - 1;
1057 	  res.low = op1.low % op2.low, res.high = 0;
1058 	  break;
1059 	}
1060 
1061       /* ... fall through ...  */
1062 
1063     case ROUND_MOD_EXPR:
1064       if (op2.is_zero ())
1065 	return NULL_TREE;
1066       tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1067       break;
1068 
1069     case MIN_EXPR:
1070       res = op1.min (op2, uns);
1071       break;
1072 
1073     case MAX_EXPR:
1074       res = op1.max (op2, uns);
1075       break;
1076 
1077     default:
1078       return NULL_TREE;
1079     }
1080 
1081   t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1082 			     (!uns && overflow)
1083 			     | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1084 
1085   return t;
1086 }
1087 
1088 tree
1089 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1090 {
1091   return int_const_binop_1 (code, arg1, arg2, 1);
1092 }
1093 
1094 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1095    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1096    are the same kind of constant and the same machine mode.  Return zero if
1097    combining the constants is not allowed in the current operating mode.  */
1098 
1099 static tree
1100 const_binop (enum tree_code code, tree arg1, tree arg2)
1101 {
1102   /* Sanity check for the recursive cases.  */
1103   if (!arg1 || !arg2)
1104     return NULL_TREE;
1105 
1106   STRIP_NOPS (arg1);
1107   STRIP_NOPS (arg2);
1108 
1109   if (TREE_CODE (arg1) == INTEGER_CST)
1110     return int_const_binop (code, arg1, arg2);
1111 
1112   if (TREE_CODE (arg1) == REAL_CST)
1113     {
1114       enum machine_mode mode;
1115       REAL_VALUE_TYPE d1;
1116       REAL_VALUE_TYPE d2;
1117       REAL_VALUE_TYPE value;
1118       REAL_VALUE_TYPE result;
1119       bool inexact;
1120       tree t, type;
1121 
1122       /* The following codes are handled by real_arithmetic.  */
1123       switch (code)
1124 	{
1125 	case PLUS_EXPR:
1126 	case MINUS_EXPR:
1127 	case MULT_EXPR:
1128 	case RDIV_EXPR:
1129 	case MIN_EXPR:
1130 	case MAX_EXPR:
1131 	  break;
1132 
1133 	default:
1134 	  return NULL_TREE;
1135 	}
1136 
1137       d1 = TREE_REAL_CST (arg1);
1138       d2 = TREE_REAL_CST (arg2);
1139 
1140       type = TREE_TYPE (arg1);
1141       mode = TYPE_MODE (type);
1142 
1143       /* Don't perform operation if we honor signaling NaNs and
1144 	 either operand is a NaN.  */
1145       if (HONOR_SNANS (mode)
1146 	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1147 	return NULL_TREE;
1148 
1149       /* Don't perform operation if it would raise a division
1150 	 by zero exception.  */
1151       if (code == RDIV_EXPR
1152 	  && REAL_VALUES_EQUAL (d2, dconst0)
1153 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1154 	return NULL_TREE;
1155 
1156       /* If either operand is a NaN, just return it.  Otherwise, set up
1157 	 for floating-point trap; we return an overflow.  */
1158       if (REAL_VALUE_ISNAN (d1))
1159 	return arg1;
1160       else if (REAL_VALUE_ISNAN (d2))
1161 	return arg2;
1162 
1163       inexact = real_arithmetic (&value, code, &d1, &d2);
1164       real_convert (&result, mode, &value);
1165 
1166       /* Don't constant fold this floating point operation if
1167 	 the result has overflowed and flag_trapping_math.  */
1168       if (flag_trapping_math
1169 	  && MODE_HAS_INFINITIES (mode)
1170 	  && REAL_VALUE_ISINF (result)
1171 	  && !REAL_VALUE_ISINF (d1)
1172 	  && !REAL_VALUE_ISINF (d2))
1173 	return NULL_TREE;
1174 
1175       /* Don't constant fold this floating point operation if the
1176 	 result may dependent upon the run-time rounding mode and
1177 	 flag_rounding_math is set, or if GCC's software emulation
1178 	 is unable to accurately represent the result.  */
1179       if ((flag_rounding_math
1180 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1181 	  && (inexact || !real_identical (&result, &value)))
1182 	return NULL_TREE;
1183 
1184       t = build_real (type, result);
1185 
1186       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1187       return t;
1188     }
1189 
1190   if (TREE_CODE (arg1) == FIXED_CST)
1191     {
1192       FIXED_VALUE_TYPE f1;
1193       FIXED_VALUE_TYPE f2;
1194       FIXED_VALUE_TYPE result;
1195       tree t, type;
1196       int sat_p;
1197       bool overflow_p;
1198 
1199       /* The following codes are handled by fixed_arithmetic.  */
1200       switch (code)
1201         {
1202 	case PLUS_EXPR:
1203 	case MINUS_EXPR:
1204 	case MULT_EXPR:
1205 	case TRUNC_DIV_EXPR:
1206 	  f2 = TREE_FIXED_CST (arg2);
1207 	  break;
1208 
1209 	case LSHIFT_EXPR:
1210 	case RSHIFT_EXPR:
1211 	  f2.data.high = TREE_INT_CST_HIGH (arg2);
1212 	  f2.data.low = TREE_INT_CST_LOW (arg2);
1213 	  f2.mode = SImode;
1214 	  break;
1215 
1216         default:
1217 	  return NULL_TREE;
1218         }
1219 
1220       f1 = TREE_FIXED_CST (arg1);
1221       type = TREE_TYPE (arg1);
1222       sat_p = TYPE_SATURATING (type);
1223       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1224       t = build_fixed (type, result);
1225       /* Propagate overflow flags.  */
1226       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1227 	TREE_OVERFLOW (t) = 1;
1228       return t;
1229     }
1230 
1231   if (TREE_CODE (arg1) == COMPLEX_CST)
1232     {
1233       tree type = TREE_TYPE (arg1);
1234       tree r1 = TREE_REALPART (arg1);
1235       tree i1 = TREE_IMAGPART (arg1);
1236       tree r2 = TREE_REALPART (arg2);
1237       tree i2 = TREE_IMAGPART (arg2);
1238       tree real, imag;
1239 
1240       switch (code)
1241 	{
1242 	case PLUS_EXPR:
1243 	case MINUS_EXPR:
1244 	  real = const_binop (code, r1, r2);
1245 	  imag = const_binop (code, i1, i2);
1246 	  break;
1247 
1248 	case MULT_EXPR:
1249 	  if (COMPLEX_FLOAT_TYPE_P (type))
1250 	    return do_mpc_arg2 (arg1, arg2, type,
1251 				/* do_nonfinite= */ folding_initializer,
1252 				mpc_mul);
1253 
1254 	  real = const_binop (MINUS_EXPR,
1255 			      const_binop (MULT_EXPR, r1, r2),
1256 			      const_binop (MULT_EXPR, i1, i2));
1257 	  imag = const_binop (PLUS_EXPR,
1258 			      const_binop (MULT_EXPR, r1, i2),
1259 			      const_binop (MULT_EXPR, i1, r2));
1260 	  break;
1261 
1262 	case RDIV_EXPR:
1263 	  if (COMPLEX_FLOAT_TYPE_P (type))
1264 	    return do_mpc_arg2 (arg1, arg2, type,
1265                                 /* do_nonfinite= */ folding_initializer,
1266 				mpc_div);
1267 	  /* Fallthru ... */
1268 	case TRUNC_DIV_EXPR:
1269 	case CEIL_DIV_EXPR:
1270 	case FLOOR_DIV_EXPR:
1271 	case ROUND_DIV_EXPR:
1272 	  if (flag_complex_method == 0)
1273 	  {
1274 	    /* Keep this algorithm in sync with
1275 	       tree-complex.c:expand_complex_div_straight().
1276 
1277 	       Expand complex division to scalars, straightforward algorithm.
1278 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1279 	       t = br*br + bi*bi
1280 	    */
1281 	    tree magsquared
1282 	      = const_binop (PLUS_EXPR,
1283 			     const_binop (MULT_EXPR, r2, r2),
1284 			     const_binop (MULT_EXPR, i2, i2));
1285 	    tree t1
1286 	      = const_binop (PLUS_EXPR,
1287 			     const_binop (MULT_EXPR, r1, r2),
1288 			     const_binop (MULT_EXPR, i1, i2));
1289 	    tree t2
1290 	      = const_binop (MINUS_EXPR,
1291 			     const_binop (MULT_EXPR, i1, r2),
1292 			     const_binop (MULT_EXPR, r1, i2));
1293 
1294 	    real = const_binop (code, t1, magsquared);
1295 	    imag = const_binop (code, t2, magsquared);
1296 	  }
1297 	  else
1298 	  {
1299 	    /* Keep this algorithm in sync with
1300                tree-complex.c:expand_complex_div_wide().
1301 
1302 	       Expand complex division to scalars, modified algorithm to minimize
1303 	       overflow with wide input ranges.  */
1304 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1305 					fold_abs_const (r2, TREE_TYPE (type)),
1306 					fold_abs_const (i2, TREE_TYPE (type)));
1307 
1308 	    if (integer_nonzerop (compare))
1309 	      {
1310 		/* In the TRUE branch, we compute
1311 		   ratio = br/bi;
1312 		   div = (br * ratio) + bi;
1313 		   tr = (ar * ratio) + ai;
1314 		   ti = (ai * ratio) - ar;
1315 		   tr = tr / div;
1316 		   ti = ti / div;  */
1317 		tree ratio = const_binop (code, r2, i2);
1318 		tree div = const_binop (PLUS_EXPR, i2,
1319 					const_binop (MULT_EXPR, r2, ratio));
1320 		real = const_binop (MULT_EXPR, r1, ratio);
1321 		real = const_binop (PLUS_EXPR, real, i1);
1322 		real = const_binop (code, real, div);
1323 
1324 		imag = const_binop (MULT_EXPR, i1, ratio);
1325 		imag = const_binop (MINUS_EXPR, imag, r1);
1326 		imag = const_binop (code, imag, div);
1327 	      }
1328 	    else
1329 	      {
1330 		/* In the FALSE branch, we compute
1331 		   ratio = d/c;
1332 		   divisor = (d * ratio) + c;
1333 		   tr = (b * ratio) + a;
1334 		   ti = b - (a * ratio);
1335 		   tr = tr / div;
1336 		   ti = ti / div;  */
1337 		tree ratio = const_binop (code, i2, r2);
1338 		tree div = const_binop (PLUS_EXPR, r2,
1339                                         const_binop (MULT_EXPR, i2, ratio));
1340 
1341 		real = const_binop (MULT_EXPR, i1, ratio);
1342 		real = const_binop (PLUS_EXPR, real, r1);
1343 		real = const_binop (code, real, div);
1344 
1345 		imag = const_binop (MULT_EXPR, r1, ratio);
1346 		imag = const_binop (MINUS_EXPR, i1, imag);
1347 		imag = const_binop (code, imag, div);
1348 	      }
1349 	  }
1350 	  break;
1351 
1352 	default:
1353 	  return NULL_TREE;
1354 	}
1355 
1356       if (real && imag)
1357 	return build_complex (type, real, imag);
1358     }
1359 
1360   if (TREE_CODE (arg1) == VECTOR_CST
1361       && TREE_CODE (arg2) == VECTOR_CST)
1362     {
1363       tree type = TREE_TYPE(arg1);
1364       int count = TYPE_VECTOR_SUBPARTS (type), i;
1365       tree *elts =  XALLOCAVEC (tree, count);
1366 
1367       for (i = 0; i < count; i++)
1368 	{
1369           tree elem1 = VECTOR_CST_ELT (arg1, i);
1370 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1371 
1372           elts[i] = const_binop (code, elem1, elem2);
1373 
1374           /* It is possible that const_binop cannot handle the given
1375             code and return NULL_TREE */
1376           if(elts[i] == NULL_TREE)
1377             return NULL_TREE;
1378 	}
1379 
1380       return build_vector (type, elts);
1381     }
1382 
1383   /* Shifts allow a scalar offset for a vector.  */
1384   if (TREE_CODE (arg1) == VECTOR_CST
1385       && TREE_CODE (arg2) == INTEGER_CST)
1386     {
1387       tree type = TREE_TYPE (arg1);
1388       int count = TYPE_VECTOR_SUBPARTS (type), i;
1389       tree *elts = XALLOCAVEC (tree, count);
1390 
1391       if (code == VEC_LSHIFT_EXPR
1392 	  || code == VEC_RSHIFT_EXPR)
1393 	{
1394 	  if (!host_integerp (arg2, 1))
1395 	    return NULL_TREE;
1396 
1397 	  unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1398 	  unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1399 	  unsigned HOST_WIDE_INT innerc
1400 	    = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1401 	  if (shiftc >= outerc || (shiftc % innerc) != 0)
1402 	    return NULL_TREE;
1403 	  int offset = shiftc / innerc;
1404 	  /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1405 	     For reductions, compiler emits VEC_RSHIFT_EXPR always,
1406 	     for !BYTES_BIG_ENDIAN picks first vector element, but
1407 	     for BYTES_BIG_ENDIAN last element from the vector.  */
1408 	  if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1409 	    offset = -offset;
1410 	  tree zero = build_zero_cst (TREE_TYPE (type));
1411 	  for (i = 0; i < count; i++)
1412 	    {
1413 	      if (i + offset < 0 || i + offset >= count)
1414 		elts[i] = zero;
1415 	      else
1416 		elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1417 	    }
1418 	}
1419       else
1420 	return NULL_TREE;
1421 
1422       return build_vector (type, elts);
1423     }
1424   return NULL_TREE;
1425 }
1426 
1427 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1428    indicates which particular sizetype to create.  */
1429 
1430 tree
1431 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1432 {
1433   return build_int_cst (sizetype_tab[(int) kind], number);
1434 }
1435 
1436 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1437    is a tree code.  The type of the result is taken from the operands.
1438    Both must be equivalent integer types, ala int_binop_types_match_p.
1439    If the operands are constant, so is the result.  */
1440 
1441 tree
1442 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1443 {
1444   tree type = TREE_TYPE (arg0);
1445 
1446   if (arg0 == error_mark_node || arg1 == error_mark_node)
1447     return error_mark_node;
1448 
1449   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1450                                        TREE_TYPE (arg1)));
1451 
1452   /* Handle the special case of two integer constants faster.  */
1453   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1454     {
1455       /* And some specific cases even faster than that.  */
1456       if (code == PLUS_EXPR)
1457 	{
1458 	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1459 	    return arg1;
1460 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1461 	    return arg0;
1462 	}
1463       else if (code == MINUS_EXPR)
1464 	{
1465 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1466 	    return arg0;
1467 	}
1468       else if (code == MULT_EXPR)
1469 	{
1470 	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1471 	    return arg1;
1472 	}
1473 
1474       /* Handle general case of two integer constants.  For sizetype
1475          constant calculations we always want to know about overflow,
1476 	 even in the unsigned case.  */
1477       return int_const_binop_1 (code, arg0, arg1, -1);
1478     }
1479 
1480   return fold_build2_loc (loc, code, type, arg0, arg1);
1481 }
1482 
1483 /* Given two values, either both of sizetype or both of bitsizetype,
1484    compute the difference between the two values.  Return the value
1485    in signed type corresponding to the type of the operands.  */
1486 
1487 tree
1488 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1489 {
1490   tree type = TREE_TYPE (arg0);
1491   tree ctype;
1492 
1493   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1494 				       TREE_TYPE (arg1)));
1495 
1496   /* If the type is already signed, just do the simple thing.  */
1497   if (!TYPE_UNSIGNED (type))
1498     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1499 
1500   if (type == sizetype)
1501     ctype = ssizetype;
1502   else if (type == bitsizetype)
1503     ctype = sbitsizetype;
1504   else
1505     ctype = signed_type_for (type);
1506 
1507   /* If either operand is not a constant, do the conversions to the signed
1508      type and subtract.  The hardware will do the right thing with any
1509      overflow in the subtraction.  */
1510   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1511     return size_binop_loc (loc, MINUS_EXPR,
1512 			   fold_convert_loc (loc, ctype, arg0),
1513 			   fold_convert_loc (loc, ctype, arg1));
1514 
1515   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1516      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1517      overflow) and negate (which can't either).  Special-case a result
1518      of zero while we're here.  */
1519   if (tree_int_cst_equal (arg0, arg1))
1520     return build_int_cst (ctype, 0);
1521   else if (tree_int_cst_lt (arg1, arg0))
1522     return fold_convert_loc (loc, ctype,
1523 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1524   else
1525     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1526 			   fold_convert_loc (loc, ctype,
1527 					     size_binop_loc (loc,
1528 							     MINUS_EXPR,
1529 							     arg1, arg0)));
1530 }
1531 
1532 /* A subroutine of fold_convert_const handling conversions of an
1533    INTEGER_CST to another integer type.  */
1534 
1535 static tree
1536 fold_convert_const_int_from_int (tree type, const_tree arg1)
1537 {
1538   tree t;
1539 
1540   /* Given an integer constant, make new constant with new type,
1541      appropriately sign-extended or truncated.  */
1542   t = force_fit_type_double (type, tree_to_double_int (arg1),
1543 			     !POINTER_TYPE_P (TREE_TYPE (arg1)),
1544 			     (TREE_INT_CST_HIGH (arg1) < 0
1545 		 	      && (TYPE_UNSIGNED (type)
1546 				  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1547 			     | TREE_OVERFLOW (arg1));
1548 
1549   return t;
1550 }
1551 
1552 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1553    to an integer type.  */
1554 
1555 static tree
1556 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1557 {
1558   int overflow = 0;
1559   tree t;
1560 
1561   /* The following code implements the floating point to integer
1562      conversion rules required by the Java Language Specification,
1563      that IEEE NaNs are mapped to zero and values that overflow
1564      the target precision saturate, i.e. values greater than
1565      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1566      are mapped to INT_MIN.  These semantics are allowed by the
1567      C and C++ standards that simply state that the behavior of
1568      FP-to-integer conversion is unspecified upon overflow.  */
1569 
1570   double_int val;
1571   REAL_VALUE_TYPE r;
1572   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1573 
1574   switch (code)
1575     {
1576     case FIX_TRUNC_EXPR:
1577       real_trunc (&r, VOIDmode, &x);
1578       break;
1579 
1580     default:
1581       gcc_unreachable ();
1582     }
1583 
1584   /* If R is NaN, return zero and show we have an overflow.  */
1585   if (REAL_VALUE_ISNAN (r))
1586     {
1587       overflow = 1;
1588       val = double_int_zero;
1589     }
1590 
1591   /* See if R is less than the lower bound or greater than the
1592      upper bound.  */
1593 
1594   if (! overflow)
1595     {
1596       tree lt = TYPE_MIN_VALUE (type);
1597       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1598       if (REAL_VALUES_LESS (r, l))
1599 	{
1600 	  overflow = 1;
1601 	  val = tree_to_double_int (lt);
1602 	}
1603     }
1604 
1605   if (! overflow)
1606     {
1607       tree ut = TYPE_MAX_VALUE (type);
1608       if (ut)
1609 	{
1610 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1611 	  if (REAL_VALUES_LESS (u, r))
1612 	    {
1613 	      overflow = 1;
1614 	      val = tree_to_double_int (ut);
1615 	    }
1616 	}
1617     }
1618 
1619   if (! overflow)
1620     real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1621 
1622   t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1623   return t;
1624 }
1625 
1626 /* A subroutine of fold_convert_const handling conversions of a
1627    FIXED_CST to an integer type.  */
1628 
1629 static tree
1630 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1631 {
1632   tree t;
1633   double_int temp, temp_trunc;
1634   unsigned int mode;
1635 
1636   /* Right shift FIXED_CST to temp by fbit.  */
1637   temp = TREE_FIXED_CST (arg1).data;
1638   mode = TREE_FIXED_CST (arg1).mode;
1639   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1640     {
1641       temp = temp.rshift (GET_MODE_FBIT (mode),
1642 			  HOST_BITS_PER_DOUBLE_INT,
1643 			  SIGNED_FIXED_POINT_MODE_P (mode));
1644 
1645       /* Left shift temp to temp_trunc by fbit.  */
1646       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1647 				HOST_BITS_PER_DOUBLE_INT,
1648 				SIGNED_FIXED_POINT_MODE_P (mode));
1649     }
1650   else
1651     {
1652       temp = double_int_zero;
1653       temp_trunc = double_int_zero;
1654     }
1655 
1656   /* If FIXED_CST is negative, we need to round the value toward 0.
1657      By checking if the fractional bits are not zero to add 1 to temp.  */
1658   if (SIGNED_FIXED_POINT_MODE_P (mode)
1659       && temp_trunc.is_negative ()
1660       && TREE_FIXED_CST (arg1).data != temp_trunc)
1661     temp += double_int_one;
1662 
1663   /* Given a fixed-point constant, make new constant with new type,
1664      appropriately sign-extended or truncated.  */
1665   t = force_fit_type_double (type, temp, -1,
1666 			     (temp.is_negative ()
1667 		 	      && (TYPE_UNSIGNED (type)
1668 				  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1669 			     | TREE_OVERFLOW (arg1));
1670 
1671   return t;
1672 }
1673 
1674 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1675    to another floating point type.  */
1676 
1677 static tree
1678 fold_convert_const_real_from_real (tree type, const_tree arg1)
1679 {
1680   REAL_VALUE_TYPE value;
1681   tree t;
1682 
1683   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1684   t = build_real (type, value);
1685 
1686   /* If converting an infinity or NAN to a representation that doesn't
1687      have one, set the overflow bit so that we can produce some kind of
1688      error message at the appropriate point if necessary.  It's not the
1689      most user-friendly message, but it's better than nothing.  */
1690   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1691       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1692     TREE_OVERFLOW (t) = 1;
1693   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1694 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
1695     TREE_OVERFLOW (t) = 1;
1696   /* Regular overflow, conversion produced an infinity in a mode that
1697      can't represent them.  */
1698   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1699 	   && REAL_VALUE_ISINF (value)
1700 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1701     TREE_OVERFLOW (t) = 1;
1702   else
1703     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1704   return t;
1705 }
1706 
1707 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1708    to a floating point type.  */
1709 
1710 static tree
1711 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1712 {
1713   REAL_VALUE_TYPE value;
1714   tree t;
1715 
1716   real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1717   t = build_real (type, value);
1718 
1719   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1720   return t;
1721 }
1722 
1723 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1724    to another fixed-point type.  */
1725 
1726 static tree
1727 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1728 {
1729   FIXED_VALUE_TYPE value;
1730   tree t;
1731   bool overflow_p;
1732 
1733   overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1734 			      TYPE_SATURATING (type));
1735   t = build_fixed (type, value);
1736 
1737   /* Propagate overflow flags.  */
1738   if (overflow_p | TREE_OVERFLOW (arg1))
1739     TREE_OVERFLOW (t) = 1;
1740   return t;
1741 }
1742 
1743 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1744    to a fixed-point type.  */
1745 
1746 static tree
1747 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1748 {
1749   FIXED_VALUE_TYPE value;
1750   tree t;
1751   bool overflow_p;
1752 
1753   overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1754 				       TREE_INT_CST (arg1),
1755 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
1756 				       TYPE_SATURATING (type));
1757   t = build_fixed (type, value);
1758 
1759   /* Propagate overflow flags.  */
1760   if (overflow_p | TREE_OVERFLOW (arg1))
1761     TREE_OVERFLOW (t) = 1;
1762   return t;
1763 }
1764 
1765 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1766    to a fixed-point type.  */
1767 
1768 static tree
1769 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1770 {
1771   FIXED_VALUE_TYPE value;
1772   tree t;
1773   bool overflow_p;
1774 
1775   overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1776 					&TREE_REAL_CST (arg1),
1777 					TYPE_SATURATING (type));
1778   t = build_fixed (type, value);
1779 
1780   /* Propagate overflow flags.  */
1781   if (overflow_p | TREE_OVERFLOW (arg1))
1782     TREE_OVERFLOW (t) = 1;
1783   return t;
1784 }
1785 
1786 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1787    type TYPE.  If no simplification can be done return NULL_TREE.  */
1788 
1789 static tree
1790 fold_convert_const (enum tree_code code, tree type, tree arg1)
1791 {
1792   if (TREE_TYPE (arg1) == type)
1793     return arg1;
1794 
1795   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1796       || TREE_CODE (type) == OFFSET_TYPE)
1797     {
1798       if (TREE_CODE (arg1) == INTEGER_CST)
1799 	return fold_convert_const_int_from_int (type, arg1);
1800       else if (TREE_CODE (arg1) == REAL_CST)
1801 	return fold_convert_const_int_from_real (code, type, arg1);
1802       else if (TREE_CODE (arg1) == FIXED_CST)
1803 	return fold_convert_const_int_from_fixed (type, arg1);
1804     }
1805   else if (TREE_CODE (type) == REAL_TYPE)
1806     {
1807       if (TREE_CODE (arg1) == INTEGER_CST)
1808 	return build_real_from_int_cst (type, arg1);
1809       else if (TREE_CODE (arg1) == REAL_CST)
1810 	return fold_convert_const_real_from_real (type, arg1);
1811       else if (TREE_CODE (arg1) == FIXED_CST)
1812 	return fold_convert_const_real_from_fixed (type, arg1);
1813     }
1814   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1815     {
1816       if (TREE_CODE (arg1) == FIXED_CST)
1817 	return fold_convert_const_fixed_from_fixed (type, arg1);
1818       else if (TREE_CODE (arg1) == INTEGER_CST)
1819 	return fold_convert_const_fixed_from_int (type, arg1);
1820       else if (TREE_CODE (arg1) == REAL_CST)
1821 	return fold_convert_const_fixed_from_real (type, arg1);
1822     }
1823   return NULL_TREE;
1824 }
1825 
1826 /* Construct a vector of zero elements of vector type TYPE.  */
1827 
1828 static tree
1829 build_zero_vector (tree type)
1830 {
1831   tree t;
1832 
1833   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1834   return build_vector_from_val (type, t);
1835 }
1836 
1837 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
1838 
1839 bool
1840 fold_convertible_p (const_tree type, const_tree arg)
1841 {
1842   tree orig = TREE_TYPE (arg);
1843 
1844   if (type == orig)
1845     return true;
1846 
1847   if (TREE_CODE (arg) == ERROR_MARK
1848       || TREE_CODE (type) == ERROR_MARK
1849       || TREE_CODE (orig) == ERROR_MARK)
1850     return false;
1851 
1852   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1853     return true;
1854 
1855   switch (TREE_CODE (type))
1856     {
1857     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1858     case POINTER_TYPE: case REFERENCE_TYPE:
1859     case OFFSET_TYPE:
1860       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1861 	  || TREE_CODE (orig) == OFFSET_TYPE)
1862         return true;
1863       return (TREE_CODE (orig) == VECTOR_TYPE
1864 	      && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1865 
1866     case REAL_TYPE:
1867     case FIXED_POINT_TYPE:
1868     case COMPLEX_TYPE:
1869     case VECTOR_TYPE:
1870     case VOID_TYPE:
1871       return TREE_CODE (type) == TREE_CODE (orig);
1872 
1873     default:
1874       return false;
1875     }
1876 }
1877 
1878 /* Convert expression ARG to type TYPE.  Used by the middle-end for
1879    simple conversions in preference to calling the front-end's convert.  */
1880 
1881 tree
1882 fold_convert_loc (location_t loc, tree type, tree arg)
1883 {
1884   tree orig = TREE_TYPE (arg);
1885   tree tem;
1886 
1887   if (type == orig)
1888     return arg;
1889 
1890   if (TREE_CODE (arg) == ERROR_MARK
1891       || TREE_CODE (type) == ERROR_MARK
1892       || TREE_CODE (orig) == ERROR_MARK)
1893     return error_mark_node;
1894 
1895   switch (TREE_CODE (type))
1896     {
1897     case POINTER_TYPE:
1898     case REFERENCE_TYPE:
1899       /* Handle conversions between pointers to different address spaces.  */
1900       if (POINTER_TYPE_P (orig)
1901 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1902 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1903 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1904       /* fall through */
1905 
1906     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1907     case OFFSET_TYPE:
1908       if (TREE_CODE (arg) == INTEGER_CST)
1909 	{
1910 	  tem = fold_convert_const (NOP_EXPR, type, arg);
1911 	  if (tem != NULL_TREE)
1912 	    return tem;
1913 	}
1914       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1915 	  || TREE_CODE (orig) == OFFSET_TYPE)
1916 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
1917       if (TREE_CODE (orig) == COMPLEX_TYPE)
1918 	return fold_convert_loc (loc, type,
1919 			     fold_build1_loc (loc, REALPART_EXPR,
1920 					  TREE_TYPE (orig), arg));
1921       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1922 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1923       return fold_build1_loc (loc, NOP_EXPR, type, arg);
1924 
1925     case REAL_TYPE:
1926       if (TREE_CODE (arg) == INTEGER_CST)
1927 	{
1928 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
1929 	  if (tem != NULL_TREE)
1930 	    return tem;
1931 	}
1932       else if (TREE_CODE (arg) == REAL_CST)
1933 	{
1934 	  tem = fold_convert_const (NOP_EXPR, type, arg);
1935 	  if (tem != NULL_TREE)
1936 	    return tem;
1937 	}
1938       else if (TREE_CODE (arg) == FIXED_CST)
1939 	{
1940 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1941 	  if (tem != NULL_TREE)
1942 	    return tem;
1943 	}
1944 
1945       switch (TREE_CODE (orig))
1946 	{
1947 	case INTEGER_TYPE:
1948 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1949 	case POINTER_TYPE: case REFERENCE_TYPE:
1950 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1951 
1952 	case REAL_TYPE:
1953 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
1954 
1955 	case FIXED_POINT_TYPE:
1956 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1957 
1958 	case COMPLEX_TYPE:
1959 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1960 	  return fold_convert_loc (loc, type, tem);
1961 
1962 	default:
1963 	  gcc_unreachable ();
1964 	}
1965 
1966     case FIXED_POINT_TYPE:
1967       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1968 	  || TREE_CODE (arg) == REAL_CST)
1969 	{
1970 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1971 	  if (tem != NULL_TREE)
1972 	    goto fold_convert_exit;
1973 	}
1974 
1975       switch (TREE_CODE (orig))
1976 	{
1977 	case FIXED_POINT_TYPE:
1978 	case INTEGER_TYPE:
1979 	case ENUMERAL_TYPE:
1980 	case BOOLEAN_TYPE:
1981 	case REAL_TYPE:
1982 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1983 
1984 	case COMPLEX_TYPE:
1985 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1986 	  return fold_convert_loc (loc, type, tem);
1987 
1988 	default:
1989 	  gcc_unreachable ();
1990 	}
1991 
1992     case COMPLEX_TYPE:
1993       switch (TREE_CODE (orig))
1994 	{
1995 	case INTEGER_TYPE:
1996 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1997 	case POINTER_TYPE: case REFERENCE_TYPE:
1998 	case REAL_TYPE:
1999 	case FIXED_POINT_TYPE:
2000 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2001 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2002 			      fold_convert_loc (loc, TREE_TYPE (type),
2003 					    integer_zero_node));
2004 	case COMPLEX_TYPE:
2005 	  {
2006 	    tree rpart, ipart;
2007 
2008 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2009 	      {
2010 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2011 				      TREE_OPERAND (arg, 0));
2012 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2013 				      TREE_OPERAND (arg, 1));
2014 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2015 	      }
2016 
2017 	    arg = save_expr (arg);
2018 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2019 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2020 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2021 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2022 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2023 	  }
2024 
2025 	default:
2026 	  gcc_unreachable ();
2027 	}
2028 
2029     case VECTOR_TYPE:
2030       if (integer_zerop (arg))
2031 	return build_zero_vector (type);
2032       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2033       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2034 		  || TREE_CODE (orig) == VECTOR_TYPE);
2035       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2036 
2037     case VOID_TYPE:
2038       tem = fold_ignored_result (arg);
2039       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2040 
2041     default:
2042       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2043 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2044       gcc_unreachable ();
2045     }
2046  fold_convert_exit:
2047   protected_set_expr_location_unshare (tem, loc);
2048   return tem;
2049 }
2050 
2051 /* Return false if expr can be assumed not to be an lvalue, true
2052    otherwise.  */
2053 
2054 static bool
2055 maybe_lvalue_p (const_tree x)
2056 {
2057   /* We only need to wrap lvalue tree codes.  */
2058   switch (TREE_CODE (x))
2059   {
2060   case VAR_DECL:
2061   case PARM_DECL:
2062   case RESULT_DECL:
2063   case LABEL_DECL:
2064   case FUNCTION_DECL:
2065   case SSA_NAME:
2066 
2067   case COMPONENT_REF:
2068   case MEM_REF:
2069   case INDIRECT_REF:
2070   case ARRAY_REF:
2071   case ARRAY_RANGE_REF:
2072   case BIT_FIELD_REF:
2073   case OBJ_TYPE_REF:
2074 
2075   case REALPART_EXPR:
2076   case IMAGPART_EXPR:
2077   case PREINCREMENT_EXPR:
2078   case PREDECREMENT_EXPR:
2079   case SAVE_EXPR:
2080   case TRY_CATCH_EXPR:
2081   case WITH_CLEANUP_EXPR:
2082   case COMPOUND_EXPR:
2083   case MODIFY_EXPR:
2084   case TARGET_EXPR:
2085   case COND_EXPR:
2086   case BIND_EXPR:
2087     break;
2088 
2089   default:
2090     /* Assume the worst for front-end tree codes.  */
2091     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2092       break;
2093     return false;
2094   }
2095 
2096   return true;
2097 }
2098 
2099 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2100 
2101 tree
2102 non_lvalue_loc (location_t loc, tree x)
2103 {
2104   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2105      us.  */
2106   if (in_gimple_form)
2107     return x;
2108 
2109   if (! maybe_lvalue_p (x))
2110     return x;
2111   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2112 }
2113 
2114 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2115    Zero means allow extended lvalues.  */
2116 
2117 int pedantic_lvalues;
2118 
2119 /* When pedantic, return an expr equal to X but certainly not valid as a
2120    pedantic lvalue.  Otherwise, return X.  */
2121 
2122 static tree
2123 pedantic_non_lvalue_loc (location_t loc, tree x)
2124 {
2125   if (pedantic_lvalues)
2126     return non_lvalue_loc (loc, x);
2127 
2128   return protected_set_expr_location_unshare (x, loc);
2129 }
2130 
2131 /* Given a tree comparison code, return the code that is the logical inverse.
2132    It is generally not safe to do this for floating-point comparisons, except
2133    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2134    ERROR_MARK in this case.  */
2135 
2136 enum tree_code
2137 invert_tree_comparison (enum tree_code code, bool honor_nans)
2138 {
2139   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2140       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2141     return ERROR_MARK;
2142 
2143   switch (code)
2144     {
2145     case EQ_EXPR:
2146       return NE_EXPR;
2147     case NE_EXPR:
2148       return EQ_EXPR;
2149     case GT_EXPR:
2150       return honor_nans ? UNLE_EXPR : LE_EXPR;
2151     case GE_EXPR:
2152       return honor_nans ? UNLT_EXPR : LT_EXPR;
2153     case LT_EXPR:
2154       return honor_nans ? UNGE_EXPR : GE_EXPR;
2155     case LE_EXPR:
2156       return honor_nans ? UNGT_EXPR : GT_EXPR;
2157     case LTGT_EXPR:
2158       return UNEQ_EXPR;
2159     case UNEQ_EXPR:
2160       return LTGT_EXPR;
2161     case UNGT_EXPR:
2162       return LE_EXPR;
2163     case UNGE_EXPR:
2164       return LT_EXPR;
2165     case UNLT_EXPR:
2166       return GE_EXPR;
2167     case UNLE_EXPR:
2168       return GT_EXPR;
2169     case ORDERED_EXPR:
2170       return UNORDERED_EXPR;
2171     case UNORDERED_EXPR:
2172       return ORDERED_EXPR;
2173     default:
2174       gcc_unreachable ();
2175     }
2176 }
2177 
2178 /* Similar, but return the comparison that results if the operands are
2179    swapped.  This is safe for floating-point.  */
2180 
2181 enum tree_code
2182 swap_tree_comparison (enum tree_code code)
2183 {
2184   switch (code)
2185     {
2186     case EQ_EXPR:
2187     case NE_EXPR:
2188     case ORDERED_EXPR:
2189     case UNORDERED_EXPR:
2190     case LTGT_EXPR:
2191     case UNEQ_EXPR:
2192       return code;
2193     case GT_EXPR:
2194       return LT_EXPR;
2195     case GE_EXPR:
2196       return LE_EXPR;
2197     case LT_EXPR:
2198       return GT_EXPR;
2199     case LE_EXPR:
2200       return GE_EXPR;
2201     case UNGT_EXPR:
2202       return UNLT_EXPR;
2203     case UNGE_EXPR:
2204       return UNLE_EXPR;
2205     case UNLT_EXPR:
2206       return UNGT_EXPR;
2207     case UNLE_EXPR:
2208       return UNGE_EXPR;
2209     default:
2210       gcc_unreachable ();
2211     }
2212 }
2213 
2214 
2215 /* Convert a comparison tree code from an enum tree_code representation
2216    into a compcode bit-based encoding.  This function is the inverse of
2217    compcode_to_comparison.  */
2218 
2219 static enum comparison_code
2220 comparison_to_compcode (enum tree_code code)
2221 {
2222   switch (code)
2223     {
2224     case LT_EXPR:
2225       return COMPCODE_LT;
2226     case EQ_EXPR:
2227       return COMPCODE_EQ;
2228     case LE_EXPR:
2229       return COMPCODE_LE;
2230     case GT_EXPR:
2231       return COMPCODE_GT;
2232     case NE_EXPR:
2233       return COMPCODE_NE;
2234     case GE_EXPR:
2235       return COMPCODE_GE;
2236     case ORDERED_EXPR:
2237       return COMPCODE_ORD;
2238     case UNORDERED_EXPR:
2239       return COMPCODE_UNORD;
2240     case UNLT_EXPR:
2241       return COMPCODE_UNLT;
2242     case UNEQ_EXPR:
2243       return COMPCODE_UNEQ;
2244     case UNLE_EXPR:
2245       return COMPCODE_UNLE;
2246     case UNGT_EXPR:
2247       return COMPCODE_UNGT;
2248     case LTGT_EXPR:
2249       return COMPCODE_LTGT;
2250     case UNGE_EXPR:
2251       return COMPCODE_UNGE;
2252     default:
2253       gcc_unreachable ();
2254     }
2255 }
2256 
2257 /* Convert a compcode bit-based encoding of a comparison operator back
2258    to GCC's enum tree_code representation.  This function is the
2259    inverse of comparison_to_compcode.  */
2260 
2261 static enum tree_code
2262 compcode_to_comparison (enum comparison_code code)
2263 {
2264   switch (code)
2265     {
2266     case COMPCODE_LT:
2267       return LT_EXPR;
2268     case COMPCODE_EQ:
2269       return EQ_EXPR;
2270     case COMPCODE_LE:
2271       return LE_EXPR;
2272     case COMPCODE_GT:
2273       return GT_EXPR;
2274     case COMPCODE_NE:
2275       return NE_EXPR;
2276     case COMPCODE_GE:
2277       return GE_EXPR;
2278     case COMPCODE_ORD:
2279       return ORDERED_EXPR;
2280     case COMPCODE_UNORD:
2281       return UNORDERED_EXPR;
2282     case COMPCODE_UNLT:
2283       return UNLT_EXPR;
2284     case COMPCODE_UNEQ:
2285       return UNEQ_EXPR;
2286     case COMPCODE_UNLE:
2287       return UNLE_EXPR;
2288     case COMPCODE_UNGT:
2289       return UNGT_EXPR;
2290     case COMPCODE_LTGT:
2291       return LTGT_EXPR;
2292     case COMPCODE_UNGE:
2293       return UNGE_EXPR;
2294     default:
2295       gcc_unreachable ();
2296     }
2297 }
2298 
2299 /* Return a tree for the comparison which is the combination of
2300    doing the AND or OR (depending on CODE) of the two operations LCODE
2301    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2302    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2303    if this makes the transformation invalid.  */
2304 
2305 tree
2306 combine_comparisons (location_t loc,
2307 		     enum tree_code code, enum tree_code lcode,
2308 		     enum tree_code rcode, tree truth_type,
2309 		     tree ll_arg, tree lr_arg)
2310 {
2311   bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2312   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2313   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2314   int compcode;
2315 
2316   switch (code)
2317     {
2318     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2319       compcode = lcompcode & rcompcode;
2320       break;
2321 
2322     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2323       compcode = lcompcode | rcompcode;
2324       break;
2325 
2326     default:
2327       return NULL_TREE;
2328     }
2329 
2330   if (!honor_nans)
2331     {
2332       /* Eliminate unordered comparisons, as well as LTGT and ORD
2333 	 which are not used unless the mode has NaNs.  */
2334       compcode &= ~COMPCODE_UNORD;
2335       if (compcode == COMPCODE_LTGT)
2336 	compcode = COMPCODE_NE;
2337       else if (compcode == COMPCODE_ORD)
2338 	compcode = COMPCODE_TRUE;
2339     }
2340    else if (flag_trapping_math)
2341      {
2342 	/* Check that the original operation and the optimized ones will trap
2343 	   under the same condition.  */
2344 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2345 		     && (lcompcode != COMPCODE_EQ)
2346 		     && (lcompcode != COMPCODE_ORD);
2347 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2348 		     && (rcompcode != COMPCODE_EQ)
2349 		     && (rcompcode != COMPCODE_ORD);
2350 	bool trap = (compcode & COMPCODE_UNORD) == 0
2351 		    && (compcode != COMPCODE_EQ)
2352 		    && (compcode != COMPCODE_ORD);
2353 
2354         /* In a short-circuited boolean expression the LHS might be
2355 	   such that the RHS, if evaluated, will never trap.  For
2356 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2357 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2358 	   example, the expression above will never trap, hence
2359 	   optimizing it to x < y would be invalid).  */
2360         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2361             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2362           rtrap = false;
2363 
2364         /* If the comparison was short-circuited, and only the RHS
2365 	   trapped, we may now generate a spurious trap.  */
2366 	if (rtrap && !ltrap
2367 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2368 	  return NULL_TREE;
2369 
2370 	/* If we changed the conditions that cause a trap, we lose.  */
2371 	if ((ltrap || rtrap) != trap)
2372 	  return NULL_TREE;
2373       }
2374 
2375   if (compcode == COMPCODE_TRUE)
2376     return constant_boolean_node (true, truth_type);
2377   else if (compcode == COMPCODE_FALSE)
2378     return constant_boolean_node (false, truth_type);
2379   else
2380     {
2381       enum tree_code tcode;
2382 
2383       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2384       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2385     }
2386 }
2387 
2388 /* Return nonzero if two operands (typically of the same tree node)
2389    are necessarily equal.  If either argument has side-effects this
2390    function returns zero.  FLAGS modifies behavior as follows:
2391 
2392    If OEP_ONLY_CONST is set, only return nonzero for constants.
2393    This function tests whether the operands are indistinguishable;
2394    it does not test whether they are equal using C's == operation.
2395    The distinction is important for IEEE floating point, because
2396    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2397    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2398 
2399    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2400    even though it may hold multiple values during a function.
2401    This is because a GCC tree node guarantees that nothing else is
2402    executed between the evaluation of its "operands" (which may often
2403    be evaluated in arbitrary order).  Hence if the operands themselves
2404    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2405    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2406    unset means assuming isochronic (or instantaneous) tree equivalence.
2407    Unless comparing arbitrary expression trees, such as from different
2408    statements, this flag can usually be left unset.
2409 
2410    If OEP_PURE_SAME is set, then pure functions with identical arguments
2411    are considered the same.  It is used when the caller has other ways
2412    to ensure that global memory is unchanged in between.  */
2413 
2414 int
2415 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2416 {
2417   /* If either is ERROR_MARK, they aren't equal.  */
2418   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2419       || TREE_TYPE (arg0) == error_mark_node
2420       || TREE_TYPE (arg1) == error_mark_node)
2421     return 0;
2422 
2423   /* Similar, if either does not have a type (like a released SSA name),
2424      they aren't equal.  */
2425   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2426     return 0;
2427 
2428   /* Check equality of integer constants before bailing out due to
2429      precision differences.  */
2430   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2431     return tree_int_cst_equal (arg0, arg1);
2432 
2433   /* If both types don't have the same signedness, then we can't consider
2434      them equal.  We must check this before the STRIP_NOPS calls
2435      because they may change the signedness of the arguments.  As pointers
2436      strictly don't have a signedness, require either two pointers or
2437      two non-pointers as well.  */
2438   if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2439       || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2440     return 0;
2441 
2442   /* We cannot consider pointers to different address space equal.  */
2443   if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2444       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2445 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2446     return 0;
2447 
2448   /* If both types don't have the same precision, then it is not safe
2449      to strip NOPs.  */
2450   if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2451     return 0;
2452 
2453   STRIP_NOPS (arg0);
2454   STRIP_NOPS (arg1);
2455 
2456   /* In case both args are comparisons but with different comparison
2457      code, try to swap the comparison operands of one arg to produce
2458      a match and compare that variant.  */
2459   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2460       && COMPARISON_CLASS_P (arg0)
2461       && COMPARISON_CLASS_P (arg1))
2462     {
2463       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2464 
2465       if (TREE_CODE (arg0) == swap_code)
2466 	return operand_equal_p (TREE_OPERAND (arg0, 0),
2467 			        TREE_OPERAND (arg1, 1), flags)
2468 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
2469 				   TREE_OPERAND (arg1, 0), flags);
2470     }
2471 
2472   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2473       /* This is needed for conversions and for COMPONENT_REF.
2474 	 Might as well play it safe and always test this.  */
2475       || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2476       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2477       || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2478     return 0;
2479 
2480   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2481      We don't care about side effects in that case because the SAVE_EXPR
2482      takes care of that for us. In all other cases, two expressions are
2483      equal if they have no side effects.  If we have two identical
2484      expressions with side effects that should be treated the same due
2485      to the only side effects being identical SAVE_EXPR's, that will
2486      be detected in the recursive calls below.
2487      If we are taking an invariant address of two identical objects
2488      they are necessarily equal as well.  */
2489   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2490       && (TREE_CODE (arg0) == SAVE_EXPR
2491 	  || (flags & OEP_CONSTANT_ADDRESS_OF)
2492 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2493     return 1;
2494 
2495   /* Next handle constant cases, those for which we can return 1 even
2496      if ONLY_CONST is set.  */
2497   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2498     switch (TREE_CODE (arg0))
2499       {
2500       case INTEGER_CST:
2501 	return tree_int_cst_equal (arg0, arg1);
2502 
2503       case FIXED_CST:
2504 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2505 				       TREE_FIXED_CST (arg1));
2506 
2507       case REAL_CST:
2508 	if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2509 				   TREE_REAL_CST (arg1)))
2510 	  return 1;
2511 
2512 
2513 	if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2514 	  {
2515 	    /* If we do not distinguish between signed and unsigned zero,
2516 	       consider them equal.  */
2517 	    if (real_zerop (arg0) && real_zerop (arg1))
2518 	      return 1;
2519 	  }
2520 	return 0;
2521 
2522       case VECTOR_CST:
2523 	{
2524 	  unsigned i;
2525 
2526 	  if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2527 	    return 0;
2528 
2529 	  for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2530 	    {
2531 	      if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2532 				    VECTOR_CST_ELT (arg1, i), flags))
2533 		return 0;
2534 	    }
2535 	  return 1;
2536 	}
2537 
2538       case COMPLEX_CST:
2539 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2540 				 flags)
2541 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2542 				    flags));
2543 
2544       case STRING_CST:
2545 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2546 		&& ! memcmp (TREE_STRING_POINTER (arg0),
2547 			      TREE_STRING_POINTER (arg1),
2548 			      TREE_STRING_LENGTH (arg0)));
2549 
2550       case ADDR_EXPR:
2551 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2552 				TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2553 				? OEP_CONSTANT_ADDRESS_OF : 0);
2554       default:
2555 	break;
2556       }
2557 
2558   if (flags & OEP_ONLY_CONST)
2559     return 0;
2560 
2561 /* Define macros to test an operand from arg0 and arg1 for equality and a
2562    variant that allows null and views null as being different from any
2563    non-null value.  In the latter case, if either is null, the both
2564    must be; otherwise, do the normal comparison.  */
2565 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
2566 				    TREE_OPERAND (arg1, N), flags)
2567 
2568 #define OP_SAME_WITH_NULL(N)				\
2569   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
2570    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2571 
2572   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2573     {
2574     case tcc_unary:
2575       /* Two conversions are equal only if signedness and modes match.  */
2576       switch (TREE_CODE (arg0))
2577         {
2578 	CASE_CONVERT:
2579         case FIX_TRUNC_EXPR:
2580 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2581 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2582 	    return 0;
2583 	  break;
2584 	default:
2585 	  break;
2586 	}
2587 
2588       return OP_SAME (0);
2589 
2590 
2591     case tcc_comparison:
2592     case tcc_binary:
2593       if (OP_SAME (0) && OP_SAME (1))
2594 	return 1;
2595 
2596       /* For commutative ops, allow the other order.  */
2597       return (commutative_tree_code (TREE_CODE (arg0))
2598 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2599 				  TREE_OPERAND (arg1, 1), flags)
2600 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2601 				  TREE_OPERAND (arg1, 0), flags));
2602 
2603     case tcc_reference:
2604       /* If either of the pointer (or reference) expressions we are
2605 	 dereferencing contain a side effect, these cannot be equal,
2606 	 but their addresses can be.  */
2607       if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2608 	  && (TREE_SIDE_EFFECTS (arg0)
2609 	      || TREE_SIDE_EFFECTS (arg1)))
2610 	return 0;
2611 
2612       switch (TREE_CODE (arg0))
2613 	{
2614 	case INDIRECT_REF:
2615 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2616 	  return OP_SAME (0);
2617 
2618 	case REALPART_EXPR:
2619 	case IMAGPART_EXPR:
2620 	  return OP_SAME (0);
2621 
2622 	case TARGET_MEM_REF:
2623 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2624 	  /* Require equal extra operands and then fall through to MEM_REF
2625 	     handling of the two common operands.  */
2626 	  if (!OP_SAME_WITH_NULL (2)
2627 	      || !OP_SAME_WITH_NULL (3)
2628 	      || !OP_SAME_WITH_NULL (4))
2629 	    return 0;
2630 	  /* Fallthru.  */
2631 	case MEM_REF:
2632 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2633 	  /* Require equal access sizes, and similar pointer types.
2634 	     We can have incomplete types for array references of
2635 	     variable-sized arrays from the Fortran frontent
2636 	     though.  */
2637 	  return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2638 		   || (TYPE_SIZE (TREE_TYPE (arg0))
2639 		       && TYPE_SIZE (TREE_TYPE (arg1))
2640 		       && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2641 					   TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2642 		  && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2643 		      == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2644 		  && OP_SAME (0) && OP_SAME (1));
2645 
2646 	case ARRAY_REF:
2647 	case ARRAY_RANGE_REF:
2648 	  /* Operands 2 and 3 may be null.
2649 	     Compare the array index by value if it is constant first as we
2650 	     may have different types but same value here.  */
2651 	  if (!OP_SAME (0))
2652 	    return 0;
2653 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2654 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2655 				       TREE_OPERAND (arg1, 1))
2656 		   || OP_SAME (1))
2657 		  && OP_SAME_WITH_NULL (2)
2658 		  && OP_SAME_WITH_NULL (3));
2659 
2660 	case COMPONENT_REF:
2661 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
2662 	     may be NULL when we're called to compare MEM_EXPRs.  */
2663 	  if (!OP_SAME_WITH_NULL (0)
2664 	      || !OP_SAME (1))
2665 	    return 0;
2666 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2667 	  return OP_SAME_WITH_NULL (2);
2668 
2669 	case BIT_FIELD_REF:
2670 	  if (!OP_SAME (0))
2671 	    return 0;
2672 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2673 	  return OP_SAME (1) && OP_SAME (2);
2674 
2675 	default:
2676 	  return 0;
2677 	}
2678 
2679     case tcc_expression:
2680       switch (TREE_CODE (arg0))
2681 	{
2682 	case ADDR_EXPR:
2683 	case TRUTH_NOT_EXPR:
2684 	  return OP_SAME (0);
2685 
2686 	case TRUTH_ANDIF_EXPR:
2687 	case TRUTH_ORIF_EXPR:
2688 	  return OP_SAME (0) && OP_SAME (1);
2689 
2690 	case FMA_EXPR:
2691 	case WIDEN_MULT_PLUS_EXPR:
2692 	case WIDEN_MULT_MINUS_EXPR:
2693 	  if (!OP_SAME (2))
2694 	    return 0;
2695 	  /* The multiplcation operands are commutative.  */
2696 	  /* FALLTHRU */
2697 
2698 	case TRUTH_AND_EXPR:
2699 	case TRUTH_OR_EXPR:
2700 	case TRUTH_XOR_EXPR:
2701 	  if (OP_SAME (0) && OP_SAME (1))
2702 	    return 1;
2703 
2704 	  /* Otherwise take into account this is a commutative operation.  */
2705 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2706 				   TREE_OPERAND (arg1, 1), flags)
2707 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2708 				      TREE_OPERAND (arg1, 0), flags));
2709 
2710 	case COND_EXPR:
2711 	case VEC_COND_EXPR:
2712 	case DOT_PROD_EXPR:
2713 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2714 
2715 	default:
2716 	  return 0;
2717 	}
2718 
2719     case tcc_vl_exp:
2720       switch (TREE_CODE (arg0))
2721 	{
2722 	case CALL_EXPR:
2723 	  /* If the CALL_EXPRs call different functions, then they
2724 	     clearly can not be equal.  */
2725 	  if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2726 				 flags))
2727 	    return 0;
2728 
2729 	  {
2730 	    unsigned int cef = call_expr_flags (arg0);
2731 	    if (flags & OEP_PURE_SAME)
2732 	      cef &= ECF_CONST | ECF_PURE;
2733 	    else
2734 	      cef &= ECF_CONST;
2735 	    if (!cef)
2736 	      return 0;
2737 	  }
2738 
2739 	  /* Now see if all the arguments are the same.  */
2740 	  {
2741 	    const_call_expr_arg_iterator iter0, iter1;
2742 	    const_tree a0, a1;
2743 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
2744 		   a1 = first_const_call_expr_arg (arg1, &iter1);
2745 		 a0 && a1;
2746 		 a0 = next_const_call_expr_arg (&iter0),
2747 		   a1 = next_const_call_expr_arg (&iter1))
2748 	      if (! operand_equal_p (a0, a1, flags))
2749 		return 0;
2750 
2751 	    /* If we get here and both argument lists are exhausted
2752 	       then the CALL_EXPRs are equal.  */
2753 	    return ! (a0 || a1);
2754 	  }
2755 	default:
2756 	  return 0;
2757 	}
2758 
2759     case tcc_declaration:
2760       /* Consider __builtin_sqrt equal to sqrt.  */
2761       return (TREE_CODE (arg0) == FUNCTION_DECL
2762 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2763 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2764 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2765 
2766     default:
2767       return 0;
2768     }
2769 
2770 #undef OP_SAME
2771 #undef OP_SAME_WITH_NULL
2772 }
2773 
2774 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2775    shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2776 
2777    When in doubt, return 0.  */
2778 
2779 static int
2780 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2781 {
2782   int unsignedp1, unsignedpo;
2783   tree primarg0, primarg1, primother;
2784   unsigned int correct_width;
2785 
2786   if (operand_equal_p (arg0, arg1, 0))
2787     return 1;
2788 
2789   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2790       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2791     return 0;
2792 
2793   /* Discard any conversions that don't change the modes of ARG0 and ARG1
2794      and see if the inner values are the same.  This removes any
2795      signedness comparison, which doesn't matter here.  */
2796   primarg0 = arg0, primarg1 = arg1;
2797   STRIP_NOPS (primarg0);
2798   STRIP_NOPS (primarg1);
2799   if (operand_equal_p (primarg0, primarg1, 0))
2800     return 1;
2801 
2802   /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2803      actual comparison operand, ARG0.
2804 
2805      First throw away any conversions to wider types
2806      already present in the operands.  */
2807 
2808   primarg1 = get_narrower (arg1, &unsignedp1);
2809   primother = get_narrower (other, &unsignedpo);
2810 
2811   correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2812   if (unsignedp1 == unsignedpo
2813       && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2814       && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2815     {
2816       tree type = TREE_TYPE (arg0);
2817 
2818       /* Make sure shorter operand is extended the right way
2819 	 to match the longer operand.  */
2820       primarg1 = fold_convert (signed_or_unsigned_type_for
2821 			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2822 
2823       if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2824 	return 1;
2825     }
2826 
2827   return 0;
2828 }
2829 
2830 /* See if ARG is an expression that is either a comparison or is performing
2831    arithmetic on comparisons.  The comparisons must only be comparing
2832    two different values, which will be stored in *CVAL1 and *CVAL2; if
2833    they are nonzero it means that some operands have already been found.
2834    No variables may be used anywhere else in the expression except in the
2835    comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2836    the expression and save_expr needs to be called with CVAL1 and CVAL2.
2837 
2838    If this is true, return 1.  Otherwise, return zero.  */
2839 
2840 static int
2841 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2842 {
2843   enum tree_code code = TREE_CODE (arg);
2844   enum tree_code_class tclass = TREE_CODE_CLASS (code);
2845 
2846   /* We can handle some of the tcc_expression cases here.  */
2847   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2848     tclass = tcc_unary;
2849   else if (tclass == tcc_expression
2850 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2851 	       || code == COMPOUND_EXPR))
2852     tclass = tcc_binary;
2853 
2854   else if (tclass == tcc_expression && code == SAVE_EXPR
2855 	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2856     {
2857       /* If we've already found a CVAL1 or CVAL2, this expression is
2858 	 two complex to handle.  */
2859       if (*cval1 || *cval2)
2860 	return 0;
2861 
2862       tclass = tcc_unary;
2863       *save_p = 1;
2864     }
2865 
2866   switch (tclass)
2867     {
2868     case tcc_unary:
2869       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2870 
2871     case tcc_binary:
2872       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2873 	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
2874 				      cval1, cval2, save_p));
2875 
2876     case tcc_constant:
2877       return 1;
2878 
2879     case tcc_expression:
2880       if (code == COND_EXPR)
2881 	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2882 				     cval1, cval2, save_p)
2883 		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
2884 					cval1, cval2, save_p)
2885 		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
2886 					cval1, cval2, save_p));
2887       return 0;
2888 
2889     case tcc_comparison:
2890       /* First see if we can handle the first operand, then the second.  For
2891 	 the second operand, we know *CVAL1 can't be zero.  It must be that
2892 	 one side of the comparison is each of the values; test for the
2893 	 case where this isn't true by failing if the two operands
2894 	 are the same.  */
2895 
2896       if (operand_equal_p (TREE_OPERAND (arg, 0),
2897 			   TREE_OPERAND (arg, 1), 0))
2898 	return 0;
2899 
2900       if (*cval1 == 0)
2901 	*cval1 = TREE_OPERAND (arg, 0);
2902       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2903 	;
2904       else if (*cval2 == 0)
2905 	*cval2 = TREE_OPERAND (arg, 0);
2906       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2907 	;
2908       else
2909 	return 0;
2910 
2911       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2912 	;
2913       else if (*cval2 == 0)
2914 	*cval2 = TREE_OPERAND (arg, 1);
2915       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2916 	;
2917       else
2918 	return 0;
2919 
2920       return 1;
2921 
2922     default:
2923       return 0;
2924     }
2925 }
2926 
2927 /* ARG is a tree that is known to contain just arithmetic operations and
2928    comparisons.  Evaluate the operations in the tree substituting NEW0 for
2929    any occurrence of OLD0 as an operand of a comparison and likewise for
2930    NEW1 and OLD1.  */
2931 
2932 static tree
2933 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2934 	    tree old1, tree new1)
2935 {
2936   tree type = TREE_TYPE (arg);
2937   enum tree_code code = TREE_CODE (arg);
2938   enum tree_code_class tclass = TREE_CODE_CLASS (code);
2939 
2940   /* We can handle some of the tcc_expression cases here.  */
2941   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2942     tclass = tcc_unary;
2943   else if (tclass == tcc_expression
2944 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2945     tclass = tcc_binary;
2946 
2947   switch (tclass)
2948     {
2949     case tcc_unary:
2950       return fold_build1_loc (loc, code, type,
2951 			  eval_subst (loc, TREE_OPERAND (arg, 0),
2952 				      old0, new0, old1, new1));
2953 
2954     case tcc_binary:
2955       return fold_build2_loc (loc, code, type,
2956 			  eval_subst (loc, TREE_OPERAND (arg, 0),
2957 				      old0, new0, old1, new1),
2958 			  eval_subst (loc, TREE_OPERAND (arg, 1),
2959 				      old0, new0, old1, new1));
2960 
2961     case tcc_expression:
2962       switch (code)
2963 	{
2964 	case SAVE_EXPR:
2965 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2966 			     old1, new1);
2967 
2968 	case COMPOUND_EXPR:
2969 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2970 			     old1, new1);
2971 
2972 	case COND_EXPR:
2973 	  return fold_build3_loc (loc, code, type,
2974 			      eval_subst (loc, TREE_OPERAND (arg, 0),
2975 					  old0, new0, old1, new1),
2976 			      eval_subst (loc, TREE_OPERAND (arg, 1),
2977 					  old0, new0, old1, new1),
2978 			      eval_subst (loc, TREE_OPERAND (arg, 2),
2979 					  old0, new0, old1, new1));
2980 	default:
2981 	  break;
2982 	}
2983       /* Fall through - ???  */
2984 
2985     case tcc_comparison:
2986       {
2987 	tree arg0 = TREE_OPERAND (arg, 0);
2988 	tree arg1 = TREE_OPERAND (arg, 1);
2989 
2990 	/* We need to check both for exact equality and tree equality.  The
2991 	   former will be true if the operand has a side-effect.  In that
2992 	   case, we know the operand occurred exactly once.  */
2993 
2994 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2995 	  arg0 = new0;
2996 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2997 	  arg0 = new1;
2998 
2999 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3000 	  arg1 = new0;
3001 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3002 	  arg1 = new1;
3003 
3004 	return fold_build2_loc (loc, code, type, arg0, arg1);
3005       }
3006 
3007     default:
3008       return arg;
3009     }
3010 }
3011 
3012 /* Return a tree for the case when the result of an expression is RESULT
3013    converted to TYPE and OMITTED was previously an operand of the expression
3014    but is now not needed (e.g., we folded OMITTED * 0).
3015 
3016    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3017    the conversion of RESULT to TYPE.  */
3018 
3019 tree
3020 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3021 {
3022   tree t = fold_convert_loc (loc, type, result);
3023 
3024   /* If the resulting operand is an empty statement, just return the omitted
3025      statement casted to void. */
3026   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3027     return build1_loc (loc, NOP_EXPR, void_type_node,
3028 		       fold_ignored_result (omitted));
3029 
3030   if (TREE_SIDE_EFFECTS (omitted))
3031     return build2_loc (loc, COMPOUND_EXPR, type,
3032 		       fold_ignored_result (omitted), t);
3033 
3034   return non_lvalue_loc (loc, t);
3035 }
3036 
3037 /* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
3038 
3039 static tree
3040 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3041 			       tree omitted)
3042 {
3043   tree t = fold_convert_loc (loc, type, result);
3044 
3045   /* If the resulting operand is an empty statement, just return the omitted
3046      statement casted to void. */
3047   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3048     return build1_loc (loc, NOP_EXPR, void_type_node,
3049 		       fold_ignored_result (omitted));
3050 
3051   if (TREE_SIDE_EFFECTS (omitted))
3052     return build2_loc (loc, COMPOUND_EXPR, type,
3053 		       fold_ignored_result (omitted), t);
3054 
3055   return pedantic_non_lvalue_loc (loc, t);
3056 }
3057 
3058 /* Return a tree for the case when the result of an expression is RESULT
3059    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3060    of the expression but are now not needed.
3061 
3062    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3063    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3064    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3065    just do the conversion of RESULT to TYPE.  */
3066 
3067 tree
3068 omit_two_operands_loc (location_t loc, tree type, tree result,
3069 		       tree omitted1, tree omitted2)
3070 {
3071   tree t = fold_convert_loc (loc, type, result);
3072 
3073   if (TREE_SIDE_EFFECTS (omitted2))
3074     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3075   if (TREE_SIDE_EFFECTS (omitted1))
3076     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3077 
3078   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3079 }
3080 
3081 
3082 /* Return a simplified tree node for the truth-negation of ARG.  This
3083    never alters ARG itself.  We assume that ARG is an operation that
3084    returns a truth value (0 or 1).
3085 
3086    FIXME: one would think we would fold the result, but it causes
3087    problems with the dominator optimizer.  */
3088 
3089 tree
3090 fold_truth_not_expr (location_t loc, tree arg)
3091 {
3092   tree type = TREE_TYPE (arg);
3093   enum tree_code code = TREE_CODE (arg);
3094   location_t loc1, loc2;
3095 
3096   /* If this is a comparison, we can simply invert it, except for
3097      floating-point non-equality comparisons, in which case we just
3098      enclose a TRUTH_NOT_EXPR around what we have.  */
3099 
3100   if (TREE_CODE_CLASS (code) == tcc_comparison)
3101     {
3102       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3103       if (FLOAT_TYPE_P (op_type)
3104 	  && flag_trapping_math
3105 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3106 	  && code != NE_EXPR && code != EQ_EXPR)
3107 	return NULL_TREE;
3108 
3109       code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3110       if (code == ERROR_MARK)
3111 	return NULL_TREE;
3112 
3113       return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3114 			 TREE_OPERAND (arg, 1));
3115     }
3116 
3117   switch (code)
3118     {
3119     case INTEGER_CST:
3120       return constant_boolean_node (integer_zerop (arg), type);
3121 
3122     case TRUTH_AND_EXPR:
3123       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3124       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3125       return build2_loc (loc, TRUTH_OR_EXPR, type,
3126 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3127 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3128 
3129     case TRUTH_OR_EXPR:
3130       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3131       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3132       return build2_loc (loc, TRUTH_AND_EXPR, type,
3133 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3134 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3135 
3136     case TRUTH_XOR_EXPR:
3137       /* Here we can invert either operand.  We invert the first operand
3138 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3139 	 result is the XOR of the first operand with the inside of the
3140 	 negation of the second operand.  */
3141 
3142       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3143 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3144 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3145       else
3146 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3147 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3148 			   TREE_OPERAND (arg, 1));
3149 
3150     case TRUTH_ANDIF_EXPR:
3151       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3152       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3153       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3154 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3155 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3156 
3157     case TRUTH_ORIF_EXPR:
3158       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3159       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3160       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3161 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3162 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3163 
3164     case TRUTH_NOT_EXPR:
3165       return TREE_OPERAND (arg, 0);
3166 
3167     case COND_EXPR:
3168       {
3169 	tree arg1 = TREE_OPERAND (arg, 1);
3170 	tree arg2 = TREE_OPERAND (arg, 2);
3171 
3172 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3173 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3174 
3175 	/* A COND_EXPR may have a throw as one operand, which
3176 	   then has void type.  Just leave void operands
3177 	   as they are.  */
3178 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3179 			   VOID_TYPE_P (TREE_TYPE (arg1))
3180 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3181 			   VOID_TYPE_P (TREE_TYPE (arg2))
3182 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3183       }
3184 
3185     case COMPOUND_EXPR:
3186       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3187       return build2_loc (loc, COMPOUND_EXPR, type,
3188 			 TREE_OPERAND (arg, 0),
3189 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3190 
3191     case NON_LVALUE_EXPR:
3192       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3193       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3194 
3195     CASE_CONVERT:
3196       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3197 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3198 
3199       /* ... fall through ...  */
3200 
3201     case FLOAT_EXPR:
3202       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3203       return build1_loc (loc, TREE_CODE (arg), type,
3204 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3205 
3206     case BIT_AND_EXPR:
3207       if (!integer_onep (TREE_OPERAND (arg, 1)))
3208 	return NULL_TREE;
3209       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3210 
3211     case SAVE_EXPR:
3212       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3213 
3214     case CLEANUP_POINT_EXPR:
3215       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3216       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3217 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3218 
3219     default:
3220       return NULL_TREE;
3221     }
3222 }
3223 
3224 /* Return a simplified tree node for the truth-negation of ARG.  This
3225    never alters ARG itself.  We assume that ARG is an operation that
3226    returns a truth value (0 or 1).
3227 
3228    FIXME: one would think we would fold the result, but it causes
3229    problems with the dominator optimizer.  */
3230 
3231 tree
3232 invert_truthvalue_loc (location_t loc, tree arg)
3233 {
3234   tree tem;
3235 
3236   if (TREE_CODE (arg) == ERROR_MARK)
3237     return arg;
3238 
3239   tem = fold_truth_not_expr (loc, arg);
3240   if (!tem)
3241     tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3242 
3243   return tem;
3244 }
3245 
3246 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3247    operands are another bit-wise operation with a common input.  If so,
3248    distribute the bit operations to save an operation and possibly two if
3249    constants are involved.  For example, convert
3250 	(A | B) & (A | C) into A | (B & C)
3251    Further simplification will occur if B and C are constants.
3252 
3253    If this optimization cannot be done, 0 will be returned.  */
3254 
3255 static tree
3256 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3257 		     tree arg0, tree arg1)
3258 {
3259   tree common;
3260   tree left, right;
3261 
3262   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3263       || TREE_CODE (arg0) == code
3264       || (TREE_CODE (arg0) != BIT_AND_EXPR
3265 	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
3266     return 0;
3267 
3268   if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3269     {
3270       common = TREE_OPERAND (arg0, 0);
3271       left = TREE_OPERAND (arg0, 1);
3272       right = TREE_OPERAND (arg1, 1);
3273     }
3274   else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3275     {
3276       common = TREE_OPERAND (arg0, 0);
3277       left = TREE_OPERAND (arg0, 1);
3278       right = TREE_OPERAND (arg1, 0);
3279     }
3280   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3281     {
3282       common = TREE_OPERAND (arg0, 1);
3283       left = TREE_OPERAND (arg0, 0);
3284       right = TREE_OPERAND (arg1, 1);
3285     }
3286   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3287     {
3288       common = TREE_OPERAND (arg0, 1);
3289       left = TREE_OPERAND (arg0, 0);
3290       right = TREE_OPERAND (arg1, 0);
3291     }
3292   else
3293     return 0;
3294 
3295   common = fold_convert_loc (loc, type, common);
3296   left = fold_convert_loc (loc, type, left);
3297   right = fold_convert_loc (loc, type, right);
3298   return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3299 		      fold_build2_loc (loc, code, type, left, right));
3300 }
3301 
3302 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3303    with code CODE.  This optimization is unsafe.  */
3304 static tree
3305 distribute_real_division (location_t loc, enum tree_code code, tree type,
3306 			  tree arg0, tree arg1)
3307 {
3308   bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3309   bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3310 
3311   /* (A / C) +- (B / C) -> (A +- B) / C.  */
3312   if (mul0 == mul1
3313       && operand_equal_p (TREE_OPERAND (arg0, 1),
3314 		       TREE_OPERAND (arg1, 1), 0))
3315     return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3316 			fold_build2_loc (loc, code, type,
3317 				     TREE_OPERAND (arg0, 0),
3318 				     TREE_OPERAND (arg1, 0)),
3319 			TREE_OPERAND (arg0, 1));
3320 
3321   /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3322   if (operand_equal_p (TREE_OPERAND (arg0, 0),
3323 		       TREE_OPERAND (arg1, 0), 0)
3324       && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3325       && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3326     {
3327       REAL_VALUE_TYPE r0, r1;
3328       r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3329       r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3330       if (!mul0)
3331 	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3332       if (!mul1)
3333         real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3334       real_arithmetic (&r0, code, &r0, &r1);
3335       return fold_build2_loc (loc, MULT_EXPR, type,
3336 			  TREE_OPERAND (arg0, 0),
3337 			  build_real (type, r0));
3338     }
3339 
3340   return NULL_TREE;
3341 }
3342 
3343 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3344    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
3345 
3346 static tree
3347 make_bit_field_ref (location_t loc, tree inner, tree type,
3348 		    HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3349 {
3350   tree result, bftype;
3351 
3352   if (bitpos == 0)
3353     {
3354       tree size = TYPE_SIZE (TREE_TYPE (inner));
3355       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3356 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3357 	  && host_integerp (size, 0)
3358 	  && tree_low_cst (size, 0) == bitsize)
3359 	return fold_convert_loc (loc, type, inner);
3360     }
3361 
3362   bftype = type;
3363   if (TYPE_PRECISION (bftype) != bitsize
3364       || TYPE_UNSIGNED (bftype) == !unsignedp)
3365     bftype = build_nonstandard_integer_type (bitsize, 0);
3366 
3367   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3368 		       size_int (bitsize), bitsize_int (bitpos));
3369 
3370   if (bftype != type)
3371     result = fold_convert_loc (loc, type, result);
3372 
3373   return result;
3374 }
3375 
3376 /* Optimize a bit-field compare.
3377 
3378    There are two cases:  First is a compare against a constant and the
3379    second is a comparison of two items where the fields are at the same
3380    bit position relative to the start of a chunk (byte, halfword, word)
3381    large enough to contain it.  In these cases we can avoid the shift
3382    implicit in bitfield extractions.
3383 
3384    For constants, we emit a compare of the shifted constant with the
3385    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3386    compared.  For two fields at the same position, we do the ANDs with the
3387    similar mask and compare the result of the ANDs.
3388 
3389    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3390    COMPARE_TYPE is the type of the comparison, and LHS and RHS
3391    are the left and right operands of the comparison, respectively.
3392 
3393    If the optimization described above can be done, we return the resulting
3394    tree.  Otherwise we return zero.  */
3395 
3396 static tree
3397 optimize_bit_field_compare (location_t loc, enum tree_code code,
3398 			    tree compare_type, tree lhs, tree rhs)
3399 {
3400   HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3401   tree type = TREE_TYPE (lhs);
3402   tree signed_type, unsigned_type;
3403   int const_p = TREE_CODE (rhs) == INTEGER_CST;
3404   enum machine_mode lmode, rmode, nmode;
3405   int lunsignedp, runsignedp;
3406   int lvolatilep = 0, rvolatilep = 0;
3407   tree linner, rinner = NULL_TREE;
3408   tree mask;
3409   tree offset;
3410 
3411   /* In the strict volatile bitfields case, doing code changes here may prevent
3412      other optimizations, in particular in a SLOW_BYTE_ACCESS setting.  */
3413   if (flag_strict_volatile_bitfields > 0)
3414     return 0;
3415 
3416   /* Get all the information about the extractions being done.  If the bit size
3417      if the same as the size of the underlying object, we aren't doing an
3418      extraction at all and so can do nothing.  We also don't want to
3419      do anything if the inner expression is a PLACEHOLDER_EXPR since we
3420      then will no longer be able to replace it.  */
3421   linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3422 				&lunsignedp, &lvolatilep, false);
3423   if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3424       || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3425     return 0;
3426 
3427  if (!const_p)
3428    {
3429      /* If this is not a constant, we can only do something if bit positions,
3430 	sizes, and signedness are the same.  */
3431      rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3432 				   &runsignedp, &rvolatilep, false);
3433 
3434      if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3435 	 || lunsignedp != runsignedp || offset != 0
3436 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3437        return 0;
3438    }
3439 
3440   /* See if we can find a mode to refer to this field.  We should be able to,
3441      but fail if we can't.  */
3442   if (lvolatilep
3443       && GET_MODE_BITSIZE (lmode) > 0
3444       && flag_strict_volatile_bitfields > 0)
3445     nmode = lmode;
3446   else
3447     nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3448 			   const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3449 			   : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3450 				  TYPE_ALIGN (TREE_TYPE (rinner))),
3451 			   word_mode, lvolatilep || rvolatilep);
3452   if (nmode == VOIDmode)
3453     return 0;
3454 
3455   /* Set signed and unsigned types of the precision of this mode for the
3456      shifts below.  */
3457   signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3458   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3459 
3460   /* Compute the bit position and size for the new reference and our offset
3461      within it. If the new reference is the same size as the original, we
3462      won't optimize anything, so return zero.  */
3463   nbitsize = GET_MODE_BITSIZE (nmode);
3464   nbitpos = lbitpos & ~ (nbitsize - 1);
3465   lbitpos -= nbitpos;
3466   if (nbitsize == lbitsize)
3467     return 0;
3468 
3469   if (BYTES_BIG_ENDIAN)
3470     lbitpos = nbitsize - lbitsize - lbitpos;
3471 
3472   /* Make the mask to be used against the extracted field.  */
3473   mask = build_int_cst_type (unsigned_type, -1);
3474   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3475   mask = const_binop (RSHIFT_EXPR, mask,
3476 		      size_int (nbitsize - lbitsize - lbitpos));
3477 
3478   if (! const_p)
3479     /* If not comparing with constant, just rework the comparison
3480        and return.  */
3481     return fold_build2_loc (loc, code, compare_type,
3482 			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3483 				     make_bit_field_ref (loc, linner,
3484 							 unsigned_type,
3485 							 nbitsize, nbitpos,
3486 							 1),
3487 				     mask),
3488 			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3489 				     make_bit_field_ref (loc, rinner,
3490 							 unsigned_type,
3491 							 nbitsize, nbitpos,
3492 							 1),
3493 				     mask));
3494 
3495   /* Otherwise, we are handling the constant case. See if the constant is too
3496      big for the field.  Warn and return a tree of for 0 (false) if so.  We do
3497      this not only for its own sake, but to avoid having to test for this
3498      error case below.  If we didn't, we might generate wrong code.
3499 
3500      For unsigned fields, the constant shifted right by the field length should
3501      be all zero.  For signed fields, the high-order bits should agree with
3502      the sign bit.  */
3503 
3504   if (lunsignedp)
3505     {
3506       if (! integer_zerop (const_binop (RSHIFT_EXPR,
3507 					fold_convert_loc (loc,
3508 							  unsigned_type, rhs),
3509 					size_int (lbitsize))))
3510 	{
3511 	  warning (0, "comparison is always %d due to width of bit-field",
3512 		   code == NE_EXPR);
3513 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3514 	}
3515     }
3516   else
3517     {
3518       tree tem = const_binop (RSHIFT_EXPR,
3519 			      fold_convert_loc (loc, signed_type, rhs),
3520 			      size_int (lbitsize - 1));
3521       if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3522 	{
3523 	  warning (0, "comparison is always %d due to width of bit-field",
3524 		   code == NE_EXPR);
3525 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3526 	}
3527     }
3528 
3529   /* Single-bit compares should always be against zero.  */
3530   if (lbitsize == 1 && ! integer_zerop (rhs))
3531     {
3532       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3533       rhs = build_int_cst (type, 0);
3534     }
3535 
3536   /* Make a new bitfield reference, shift the constant over the
3537      appropriate number of bits and mask it with the computed mask
3538      (in case this was a signed field).  If we changed it, make a new one.  */
3539   lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3540   if (lvolatilep)
3541     {
3542       TREE_SIDE_EFFECTS (lhs) = 1;
3543       TREE_THIS_VOLATILE (lhs) = 1;
3544     }
3545 
3546   rhs = const_binop (BIT_AND_EXPR,
3547 		     const_binop (LSHIFT_EXPR,
3548 				  fold_convert_loc (loc, unsigned_type, rhs),
3549 				  size_int (lbitpos)),
3550 		     mask);
3551 
3552   lhs = build2_loc (loc, code, compare_type,
3553 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3554   return lhs;
3555 }
3556 
3557 /* Subroutine for fold_truth_andor_1: decode a field reference.
3558 
3559    If EXP is a comparison reference, we return the innermost reference.
3560 
3561    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3562    set to the starting bit number.
3563 
3564    If the innermost field can be completely contained in a mode-sized
3565    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
3566 
3567    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3568    otherwise it is not changed.
3569 
3570    *PUNSIGNEDP is set to the signedness of the field.
3571 
3572    *PMASK is set to the mask used.  This is either contained in a
3573    BIT_AND_EXPR or derived from the width of the field.
3574 
3575    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3576 
3577    Return 0 if this is not a component reference or is one that we can't
3578    do anything with.  */
3579 
3580 static tree
3581 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3582 			HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3583 			int *punsignedp, int *pvolatilep,
3584 			tree *pmask, tree *pand_mask)
3585 {
3586   tree outer_type = 0;
3587   tree and_mask = 0;
3588   tree mask, inner, offset;
3589   tree unsigned_type;
3590   unsigned int precision;
3591 
3592   /* All the optimizations using this function assume integer fields.
3593      There are problems with FP fields since the type_for_size call
3594      below can fail for, e.g., XFmode.  */
3595   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3596     return 0;
3597 
3598   /* We are interested in the bare arrangement of bits, so strip everything
3599      that doesn't affect the machine mode.  However, record the type of the
3600      outermost expression if it may matter below.  */
3601   if (CONVERT_EXPR_P (exp)
3602       || TREE_CODE (exp) == NON_LVALUE_EXPR)
3603     outer_type = TREE_TYPE (exp);
3604   STRIP_NOPS (exp);
3605 
3606   if (TREE_CODE (exp) == BIT_AND_EXPR)
3607     {
3608       and_mask = TREE_OPERAND (exp, 1);
3609       exp = TREE_OPERAND (exp, 0);
3610       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3611       if (TREE_CODE (and_mask) != INTEGER_CST)
3612 	return 0;
3613     }
3614 
3615   inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3616 			       punsignedp, pvolatilep, false);
3617   if ((inner == exp && and_mask == 0)
3618       || *pbitsize < 0 || offset != 0
3619       || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3620     return 0;
3621 
3622   /* If the number of bits in the reference is the same as the bitsize of
3623      the outer type, then the outer type gives the signedness. Otherwise
3624      (in case of a small bitfield) the signedness is unchanged.  */
3625   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3626     *punsignedp = TYPE_UNSIGNED (outer_type);
3627 
3628   /* Compute the mask to access the bitfield.  */
3629   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3630   precision = TYPE_PRECISION (unsigned_type);
3631 
3632   mask = build_int_cst_type (unsigned_type, -1);
3633 
3634   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3635   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3636 
3637   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3638   if (and_mask != 0)
3639     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3640 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
3641 
3642   *pmask = mask;
3643   *pand_mask = and_mask;
3644   return inner;
3645 }
3646 
3647 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3648    bit positions.  */
3649 
3650 static int
3651 all_ones_mask_p (const_tree mask, int size)
3652 {
3653   tree type = TREE_TYPE (mask);
3654   unsigned int precision = TYPE_PRECISION (type);
3655   tree tmask;
3656 
3657   tmask = build_int_cst_type (signed_type_for (type), -1);
3658 
3659   return
3660     tree_int_cst_equal (mask,
3661 			const_binop (RSHIFT_EXPR,
3662 				     const_binop (LSHIFT_EXPR, tmask,
3663 						  size_int (precision - size)),
3664 				     size_int (precision - size)));
3665 }
3666 
3667 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3668    represents the sign bit of EXP's type.  If EXP represents a sign
3669    or zero extension, also test VAL against the unextended type.
3670    The return value is the (sub)expression whose sign bit is VAL,
3671    or NULL_TREE otherwise.  */
3672 
3673 static tree
3674 sign_bit_p (tree exp, const_tree val)
3675 {
3676   unsigned HOST_WIDE_INT mask_lo, lo;
3677   HOST_WIDE_INT mask_hi, hi;
3678   int width;
3679   tree t;
3680 
3681   /* Tree EXP must have an integral type.  */
3682   t = TREE_TYPE (exp);
3683   if (! INTEGRAL_TYPE_P (t))
3684     return NULL_TREE;
3685 
3686   /* Tree VAL must be an integer constant.  */
3687   if (TREE_CODE (val) != INTEGER_CST
3688       || TREE_OVERFLOW (val))
3689     return NULL_TREE;
3690 
3691   width = TYPE_PRECISION (t);
3692   if (width > HOST_BITS_PER_WIDE_INT)
3693     {
3694       hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3695       lo = 0;
3696 
3697       mask_hi = ((unsigned HOST_WIDE_INT) -1
3698 		 >> (HOST_BITS_PER_DOUBLE_INT - width));
3699       mask_lo = -1;
3700     }
3701   else
3702     {
3703       hi = 0;
3704       lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3705 
3706       mask_hi = 0;
3707       mask_lo = ((unsigned HOST_WIDE_INT) -1
3708 		 >> (HOST_BITS_PER_WIDE_INT - width));
3709     }
3710 
3711   /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3712      treat VAL as if it were unsigned.  */
3713   if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3714       && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3715     return exp;
3716 
3717   /* Handle extension from a narrower type.  */
3718   if (TREE_CODE (exp) == NOP_EXPR
3719       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3720     return sign_bit_p (TREE_OPERAND (exp, 0), val);
3721 
3722   return NULL_TREE;
3723 }
3724 
3725 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3726    to be evaluated unconditionally.  */
3727 
3728 static int
3729 simple_operand_p (const_tree exp)
3730 {
3731   /* Strip any conversions that don't change the machine mode.  */
3732   STRIP_NOPS (exp);
3733 
3734   return (CONSTANT_CLASS_P (exp)
3735   	  || TREE_CODE (exp) == SSA_NAME
3736 	  || (DECL_P (exp)
3737 	      && ! TREE_ADDRESSABLE (exp)
3738 	      && ! TREE_THIS_VOLATILE (exp)
3739 	      && ! DECL_NONLOCAL (exp)
3740 	      /* Don't regard global variables as simple.  They may be
3741 		 allocated in ways unknown to the compiler (shared memory,
3742 		 #pragma weak, etc).  */
3743 	      && ! TREE_PUBLIC (exp)
3744 	      && ! DECL_EXTERNAL (exp)
3745 	      /* Loading a static variable is unduly expensive, but global
3746 		 registers aren't expensive.  */
3747 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3748 }
3749 
3750 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3751    to be evaluated unconditionally.
3752    I addition to simple_operand_p, we assume that comparisons, conversions,
3753    and logic-not operations are simple, if their operands are simple, too.  */
3754 
3755 static bool
3756 simple_operand_p_2 (tree exp)
3757 {
3758   enum tree_code code;
3759 
3760   if (TREE_SIDE_EFFECTS (exp)
3761       || tree_could_trap_p (exp))
3762     return false;
3763 
3764   while (CONVERT_EXPR_P (exp))
3765     exp = TREE_OPERAND (exp, 0);
3766 
3767   code = TREE_CODE (exp);
3768 
3769   if (TREE_CODE_CLASS (code) == tcc_comparison)
3770     return (simple_operand_p (TREE_OPERAND (exp, 0))
3771 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
3772 
3773   if (code == TRUTH_NOT_EXPR)
3774       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3775 
3776   return simple_operand_p (exp);
3777 }
3778 
3779 
3780 /* The following functions are subroutines to fold_range_test and allow it to
3781    try to change a logical combination of comparisons into a range test.
3782 
3783    For example, both
3784 	X == 2 || X == 3 || X == 4 || X == 5
3785    and
3786 	X >= 2 && X <= 5
3787    are converted to
3788 	(unsigned) (X - 2) <= 3
3789 
3790    We describe each set of comparisons as being either inside or outside
3791    a range, using a variable named like IN_P, and then describe the
3792    range with a lower and upper bound.  If one of the bounds is omitted,
3793    it represents either the highest or lowest value of the type.
3794 
3795    In the comments below, we represent a range by two numbers in brackets
3796    preceded by a "+" to designate being inside that range, or a "-" to
3797    designate being outside that range, so the condition can be inverted by
3798    flipping the prefix.  An omitted bound is represented by a "-".  For
3799    example, "- [-, 10]" means being outside the range starting at the lowest
3800    possible value and ending at 10, in other words, being greater than 10.
3801    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3802    always false.
3803 
3804    We set up things so that the missing bounds are handled in a consistent
3805    manner so neither a missing bound nor "true" and "false" need to be
3806    handled using a special case.  */
3807 
3808 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3809    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3810    and UPPER1_P are nonzero if the respective argument is an upper bound
3811    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3812    must be specified for a comparison.  ARG1 will be converted to ARG0's
3813    type if both are specified.  */
3814 
3815 static tree
3816 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3817 	     tree arg1, int upper1_p)
3818 {
3819   tree tem;
3820   int result;
3821   int sgn0, sgn1;
3822 
3823   /* If neither arg represents infinity, do the normal operation.
3824      Else, if not a comparison, return infinity.  Else handle the special
3825      comparison rules. Note that most of the cases below won't occur, but
3826      are handled for consistency.  */
3827 
3828   if (arg0 != 0 && arg1 != 0)
3829     {
3830       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3831 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3832       STRIP_NOPS (tem);
3833       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3834     }
3835 
3836   if (TREE_CODE_CLASS (code) != tcc_comparison)
3837     return 0;
3838 
3839   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3840      for neither.  In real maths, we cannot assume open ended ranges are
3841      the same. But, this is computer arithmetic, where numbers are finite.
3842      We can therefore make the transformation of any unbounded range with
3843      the value Z, Z being greater than any representable number. This permits
3844      us to treat unbounded ranges as equal.  */
3845   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3846   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3847   switch (code)
3848     {
3849     case EQ_EXPR:
3850       result = sgn0 == sgn1;
3851       break;
3852     case NE_EXPR:
3853       result = sgn0 != sgn1;
3854       break;
3855     case LT_EXPR:
3856       result = sgn0 < sgn1;
3857       break;
3858     case LE_EXPR:
3859       result = sgn0 <= sgn1;
3860       break;
3861     case GT_EXPR:
3862       result = sgn0 > sgn1;
3863       break;
3864     case GE_EXPR:
3865       result = sgn0 >= sgn1;
3866       break;
3867     default:
3868       gcc_unreachable ();
3869     }
3870 
3871   return constant_boolean_node (result, type);
3872 }
3873 
3874 /* Helper routine for make_range.  Perform one step for it, return
3875    new expression if the loop should continue or NULL_TREE if it should
3876    stop.  */
3877 
3878 tree
3879 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3880 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3881 		 bool *strict_overflow_p)
3882 {
3883   tree arg0_type = TREE_TYPE (arg0);
3884   tree n_low, n_high, low = *p_low, high = *p_high;
3885   int in_p = *p_in_p, n_in_p;
3886 
3887   switch (code)
3888     {
3889     case TRUTH_NOT_EXPR:
3890       /* We can only do something if the range is testing for zero.  */
3891       if (low == NULL_TREE || high == NULL_TREE
3892 	  || ! integer_zerop (low) || ! integer_zerop (high))
3893 	return NULL_TREE;
3894       *p_in_p = ! in_p;
3895       return arg0;
3896 
3897     case EQ_EXPR: case NE_EXPR:
3898     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3899       /* We can only do something if the range is testing for zero
3900 	 and if the second operand is an integer constant.  Note that
3901 	 saying something is "in" the range we make is done by
3902 	 complementing IN_P since it will set in the initial case of
3903 	 being not equal to zero; "out" is leaving it alone.  */
3904       if (low == NULL_TREE || high == NULL_TREE
3905 	  || ! integer_zerop (low) || ! integer_zerop (high)
3906 	  || TREE_CODE (arg1) != INTEGER_CST)
3907 	return NULL_TREE;
3908 
3909       switch (code)
3910 	{
3911 	case NE_EXPR:  /* - [c, c]  */
3912 	  low = high = arg1;
3913 	  break;
3914 	case EQ_EXPR:  /* + [c, c]  */
3915 	  in_p = ! in_p, low = high = arg1;
3916 	  break;
3917 	case GT_EXPR:  /* - [-, c] */
3918 	  low = 0, high = arg1;
3919 	  break;
3920 	case GE_EXPR:  /* + [c, -] */
3921 	  in_p = ! in_p, low = arg1, high = 0;
3922 	  break;
3923 	case LT_EXPR:  /* - [c, -] */
3924 	  low = arg1, high = 0;
3925 	  break;
3926 	case LE_EXPR:  /* + [-, c] */
3927 	  in_p = ! in_p, low = 0, high = arg1;
3928 	  break;
3929 	default:
3930 	  gcc_unreachable ();
3931 	}
3932 
3933       /* If this is an unsigned comparison, we also know that EXP is
3934 	 greater than or equal to zero.  We base the range tests we make
3935 	 on that fact, so we record it here so we can parse existing
3936 	 range tests.  We test arg0_type since often the return type
3937 	 of, e.g. EQ_EXPR, is boolean.  */
3938       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3939 	{
3940 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3941 			      in_p, low, high, 1,
3942 			      build_int_cst (arg0_type, 0),
3943 			      NULL_TREE))
3944 	    return NULL_TREE;
3945 
3946 	  in_p = n_in_p, low = n_low, high = n_high;
3947 
3948 	  /* If the high bound is missing, but we have a nonzero low
3949 	     bound, reverse the range so it goes from zero to the low bound
3950 	     minus 1.  */
3951 	  if (high == 0 && low && ! integer_zerop (low))
3952 	    {
3953 	      in_p = ! in_p;
3954 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3955 				  integer_one_node, 0);
3956 	      low = build_int_cst (arg0_type, 0);
3957 	    }
3958 	}
3959 
3960       *p_low = low;
3961       *p_high = high;
3962       *p_in_p = in_p;
3963       return arg0;
3964 
3965     case NEGATE_EXPR:
3966       /* If flag_wrapv and ARG0_TYPE is signed, make sure
3967 	 low and high are non-NULL, then normalize will DTRT.  */
3968       if (!TYPE_UNSIGNED (arg0_type)
3969 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3970 	{
3971 	  if (low == NULL_TREE)
3972 	    low = TYPE_MIN_VALUE (arg0_type);
3973 	  if (high == NULL_TREE)
3974 	    high = TYPE_MAX_VALUE (arg0_type);
3975 	}
3976 
3977       /* (-x) IN [a,b] -> x in [-b, -a]  */
3978       n_low = range_binop (MINUS_EXPR, exp_type,
3979 			   build_int_cst (exp_type, 0),
3980 			   0, high, 1);
3981       n_high = range_binop (MINUS_EXPR, exp_type,
3982 			    build_int_cst (exp_type, 0),
3983 			    0, low, 0);
3984       if (n_high != 0 && TREE_OVERFLOW (n_high))
3985 	return NULL_TREE;
3986       goto normalize;
3987 
3988     case BIT_NOT_EXPR:
3989       /* ~ X -> -X - 1  */
3990       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3991 			 build_int_cst (exp_type, 1));
3992 
3993     case PLUS_EXPR:
3994     case MINUS_EXPR:
3995       if (TREE_CODE (arg1) != INTEGER_CST)
3996 	return NULL_TREE;
3997 
3998       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3999 	 move a constant to the other side.  */
4000       if (!TYPE_UNSIGNED (arg0_type)
4001 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4002 	return NULL_TREE;
4003 
4004       /* If EXP is signed, any overflow in the computation is undefined,
4005 	 so we don't worry about it so long as our computations on
4006 	 the bounds don't overflow.  For unsigned, overflow is defined
4007 	 and this is exactly the right thing.  */
4008       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4009 			   arg0_type, low, 0, arg1, 0);
4010       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4011 			    arg0_type, high, 1, arg1, 0);
4012       if ((n_low != 0 && TREE_OVERFLOW (n_low))
4013 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
4014 	return NULL_TREE;
4015 
4016       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4017 	*strict_overflow_p = true;
4018 
4019       normalize:
4020 	/* Check for an unsigned range which has wrapped around the maximum
4021 	   value thus making n_high < n_low, and normalize it.  */
4022 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4023 	  {
4024 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4025 			       integer_one_node, 0);
4026 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4027 				integer_one_node, 0);
4028 
4029 	    /* If the range is of the form +/- [ x+1, x ], we won't
4030 	       be able to normalize it.  But then, it represents the
4031 	       whole range or the empty set, so make it
4032 	       +/- [ -, - ].  */
4033 	    if (tree_int_cst_equal (n_low, low)
4034 		&& tree_int_cst_equal (n_high, high))
4035 	      low = high = 0;
4036 	    else
4037 	      in_p = ! in_p;
4038 	  }
4039 	else
4040 	  low = n_low, high = n_high;
4041 
4042 	*p_low = low;
4043 	*p_high = high;
4044 	*p_in_p = in_p;
4045 	return arg0;
4046 
4047     CASE_CONVERT:
4048     case NON_LVALUE_EXPR:
4049       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4050 	return NULL_TREE;
4051 
4052       if (! INTEGRAL_TYPE_P (arg0_type)
4053 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
4054 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4055 	return NULL_TREE;
4056 
4057       n_low = low, n_high = high;
4058 
4059       if (n_low != 0)
4060 	n_low = fold_convert_loc (loc, arg0_type, n_low);
4061 
4062       if (n_high != 0)
4063 	n_high = fold_convert_loc (loc, arg0_type, n_high);
4064 
4065       /* If we're converting arg0 from an unsigned type, to exp,
4066 	 a signed type,  we will be doing the comparison as unsigned.
4067 	 The tests above have already verified that LOW and HIGH
4068 	 are both positive.
4069 
4070 	 So we have to ensure that we will handle large unsigned
4071 	 values the same way that the current signed bounds treat
4072 	 negative values.  */
4073 
4074       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4075 	{
4076 	  tree high_positive;
4077 	  tree equiv_type;
4078 	  /* For fixed-point modes, we need to pass the saturating flag
4079 	     as the 2nd parameter.  */
4080 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4081 	    equiv_type
4082 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4083 						TYPE_SATURATING (arg0_type));
4084 	  else
4085 	    equiv_type
4086 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4087 
4088 	  /* A range without an upper bound is, naturally, unbounded.
4089 	     Since convert would have cropped a very large value, use
4090 	     the max value for the destination type.  */
4091 	  high_positive
4092 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4093 	      : TYPE_MAX_VALUE (arg0_type);
4094 
4095 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4096 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4097 					     fold_convert_loc (loc, arg0_type,
4098 							       high_positive),
4099 					     build_int_cst (arg0_type, 1));
4100 
4101 	  /* If the low bound is specified, "and" the range with the
4102 	     range for which the original unsigned value will be
4103 	     positive.  */
4104 	  if (low != 0)
4105 	    {
4106 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4107 				  1, fold_convert_loc (loc, arg0_type,
4108 						       integer_zero_node),
4109 				  high_positive))
4110 		return NULL_TREE;
4111 
4112 	      in_p = (n_in_p == in_p);
4113 	    }
4114 	  else
4115 	    {
4116 	      /* Otherwise, "or" the range with the range of the input
4117 		 that will be interpreted as negative.  */
4118 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4119 				  1, fold_convert_loc (loc, arg0_type,
4120 						       integer_zero_node),
4121 				  high_positive))
4122 		return NULL_TREE;
4123 
4124 	      in_p = (in_p != n_in_p);
4125 	    }
4126 	}
4127 
4128       *p_low = n_low;
4129       *p_high = n_high;
4130       *p_in_p = in_p;
4131       return arg0;
4132 
4133     default:
4134       return NULL_TREE;
4135     }
4136 }
4137 
4138 /* Given EXP, a logical expression, set the range it is testing into
4139    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4140    actually being tested.  *PLOW and *PHIGH will be made of the same
4141    type as the returned expression.  If EXP is not a comparison, we
4142    will most likely not be returning a useful value and range.  Set
4143    *STRICT_OVERFLOW_P to true if the return value is only valid
4144    because signed overflow is undefined; otherwise, do not change
4145    *STRICT_OVERFLOW_P.  */
4146 
4147 tree
4148 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4149 	    bool *strict_overflow_p)
4150 {
4151   enum tree_code code;
4152   tree arg0, arg1 = NULL_TREE;
4153   tree exp_type, nexp;
4154   int in_p;
4155   tree low, high;
4156   location_t loc = EXPR_LOCATION (exp);
4157 
4158   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4159      and see if we can refine the range.  Some of the cases below may not
4160      happen, but it doesn't seem worth worrying about this.  We "continue"
4161      the outer loop when we've changed something; otherwise we "break"
4162      the switch, which will "break" the while.  */
4163 
4164   in_p = 0;
4165   low = high = build_int_cst (TREE_TYPE (exp), 0);
4166 
4167   while (1)
4168     {
4169       code = TREE_CODE (exp);
4170       exp_type = TREE_TYPE (exp);
4171       arg0 = NULL_TREE;
4172 
4173       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4174 	{
4175 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4176 	    arg0 = TREE_OPERAND (exp, 0);
4177 	  if (TREE_CODE_CLASS (code) == tcc_binary
4178 	      || TREE_CODE_CLASS (code) == tcc_comparison
4179 	      || (TREE_CODE_CLASS (code) == tcc_expression
4180 		  && TREE_OPERAND_LENGTH (exp) > 1))
4181 	    arg1 = TREE_OPERAND (exp, 1);
4182 	}
4183       if (arg0 == NULL_TREE)
4184 	break;
4185 
4186       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4187 			      &high, &in_p, strict_overflow_p);
4188       if (nexp == NULL_TREE)
4189 	break;
4190       exp = nexp;
4191     }
4192 
4193   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4194   if (TREE_CODE (exp) == INTEGER_CST)
4195     {
4196       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4197 						 exp, 0, low, 0))
4198 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4199 						    exp, 1, high, 1)));
4200       low = high = 0;
4201       exp = 0;
4202     }
4203 
4204   *pin_p = in_p, *plow = low, *phigh = high;
4205   return exp;
4206 }
4207 
4208 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4209    type, TYPE, return an expression to test if EXP is in (or out of, depending
4210    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4211 
4212 tree
4213 build_range_check (location_t loc, tree type, tree exp, int in_p,
4214 		   tree low, tree high)
4215 {
4216   tree etype = TREE_TYPE (exp), value;
4217 
4218 #ifdef HAVE_canonicalize_funcptr_for_compare
4219   /* Disable this optimization for function pointer expressions
4220      on targets that require function pointer canonicalization.  */
4221   if (HAVE_canonicalize_funcptr_for_compare
4222       && TREE_CODE (etype) == POINTER_TYPE
4223       && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4224     return NULL_TREE;
4225 #endif
4226 
4227   if (! in_p)
4228     {
4229       value = build_range_check (loc, type, exp, 1, low, high);
4230       if (value != 0)
4231         return invert_truthvalue_loc (loc, value);
4232 
4233       return 0;
4234     }
4235 
4236   if (low == 0 && high == 0)
4237     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4238 
4239   if (low == 0)
4240     return fold_build2_loc (loc, LE_EXPR, type, exp,
4241 			fold_convert_loc (loc, etype, high));
4242 
4243   if (high == 0)
4244     return fold_build2_loc (loc, GE_EXPR, type, exp,
4245 			fold_convert_loc (loc, etype, low));
4246 
4247   if (operand_equal_p (low, high, 0))
4248     return fold_build2_loc (loc, EQ_EXPR, type, exp,
4249 			fold_convert_loc (loc, etype, low));
4250 
4251   if (integer_zerop (low))
4252     {
4253       if (! TYPE_UNSIGNED (etype))
4254 	{
4255 	  etype = unsigned_type_for (etype);
4256 	  high = fold_convert_loc (loc, etype, high);
4257 	  exp = fold_convert_loc (loc, etype, exp);
4258 	}
4259       return build_range_check (loc, type, exp, 1, 0, high);
4260     }
4261 
4262   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4263   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4264     {
4265       unsigned HOST_WIDE_INT lo;
4266       HOST_WIDE_INT hi;
4267       int prec;
4268 
4269       prec = TYPE_PRECISION (etype);
4270       if (prec <= HOST_BITS_PER_WIDE_INT)
4271 	{
4272 	  hi = 0;
4273 	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4274 	}
4275       else
4276 	{
4277 	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4278 	  lo = (unsigned HOST_WIDE_INT) -1;
4279 	}
4280 
4281       if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4282 	{
4283 	  if (TYPE_UNSIGNED (etype))
4284 	    {
4285 	      tree signed_etype = signed_type_for (etype);
4286 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4287 		etype
4288 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4289 	      else
4290 		etype = signed_etype;
4291 	      exp = fold_convert_loc (loc, etype, exp);
4292 	    }
4293 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
4294 			      build_int_cst (etype, 0));
4295 	}
4296     }
4297 
4298   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4299      This requires wrap-around arithmetics for the type of the expression.
4300      First make sure that arithmetics in this type is valid, then make sure
4301      that it wraps around.  */
4302   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4303     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4304 					    TYPE_UNSIGNED (etype));
4305 
4306   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4307     {
4308       tree utype, minv, maxv;
4309 
4310       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4311 	 for the type in question, as we rely on this here.  */
4312       utype = unsigned_type_for (etype);
4313       maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4314       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4315 			  integer_one_node, 1);
4316       minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4317 
4318       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4319 				      minv, 1, maxv, 1)))
4320 	etype = utype;
4321       else
4322 	return 0;
4323     }
4324 
4325   high = fold_convert_loc (loc, etype, high);
4326   low = fold_convert_loc (loc, etype, low);
4327   exp = fold_convert_loc (loc, etype, exp);
4328 
4329   value = const_binop (MINUS_EXPR, high, low);
4330 
4331 
4332   if (POINTER_TYPE_P (etype))
4333     {
4334       if (value != 0 && !TREE_OVERFLOW (value))
4335 	{
4336 	  low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4337           return build_range_check (loc, type,
4338 			     	    fold_build_pointer_plus_loc (loc, exp, low),
4339 			            1, build_int_cst (etype, 0), value);
4340 	}
4341       return 0;
4342     }
4343 
4344   if (value != 0 && !TREE_OVERFLOW (value))
4345     return build_range_check (loc, type,
4346 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4347 			      1, build_int_cst (etype, 0), value);
4348 
4349   return 0;
4350 }
4351 
4352 /* Return the predecessor of VAL in its type, handling the infinite case.  */
4353 
4354 static tree
4355 range_predecessor (tree val)
4356 {
4357   tree type = TREE_TYPE (val);
4358 
4359   if (INTEGRAL_TYPE_P (type)
4360       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4361     return 0;
4362   else
4363     return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4364 }
4365 
4366 /* Return the successor of VAL in its type, handling the infinite case.  */
4367 
4368 static tree
4369 range_successor (tree val)
4370 {
4371   tree type = TREE_TYPE (val);
4372 
4373   if (INTEGRAL_TYPE_P (type)
4374       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4375     return 0;
4376   else
4377     return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4378 }
4379 
4380 /* Given two ranges, see if we can merge them into one.  Return 1 if we
4381    can, 0 if we can't.  Set the output range into the specified parameters.  */
4382 
4383 bool
4384 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4385 	      tree high0, int in1_p, tree low1, tree high1)
4386 {
4387   int no_overlap;
4388   int subset;
4389   int temp;
4390   tree tem;
4391   int in_p;
4392   tree low, high;
4393   int lowequal = ((low0 == 0 && low1 == 0)
4394 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4395 						low0, 0, low1, 0)));
4396   int highequal = ((high0 == 0 && high1 == 0)
4397 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4398 						 high0, 1, high1, 1)));
4399 
4400   /* Make range 0 be the range that starts first, or ends last if they
4401      start at the same value.  Swap them if it isn't.  */
4402   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4403 				 low0, 0, low1, 0))
4404       || (lowequal
4405 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
4406 					high1, 1, high0, 1))))
4407     {
4408       temp = in0_p, in0_p = in1_p, in1_p = temp;
4409       tem = low0, low0 = low1, low1 = tem;
4410       tem = high0, high0 = high1, high1 = tem;
4411     }
4412 
4413   /* Now flag two cases, whether the ranges are disjoint or whether the
4414      second range is totally subsumed in the first.  Note that the tests
4415      below are simplified by the ones above.  */
4416   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4417 					  high0, 1, low1, 0));
4418   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4419 				      high1, 1, high0, 1));
4420 
4421   /* We now have four cases, depending on whether we are including or
4422      excluding the two ranges.  */
4423   if (in0_p && in1_p)
4424     {
4425       /* If they don't overlap, the result is false.  If the second range
4426 	 is a subset it is the result.  Otherwise, the range is from the start
4427 	 of the second to the end of the first.  */
4428       if (no_overlap)
4429 	in_p = 0, low = high = 0;
4430       else if (subset)
4431 	in_p = 1, low = low1, high = high1;
4432       else
4433 	in_p = 1, low = low1, high = high0;
4434     }
4435 
4436   else if (in0_p && ! in1_p)
4437     {
4438       /* If they don't overlap, the result is the first range.  If they are
4439 	 equal, the result is false.  If the second range is a subset of the
4440 	 first, and the ranges begin at the same place, we go from just after
4441 	 the end of the second range to the end of the first.  If the second
4442 	 range is not a subset of the first, or if it is a subset and both
4443 	 ranges end at the same place, the range starts at the start of the
4444 	 first range and ends just before the second range.
4445 	 Otherwise, we can't describe this as a single range.  */
4446       if (no_overlap)
4447 	in_p = 1, low = low0, high = high0;
4448       else if (lowequal && highequal)
4449 	in_p = 0, low = high = 0;
4450       else if (subset && lowequal)
4451 	{
4452 	  low = range_successor (high1);
4453 	  high = high0;
4454 	  in_p = 1;
4455 	  if (low == 0)
4456 	    {
4457 	      /* We are in the weird situation where high0 > high1 but
4458 		 high1 has no successor.  Punt.  */
4459 	      return 0;
4460 	    }
4461 	}
4462       else if (! subset || highequal)
4463 	{
4464 	  low = low0;
4465 	  high = range_predecessor (low1);
4466 	  in_p = 1;
4467 	  if (high == 0)
4468 	    {
4469 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
4470 	      return 0;
4471 	    }
4472 	}
4473       else
4474 	return 0;
4475     }
4476 
4477   else if (! in0_p && in1_p)
4478     {
4479       /* If they don't overlap, the result is the second range.  If the second
4480 	 is a subset of the first, the result is false.  Otherwise,
4481 	 the range starts just after the first range and ends at the
4482 	 end of the second.  */
4483       if (no_overlap)
4484 	in_p = 1, low = low1, high = high1;
4485       else if (subset || highequal)
4486 	in_p = 0, low = high = 0;
4487       else
4488 	{
4489 	  low = range_successor (high0);
4490 	  high = high1;
4491 	  in_p = 1;
4492 	  if (low == 0)
4493 	    {
4494 	      /* high1 > high0 but high0 has no successor.  Punt.  */
4495 	      return 0;
4496 	    }
4497 	}
4498     }
4499 
4500   else
4501     {
4502       /* The case where we are excluding both ranges.  Here the complex case
4503 	 is if they don't overlap.  In that case, the only time we have a
4504 	 range is if they are adjacent.  If the second is a subset of the
4505 	 first, the result is the first.  Otherwise, the range to exclude
4506 	 starts at the beginning of the first range and ends at the end of the
4507 	 second.  */
4508       if (no_overlap)
4509 	{
4510 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4511 					 range_successor (high0),
4512 					 1, low1, 0)))
4513 	    in_p = 0, low = low0, high = high1;
4514 	  else
4515 	    {
4516 	      /* Canonicalize - [min, x] into - [-, x].  */
4517 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
4518 		switch (TREE_CODE (TREE_TYPE (low0)))
4519 		  {
4520 		  case ENUMERAL_TYPE:
4521 		    if (TYPE_PRECISION (TREE_TYPE (low0))
4522 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4523 		      break;
4524 		    /* FALLTHROUGH */
4525 		  case INTEGER_TYPE:
4526 		    if (tree_int_cst_equal (low0,
4527 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
4528 		      low0 = 0;
4529 		    break;
4530 		  case POINTER_TYPE:
4531 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
4532 			&& integer_zerop (low0))
4533 		      low0 = 0;
4534 		    break;
4535 		  default:
4536 		    break;
4537 		  }
4538 
4539 	      /* Canonicalize - [x, max] into - [x, -].  */
4540 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
4541 		switch (TREE_CODE (TREE_TYPE (high1)))
4542 		  {
4543 		  case ENUMERAL_TYPE:
4544 		    if (TYPE_PRECISION (TREE_TYPE (high1))
4545 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4546 		      break;
4547 		    /* FALLTHROUGH */
4548 		  case INTEGER_TYPE:
4549 		    if (tree_int_cst_equal (high1,
4550 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
4551 		      high1 = 0;
4552 		    break;
4553 		  case POINTER_TYPE:
4554 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
4555 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4556 						       high1, 1,
4557 						       integer_one_node, 1)))
4558 		      high1 = 0;
4559 		    break;
4560 		  default:
4561 		    break;
4562 		  }
4563 
4564 	      /* The ranges might be also adjacent between the maximum and
4565 	         minimum values of the given type.  For
4566 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4567 	         return + [x + 1, y - 1].  */
4568 	      if (low0 == 0 && high1 == 0)
4569 	        {
4570 		  low = range_successor (high0);
4571 		  high = range_predecessor (low1);
4572 		  if (low == 0 || high == 0)
4573 		    return 0;
4574 
4575 		  in_p = 1;
4576 		}
4577 	      else
4578 		return 0;
4579 	    }
4580 	}
4581       else if (subset)
4582 	in_p = 0, low = low0, high = high0;
4583       else
4584 	in_p = 0, low = low0, high = high1;
4585     }
4586 
4587   *pin_p = in_p, *plow = low, *phigh = high;
4588   return 1;
4589 }
4590 
4591 
4592 /* Subroutine of fold, looking inside expressions of the form
4593    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4594    of the COND_EXPR.  This function is being used also to optimize
4595    A op B ? C : A, by reversing the comparison first.
4596 
4597    Return a folded expression whose code is not a COND_EXPR
4598    anymore, or NULL_TREE if no folding opportunity is found.  */
4599 
4600 static tree
4601 fold_cond_expr_with_comparison (location_t loc, tree type,
4602 				tree arg0, tree arg1, tree arg2)
4603 {
4604   enum tree_code comp_code = TREE_CODE (arg0);
4605   tree arg00 = TREE_OPERAND (arg0, 0);
4606   tree arg01 = TREE_OPERAND (arg0, 1);
4607   tree arg1_type = TREE_TYPE (arg1);
4608   tree tem;
4609 
4610   STRIP_NOPS (arg1);
4611   STRIP_NOPS (arg2);
4612 
4613   /* If we have A op 0 ? A : -A, consider applying the following
4614      transformations:
4615 
4616      A == 0? A : -A    same as -A
4617      A != 0? A : -A    same as A
4618      A >= 0? A : -A    same as abs (A)
4619      A > 0?  A : -A    same as abs (A)
4620      A <= 0? A : -A    same as -abs (A)
4621      A < 0?  A : -A    same as -abs (A)
4622 
4623      None of these transformations work for modes with signed
4624      zeros.  If A is +/-0, the first two transformations will
4625      change the sign of the result (from +0 to -0, or vice
4626      versa).  The last four will fix the sign of the result,
4627      even though the original expressions could be positive or
4628      negative, depending on the sign of A.
4629 
4630      Note that all these transformations are correct if A is
4631      NaN, since the two alternatives (A and -A) are also NaNs.  */
4632   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4633       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4634 	  ? real_zerop (arg01)
4635 	  : integer_zerop (arg01))
4636       && ((TREE_CODE (arg2) == NEGATE_EXPR
4637 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4638 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
4639 	        have already been folded to Y-X, check for that. */
4640 	  || (TREE_CODE (arg1) == MINUS_EXPR
4641 	      && TREE_CODE (arg2) == MINUS_EXPR
4642 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
4643 				  TREE_OPERAND (arg2, 1), 0)
4644 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
4645 				  TREE_OPERAND (arg2, 0), 0))))
4646     switch (comp_code)
4647       {
4648       case EQ_EXPR:
4649       case UNEQ_EXPR:
4650 	tem = fold_convert_loc (loc, arg1_type, arg1);
4651 	return pedantic_non_lvalue_loc (loc,
4652 				    fold_convert_loc (loc, type,
4653 						  negate_expr (tem)));
4654       case NE_EXPR:
4655       case LTGT_EXPR:
4656 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4657       case UNGE_EXPR:
4658       case UNGT_EXPR:
4659 	if (flag_trapping_math)
4660 	  break;
4661 	/* Fall through.  */
4662       case GE_EXPR:
4663       case GT_EXPR:
4664 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4665 	  arg1 = fold_convert_loc (loc, signed_type_for
4666 			       (TREE_TYPE (arg1)), arg1);
4667 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4668 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4669       case UNLE_EXPR:
4670       case UNLT_EXPR:
4671 	if (flag_trapping_math)
4672 	  break;
4673       case LE_EXPR:
4674       case LT_EXPR:
4675 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4676 	  arg1 = fold_convert_loc (loc, signed_type_for
4677 			       (TREE_TYPE (arg1)), arg1);
4678 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4679 	return negate_expr (fold_convert_loc (loc, type, tem));
4680       default:
4681 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4682 	break;
4683       }
4684 
4685   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
4686      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
4687      both transformations are correct when A is NaN: A != 0
4688      is then true, and A == 0 is false.  */
4689 
4690   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4691       && integer_zerop (arg01) && integer_zerop (arg2))
4692     {
4693       if (comp_code == NE_EXPR)
4694 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4695       else if (comp_code == EQ_EXPR)
4696 	return build_int_cst (type, 0);
4697     }
4698 
4699   /* Try some transformations of A op B ? A : B.
4700 
4701      A == B? A : B    same as B
4702      A != B? A : B    same as A
4703      A >= B? A : B    same as max (A, B)
4704      A > B?  A : B    same as max (B, A)
4705      A <= B? A : B    same as min (A, B)
4706      A < B?  A : B    same as min (B, A)
4707 
4708      As above, these transformations don't work in the presence
4709      of signed zeros.  For example, if A and B are zeros of
4710      opposite sign, the first two transformations will change
4711      the sign of the result.  In the last four, the original
4712      expressions give different results for (A=+0, B=-0) and
4713      (A=-0, B=+0), but the transformed expressions do not.
4714 
4715      The first two transformations are correct if either A or B
4716      is a NaN.  In the first transformation, the condition will
4717      be false, and B will indeed be chosen.  In the case of the
4718      second transformation, the condition A != B will be true,
4719      and A will be chosen.
4720 
4721      The conversions to max() and min() are not correct if B is
4722      a number and A is not.  The conditions in the original
4723      expressions will be false, so all four give B.  The min()
4724      and max() versions would give a NaN instead.  */
4725   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4726       && operand_equal_for_comparison_p (arg01, arg2, arg00)
4727       /* Avoid these transformations if the COND_EXPR may be used
4728 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
4729       && (in_gimple_form
4730 	  || (strcmp (lang_hooks.name, "GNU C++") != 0
4731 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4732 	  || ! maybe_lvalue_p (arg1)
4733 	  || ! maybe_lvalue_p (arg2)))
4734     {
4735       tree comp_op0 = arg00;
4736       tree comp_op1 = arg01;
4737       tree comp_type = TREE_TYPE (comp_op0);
4738 
4739       /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
4740       if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4741 	{
4742 	  comp_type = type;
4743 	  comp_op0 = arg1;
4744 	  comp_op1 = arg2;
4745 	}
4746 
4747       switch (comp_code)
4748 	{
4749 	case EQ_EXPR:
4750 	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4751 	case NE_EXPR:
4752 	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4753 	case LE_EXPR:
4754 	case LT_EXPR:
4755 	case UNLE_EXPR:
4756 	case UNLT_EXPR:
4757 	  /* In C++ a ?: expression can be an lvalue, so put the
4758 	     operand which will be used if they are equal first
4759 	     so that we can convert this back to the
4760 	     corresponding COND_EXPR.  */
4761 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4762 	    {
4763 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4764 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4765 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4766 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4767 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
4768 				   comp_op1, comp_op0);
4769 	      return pedantic_non_lvalue_loc (loc,
4770 					  fold_convert_loc (loc, type, tem));
4771 	    }
4772 	  break;
4773 	case GE_EXPR:
4774 	case GT_EXPR:
4775 	case UNGE_EXPR:
4776 	case UNGT_EXPR:
4777 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4778 	    {
4779 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4780 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4781 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4782 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4783 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
4784 				   comp_op1, comp_op0);
4785 	      return pedantic_non_lvalue_loc (loc,
4786 					  fold_convert_loc (loc, type, tem));
4787 	    }
4788 	  break;
4789 	case UNEQ_EXPR:
4790 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4791 	    return pedantic_non_lvalue_loc (loc,
4792 					fold_convert_loc (loc, type, arg2));
4793 	  break;
4794 	case LTGT_EXPR:
4795 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4796 	    return pedantic_non_lvalue_loc (loc,
4797 					fold_convert_loc (loc, type, arg1));
4798 	  break;
4799 	default:
4800 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4801 	  break;
4802 	}
4803     }
4804 
4805   /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4806      we might still be able to simplify this.  For example,
4807      if C1 is one less or one more than C2, this might have started
4808      out as a MIN or MAX and been transformed by this function.
4809      Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
4810 
4811   if (INTEGRAL_TYPE_P (type)
4812       && TREE_CODE (arg01) == INTEGER_CST
4813       && TREE_CODE (arg2) == INTEGER_CST)
4814     switch (comp_code)
4815       {
4816       case EQ_EXPR:
4817 	if (TREE_CODE (arg1) == INTEGER_CST)
4818 	  break;
4819 	/* We can replace A with C1 in this case.  */
4820 	arg1 = fold_convert_loc (loc, type, arg01);
4821 	return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4822 
4823       case LT_EXPR:
4824 	/* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4825 	   MIN_EXPR, to preserve the signedness of the comparison.  */
4826 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4827 			       OEP_ONLY_CONST)
4828 	    && operand_equal_p (arg01,
4829 				const_binop (PLUS_EXPR, arg2,
4830 					     build_int_cst (type, 1)),
4831 				OEP_ONLY_CONST))
4832 	  {
4833 	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4834 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4835 						     arg2));
4836 	    return pedantic_non_lvalue_loc (loc,
4837 					    fold_convert_loc (loc, type, tem));
4838 	  }
4839 	break;
4840 
4841       case LE_EXPR:
4842 	/* If C1 is C2 - 1, this is min(A, C2), with the same care
4843 	   as above.  */
4844 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4845 			       OEP_ONLY_CONST)
4846 	    && operand_equal_p (arg01,
4847 				const_binop (MINUS_EXPR, arg2,
4848 					     build_int_cst (type, 1)),
4849 				OEP_ONLY_CONST))
4850 	  {
4851 	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4852 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4853 						     arg2));
4854 	    return pedantic_non_lvalue_loc (loc,
4855 					    fold_convert_loc (loc, type, tem));
4856 	  }
4857 	break;
4858 
4859       case GT_EXPR:
4860 	/* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4861 	   MAX_EXPR, to preserve the signedness of the comparison.  */
4862 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4863 			       OEP_ONLY_CONST)
4864 	    && operand_equal_p (arg01,
4865 				const_binop (MINUS_EXPR, arg2,
4866 					     build_int_cst (type, 1)),
4867 				OEP_ONLY_CONST))
4868 	  {
4869 	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4870 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4871 						     arg2));
4872 	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4873 	  }
4874 	break;
4875 
4876       case GE_EXPR:
4877 	/* If C1 is C2 + 1, this is max(A, C2), with the same care as above.  */
4878 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4879 			       OEP_ONLY_CONST)
4880 	    && operand_equal_p (arg01,
4881 				const_binop (PLUS_EXPR, arg2,
4882 					     build_int_cst (type, 1)),
4883 				OEP_ONLY_CONST))
4884 	  {
4885 	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4886 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4887 						     arg2));
4888 	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4889 	  }
4890 	break;
4891       case NE_EXPR:
4892 	break;
4893       default:
4894 	gcc_unreachable ();
4895       }
4896 
4897   return NULL_TREE;
4898 }
4899 
4900 
4901 
4902 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4903 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4904   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4905 		false) >= 2)
4906 #endif
4907 
4908 /* EXP is some logical combination of boolean tests.  See if we can
4909    merge it into some range test.  Return the new tree if so.  */
4910 
4911 static tree
4912 fold_range_test (location_t loc, enum tree_code code, tree type,
4913 		 tree op0, tree op1)
4914 {
4915   int or_op = (code == TRUTH_ORIF_EXPR
4916 	       || code == TRUTH_OR_EXPR);
4917   int in0_p, in1_p, in_p;
4918   tree low0, low1, low, high0, high1, high;
4919   bool strict_overflow_p = false;
4920   tree tem, lhs, rhs;
4921   const char * const warnmsg = G_("assuming signed overflow does not occur "
4922 				  "when simplifying range test");
4923 
4924   if (!INTEGRAL_TYPE_P (type))
4925     return 0;
4926 
4927   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4928   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4929 
4930   /* If this is an OR operation, invert both sides; we will invert
4931      again at the end.  */
4932   if (or_op)
4933     in0_p = ! in0_p, in1_p = ! in1_p;
4934 
4935   /* If both expressions are the same, if we can merge the ranges, and we
4936      can build the range test, return it or it inverted.  If one of the
4937      ranges is always true or always false, consider it to be the same
4938      expression as the other.  */
4939   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4940       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4941 		       in1_p, low1, high1)
4942       && 0 != (tem = (build_range_check (loc, type,
4943 					 lhs != 0 ? lhs
4944 					 : rhs != 0 ? rhs : integer_zero_node,
4945 					 in_p, low, high))))
4946     {
4947       if (strict_overflow_p)
4948 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4949       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4950     }
4951 
4952   /* On machines where the branch cost is expensive, if this is a
4953      short-circuited branch and the underlying object on both sides
4954      is the same, make a non-short-circuit operation.  */
4955   else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4956 	   && lhs != 0 && rhs != 0
4957 	   && (code == TRUTH_ANDIF_EXPR
4958 	       || code == TRUTH_ORIF_EXPR)
4959 	   && operand_equal_p (lhs, rhs, 0))
4960     {
4961       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
4962 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4963 	 which cases we can't do this.  */
4964       if (simple_operand_p (lhs))
4965 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4966 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4967 			   type, op0, op1);
4968 
4969       else if (!lang_hooks.decls.global_bindings_p ()
4970 	       && !CONTAINS_PLACEHOLDER_P (lhs))
4971 	{
4972 	  tree common = save_expr (lhs);
4973 
4974 	  if (0 != (lhs = build_range_check (loc, type, common,
4975 					     or_op ? ! in0_p : in0_p,
4976 					     low0, high0))
4977 	      && (0 != (rhs = build_range_check (loc, type, common,
4978 						 or_op ? ! in1_p : in1_p,
4979 						 low1, high1))))
4980 	    {
4981 	      if (strict_overflow_p)
4982 		fold_overflow_warning (warnmsg,
4983 				       WARN_STRICT_OVERFLOW_COMPARISON);
4984 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4985 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4986 				 type, lhs, rhs);
4987 	    }
4988 	}
4989     }
4990 
4991   return 0;
4992 }
4993 
4994 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4995    bit value.  Arrange things so the extra bits will be set to zero if and
4996    only if C is signed-extended to its full width.  If MASK is nonzero,
4997    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
4998 
4999 static tree
5000 unextend (tree c, int p, int unsignedp, tree mask)
5001 {
5002   tree type = TREE_TYPE (c);
5003   int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5004   tree temp;
5005 
5006   if (p == modesize || unsignedp)
5007     return c;
5008 
5009   /* We work by getting just the sign bit into the low-order bit, then
5010      into the high-order bit, then sign-extend.  We then XOR that value
5011      with C.  */
5012   temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5013   temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5014 
5015   /* We must use a signed type in order to get an arithmetic right shift.
5016      However, we must also avoid introducing accidental overflows, so that
5017      a subsequent call to integer_zerop will work.  Hence we must
5018      do the type conversion here.  At this point, the constant is either
5019      zero or one, and the conversion to a signed type can never overflow.
5020      We could get an overflow if this conversion is done anywhere else.  */
5021   if (TYPE_UNSIGNED (type))
5022     temp = fold_convert (signed_type_for (type), temp);
5023 
5024   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5025   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5026   if (mask != 0)
5027     temp = const_binop (BIT_AND_EXPR, temp,
5028 			fold_convert (TREE_TYPE (c), mask));
5029   /* If necessary, convert the type back to match the type of C.  */
5030   if (TYPE_UNSIGNED (type))
5031     temp = fold_convert (type, temp);
5032 
5033   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5034 }
5035 
5036 /* For an expression that has the form
5037      (A && B) || ~B
5038    or
5039      (A || B) && ~B,
5040    we can drop one of the inner expressions and simplify to
5041      A || ~B
5042    or
5043      A && ~B
5044    LOC is the location of the resulting expression.  OP is the inner
5045    logical operation; the left-hand side in the examples above, while CMPOP
5046    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
5047    removing a condition that guards another, as in
5048      (A != NULL && A->...) || A == NULL
5049    which we must not transform.  If RHS_ONLY is true, only eliminate the
5050    right-most operand of the inner logical operation.  */
5051 
5052 static tree
5053 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5054 				 bool rhs_only)
5055 {
5056   tree type = TREE_TYPE (cmpop);
5057   enum tree_code code = TREE_CODE (cmpop);
5058   enum tree_code truthop_code = TREE_CODE (op);
5059   tree lhs = TREE_OPERAND (op, 0);
5060   tree rhs = TREE_OPERAND (op, 1);
5061   tree orig_lhs = lhs, orig_rhs = rhs;
5062   enum tree_code rhs_code = TREE_CODE (rhs);
5063   enum tree_code lhs_code = TREE_CODE (lhs);
5064   enum tree_code inv_code;
5065 
5066   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5067     return NULL_TREE;
5068 
5069   if (TREE_CODE_CLASS (code) != tcc_comparison)
5070     return NULL_TREE;
5071 
5072   if (rhs_code == truthop_code)
5073     {
5074       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5075       if (newrhs != NULL_TREE)
5076 	{
5077 	  rhs = newrhs;
5078 	  rhs_code = TREE_CODE (rhs);
5079 	}
5080     }
5081   if (lhs_code == truthop_code && !rhs_only)
5082     {
5083       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5084       if (newlhs != NULL_TREE)
5085 	{
5086 	  lhs = newlhs;
5087 	  lhs_code = TREE_CODE (lhs);
5088 	}
5089     }
5090 
5091   inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5092   if (inv_code == rhs_code
5093       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5094       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5095     return lhs;
5096   if (!rhs_only && inv_code == lhs_code
5097       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5098       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5099     return rhs;
5100   if (rhs != orig_rhs || lhs != orig_lhs)
5101     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5102 			    lhs, rhs);
5103   return NULL_TREE;
5104 }
5105 
5106 /* Find ways of folding logical expressions of LHS and RHS:
5107    Try to merge two comparisons to the same innermost item.
5108    Look for range tests like "ch >= '0' && ch <= '9'".
5109    Look for combinations of simple terms on machines with expensive branches
5110    and evaluate the RHS unconditionally.
5111 
5112    For example, if we have p->a == 2 && p->b == 4 and we can make an
5113    object large enough to span both A and B, we can do this with a comparison
5114    against the object ANDed with the a mask.
5115 
5116    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5117    operations to do this with one comparison.
5118 
5119    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5120    function and the one above.
5121 
5122    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5123    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5124 
5125    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5126    two operands.
5127 
5128    We return the simplified tree or 0 if no optimization is possible.  */
5129 
5130 static tree
5131 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5132 		    tree lhs, tree rhs)
5133 {
5134   /* If this is the "or" of two comparisons, we can do something if
5135      the comparisons are NE_EXPR.  If this is the "and", we can do something
5136      if the comparisons are EQ_EXPR.  I.e.,
5137 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5138 
5139      WANTED_CODE is this operation code.  For single bit fields, we can
5140      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5141      comparison for one-bit fields.  */
5142 
5143   enum tree_code wanted_code;
5144   enum tree_code lcode, rcode;
5145   tree ll_arg, lr_arg, rl_arg, rr_arg;
5146   tree ll_inner, lr_inner, rl_inner, rr_inner;
5147   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5148   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5149   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5150   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5151   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5152   enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5153   enum machine_mode lnmode, rnmode;
5154   tree ll_mask, lr_mask, rl_mask, rr_mask;
5155   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5156   tree l_const, r_const;
5157   tree lntype, rntype, result;
5158   HOST_WIDE_INT first_bit, end_bit;
5159   int volatilep;
5160 
5161   /* Start by getting the comparison codes.  Fail if anything is volatile.
5162      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5163      it were surrounded with a NE_EXPR.  */
5164 
5165   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5166     return 0;
5167 
5168   lcode = TREE_CODE (lhs);
5169   rcode = TREE_CODE (rhs);
5170 
5171   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5172     {
5173       lhs = build2 (NE_EXPR, truth_type, lhs,
5174 		    build_int_cst (TREE_TYPE (lhs), 0));
5175       lcode = NE_EXPR;
5176     }
5177 
5178   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5179     {
5180       rhs = build2 (NE_EXPR, truth_type, rhs,
5181 		    build_int_cst (TREE_TYPE (rhs), 0));
5182       rcode = NE_EXPR;
5183     }
5184 
5185   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5186       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5187     return 0;
5188 
5189   ll_arg = TREE_OPERAND (lhs, 0);
5190   lr_arg = TREE_OPERAND (lhs, 1);
5191   rl_arg = TREE_OPERAND (rhs, 0);
5192   rr_arg = TREE_OPERAND (rhs, 1);
5193 
5194   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5195   if (simple_operand_p (ll_arg)
5196       && simple_operand_p (lr_arg))
5197     {
5198       if (operand_equal_p (ll_arg, rl_arg, 0)
5199           && operand_equal_p (lr_arg, rr_arg, 0))
5200 	{
5201           result = combine_comparisons (loc, code, lcode, rcode,
5202 					truth_type, ll_arg, lr_arg);
5203 	  if (result)
5204 	    return result;
5205 	}
5206       else if (operand_equal_p (ll_arg, rr_arg, 0)
5207                && operand_equal_p (lr_arg, rl_arg, 0))
5208 	{
5209           result = combine_comparisons (loc, code, lcode,
5210 					swap_tree_comparison (rcode),
5211 					truth_type, ll_arg, lr_arg);
5212 	  if (result)
5213 	    return result;
5214 	}
5215     }
5216 
5217   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5218 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5219 
5220   /* If the RHS can be evaluated unconditionally and its operands are
5221      simple, it wins to evaluate the RHS unconditionally on machines
5222      with expensive branches.  In this case, this isn't a comparison
5223      that can be merged.  */
5224 
5225   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5226 		   false) >= 2
5227       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5228       && simple_operand_p (rl_arg)
5229       && simple_operand_p (rr_arg))
5230     {
5231       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5232       if (code == TRUTH_OR_EXPR
5233 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5234 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5235 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5236 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5237 	return build2_loc (loc, NE_EXPR, truth_type,
5238 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5239 				   ll_arg, rl_arg),
5240 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5241 
5242       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5243       if (code == TRUTH_AND_EXPR
5244 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5245 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5246 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5247 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5248 	return build2_loc (loc, EQ_EXPR, truth_type,
5249 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5250 				   ll_arg, rl_arg),
5251 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5252     }
5253 
5254   /* See if the comparisons can be merged.  Then get all the parameters for
5255      each side.  */
5256 
5257   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5258       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5259     return 0;
5260 
5261   volatilep = 0;
5262   ll_inner = decode_field_reference (loc, ll_arg,
5263 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5264 				     &ll_unsignedp, &volatilep, &ll_mask,
5265 				     &ll_and_mask);
5266   lr_inner = decode_field_reference (loc, lr_arg,
5267 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5268 				     &lr_unsignedp, &volatilep, &lr_mask,
5269 				     &lr_and_mask);
5270   rl_inner = decode_field_reference (loc, rl_arg,
5271 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5272 				     &rl_unsignedp, &volatilep, &rl_mask,
5273 				     &rl_and_mask);
5274   rr_inner = decode_field_reference (loc, rr_arg,
5275 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5276 				     &rr_unsignedp, &volatilep, &rr_mask,
5277 				     &rr_and_mask);
5278 
5279   /* It must be true that the inner operation on the lhs of each
5280      comparison must be the same if we are to be able to do anything.
5281      Then see if we have constants.  If not, the same must be true for
5282      the rhs's.  */
5283   if (volatilep || ll_inner == 0 || rl_inner == 0
5284       || ! operand_equal_p (ll_inner, rl_inner, 0))
5285     return 0;
5286 
5287   if (TREE_CODE (lr_arg) == INTEGER_CST
5288       && TREE_CODE (rr_arg) == INTEGER_CST)
5289     l_const = lr_arg, r_const = rr_arg;
5290   else if (lr_inner == 0 || rr_inner == 0
5291 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5292     return 0;
5293   else
5294     l_const = r_const = 0;
5295 
5296   /* If either comparison code is not correct for our logical operation,
5297      fail.  However, we can convert a one-bit comparison against zero into
5298      the opposite comparison against that bit being set in the field.  */
5299 
5300   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5301   if (lcode != wanted_code)
5302     {
5303       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5304 	{
5305 	  /* Make the left operand unsigned, since we are only interested
5306 	     in the value of one bit.  Otherwise we are doing the wrong
5307 	     thing below.  */
5308 	  ll_unsignedp = 1;
5309 	  l_const = ll_mask;
5310 	}
5311       else
5312 	return 0;
5313     }
5314 
5315   /* This is analogous to the code for l_const above.  */
5316   if (rcode != wanted_code)
5317     {
5318       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5319 	{
5320 	  rl_unsignedp = 1;
5321 	  r_const = rl_mask;
5322 	}
5323       else
5324 	return 0;
5325     }
5326 
5327   /* See if we can find a mode that contains both fields being compared on
5328      the left.  If we can't, fail.  Otherwise, update all constants and masks
5329      to be relative to a field of that size.  */
5330   first_bit = MIN (ll_bitpos, rl_bitpos);
5331   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5332   lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5333 			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5334 			  volatilep);
5335   if (lnmode == VOIDmode)
5336     return 0;
5337 
5338   lnbitsize = GET_MODE_BITSIZE (lnmode);
5339   lnbitpos = first_bit & ~ (lnbitsize - 1);
5340   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5341   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5342 
5343   if (BYTES_BIG_ENDIAN)
5344     {
5345       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5346       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5347     }
5348 
5349   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5350 			 size_int (xll_bitpos));
5351   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5352 			 size_int (xrl_bitpos));
5353 
5354   if (l_const)
5355     {
5356       l_const = fold_convert_loc (loc, lntype, l_const);
5357       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5358       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5359       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5360 					fold_build1_loc (loc, BIT_NOT_EXPR,
5361 						     lntype, ll_mask))))
5362 	{
5363 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5364 
5365 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5366 	}
5367     }
5368   if (r_const)
5369     {
5370       r_const = fold_convert_loc (loc, lntype, r_const);
5371       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5372       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5373       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5374 					fold_build1_loc (loc, BIT_NOT_EXPR,
5375 						     lntype, rl_mask))))
5376 	{
5377 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5378 
5379 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5380 	}
5381     }
5382 
5383   /* If the right sides are not constant, do the same for it.  Also,
5384      disallow this optimization if a size or signedness mismatch occurs
5385      between the left and right sides.  */
5386   if (l_const == 0)
5387     {
5388       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5389 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5390 	  /* Make sure the two fields on the right
5391 	     correspond to the left without being swapped.  */
5392 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5393 	return 0;
5394 
5395       first_bit = MIN (lr_bitpos, rr_bitpos);
5396       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5397       rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5398 			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5399 			      volatilep);
5400       if (rnmode == VOIDmode)
5401 	return 0;
5402 
5403       rnbitsize = GET_MODE_BITSIZE (rnmode);
5404       rnbitpos = first_bit & ~ (rnbitsize - 1);
5405       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5406       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5407 
5408       if (BYTES_BIG_ENDIAN)
5409 	{
5410 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5411 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5412 	}
5413 
5414       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5415 							    rntype, lr_mask),
5416 			     size_int (xlr_bitpos));
5417       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5418 							    rntype, rr_mask),
5419 			     size_int (xrr_bitpos));
5420 
5421       /* Make a mask that corresponds to both fields being compared.
5422 	 Do this for both items being compared.  If the operands are the
5423 	 same size and the bits being compared are in the same position
5424 	 then we can do this by masking both and comparing the masked
5425 	 results.  */
5426       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5427       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5428       if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5429 	{
5430 	  lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5431 				    ll_unsignedp || rl_unsignedp);
5432 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
5433 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5434 
5435 	  rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5436 				    lr_unsignedp || rr_unsignedp);
5437 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
5438 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5439 
5440 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5441 	}
5442 
5443       /* There is still another way we can do something:  If both pairs of
5444 	 fields being compared are adjacent, we may be able to make a wider
5445 	 field containing them both.
5446 
5447 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
5448 	 the mask must be shifted to account for the shift done by
5449 	 make_bit_field_ref.  */
5450       if ((ll_bitsize + ll_bitpos == rl_bitpos
5451 	   && lr_bitsize + lr_bitpos == rr_bitpos)
5452 	  || (ll_bitpos == rl_bitpos + rl_bitsize
5453 	      && lr_bitpos == rr_bitpos + rr_bitsize))
5454 	{
5455 	  tree type;
5456 
5457 	  lhs = make_bit_field_ref (loc, ll_inner, lntype,
5458 				    ll_bitsize + rl_bitsize,
5459 				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5460 	  rhs = make_bit_field_ref (loc, lr_inner, rntype,
5461 				    lr_bitsize + rr_bitsize,
5462 				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5463 
5464 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5465 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
5466 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5467 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5468 
5469 	  /* Convert to the smaller type before masking out unwanted bits.  */
5470 	  type = lntype;
5471 	  if (lntype != rntype)
5472 	    {
5473 	      if (lnbitsize > rnbitsize)
5474 		{
5475 		  lhs = fold_convert_loc (loc, rntype, lhs);
5476 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5477 		  type = rntype;
5478 		}
5479 	      else if (lnbitsize < rnbitsize)
5480 		{
5481 		  rhs = fold_convert_loc (loc, lntype, rhs);
5482 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5483 		  type = lntype;
5484 		}
5485 	    }
5486 
5487 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5488 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5489 
5490 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5491 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5492 
5493 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5494 	}
5495 
5496       return 0;
5497     }
5498 
5499   /* Handle the case of comparisons with constants.  If there is something in
5500      common between the masks, those bits of the constants must be the same.
5501      If not, the condition is always false.  Test for this to avoid generating
5502      incorrect code below.  */
5503   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5504   if (! integer_zerop (result)
5505       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5506 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5507     {
5508       if (wanted_code == NE_EXPR)
5509 	{
5510 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
5511 	  return constant_boolean_node (true, truth_type);
5512 	}
5513       else
5514 	{
5515 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5516 	  return constant_boolean_node (false, truth_type);
5517 	}
5518     }
5519 
5520   /* Construct the expression we will return.  First get the component
5521      reference we will make.  Unless the mask is all ones the width of
5522      that field, perform the mask operation.  Then compare with the
5523      merged constant.  */
5524   result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5525 			       ll_unsignedp || rl_unsignedp);
5526 
5527   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5528   if (! all_ones_mask_p (ll_mask, lnbitsize))
5529     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5530 
5531   return build2_loc (loc, wanted_code, truth_type, result,
5532 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
5533 }
5534 
5535 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5536    constant.  */
5537 
5538 static tree
5539 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5540 			    tree op0, tree op1)
5541 {
5542   tree arg0 = op0;
5543   enum tree_code op_code;
5544   tree comp_const;
5545   tree minmax_const;
5546   int consts_equal, consts_lt;
5547   tree inner;
5548 
5549   STRIP_SIGN_NOPS (arg0);
5550 
5551   op_code = TREE_CODE (arg0);
5552   minmax_const = TREE_OPERAND (arg0, 1);
5553   comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5554   consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5555   consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5556   inner = TREE_OPERAND (arg0, 0);
5557 
5558   /* If something does not permit us to optimize, return the original tree.  */
5559   if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5560       || TREE_CODE (comp_const) != INTEGER_CST
5561       || TREE_OVERFLOW (comp_const)
5562       || TREE_CODE (minmax_const) != INTEGER_CST
5563       || TREE_OVERFLOW (minmax_const))
5564     return NULL_TREE;
5565 
5566   /* Now handle all the various comparison codes.  We only handle EQ_EXPR
5567      and GT_EXPR, doing the rest with recursive calls using logical
5568      simplifications.  */
5569   switch (code)
5570     {
5571     case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
5572       {
5573 	tree tem
5574 	  = optimize_minmax_comparison (loc,
5575 					invert_tree_comparison (code, false),
5576 					type, op0, op1);
5577 	if (tem)
5578 	  return invert_truthvalue_loc (loc, tem);
5579 	return NULL_TREE;
5580       }
5581 
5582     case GE_EXPR:
5583       return
5584 	fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5585 		     optimize_minmax_comparison
5586 		     (loc, EQ_EXPR, type, arg0, comp_const),
5587 		     optimize_minmax_comparison
5588 		     (loc, GT_EXPR, type, arg0, comp_const));
5589 
5590     case EQ_EXPR:
5591       if (op_code == MAX_EXPR && consts_equal)
5592 	/* MAX (X, 0) == 0  ->  X <= 0  */
5593 	return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5594 
5595       else if (op_code == MAX_EXPR && consts_lt)
5596 	/* MAX (X, 0) == 5  ->  X == 5   */
5597 	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5598 
5599       else if (op_code == MAX_EXPR)
5600 	/* MAX (X, 0) == -1  ->  false  */
5601 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5602 
5603       else if (consts_equal)
5604 	/* MIN (X, 0) == 0  ->  X >= 0  */
5605 	return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5606 
5607       else if (consts_lt)
5608 	/* MIN (X, 0) == 5  ->  false  */
5609 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5610 
5611       else
5612 	/* MIN (X, 0) == -1  ->  X == -1  */
5613 	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5614 
5615     case GT_EXPR:
5616       if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5617 	/* MAX (X, 0) > 0  ->  X > 0
5618 	   MAX (X, 0) > 5  ->  X > 5  */
5619 	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5620 
5621       else if (op_code == MAX_EXPR)
5622 	/* MAX (X, 0) > -1  ->  true  */
5623 	return omit_one_operand_loc (loc, type, integer_one_node, inner);
5624 
5625       else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5626 	/* MIN (X, 0) > 0  ->  false
5627 	   MIN (X, 0) > 5  ->  false  */
5628 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5629 
5630       else
5631 	/* MIN (X, 0) > -1  ->  X > -1  */
5632 	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5633 
5634     default:
5635       return NULL_TREE;
5636     }
5637 }
5638 
5639 /* T is an integer expression that is being multiplied, divided, or taken a
5640    modulus (CODE says which and what kind of divide or modulus) by a
5641    constant C.  See if we can eliminate that operation by folding it with
5642    other operations already in T.  WIDE_TYPE, if non-null, is a type that
5643    should be used for the computation if wider than our type.
5644 
5645    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5646    (X * 2) + (Y * 4).  We must, however, be assured that either the original
5647    expression would not overflow or that overflow is undefined for the type
5648    in the language in question.
5649 
5650    If we return a non-null expression, it is an equivalent form of the
5651    original computation, but need not be in the original type.
5652 
5653    We set *STRICT_OVERFLOW_P to true if the return values depends on
5654    signed overflow being undefined.  Otherwise we do not change
5655    *STRICT_OVERFLOW_P.  */
5656 
5657 static tree
5658 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5659 		bool *strict_overflow_p)
5660 {
5661   /* To avoid exponential search depth, refuse to allow recursion past
5662      three levels.  Beyond that (1) it's highly unlikely that we'll find
5663      something interesting and (2) we've probably processed it before
5664      when we built the inner expression.  */
5665 
5666   static int depth;
5667   tree ret;
5668 
5669   if (depth > 3)
5670     return NULL;
5671 
5672   depth++;
5673   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5674   depth--;
5675 
5676   return ret;
5677 }
5678 
5679 static tree
5680 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5681 		  bool *strict_overflow_p)
5682 {
5683   tree type = TREE_TYPE (t);
5684   enum tree_code tcode = TREE_CODE (t);
5685   tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5686 				   > GET_MODE_SIZE (TYPE_MODE (type)))
5687 		? wide_type : type);
5688   tree t1, t2;
5689   int same_p = tcode == code;
5690   tree op0 = NULL_TREE, op1 = NULL_TREE;
5691   bool sub_strict_overflow_p;
5692 
5693   /* Don't deal with constants of zero here; they confuse the code below.  */
5694   if (integer_zerop (c))
5695     return NULL_TREE;
5696 
5697   if (TREE_CODE_CLASS (tcode) == tcc_unary)
5698     op0 = TREE_OPERAND (t, 0);
5699 
5700   if (TREE_CODE_CLASS (tcode) == tcc_binary)
5701     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5702 
5703   /* Note that we need not handle conditional operations here since fold
5704      already handles those cases.  So just do arithmetic here.  */
5705   switch (tcode)
5706     {
5707     case INTEGER_CST:
5708       /* For a constant, we can always simplify if we are a multiply
5709 	 or (for divide and modulus) if it is a multiple of our constant.  */
5710       if (code == MULT_EXPR
5711 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5712 	return const_binop (code, fold_convert (ctype, t),
5713 			    fold_convert (ctype, c));
5714       break;
5715 
5716     CASE_CONVERT: case NON_LVALUE_EXPR:
5717       /* If op0 is an expression ...  */
5718       if ((COMPARISON_CLASS_P (op0)
5719 	   || UNARY_CLASS_P (op0)
5720 	   || BINARY_CLASS_P (op0)
5721 	   || VL_EXP_CLASS_P (op0)
5722 	   || EXPRESSION_CLASS_P (op0))
5723 	  /* ... and has wrapping overflow, and its type is smaller
5724 	     than ctype, then we cannot pass through as widening.  */
5725 	  && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5726 	       && (TYPE_PRECISION (ctype)
5727 	           > TYPE_PRECISION (TREE_TYPE (op0))))
5728 	      /* ... or this is a truncation (t is narrower than op0),
5729 		 then we cannot pass through this narrowing.  */
5730 	      || (TYPE_PRECISION (type)
5731 		  < TYPE_PRECISION (TREE_TYPE (op0)))
5732 	      /* ... or signedness changes for division or modulus,
5733 		 then we cannot pass through this conversion.  */
5734 	      || (code != MULT_EXPR
5735 		  && (TYPE_UNSIGNED (ctype)
5736 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
5737 	      /* ... or has undefined overflow while the converted to
5738 		 type has not, we cannot do the operation in the inner type
5739 		 as that would introduce undefined overflow.  */
5740 	      || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5741 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
5742 	break;
5743 
5744       /* Pass the constant down and see if we can make a simplification.  If
5745 	 we can, replace this expression with the inner simplification for
5746 	 possible later conversion to our or some other type.  */
5747       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5748 	  && TREE_CODE (t2) == INTEGER_CST
5749 	  && !TREE_OVERFLOW (t2)
5750 	  && (0 != (t1 = extract_muldiv (op0, t2, code,
5751 					 code == MULT_EXPR
5752 					 ? ctype : NULL_TREE,
5753 					 strict_overflow_p))))
5754 	return t1;
5755       break;
5756 
5757     case ABS_EXPR:
5758       /* If widening the type changes it from signed to unsigned, then we
5759          must avoid building ABS_EXPR itself as unsigned.  */
5760       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5761         {
5762           tree cstype = (*signed_type_for) (ctype);
5763           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5764 	      != 0)
5765             {
5766               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5767               return fold_convert (ctype, t1);
5768             }
5769           break;
5770         }
5771       /* If the constant is negative, we cannot simplify this.  */
5772       if (tree_int_cst_sgn (c) == -1)
5773         break;
5774       /* FALLTHROUGH */
5775     case NEGATE_EXPR:
5776       /* For division and modulus, type can't be unsigned, as e.g.
5777 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5778 	 For signed types, even with wrapping overflow, this is fine.  */
5779       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5780 	break;
5781       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5782 	  != 0)
5783 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5784       break;
5785 
5786     case MIN_EXPR:  case MAX_EXPR:
5787       /* If widening the type changes the signedness, then we can't perform
5788 	 this optimization as that changes the result.  */
5789       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5790 	break;
5791 
5792       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
5793       sub_strict_overflow_p = false;
5794       if ((t1 = extract_muldiv (op0, c, code, wide_type,
5795 				&sub_strict_overflow_p)) != 0
5796 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
5797 				   &sub_strict_overflow_p)) != 0)
5798 	{
5799 	  if (tree_int_cst_sgn (c) < 0)
5800 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5801 	  if (sub_strict_overflow_p)
5802 	    *strict_overflow_p = true;
5803 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5804 			      fold_convert (ctype, t2));
5805 	}
5806       break;
5807 
5808     case LSHIFT_EXPR:  case RSHIFT_EXPR:
5809       /* If the second operand is constant, this is a multiplication
5810 	 or floor division, by a power of two, so we can treat it that
5811 	 way unless the multiplier or divisor overflows.  Signed
5812 	 left-shift overflow is implementation-defined rather than
5813 	 undefined in C90, so do not convert signed left shift into
5814 	 multiplication.  */
5815       if (TREE_CODE (op1) == INTEGER_CST
5816 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5817 	  /* const_binop may not detect overflow correctly,
5818 	     so check for it explicitly here.  */
5819 	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5820 	  && TREE_INT_CST_HIGH (op1) == 0
5821 	  && 0 != (t1 = fold_convert (ctype,
5822 				      const_binop (LSHIFT_EXPR,
5823 						   size_one_node,
5824 						   op1)))
5825 	  && !TREE_OVERFLOW (t1))
5826 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5827 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
5828 				       ctype,
5829 				       fold_convert (ctype, op0),
5830 				       t1),
5831 			       c, code, wide_type, strict_overflow_p);
5832       break;
5833 
5834     case PLUS_EXPR:  case MINUS_EXPR:
5835       /* See if we can eliminate the operation on both sides.  If we can, we
5836 	 can return a new PLUS or MINUS.  If we can't, the only remaining
5837 	 cases where we can do anything are if the second operand is a
5838 	 constant.  */
5839       sub_strict_overflow_p = false;
5840       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5841       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5842       if (t1 != 0 && t2 != 0
5843 	  && (code == MULT_EXPR
5844 	      /* If not multiplication, we can only do this if both operands
5845 		 are divisible by c.  */
5846 	      || (multiple_of_p (ctype, op0, c)
5847 	          && multiple_of_p (ctype, op1, c))))
5848 	{
5849 	  if (sub_strict_overflow_p)
5850 	    *strict_overflow_p = true;
5851 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5852 			      fold_convert (ctype, t2));
5853 	}
5854 
5855       /* If this was a subtraction, negate OP1 and set it to be an addition.
5856 	 This simplifies the logic below.  */
5857       if (tcode == MINUS_EXPR)
5858 	{
5859 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
5860 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
5861 	  if (TREE_CODE (op0) == INTEGER_CST)
5862 	    {
5863 	      tree tem = op0;
5864 	      op0 = op1;
5865 	      op1 = tem;
5866 	      tem = t1;
5867 	      t1 = t2;
5868 	      t2 = tem;
5869 	    }
5870 	}
5871 
5872       if (TREE_CODE (op1) != INTEGER_CST)
5873 	break;
5874 
5875       /* If either OP1 or C are negative, this optimization is not safe for
5876 	 some of the division and remainder types while for others we need
5877 	 to change the code.  */
5878       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5879 	{
5880 	  if (code == CEIL_DIV_EXPR)
5881 	    code = FLOOR_DIV_EXPR;
5882 	  else if (code == FLOOR_DIV_EXPR)
5883 	    code = CEIL_DIV_EXPR;
5884 	  else if (code != MULT_EXPR
5885 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5886 	    break;
5887 	}
5888 
5889       /* If it's a multiply or a division/modulus operation of a multiple
5890          of our constant, do the operation and verify it doesn't overflow.  */
5891       if (code == MULT_EXPR
5892 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5893 	{
5894 	  op1 = const_binop (code, fold_convert (ctype, op1),
5895 			     fold_convert (ctype, c));
5896 	  /* We allow the constant to overflow with wrapping semantics.  */
5897 	  if (op1 == 0
5898 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5899 	    break;
5900 	}
5901       else
5902 	break;
5903 
5904       /* If we have an unsigned type, we cannot widen the operation since it
5905 	 will change the result if the original computation overflowed.  */
5906       if (TYPE_UNSIGNED (ctype) && ctype != type)
5907 	break;
5908 
5909       /* If we were able to eliminate our operation from the first side,
5910 	 apply our operation to the second side and reform the PLUS.  */
5911       if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5912 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5913 
5914       /* The last case is if we are a multiply.  In that case, we can
5915 	 apply the distributive law to commute the multiply and addition
5916 	 if the multiplication of the constants doesn't overflow
5917 	 and overflow is defined.  With undefined overflow
5918 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.  */
5919       if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5920 	return fold_build2 (tcode, ctype,
5921 			    fold_build2 (code, ctype,
5922 					 fold_convert (ctype, op0),
5923 					 fold_convert (ctype, c)),
5924 			    op1);
5925 
5926       break;
5927 
5928     case MULT_EXPR:
5929       /* We have a special case here if we are doing something like
5930 	 (C * 8) % 4 since we know that's zero.  */
5931       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5932 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5933 	  /* If the multiplication can overflow we cannot optimize this.  */
5934 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5935 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5936 	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5937 	{
5938 	  *strict_overflow_p = true;
5939 	  return omit_one_operand (type, integer_zero_node, op0);
5940 	}
5941 
5942       /* ... fall through ...  */
5943 
5944     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
5945     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
5946       /* If we can extract our operation from the LHS, do so and return a
5947 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
5948 	 do something only if the second operand is a constant.  */
5949       if (same_p
5950 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
5951 				   strict_overflow_p)) != 0)
5952 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5953 			    fold_convert (ctype, op1));
5954       else if (tcode == MULT_EXPR && code == MULT_EXPR
5955 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
5956 					strict_overflow_p)) != 0)
5957 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5958 			    fold_convert (ctype, t1));
5959       else if (TREE_CODE (op1) != INTEGER_CST)
5960 	return 0;
5961 
5962       /* If these are the same operation types, we can associate them
5963 	 assuming no overflow.  */
5964       if (tcode == code)
5965 	{
5966 	  double_int mul;
5967 	  bool overflow_p;
5968 	  unsigned prec = TYPE_PRECISION (ctype);
5969 	  bool uns = TYPE_UNSIGNED (ctype);
5970 	  double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5971 	  double_int dic = tree_to_double_int (c).ext (prec, uns);
5972 	  mul = diop1.mul_with_sign (dic, false, &overflow_p);
5973 	  overflow_p = ((!uns && overflow_p)
5974 			| TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5975 	  if (!double_int_fits_to_tree_p (ctype, mul)
5976 	      && ((uns && tcode != MULT_EXPR) || !uns))
5977 	    overflow_p = 1;
5978 	  if (!overflow_p)
5979 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5980 				double_int_to_tree (ctype, mul));
5981 	}
5982 
5983       /* If these operations "cancel" each other, we have the main
5984 	 optimizations of this pass, which occur when either constant is a
5985 	 multiple of the other, in which case we replace this with either an
5986 	 operation or CODE or TCODE.
5987 
5988 	 If we have an unsigned type, we cannot do this since it will change
5989 	 the result if the original computation overflowed.  */
5990       if (TYPE_OVERFLOW_UNDEFINED (ctype)
5991 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5992 	      || (tcode == MULT_EXPR
5993 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5994 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5995 		  && code != MULT_EXPR)))
5996 	{
5997 	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5998 	    {
5999 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6000 		*strict_overflow_p = true;
6001 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6002 				  fold_convert (ctype,
6003 						const_binop (TRUNC_DIV_EXPR,
6004 							     op1, c)));
6005 	    }
6006 	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6007 	    {
6008 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6009 		*strict_overflow_p = true;
6010 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6011 				  fold_convert (ctype,
6012 						const_binop (TRUNC_DIV_EXPR,
6013 							     c, op1)));
6014 	    }
6015 	}
6016       break;
6017 
6018     default:
6019       break;
6020     }
6021 
6022   return 0;
6023 }
6024 
6025 /* Return a node which has the indicated constant VALUE (either 0 or
6026    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6027    and is of the indicated TYPE.  */
6028 
6029 tree
6030 constant_boolean_node (bool value, tree type)
6031 {
6032   if (type == integer_type_node)
6033     return value ? integer_one_node : integer_zero_node;
6034   else if (type == boolean_type_node)
6035     return value ? boolean_true_node : boolean_false_node;
6036   else if (TREE_CODE (type) == VECTOR_TYPE)
6037     return build_vector_from_val (type,
6038 				  build_int_cst (TREE_TYPE (type),
6039 						 value ? -1 : 0));
6040   else
6041     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6042 }
6043 
6044 
6045 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6046    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6047    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6048    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6049    COND is the first argument to CODE; otherwise (as in the example
6050    given here), it is the second argument.  TYPE is the type of the
6051    original expression.  Return NULL_TREE if no simplification is
6052    possible.  */
6053 
6054 static tree
6055 fold_binary_op_with_conditional_arg (location_t loc,
6056 				     enum tree_code code,
6057 				     tree type, tree op0, tree op1,
6058 				     tree cond, tree arg, int cond_first_p)
6059 {
6060   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6061   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6062   tree test, true_value, false_value;
6063   tree lhs = NULL_TREE;
6064   tree rhs = NULL_TREE;
6065   enum tree_code cond_code = COND_EXPR;
6066 
6067   if (TREE_CODE (cond) == COND_EXPR
6068       || TREE_CODE (cond) == VEC_COND_EXPR)
6069     {
6070       test = TREE_OPERAND (cond, 0);
6071       true_value = TREE_OPERAND (cond, 1);
6072       false_value = TREE_OPERAND (cond, 2);
6073       /* If this operand throws an expression, then it does not make
6074 	 sense to try to perform a logical or arithmetic operation
6075 	 involving it.  */
6076       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6077 	lhs = true_value;
6078       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6079 	rhs = false_value;
6080     }
6081   else
6082     {
6083       tree testtype = TREE_TYPE (cond);
6084       test = cond;
6085       true_value = constant_boolean_node (true, testtype);
6086       false_value = constant_boolean_node (false, testtype);
6087     }
6088 
6089   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6090     cond_code = VEC_COND_EXPR;
6091 
6092   /* This transformation is only worthwhile if we don't have to wrap ARG
6093      in a SAVE_EXPR and the operation can be simplified without recursing
6094      on at least one of the branches once its pushed inside the COND_EXPR.  */
6095   if (!TREE_CONSTANT (arg)
6096       && (TREE_SIDE_EFFECTS (arg)
6097 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6098 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6099     return NULL_TREE;
6100 
6101   arg = fold_convert_loc (loc, arg_type, arg);
6102   if (lhs == 0)
6103     {
6104       true_value = fold_convert_loc (loc, cond_type, true_value);
6105       if (cond_first_p)
6106 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6107       else
6108 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6109     }
6110   if (rhs == 0)
6111     {
6112       false_value = fold_convert_loc (loc, cond_type, false_value);
6113       if (cond_first_p)
6114 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6115       else
6116 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6117     }
6118 
6119   /* Check that we have simplified at least one of the branches.  */
6120   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6121     return NULL_TREE;
6122 
6123   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6124 }
6125 
6126 
6127 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6128 
6129    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6130    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6131    ADDEND is the same as X.
6132 
6133    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6134    and finite.  The problematic cases are when X is zero, and its mode
6135    has signed zeros.  In the case of rounding towards -infinity,
6136    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6137    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6138 
6139 bool
6140 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6141 {
6142   if (!real_zerop (addend))
6143     return false;
6144 
6145   /* Don't allow the fold with -fsignaling-nans.  */
6146   if (HONOR_SNANS (TYPE_MODE (type)))
6147     return false;
6148 
6149   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6150   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6151     return true;
6152 
6153   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6154   if (TREE_CODE (addend) == REAL_CST
6155       && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6156     negate = !negate;
6157 
6158   /* The mode has signed zeros, and we have to honor their sign.
6159      In this situation, there is only one case we can return true for.
6160      X - 0 is the same as X unless rounding towards -infinity is
6161      supported.  */
6162   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6163 }
6164 
6165 /* Subroutine of fold() that checks comparisons of built-in math
6166    functions against real constants.
6167 
6168    FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6169    operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
6170    is the type of the result and ARG0 and ARG1 are the operands of the
6171    comparison.  ARG1 must be a TREE_REAL_CST.
6172 
6173    The function returns the constant folded tree if a simplification
6174    can be made, and NULL_TREE otherwise.  */
6175 
6176 static tree
6177 fold_mathfn_compare (location_t loc,
6178 		     enum built_in_function fcode, enum tree_code code,
6179 		     tree type, tree arg0, tree arg1)
6180 {
6181   REAL_VALUE_TYPE c;
6182 
6183   if (BUILTIN_SQRT_P (fcode))
6184     {
6185       tree arg = CALL_EXPR_ARG (arg0, 0);
6186       enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6187 
6188       c = TREE_REAL_CST (arg1);
6189       if (REAL_VALUE_NEGATIVE (c))
6190 	{
6191 	  /* sqrt(x) < y is always false, if y is negative.  */
6192 	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6193 	    return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6194 
6195 	  /* sqrt(x) > y is always true, if y is negative and we
6196 	     don't care about NaNs, i.e. negative values of x.  */
6197 	  if (code == NE_EXPR || !HONOR_NANS (mode))
6198 	    return omit_one_operand_loc (loc, type, integer_one_node, arg);
6199 
6200 	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
6201 	  return fold_build2_loc (loc, GE_EXPR, type, arg,
6202 			      build_real (TREE_TYPE (arg), dconst0));
6203 	}
6204       else if (code == GT_EXPR || code == GE_EXPR)
6205 	{
6206 	  REAL_VALUE_TYPE c2;
6207 
6208 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6209 	  real_convert (&c2, mode, &c2);
6210 
6211 	  if (REAL_VALUE_ISINF (c2))
6212 	    {
6213 	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
6214 	      if (HONOR_INFINITIES (mode))
6215 		return fold_build2_loc (loc, EQ_EXPR, type, arg,
6216 				    build_real (TREE_TYPE (arg), c2));
6217 
6218 	      /* sqrt(x) > y is always false, when y is very large
6219 		 and we don't care about infinities.  */
6220 	      return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6221 	    }
6222 
6223 	  /* sqrt(x) > c is the same as x > c*c.  */
6224 	  return fold_build2_loc (loc, code, type, arg,
6225 			      build_real (TREE_TYPE (arg), c2));
6226 	}
6227       else if (code == LT_EXPR || code == LE_EXPR)
6228 	{
6229 	  REAL_VALUE_TYPE c2;
6230 
6231 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6232 	  real_convert (&c2, mode, &c2);
6233 
6234 	  if (REAL_VALUE_ISINF (c2))
6235 	    {
6236 	      /* sqrt(x) < y is always true, when y is a very large
6237 		 value and we don't care about NaNs or Infinities.  */
6238 	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6239 		return omit_one_operand_loc (loc, type, integer_one_node, arg);
6240 
6241 	      /* sqrt(x) < y is x != +Inf when y is very large and we
6242 		 don't care about NaNs.  */
6243 	      if (! HONOR_NANS (mode))
6244 		return fold_build2_loc (loc, NE_EXPR, type, arg,
6245 				    build_real (TREE_TYPE (arg), c2));
6246 
6247 	      /* sqrt(x) < y is x >= 0 when y is very large and we
6248 		 don't care about Infinities.  */
6249 	      if (! HONOR_INFINITIES (mode))
6250 		return fold_build2_loc (loc, GE_EXPR, type, arg,
6251 				    build_real (TREE_TYPE (arg), dconst0));
6252 
6253 	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
6254 	      arg = save_expr (arg);
6255 	      return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6256 				  fold_build2_loc (loc, GE_EXPR, type, arg,
6257 					       build_real (TREE_TYPE (arg),
6258 							   dconst0)),
6259 				  fold_build2_loc (loc, NE_EXPR, type, arg,
6260 					       build_real (TREE_TYPE (arg),
6261 							   c2)));
6262 	    }
6263 
6264 	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
6265 	  if (! HONOR_NANS (mode))
6266 	    return fold_build2_loc (loc, code, type, arg,
6267 				build_real (TREE_TYPE (arg), c2));
6268 
6269 	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
6270 	  arg = save_expr (arg);
6271 	  return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6272 				  fold_build2_loc (loc, GE_EXPR, type, arg,
6273 					       build_real (TREE_TYPE (arg),
6274 							   dconst0)),
6275 				  fold_build2_loc (loc, code, type, arg,
6276 					       build_real (TREE_TYPE (arg),
6277 							   c2)));
6278 	}
6279     }
6280 
6281   return NULL_TREE;
6282 }
6283 
6284 /* Subroutine of fold() that optimizes comparisons against Infinities,
6285    either +Inf or -Inf.
6286 
6287    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6288    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6289    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6290 
6291    The function returns the constant folded tree if a simplification
6292    can be made, and NULL_TREE otherwise.  */
6293 
6294 static tree
6295 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6296 		  tree arg0, tree arg1)
6297 {
6298   enum machine_mode mode;
6299   REAL_VALUE_TYPE max;
6300   tree temp;
6301   bool neg;
6302 
6303   mode = TYPE_MODE (TREE_TYPE (arg0));
6304 
6305   /* For negative infinity swap the sense of the comparison.  */
6306   neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6307   if (neg)
6308     code = swap_tree_comparison (code);
6309 
6310   switch (code)
6311     {
6312     case GT_EXPR:
6313       /* x > +Inf is always false, if with ignore sNANs.  */
6314       if (HONOR_SNANS (mode))
6315         return NULL_TREE;
6316       return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6317 
6318     case LE_EXPR:
6319       /* x <= +Inf is always true, if we don't case about NaNs.  */
6320       if (! HONOR_NANS (mode))
6321 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6322 
6323       /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
6324       arg0 = save_expr (arg0);
6325       return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6326 
6327     case EQ_EXPR:
6328     case GE_EXPR:
6329       /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
6330       real_maxval (&max, neg, mode);
6331       return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6332 			  arg0, build_real (TREE_TYPE (arg0), max));
6333 
6334     case LT_EXPR:
6335       /* x < +Inf is always equal to x <= DBL_MAX.  */
6336       real_maxval (&max, neg, mode);
6337       return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6338 			  arg0, build_real (TREE_TYPE (arg0), max));
6339 
6340     case NE_EXPR:
6341       /* x != +Inf is always equal to !(x > DBL_MAX).  */
6342       real_maxval (&max, neg, mode);
6343       if (! HONOR_NANS (mode))
6344 	return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6345 			    arg0, build_real (TREE_TYPE (arg0), max));
6346 
6347       temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6348 			  arg0, build_real (TREE_TYPE (arg0), max));
6349       return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6350 
6351     default:
6352       break;
6353     }
6354 
6355   return NULL_TREE;
6356 }
6357 
6358 /* Subroutine of fold() that optimizes comparisons of a division by
6359    a nonzero integer constant against an integer constant, i.e.
6360    X/C1 op C2.
6361 
6362    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6363    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6364    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6365 
6366    The function returns the constant folded tree if a simplification
6367    can be made, and NULL_TREE otherwise.  */
6368 
6369 static tree
6370 fold_div_compare (location_t loc,
6371 		  enum tree_code code, tree type, tree arg0, tree arg1)
6372 {
6373   tree prod, tmp, hi, lo;
6374   tree arg00 = TREE_OPERAND (arg0, 0);
6375   tree arg01 = TREE_OPERAND (arg0, 1);
6376   double_int val;
6377   bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6378   bool neg_overflow;
6379   bool overflow;
6380 
6381   /* We have to do this the hard way to detect unsigned overflow.
6382      prod = int_const_binop (MULT_EXPR, arg01, arg1);  */
6383   val = TREE_INT_CST (arg01)
6384 	.mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6385   prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6386   neg_overflow = false;
6387 
6388   if (unsigned_p)
6389     {
6390       tmp = int_const_binop (MINUS_EXPR, arg01,
6391                              build_int_cst (TREE_TYPE (arg01), 1));
6392       lo = prod;
6393 
6394       /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6395       val = TREE_INT_CST (prod)
6396 	    .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6397       hi = force_fit_type_double (TREE_TYPE (arg00), val,
6398 				  -1, overflow | TREE_OVERFLOW (prod));
6399     }
6400   else if (tree_int_cst_sgn (arg01) >= 0)
6401     {
6402       tmp = int_const_binop (MINUS_EXPR, arg01,
6403 			     build_int_cst (TREE_TYPE (arg01), 1));
6404       switch (tree_int_cst_sgn (arg1))
6405 	{
6406 	case -1:
6407 	  neg_overflow = true;
6408 	  lo = int_const_binop (MINUS_EXPR, prod, tmp);
6409 	  hi = prod;
6410 	  break;
6411 
6412 	case  0:
6413 	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6414 	  hi = tmp;
6415 	  break;
6416 
6417 	case  1:
6418           hi = int_const_binop (PLUS_EXPR, prod, tmp);
6419 	  lo = prod;
6420 	  break;
6421 
6422 	default:
6423 	  gcc_unreachable ();
6424 	}
6425     }
6426   else
6427     {
6428       /* A negative divisor reverses the relational operators.  */
6429       code = swap_tree_comparison (code);
6430 
6431       tmp = int_const_binop (PLUS_EXPR, arg01,
6432 			     build_int_cst (TREE_TYPE (arg01), 1));
6433       switch (tree_int_cst_sgn (arg1))
6434 	{
6435 	case -1:
6436 	  hi = int_const_binop (MINUS_EXPR, prod, tmp);
6437 	  lo = prod;
6438 	  break;
6439 
6440 	case  0:
6441 	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6442 	  lo = tmp;
6443 	  break;
6444 
6445 	case  1:
6446 	  neg_overflow = true;
6447 	  lo = int_const_binop (PLUS_EXPR, prod, tmp);
6448 	  hi = prod;
6449 	  break;
6450 
6451 	default:
6452 	  gcc_unreachable ();
6453 	}
6454     }
6455 
6456   switch (code)
6457     {
6458     case EQ_EXPR:
6459       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6460 	return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6461       if (TREE_OVERFLOW (hi))
6462 	return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6463       if (TREE_OVERFLOW (lo))
6464 	return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6465       return build_range_check (loc, type, arg00, 1, lo, hi);
6466 
6467     case NE_EXPR:
6468       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6469 	return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6470       if (TREE_OVERFLOW (hi))
6471 	return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6472       if (TREE_OVERFLOW (lo))
6473 	return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6474       return build_range_check (loc, type, arg00, 0, lo, hi);
6475 
6476     case LT_EXPR:
6477       if (TREE_OVERFLOW (lo))
6478 	{
6479 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6480 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6481 	}
6482       return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6483 
6484     case LE_EXPR:
6485       if (TREE_OVERFLOW (hi))
6486 	{
6487 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6488 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6489 	}
6490       return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6491 
6492     case GT_EXPR:
6493       if (TREE_OVERFLOW (hi))
6494 	{
6495 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6496 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6497 	}
6498       return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6499 
6500     case GE_EXPR:
6501       if (TREE_OVERFLOW (lo))
6502 	{
6503 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6504 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6505 	}
6506       return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6507 
6508     default:
6509       break;
6510     }
6511 
6512   return NULL_TREE;
6513 }
6514 
6515 
6516 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6517    equality/inequality test, then return a simplified form of the test
6518    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6519    result type.  */
6520 
6521 static tree
6522 fold_single_bit_test_into_sign_test (location_t loc,
6523 				     enum tree_code code, tree arg0, tree arg1,
6524 				     tree result_type)
6525 {
6526   /* If this is testing a single bit, we can optimize the test.  */
6527   if ((code == NE_EXPR || code == EQ_EXPR)
6528       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6529       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6530     {
6531       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6532 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6533       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6534 
6535       if (arg00 != NULL_TREE
6536 	  /* This is only a win if casting to a signed type is cheap,
6537 	     i.e. when arg00's type is not a partial mode.  */
6538 	  && TYPE_PRECISION (TREE_TYPE (arg00))
6539 	     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6540 	{
6541 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6542 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6543 			      result_type,
6544 			      fold_convert_loc (loc, stype, arg00),
6545 			      build_int_cst (stype, 0));
6546 	}
6547     }
6548 
6549   return NULL_TREE;
6550 }
6551 
6552 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6553    equality/inequality test, then return a simplified form of
6554    the test using shifts and logical operations.  Otherwise return
6555    NULL.  TYPE is the desired result type.  */
6556 
6557 tree
6558 fold_single_bit_test (location_t loc, enum tree_code code,
6559 		      tree arg0, tree arg1, tree result_type)
6560 {
6561   /* If this is testing a single bit, we can optimize the test.  */
6562   if ((code == NE_EXPR || code == EQ_EXPR)
6563       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6564       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6565     {
6566       tree inner = TREE_OPERAND (arg0, 0);
6567       tree type = TREE_TYPE (arg0);
6568       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6569       enum machine_mode operand_mode = TYPE_MODE (type);
6570       int ops_unsigned;
6571       tree signed_type, unsigned_type, intermediate_type;
6572       tree tem, one;
6573 
6574       /* First, see if we can fold the single bit test into a sign-bit
6575 	 test.  */
6576       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6577 						 result_type);
6578       if (tem)
6579 	return tem;
6580 
6581       /* Otherwise we have (A & C) != 0 where C is a single bit,
6582 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6583 	 Similarly for (A & C) == 0.  */
6584 
6585       /* If INNER is a right shift of a constant and it plus BITNUM does
6586 	 not overflow, adjust BITNUM and INNER.  */
6587       if (TREE_CODE (inner) == RSHIFT_EXPR
6588 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6589 	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6590 	  && bitnum < TYPE_PRECISION (type)
6591 	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6592 				   bitnum - TYPE_PRECISION (type)))
6593 	{
6594 	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6595 	  inner = TREE_OPERAND (inner, 0);
6596 	}
6597 
6598       /* If we are going to be able to omit the AND below, we must do our
6599 	 operations as unsigned.  If we must use the AND, we have a choice.
6600 	 Normally unsigned is faster, but for some machines signed is.  */
6601 #ifdef LOAD_EXTEND_OP
6602       ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6603 		      && !flag_syntax_only) ? 0 : 1;
6604 #else
6605       ops_unsigned = 1;
6606 #endif
6607 
6608       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6609       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6610       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6611       inner = fold_convert_loc (loc, intermediate_type, inner);
6612 
6613       if (bitnum != 0)
6614 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6615 			inner, size_int (bitnum));
6616 
6617       one = build_int_cst (intermediate_type, 1);
6618 
6619       if (code == EQ_EXPR)
6620 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6621 
6622       /* Put the AND last so it can combine with more things.  */
6623       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6624 
6625       /* Make sure to return the proper type.  */
6626       inner = fold_convert_loc (loc, result_type, inner);
6627 
6628       return inner;
6629     }
6630   return NULL_TREE;
6631 }
6632 
6633 /* Check whether we are allowed to reorder operands arg0 and arg1,
6634    such that the evaluation of arg1 occurs before arg0.  */
6635 
6636 static bool
6637 reorder_operands_p (const_tree arg0, const_tree arg1)
6638 {
6639   if (! flag_evaluation_order)
6640       return true;
6641   if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6642     return true;
6643   return ! TREE_SIDE_EFFECTS (arg0)
6644 	 && ! TREE_SIDE_EFFECTS (arg1);
6645 }
6646 
6647 /* Test whether it is preferable two swap two operands, ARG0 and
6648    ARG1, for example because ARG0 is an integer constant and ARG1
6649    isn't.  If REORDER is true, only recommend swapping if we can
6650    evaluate the operands in reverse order.  */
6651 
6652 bool
6653 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6654 {
6655   STRIP_SIGN_NOPS (arg0);
6656   STRIP_SIGN_NOPS (arg1);
6657 
6658   if (TREE_CODE (arg1) == INTEGER_CST)
6659     return 0;
6660   if (TREE_CODE (arg0) == INTEGER_CST)
6661     return 1;
6662 
6663   if (TREE_CODE (arg1) == REAL_CST)
6664     return 0;
6665   if (TREE_CODE (arg0) == REAL_CST)
6666     return 1;
6667 
6668   if (TREE_CODE (arg1) == FIXED_CST)
6669     return 0;
6670   if (TREE_CODE (arg0) == FIXED_CST)
6671     return 1;
6672 
6673   if (TREE_CODE (arg1) == COMPLEX_CST)
6674     return 0;
6675   if (TREE_CODE (arg0) == COMPLEX_CST)
6676     return 1;
6677 
6678   if (TREE_CONSTANT (arg1))
6679     return 0;
6680   if (TREE_CONSTANT (arg0))
6681     return 1;
6682 
6683   if (optimize_function_for_size_p (cfun))
6684     return 0;
6685 
6686   if (reorder && flag_evaluation_order
6687       && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6688     return 0;
6689 
6690   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6691      for commutative and comparison operators.  Ensuring a canonical
6692      form allows the optimizers to find additional redundancies without
6693      having to explicitly check for both orderings.  */
6694   if (TREE_CODE (arg0) == SSA_NAME
6695       && TREE_CODE (arg1) == SSA_NAME
6696       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6697     return 1;
6698 
6699   /* Put SSA_NAMEs last.  */
6700   if (TREE_CODE (arg1) == SSA_NAME)
6701     return 0;
6702   if (TREE_CODE (arg0) == SSA_NAME)
6703     return 1;
6704 
6705   /* Put variables last.  */
6706   if (DECL_P (arg1))
6707     return 0;
6708   if (DECL_P (arg0))
6709     return 1;
6710 
6711   return 0;
6712 }
6713 
6714 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6715    ARG0 is extended to a wider type.  */
6716 
6717 static tree
6718 fold_widened_comparison (location_t loc, enum tree_code code,
6719 			 tree type, tree arg0, tree arg1)
6720 {
6721   tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6722   tree arg1_unw;
6723   tree shorter_type, outer_type;
6724   tree min, max;
6725   bool above, below;
6726 
6727   if (arg0_unw == arg0)
6728     return NULL_TREE;
6729   shorter_type = TREE_TYPE (arg0_unw);
6730 
6731 #ifdef HAVE_canonicalize_funcptr_for_compare
6732   /* Disable this optimization if we're casting a function pointer
6733      type on targets that require function pointer canonicalization.  */
6734   if (HAVE_canonicalize_funcptr_for_compare
6735       && TREE_CODE (shorter_type) == POINTER_TYPE
6736       && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6737     return NULL_TREE;
6738 #endif
6739 
6740   if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6741     return NULL_TREE;
6742 
6743   arg1_unw = get_unwidened (arg1, NULL_TREE);
6744 
6745   /* If possible, express the comparison in the shorter mode.  */
6746   if ((code == EQ_EXPR || code == NE_EXPR
6747        || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6748       && (TREE_TYPE (arg1_unw) == shorter_type
6749 	  || ((TYPE_PRECISION (shorter_type)
6750 	       >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6751 	      && (TYPE_UNSIGNED (shorter_type)
6752 		  == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6753 	  || (TREE_CODE (arg1_unw) == INTEGER_CST
6754 	      && (TREE_CODE (shorter_type) == INTEGER_TYPE
6755 		  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6756 	      && int_fits_type_p (arg1_unw, shorter_type))))
6757     return fold_build2_loc (loc, code, type, arg0_unw,
6758 			fold_convert_loc (loc, shorter_type, arg1_unw));
6759 
6760   if (TREE_CODE (arg1_unw) != INTEGER_CST
6761       || TREE_CODE (shorter_type) != INTEGER_TYPE
6762       || !int_fits_type_p (arg1_unw, shorter_type))
6763     return NULL_TREE;
6764 
6765   /* If we are comparing with the integer that does not fit into the range
6766      of the shorter type, the result is known.  */
6767   outer_type = TREE_TYPE (arg1_unw);
6768   min = lower_bound_in_type (outer_type, shorter_type);
6769   max = upper_bound_in_type (outer_type, shorter_type);
6770 
6771   above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6772 						   max, arg1_unw));
6773   below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6774 						   arg1_unw, min));
6775 
6776   switch (code)
6777     {
6778     case EQ_EXPR:
6779       if (above || below)
6780 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6781       break;
6782 
6783     case NE_EXPR:
6784       if (above || below)
6785 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6786       break;
6787 
6788     case LT_EXPR:
6789     case LE_EXPR:
6790       if (above)
6791 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6792       else if (below)
6793 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6794 
6795     case GT_EXPR:
6796     case GE_EXPR:
6797       if (above)
6798 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6799       else if (below)
6800 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6801 
6802     default:
6803       break;
6804     }
6805 
6806   return NULL_TREE;
6807 }
6808 
6809 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6810    ARG0 just the signedness is changed.  */
6811 
6812 static tree
6813 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6814 			      tree arg0, tree arg1)
6815 {
6816   tree arg0_inner;
6817   tree inner_type, outer_type;
6818 
6819   if (!CONVERT_EXPR_P (arg0))
6820     return NULL_TREE;
6821 
6822   outer_type = TREE_TYPE (arg0);
6823   arg0_inner = TREE_OPERAND (arg0, 0);
6824   inner_type = TREE_TYPE (arg0_inner);
6825 
6826 #ifdef HAVE_canonicalize_funcptr_for_compare
6827   /* Disable this optimization if we're casting a function pointer
6828      type on targets that require function pointer canonicalization.  */
6829   if (HAVE_canonicalize_funcptr_for_compare
6830       && TREE_CODE (inner_type) == POINTER_TYPE
6831       && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6832     return NULL_TREE;
6833 #endif
6834 
6835   if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6836     return NULL_TREE;
6837 
6838   if (TREE_CODE (arg1) != INTEGER_CST
6839       && !(CONVERT_EXPR_P (arg1)
6840 	   && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6841     return NULL_TREE;
6842 
6843   if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6844       && code != NE_EXPR
6845       && code != EQ_EXPR)
6846     return NULL_TREE;
6847 
6848   if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6849     return NULL_TREE;
6850 
6851   if (TREE_CODE (arg1) == INTEGER_CST)
6852     arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6853 				  0, TREE_OVERFLOW (arg1));
6854   else
6855     arg1 = fold_convert_loc (loc, inner_type, arg1);
6856 
6857   return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6858 }
6859 
6860 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6861    step of the array.  Reconstructs s and delta in the case of s *
6862    delta being an integer constant (and thus already folded).  ADDR is
6863    the address. MULT is the multiplicative expression.  If the
6864    function succeeds, the new address expression is returned.
6865    Otherwise NULL_TREE is returned.  LOC is the location of the
6866    resulting expression.  */
6867 
6868 static tree
6869 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6870 {
6871   tree s, delta, step;
6872   tree ref = TREE_OPERAND (addr, 0), pref;
6873   tree ret, pos;
6874   tree itype;
6875   bool mdim = false;
6876 
6877   /*  Strip the nops that might be added when converting op1 to sizetype. */
6878   STRIP_NOPS (op1);
6879 
6880   /* Canonicalize op1 into a possibly non-constant delta
6881      and an INTEGER_CST s.  */
6882   if (TREE_CODE (op1) == MULT_EXPR)
6883     {
6884       tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6885 
6886       STRIP_NOPS (arg0);
6887       STRIP_NOPS (arg1);
6888 
6889       if (TREE_CODE (arg0) == INTEGER_CST)
6890         {
6891           s = arg0;
6892           delta = arg1;
6893         }
6894       else if (TREE_CODE (arg1) == INTEGER_CST)
6895         {
6896           s = arg1;
6897           delta = arg0;
6898         }
6899       else
6900         return NULL_TREE;
6901     }
6902   else if (TREE_CODE (op1) == INTEGER_CST)
6903     {
6904       delta = op1;
6905       s = NULL_TREE;
6906     }
6907   else
6908     {
6909       /* Simulate we are delta * 1.  */
6910       delta = op1;
6911       s = integer_one_node;
6912     }
6913 
6914   /* Handle &x.array the same as we would handle &x.array[0].  */
6915   if (TREE_CODE (ref) == COMPONENT_REF
6916       && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6917     {
6918       tree domain;
6919 
6920       /* Remember if this was a multi-dimensional array.  */
6921       if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6922 	mdim = true;
6923 
6924       domain = TYPE_DOMAIN (TREE_TYPE (ref));
6925       if (! domain)
6926 	goto cont;
6927       itype = TREE_TYPE (domain);
6928 
6929       step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6930       if (TREE_CODE (step) != INTEGER_CST)
6931 	goto cont;
6932 
6933       if (s)
6934 	{
6935 	  if (! tree_int_cst_equal (step, s))
6936 	    goto cont;
6937 	}
6938       else
6939 	{
6940 	  /* Try if delta is a multiple of step.  */
6941 	  tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6942 	  if (! tmp)
6943 	    goto cont;
6944 	  delta = tmp;
6945 	}
6946 
6947       /* Only fold here if we can verify we do not overflow one
6948 	 dimension of a multi-dimensional array.  */
6949       if (mdim)
6950 	{
6951 	  tree tmp;
6952 
6953 	  if (!TYPE_MIN_VALUE (domain)
6954 	      || !TYPE_MAX_VALUE (domain)
6955 	      || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6956 	    goto cont;
6957 
6958 	  tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6959 				 fold_convert_loc (loc, itype,
6960 						   TYPE_MIN_VALUE (domain)),
6961 				 fold_convert_loc (loc, itype, delta));
6962 	  if (TREE_CODE (tmp) != INTEGER_CST
6963 	      || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6964 	    goto cont;
6965 	}
6966 
6967       /* We found a suitable component reference.  */
6968 
6969       pref = TREE_OPERAND (addr, 0);
6970       ret = copy_node (pref);
6971       SET_EXPR_LOCATION (ret, loc);
6972 
6973       ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6974 			fold_build2_loc
6975 			  (loc, PLUS_EXPR, itype,
6976 			   fold_convert_loc (loc, itype,
6977 					     TYPE_MIN_VALUE
6978 					       (TYPE_DOMAIN (TREE_TYPE (ref)))),
6979 			   fold_convert_loc (loc, itype, delta)),
6980 			NULL_TREE, NULL_TREE);
6981       return build_fold_addr_expr_loc (loc, ret);
6982     }
6983 
6984 cont:
6985 
6986   for (;; ref = TREE_OPERAND (ref, 0))
6987     {
6988       if (TREE_CODE (ref) == ARRAY_REF)
6989 	{
6990 	  tree domain;
6991 
6992 	  /* Remember if this was a multi-dimensional array.  */
6993 	  if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6994 	    mdim = true;
6995 
6996 	  domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6997 	  if (! domain)
6998 	    continue;
6999 	  itype = TREE_TYPE (domain);
7000 
7001 	  step = array_ref_element_size (ref);
7002 	  if (TREE_CODE (step) != INTEGER_CST)
7003 	    continue;
7004 
7005 	  if (s)
7006 	    {
7007 	      if (! tree_int_cst_equal (step, s))
7008                 continue;
7009 	    }
7010 	  else
7011 	    {
7012 	      /* Try if delta is a multiple of step.  */
7013 	      tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7014 	      if (! tmp)
7015 		continue;
7016 	      delta = tmp;
7017 	    }
7018 
7019 	  /* Only fold here if we can verify we do not overflow one
7020 	     dimension of a multi-dimensional array.  */
7021 	  if (mdim)
7022 	    {
7023 	      tree tmp;
7024 
7025 	      if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7026 		  || !TYPE_MAX_VALUE (domain)
7027 		  || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7028 		continue;
7029 
7030 	      tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7031 				     fold_convert_loc (loc, itype,
7032 						       TREE_OPERAND (ref, 1)),
7033 				     fold_convert_loc (loc, itype, delta));
7034 	      if (!tmp
7035 		  || TREE_CODE (tmp) != INTEGER_CST
7036 		  || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7037 		continue;
7038 	    }
7039 
7040 	  break;
7041 	}
7042       else
7043 	mdim = false;
7044 
7045       if (!handled_component_p (ref))
7046 	return NULL_TREE;
7047     }
7048 
7049   /* We found the suitable array reference.  So copy everything up to it,
7050      and replace the index.  */
7051 
7052   pref = TREE_OPERAND (addr, 0);
7053   ret = copy_node (pref);
7054   SET_EXPR_LOCATION (ret, loc);
7055   pos = ret;
7056 
7057   while (pref != ref)
7058     {
7059       pref = TREE_OPERAND (pref, 0);
7060       TREE_OPERAND (pos, 0) = copy_node (pref);
7061       pos = TREE_OPERAND (pos, 0);
7062     }
7063 
7064   TREE_OPERAND (pos, 1)
7065     = fold_build2_loc (loc, PLUS_EXPR, itype,
7066 		       fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7067 		       fold_convert_loc (loc, itype, delta));
7068   return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7069 }
7070 
7071 
7072 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7073    means A >= Y && A != MAX, but in this case we know that
7074    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7075 
7076 static tree
7077 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7078 {
7079   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7080 
7081   if (TREE_CODE (bound) == LT_EXPR)
7082     a = TREE_OPERAND (bound, 0);
7083   else if (TREE_CODE (bound) == GT_EXPR)
7084     a = TREE_OPERAND (bound, 1);
7085   else
7086     return NULL_TREE;
7087 
7088   typea = TREE_TYPE (a);
7089   if (!INTEGRAL_TYPE_P (typea)
7090       && !POINTER_TYPE_P (typea))
7091     return NULL_TREE;
7092 
7093   if (TREE_CODE (ineq) == LT_EXPR)
7094     {
7095       a1 = TREE_OPERAND (ineq, 1);
7096       y = TREE_OPERAND (ineq, 0);
7097     }
7098   else if (TREE_CODE (ineq) == GT_EXPR)
7099     {
7100       a1 = TREE_OPERAND (ineq, 0);
7101       y = TREE_OPERAND (ineq, 1);
7102     }
7103   else
7104     return NULL_TREE;
7105 
7106   if (TREE_TYPE (a1) != typea)
7107     return NULL_TREE;
7108 
7109   if (POINTER_TYPE_P (typea))
7110     {
7111       /* Convert the pointer types into integer before taking the difference.  */
7112       tree ta = fold_convert_loc (loc, ssizetype, a);
7113       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7114       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7115     }
7116   else
7117     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7118 
7119   if (!diff || !integer_onep (diff))
7120    return NULL_TREE;
7121 
7122   return fold_build2_loc (loc, GE_EXPR, type, a, y);
7123 }
7124 
7125 /* Fold a sum or difference of at least one multiplication.
7126    Returns the folded tree or NULL if no simplification could be made.  */
7127 
7128 static tree
7129 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7130 			  tree arg0, tree arg1)
7131 {
7132   tree arg00, arg01, arg10, arg11;
7133   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7134 
7135   /* (A * C) +- (B * C) -> (A+-B) * C.
7136      (A * C) +- A -> A * (C+-1).
7137      We are most concerned about the case where C is a constant,
7138      but other combinations show up during loop reduction.  Since
7139      it is not difficult, try all four possibilities.  */
7140 
7141   if (TREE_CODE (arg0) == MULT_EXPR)
7142     {
7143       arg00 = TREE_OPERAND (arg0, 0);
7144       arg01 = TREE_OPERAND (arg0, 1);
7145     }
7146   else if (TREE_CODE (arg0) == INTEGER_CST)
7147     {
7148       arg00 = build_one_cst (type);
7149       arg01 = arg0;
7150     }
7151   else
7152     {
7153       /* We cannot generate constant 1 for fract.  */
7154       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7155 	return NULL_TREE;
7156       arg00 = arg0;
7157       arg01 = build_one_cst (type);
7158     }
7159   if (TREE_CODE (arg1) == MULT_EXPR)
7160     {
7161       arg10 = TREE_OPERAND (arg1, 0);
7162       arg11 = TREE_OPERAND (arg1, 1);
7163     }
7164   else if (TREE_CODE (arg1) == INTEGER_CST)
7165     {
7166       arg10 = build_one_cst (type);
7167       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7168 	 the purpose of this canonicalization.  */
7169       if (TREE_INT_CST_HIGH (arg1) == -1
7170 	  && negate_expr_p (arg1)
7171 	  && code == PLUS_EXPR)
7172 	{
7173 	  arg11 = negate_expr (arg1);
7174 	  code = MINUS_EXPR;
7175 	}
7176       else
7177 	arg11 = arg1;
7178     }
7179   else
7180     {
7181       /* We cannot generate constant 1 for fract.  */
7182       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7183 	return NULL_TREE;
7184       arg10 = arg1;
7185       arg11 = build_one_cst (type);
7186     }
7187   same = NULL_TREE;
7188 
7189   if (operand_equal_p (arg01, arg11, 0))
7190     same = arg01, alt0 = arg00, alt1 = arg10;
7191   else if (operand_equal_p (arg00, arg10, 0))
7192     same = arg00, alt0 = arg01, alt1 = arg11;
7193   else if (operand_equal_p (arg00, arg11, 0))
7194     same = arg00, alt0 = arg01, alt1 = arg10;
7195   else if (operand_equal_p (arg01, arg10, 0))
7196     same = arg01, alt0 = arg00, alt1 = arg11;
7197 
7198   /* No identical multiplicands; see if we can find a common
7199      power-of-two factor in non-power-of-two multiplies.  This
7200      can help in multi-dimensional array access.  */
7201   else if (host_integerp (arg01, 0)
7202 	   && host_integerp (arg11, 0))
7203     {
7204       HOST_WIDE_INT int01, int11, tmp;
7205       bool swap = false;
7206       tree maybe_same;
7207       int01 = TREE_INT_CST_LOW (arg01);
7208       int11 = TREE_INT_CST_LOW (arg11);
7209 
7210       /* Move min of absolute values to int11.  */
7211       if (absu_hwi (int01) < absu_hwi (int11))
7212         {
7213 	  tmp = int01, int01 = int11, int11 = tmp;
7214 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7215 	  maybe_same = arg01;
7216 	  swap = true;
7217 	}
7218       else
7219 	maybe_same = arg11;
7220 
7221       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7222 	  /* The remainder should not be a constant, otherwise we
7223 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7224 	     increased the number of multiplications necessary.  */
7225 	  && TREE_CODE (arg10) != INTEGER_CST)
7226         {
7227 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7228 			      build_int_cst (TREE_TYPE (arg00),
7229 					     int01 / int11));
7230 	  alt1 = arg10;
7231 	  same = maybe_same;
7232 	  if (swap)
7233 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7234 	}
7235     }
7236 
7237   if (same)
7238     return fold_build2_loc (loc, MULT_EXPR, type,
7239 			fold_build2_loc (loc, code, type,
7240 				     fold_convert_loc (loc, type, alt0),
7241 				     fold_convert_loc (loc, type, alt1)),
7242 			fold_convert_loc (loc, type, same));
7243 
7244   return NULL_TREE;
7245 }
7246 
7247 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7248    specified by EXPR into the buffer PTR of length LEN bytes.
7249    Return the number of bytes placed in the buffer, or zero
7250    upon failure.  */
7251 
7252 static int
7253 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7254 {
7255   tree type = TREE_TYPE (expr);
7256   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7257   int byte, offset, word, words;
7258   unsigned char value;
7259 
7260   if (total_bytes > len)
7261     return 0;
7262   words = total_bytes / UNITS_PER_WORD;
7263 
7264   for (byte = 0; byte < total_bytes; byte++)
7265     {
7266       int bitpos = byte * BITS_PER_UNIT;
7267       if (bitpos < HOST_BITS_PER_WIDE_INT)
7268 	value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7269       else
7270 	value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7271 				 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7272 
7273       if (total_bytes > UNITS_PER_WORD)
7274 	{
7275 	  word = byte / UNITS_PER_WORD;
7276 	  if (WORDS_BIG_ENDIAN)
7277 	    word = (words - 1) - word;
7278 	  offset = word * UNITS_PER_WORD;
7279 	  if (BYTES_BIG_ENDIAN)
7280 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7281 	  else
7282 	    offset += byte % UNITS_PER_WORD;
7283 	}
7284       else
7285 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7286       ptr[offset] = value;
7287     }
7288   return total_bytes;
7289 }
7290 
7291 
7292 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7293    specified by EXPR into the buffer PTR of length LEN bytes.
7294    Return the number of bytes placed in the buffer, or zero
7295    upon failure.  */
7296 
7297 static int
7298 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7299 {
7300   tree type = TREE_TYPE (expr);
7301   enum machine_mode mode = TYPE_MODE (type);
7302   int total_bytes = GET_MODE_SIZE (mode);
7303   FIXED_VALUE_TYPE value;
7304   tree i_value, i_type;
7305 
7306   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7307     return 0;
7308 
7309   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7310 
7311   if (NULL_TREE == i_type
7312       || TYPE_PRECISION (i_type) != total_bytes)
7313     return 0;
7314 
7315   value = TREE_FIXED_CST (expr);
7316   i_value = double_int_to_tree (i_type, value.data);
7317 
7318   return native_encode_int (i_value, ptr, len);
7319 }
7320 
7321 
7322 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7323    specified by EXPR into the buffer PTR of length LEN bytes.
7324    Return the number of bytes placed in the buffer, or zero
7325    upon failure.  */
7326 
7327 static int
7328 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7329 {
7330   tree type = TREE_TYPE (expr);
7331   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7332   int byte, offset, word, words, bitpos;
7333   unsigned char value;
7334 
7335   /* There are always 32 bits in each long, no matter the size of
7336      the hosts long.  We handle floating point representations with
7337      up to 192 bits.  */
7338   long tmp[6];
7339 
7340   if (total_bytes > len)
7341     return 0;
7342   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7343 
7344   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7345 
7346   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7347        bitpos += BITS_PER_UNIT)
7348     {
7349       byte = (bitpos / BITS_PER_UNIT) & 3;
7350       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7351 
7352       if (UNITS_PER_WORD < 4)
7353 	{
7354 	  word = byte / UNITS_PER_WORD;
7355 	  if (WORDS_BIG_ENDIAN)
7356 	    word = (words - 1) - word;
7357 	  offset = word * UNITS_PER_WORD;
7358 	  if (BYTES_BIG_ENDIAN)
7359 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7360 	  else
7361 	    offset += byte % UNITS_PER_WORD;
7362 	}
7363       else
7364 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7365       ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7366     }
7367   return total_bytes;
7368 }
7369 
7370 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7371    specified by EXPR into the buffer PTR of length LEN bytes.
7372    Return the number of bytes placed in the buffer, or zero
7373    upon failure.  */
7374 
7375 static int
7376 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7377 {
7378   int rsize, isize;
7379   tree part;
7380 
7381   part = TREE_REALPART (expr);
7382   rsize = native_encode_expr (part, ptr, len);
7383   if (rsize == 0)
7384     return 0;
7385   part = TREE_IMAGPART (expr);
7386   isize = native_encode_expr (part, ptr+rsize, len-rsize);
7387   if (isize != rsize)
7388     return 0;
7389   return rsize + isize;
7390 }
7391 
7392 
7393 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7394    specified by EXPR into the buffer PTR of length LEN bytes.
7395    Return the number of bytes placed in the buffer, or zero
7396    upon failure.  */
7397 
7398 static int
7399 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7400 {
7401   unsigned i, count;
7402   int size, offset;
7403   tree itype, elem;
7404 
7405   offset = 0;
7406   count = VECTOR_CST_NELTS (expr);
7407   itype = TREE_TYPE (TREE_TYPE (expr));
7408   size = GET_MODE_SIZE (TYPE_MODE (itype));
7409   for (i = 0; i < count; i++)
7410     {
7411       elem = VECTOR_CST_ELT (expr, i);
7412       if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7413 	return 0;
7414       offset += size;
7415     }
7416   return offset;
7417 }
7418 
7419 
7420 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7421    specified by EXPR into the buffer PTR of length LEN bytes.
7422    Return the number of bytes placed in the buffer, or zero
7423    upon failure.  */
7424 
7425 static int
7426 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7427 {
7428   tree type = TREE_TYPE (expr);
7429   HOST_WIDE_INT total_bytes;
7430 
7431   if (TREE_CODE (type) != ARRAY_TYPE
7432       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7433       || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7434       || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7435     return 0;
7436   total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7437   if (total_bytes > len)
7438     return 0;
7439   if (TREE_STRING_LENGTH (expr) < total_bytes)
7440     {
7441       memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7442       memset (ptr + TREE_STRING_LENGTH (expr), 0,
7443 	      total_bytes - TREE_STRING_LENGTH (expr));
7444     }
7445   else
7446     memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7447   return total_bytes;
7448 }
7449 
7450 
7451 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7452    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7453    buffer PTR of length LEN bytes.  Return the number of bytes
7454    placed in the buffer, or zero upon failure.  */
7455 
7456 int
7457 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7458 {
7459   switch (TREE_CODE (expr))
7460     {
7461     case INTEGER_CST:
7462       return native_encode_int (expr, ptr, len);
7463 
7464     case REAL_CST:
7465       return native_encode_real (expr, ptr, len);
7466 
7467     case FIXED_CST:
7468       return native_encode_fixed (expr, ptr, len);
7469 
7470     case COMPLEX_CST:
7471       return native_encode_complex (expr, ptr, len);
7472 
7473     case VECTOR_CST:
7474       return native_encode_vector (expr, ptr, len);
7475 
7476     case STRING_CST:
7477       return native_encode_string (expr, ptr, len);
7478 
7479     default:
7480       return 0;
7481     }
7482 }
7483 
7484 
7485 /* Subroutine of native_interpret_expr.  Interpret the contents of
7486    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7487    If the buffer cannot be interpreted, return NULL_TREE.  */
7488 
7489 static tree
7490 native_interpret_int (tree type, const unsigned char *ptr, int len)
7491 {
7492   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7493   double_int result;
7494 
7495   if (total_bytes > len
7496       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7497     return NULL_TREE;
7498 
7499   result = double_int::from_buffer (ptr, total_bytes);
7500 
7501   return double_int_to_tree (type, result);
7502 }
7503 
7504 
7505 /* Subroutine of native_interpret_expr.  Interpret the contents of
7506    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7507    If the buffer cannot be interpreted, return NULL_TREE.  */
7508 
7509 static tree
7510 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7511 {
7512   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7513   double_int result;
7514   FIXED_VALUE_TYPE fixed_value;
7515 
7516   if (total_bytes > len
7517       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7518     return NULL_TREE;
7519 
7520   result = double_int::from_buffer (ptr, total_bytes);
7521   fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7522 
7523   return build_fixed (type, fixed_value);
7524 }
7525 
7526 
7527 /* Subroutine of native_interpret_expr.  Interpret the contents of
7528    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7529    If the buffer cannot be interpreted, return NULL_TREE.  */
7530 
7531 static tree
7532 native_interpret_real (tree type, const unsigned char *ptr, int len)
7533 {
7534   enum machine_mode mode = TYPE_MODE (type);
7535   int total_bytes = GET_MODE_SIZE (mode);
7536   int byte, offset, word, words, bitpos;
7537   unsigned char value;
7538   /* There are always 32 bits in each long, no matter the size of
7539      the hosts long.  We handle floating point representations with
7540      up to 192 bits.  */
7541   REAL_VALUE_TYPE r;
7542   long tmp[6];
7543 
7544   total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7545   if (total_bytes > len || total_bytes > 24)
7546     return NULL_TREE;
7547   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7548 
7549   memset (tmp, 0, sizeof (tmp));
7550   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7551        bitpos += BITS_PER_UNIT)
7552     {
7553       byte = (bitpos / BITS_PER_UNIT) & 3;
7554       if (UNITS_PER_WORD < 4)
7555 	{
7556 	  word = byte / UNITS_PER_WORD;
7557 	  if (WORDS_BIG_ENDIAN)
7558 	    word = (words - 1) - word;
7559 	  offset = word * UNITS_PER_WORD;
7560 	  if (BYTES_BIG_ENDIAN)
7561 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7562 	  else
7563 	    offset += byte % UNITS_PER_WORD;
7564 	}
7565       else
7566 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7567       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7568 
7569       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7570     }
7571 
7572   real_from_target (&r, tmp, mode);
7573   return build_real (type, r);
7574 }
7575 
7576 
7577 /* Subroutine of native_interpret_expr.  Interpret the contents of
7578    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7579    If the buffer cannot be interpreted, return NULL_TREE.  */
7580 
7581 static tree
7582 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7583 {
7584   tree etype, rpart, ipart;
7585   int size;
7586 
7587   etype = TREE_TYPE (type);
7588   size = GET_MODE_SIZE (TYPE_MODE (etype));
7589   if (size * 2 > len)
7590     return NULL_TREE;
7591   rpart = native_interpret_expr (etype, ptr, size);
7592   if (!rpart)
7593     return NULL_TREE;
7594   ipart = native_interpret_expr (etype, ptr+size, size);
7595   if (!ipart)
7596     return NULL_TREE;
7597   return build_complex (type, rpart, ipart);
7598 }
7599 
7600 
7601 /* Subroutine of native_interpret_expr.  Interpret the contents of
7602    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7603    If the buffer cannot be interpreted, return NULL_TREE.  */
7604 
7605 static tree
7606 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7607 {
7608   tree etype, elem;
7609   int i, size, count;
7610   tree *elements;
7611 
7612   etype = TREE_TYPE (type);
7613   size = GET_MODE_SIZE (TYPE_MODE (etype));
7614   count = TYPE_VECTOR_SUBPARTS (type);
7615   if (size * count > len)
7616     return NULL_TREE;
7617 
7618   elements = XALLOCAVEC (tree, count);
7619   for (i = count - 1; i >= 0; i--)
7620     {
7621       elem = native_interpret_expr (etype, ptr+(i*size), size);
7622       if (!elem)
7623 	return NULL_TREE;
7624       elements[i] = elem;
7625     }
7626   return build_vector (type, elements);
7627 }
7628 
7629 
7630 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7631    the buffer PTR of length LEN as a constant of type TYPE.  For
7632    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7633    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7634    return NULL_TREE.  */
7635 
7636 tree
7637 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7638 {
7639   switch (TREE_CODE (type))
7640     {
7641     case INTEGER_TYPE:
7642     case ENUMERAL_TYPE:
7643     case BOOLEAN_TYPE:
7644     case POINTER_TYPE:
7645     case REFERENCE_TYPE:
7646       return native_interpret_int (type, ptr, len);
7647 
7648     case REAL_TYPE:
7649       return native_interpret_real (type, ptr, len);
7650 
7651     case FIXED_POINT_TYPE:
7652       return native_interpret_fixed (type, ptr, len);
7653 
7654     case COMPLEX_TYPE:
7655       return native_interpret_complex (type, ptr, len);
7656 
7657     case VECTOR_TYPE:
7658       return native_interpret_vector (type, ptr, len);
7659 
7660     default:
7661       return NULL_TREE;
7662     }
7663 }
7664 
7665 /* Returns true if we can interpret the contents of a native encoding
7666    as TYPE.  */
7667 
7668 static bool
7669 can_native_interpret_type_p (tree type)
7670 {
7671   switch (TREE_CODE (type))
7672     {
7673     case INTEGER_TYPE:
7674     case ENUMERAL_TYPE:
7675     case BOOLEAN_TYPE:
7676     case POINTER_TYPE:
7677     case REFERENCE_TYPE:
7678     case FIXED_POINT_TYPE:
7679     case REAL_TYPE:
7680     case COMPLEX_TYPE:
7681     case VECTOR_TYPE:
7682       return true;
7683     default:
7684       return false;
7685     }
7686 }
7687 
7688 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7689    TYPE at compile-time.  If we're unable to perform the conversion
7690    return NULL_TREE.  */
7691 
7692 static tree
7693 fold_view_convert_expr (tree type, tree expr)
7694 {
7695   /* We support up to 512-bit values (for V8DFmode).  */
7696   unsigned char buffer[64];
7697   int len;
7698 
7699   /* Check that the host and target are sane.  */
7700   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7701     return NULL_TREE;
7702 
7703   len = native_encode_expr (expr, buffer, sizeof (buffer));
7704   if (len == 0)
7705     return NULL_TREE;
7706 
7707   return native_interpret_expr (type, buffer, len);
7708 }
7709 
7710 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7711    to avoid confusing the gimplify process.  */
7712 
7713 tree
7714 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7715 {
7716   /* The size of the object is not relevant when talking about its address.  */
7717   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7718     t = TREE_OPERAND (t, 0);
7719 
7720   if (TREE_CODE (t) == INDIRECT_REF)
7721     {
7722       t = TREE_OPERAND (t, 0);
7723 
7724       if (TREE_TYPE (t) != ptrtype)
7725 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7726     }
7727   else if (TREE_CODE (t) == MEM_REF
7728 	   && integer_zerop (TREE_OPERAND (t, 1)))
7729     return TREE_OPERAND (t, 0);
7730   else if (TREE_CODE (t) == MEM_REF
7731 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7732     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7733 			TREE_OPERAND (t, 0),
7734 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7735   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7736     {
7737       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7738 
7739       if (TREE_TYPE (t) != ptrtype)
7740 	t = fold_convert_loc (loc, ptrtype, t);
7741     }
7742   else
7743     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7744 
7745   return t;
7746 }
7747 
7748 /* Build an expression for the address of T.  */
7749 
7750 tree
7751 build_fold_addr_expr_loc (location_t loc, tree t)
7752 {
7753   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7754 
7755   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7756 }
7757 
7758 static bool vec_cst_ctor_to_array (tree, tree *);
7759 
7760 /* Fold a unary expression of code CODE and type TYPE with operand
7761    OP0.  Return the folded expression if folding is successful.
7762    Otherwise, return NULL_TREE.  */
7763 
7764 tree
7765 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7766 {
7767   tree tem;
7768   tree arg0;
7769   enum tree_code_class kind = TREE_CODE_CLASS (code);
7770 
7771   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7772 	      && TREE_CODE_LENGTH (code) == 1);
7773 
7774   arg0 = op0;
7775   if (arg0)
7776     {
7777       if (CONVERT_EXPR_CODE_P (code)
7778 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7779 	{
7780 	  /* Don't use STRIP_NOPS, because signedness of argument type
7781 	     matters.  */
7782 	  STRIP_SIGN_NOPS (arg0);
7783 	}
7784       else
7785 	{
7786 	  /* Strip any conversions that don't change the mode.  This
7787 	     is safe for every expression, except for a comparison
7788 	     expression because its signedness is derived from its
7789 	     operands.
7790 
7791 	     Note that this is done as an internal manipulation within
7792 	     the constant folder, in order to find the simplest
7793 	     representation of the arguments so that their form can be
7794 	     studied.  In any cases, the appropriate type conversions
7795 	     should be put back in the tree that will get out of the
7796 	     constant folder.  */
7797 	  STRIP_NOPS (arg0);
7798 	}
7799     }
7800 
7801   if (TREE_CODE_CLASS (code) == tcc_unary)
7802     {
7803       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7804 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7805 		       fold_build1_loc (loc, code, type,
7806 				    fold_convert_loc (loc, TREE_TYPE (op0),
7807 						      TREE_OPERAND (arg0, 1))));
7808       else if (TREE_CODE (arg0) == COND_EXPR)
7809 	{
7810 	  tree arg01 = TREE_OPERAND (arg0, 1);
7811 	  tree arg02 = TREE_OPERAND (arg0, 2);
7812 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7813 	    arg01 = fold_build1_loc (loc, code, type,
7814 				 fold_convert_loc (loc,
7815 						   TREE_TYPE (op0), arg01));
7816 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7817 	    arg02 = fold_build1_loc (loc, code, type,
7818 				 fold_convert_loc (loc,
7819 						   TREE_TYPE (op0), arg02));
7820 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7821 			     arg01, arg02);
7822 
7823 	  /* If this was a conversion, and all we did was to move into
7824 	     inside the COND_EXPR, bring it back out.  But leave it if
7825 	     it is a conversion from integer to integer and the
7826 	     result precision is no wider than a word since such a
7827 	     conversion is cheap and may be optimized away by combine,
7828 	     while it couldn't if it were outside the COND_EXPR.  Then return
7829 	     so we don't get into an infinite recursion loop taking the
7830 	     conversion out and then back in.  */
7831 
7832 	  if ((CONVERT_EXPR_CODE_P (code)
7833 	       || code == NON_LVALUE_EXPR)
7834 	      && TREE_CODE (tem) == COND_EXPR
7835 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7836 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7837 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7838 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7839 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7840 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7841 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7842 		     && (INTEGRAL_TYPE_P
7843 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7844 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7845 		  || flag_syntax_only))
7846 	    tem = build1_loc (loc, code, type,
7847 			      build3 (COND_EXPR,
7848 				      TREE_TYPE (TREE_OPERAND
7849 						 (TREE_OPERAND (tem, 1), 0)),
7850 				      TREE_OPERAND (tem, 0),
7851 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7852 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7853 						    0)));
7854 	  return tem;
7855 	}
7856    }
7857 
7858   switch (code)
7859     {
7860     case PAREN_EXPR:
7861       /* Re-association barriers around constants and other re-association
7862 	 barriers can be removed.  */
7863       if (CONSTANT_CLASS_P (op0)
7864 	  || TREE_CODE (op0) == PAREN_EXPR)
7865 	return fold_convert_loc (loc, type, op0);
7866       return NULL_TREE;
7867 
7868     case NON_LVALUE_EXPR:
7869       if (!maybe_lvalue_p (op0))
7870 	return fold_convert_loc (loc, type, op0);
7871       return NULL_TREE;
7872 
7873     CASE_CONVERT:
7874     case FLOAT_EXPR:
7875     case FIX_TRUNC_EXPR:
7876       if (TREE_TYPE (op0) == type)
7877 	return op0;
7878 
7879       if (COMPARISON_CLASS_P (op0))
7880 	{
7881 	  /* If we have (type) (a CMP b) and type is an integral type, return
7882 	     new expression involving the new type.  Canonicalize
7883 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7884 	     non-integral type.
7885 	     Do not fold the result as that would not simplify further, also
7886 	     folding again results in recursions.  */
7887 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7888 	    return build2_loc (loc, TREE_CODE (op0), type,
7889 			       TREE_OPERAND (op0, 0),
7890 			       TREE_OPERAND (op0, 1));
7891 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7892 		   && TREE_CODE (type) != VECTOR_TYPE)
7893 	    return build3_loc (loc, COND_EXPR, type, op0,
7894 			       constant_boolean_node (true, type),
7895 			       constant_boolean_node (false, type));
7896 	}
7897 
7898       /* Handle cases of two conversions in a row.  */
7899       if (CONVERT_EXPR_P (op0))
7900 	{
7901 	  tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7902 	  tree inter_type = TREE_TYPE (op0);
7903 	  int inside_int = INTEGRAL_TYPE_P (inside_type);
7904 	  int inside_ptr = POINTER_TYPE_P (inside_type);
7905 	  int inside_float = FLOAT_TYPE_P (inside_type);
7906 	  int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7907 	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
7908 	  int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7909 	  int inter_int = INTEGRAL_TYPE_P (inter_type);
7910 	  int inter_ptr = POINTER_TYPE_P (inter_type);
7911 	  int inter_float = FLOAT_TYPE_P (inter_type);
7912 	  int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7913 	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
7914 	  int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7915 	  int final_int = INTEGRAL_TYPE_P (type);
7916 	  int final_ptr = POINTER_TYPE_P (type);
7917 	  int final_float = FLOAT_TYPE_P (type);
7918 	  int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7919 	  unsigned int final_prec = TYPE_PRECISION (type);
7920 	  int final_unsignedp = TYPE_UNSIGNED (type);
7921 
7922 	  /* In addition to the cases of two conversions in a row
7923 	     handled below, if we are converting something to its own
7924 	     type via an object of identical or wider precision, neither
7925 	     conversion is needed.  */
7926 	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7927 	      && (((inter_int || inter_ptr) && final_int)
7928 		  || (inter_float && final_float))
7929 	      && inter_prec >= final_prec)
7930 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7931 
7932 	  /* Likewise, if the intermediate and initial types are either both
7933 	     float or both integer, we don't need the middle conversion if the
7934 	     former is wider than the latter and doesn't change the signedness
7935 	     (for integers).  Avoid this if the final type is a pointer since
7936 	     then we sometimes need the middle conversion.  Likewise if the
7937 	     final type has a precision not equal to the size of its mode.  */
7938 	  if (((inter_int && inside_int) || (inter_float && inside_float))
7939 	      && (final_int || final_float)
7940 	      && inter_prec >= inside_prec
7941 	      && (inter_float || inter_unsignedp == inside_unsignedp)
7942 	      && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7943 		    && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7944 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7945 
7946 	  /* If we have a sign-extension of a zero-extended value, we can
7947 	     replace that by a single zero-extension.  Likewise if the
7948 	     final conversion does not change precision we can drop the
7949 	     intermediate conversion.  */
7950 	  if (inside_int && inter_int && final_int
7951 	      && ((inside_prec < inter_prec && inter_prec < final_prec
7952 		   && inside_unsignedp && !inter_unsignedp)
7953 		  || final_prec == inter_prec))
7954 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7955 
7956 	  /* Two conversions in a row are not needed unless:
7957 	     - some conversion is floating-point (overstrict for now), or
7958 	     - some conversion is a vector (overstrict for now), or
7959 	     - the intermediate type is narrower than both initial and
7960 	       final, or
7961 	     - the intermediate type and innermost type differ in signedness,
7962 	       and the outermost type is wider than the intermediate, or
7963 	     - the initial type is a pointer type and the precisions of the
7964 	       intermediate and final types differ, or
7965 	     - the final type is a pointer type and the precisions of the
7966 	       initial and intermediate types differ.  */
7967 	  if (! inside_float && ! inter_float && ! final_float
7968 	      && ! inside_vec && ! inter_vec && ! final_vec
7969 	      && (inter_prec >= inside_prec || inter_prec >= final_prec)
7970 	      && ! (inside_int && inter_int
7971 		    && inter_unsignedp != inside_unsignedp
7972 		    && inter_prec < final_prec)
7973 	      && ((inter_unsignedp && inter_prec > inside_prec)
7974 		  == (final_unsignedp && final_prec > inter_prec))
7975 	      && ! (inside_ptr && inter_prec != final_prec)
7976 	      && ! (final_ptr && inside_prec != inter_prec)
7977 	      && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7978 		    && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7979 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7980 	}
7981 
7982       /* Handle (T *)&A.B.C for A being of type T and B and C
7983 	 living at offset zero.  This occurs frequently in
7984 	 C++ upcasting and then accessing the base.  */
7985       if (TREE_CODE (op0) == ADDR_EXPR
7986 	  && POINTER_TYPE_P (type)
7987 	  && handled_component_p (TREE_OPERAND (op0, 0)))
7988         {
7989 	  HOST_WIDE_INT bitsize, bitpos;
7990 	  tree offset;
7991 	  enum machine_mode mode;
7992 	  int unsignedp, volatilep;
7993           tree base = TREE_OPERAND (op0, 0);
7994 	  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7995 				      &mode, &unsignedp, &volatilep, false);
7996 	  /* If the reference was to a (constant) zero offset, we can use
7997 	     the address of the base if it has the same base type
7998 	     as the result type and the pointer type is unqualified.  */
7999 	  if (! offset && bitpos == 0
8000 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8001 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8002 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8003 	    return fold_convert_loc (loc, type,
8004 				     build_fold_addr_expr_loc (loc, base));
8005         }
8006 
8007       if (TREE_CODE (op0) == MODIFY_EXPR
8008 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8009 	  /* Detect assigning a bitfield.  */
8010 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8011 	       && DECL_BIT_FIELD
8012 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8013 	{
8014 	  /* Don't leave an assignment inside a conversion
8015 	     unless assigning a bitfield.  */
8016 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8017 	  /* First do the assignment, then return converted constant.  */
8018 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8019 	  TREE_NO_WARNING (tem) = 1;
8020 	  TREE_USED (tem) = 1;
8021 	  return tem;
8022 	}
8023 
8024       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8025 	 constants (if x has signed type, the sign bit cannot be set
8026 	 in c).  This folds extension into the BIT_AND_EXPR.
8027 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8028 	 very likely don't have maximal range for their precision and this
8029 	 transformation effectively doesn't preserve non-maximal ranges.  */
8030       if (TREE_CODE (type) == INTEGER_TYPE
8031 	  && TREE_CODE (op0) == BIT_AND_EXPR
8032 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8033 	{
8034 	  tree and_expr = op0;
8035 	  tree and0 = TREE_OPERAND (and_expr, 0);
8036 	  tree and1 = TREE_OPERAND (and_expr, 1);
8037 	  int change = 0;
8038 
8039 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8040 	      || (TYPE_PRECISION (type)
8041 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8042 	    change = 1;
8043 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
8044 		   <= HOST_BITS_PER_WIDE_INT
8045 		   && host_integerp (and1, 1))
8046 	    {
8047 	      unsigned HOST_WIDE_INT cst;
8048 
8049 	      cst = tree_low_cst (and1, 1);
8050 	      cst &= (HOST_WIDE_INT) -1
8051 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8052 	      change = (cst == 0);
8053 #ifdef LOAD_EXTEND_OP
8054 	      if (change
8055 		  && !flag_syntax_only
8056 		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8057 		      == ZERO_EXTEND))
8058 		{
8059 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
8060 		  and0 = fold_convert_loc (loc, uns, and0);
8061 		  and1 = fold_convert_loc (loc, uns, and1);
8062 		}
8063 #endif
8064 	    }
8065 	  if (change)
8066 	    {
8067 	      tem = force_fit_type_double (type, tree_to_double_int (and1),
8068 					   0, TREE_OVERFLOW (and1));
8069 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
8070 				  fold_convert_loc (loc, type, and0), tem);
8071 	    }
8072 	}
8073 
8074       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8075          when one of the new casts will fold away. Conservatively we assume
8076 	 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8077       if (POINTER_TYPE_P (type)
8078 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8079 	  && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8080 	  && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8081 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8082 	      || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8083 	{
8084 	  tree arg00 = TREE_OPERAND (arg0, 0);
8085 	  tree arg01 = TREE_OPERAND (arg0, 1);
8086 
8087 	  return fold_build_pointer_plus_loc
8088 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
8089 	}
8090 
8091       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8092 	 of the same precision, and X is an integer type not narrower than
8093 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
8094       if (INTEGRAL_TYPE_P (type)
8095 	  && TREE_CODE (op0) == BIT_NOT_EXPR
8096 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8097 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8098 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8099 	{
8100 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8101 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8102 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8103 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8104 				fold_convert_loc (loc, type, tem));
8105 	}
8106 
8107       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8108 	 type of X and Y (integer types only).  */
8109       if (INTEGRAL_TYPE_P (type)
8110 	  && TREE_CODE (op0) == MULT_EXPR
8111 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8112 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8113 	{
8114 	  /* Be careful not to introduce new overflows.  */
8115 	  tree mult_type;
8116           if (TYPE_OVERFLOW_WRAPS (type))
8117 	    mult_type = type;
8118 	  else
8119 	    mult_type = unsigned_type_for (type);
8120 
8121 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8122 	    {
8123 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8124 				 fold_convert_loc (loc, mult_type,
8125 						   TREE_OPERAND (op0, 0)),
8126 				 fold_convert_loc (loc, mult_type,
8127 						   TREE_OPERAND (op0, 1)));
8128 	      return fold_convert_loc (loc, type, tem);
8129 	    }
8130 	}
8131 
8132       tem = fold_convert_const (code, type, arg0);
8133       return tem ? tem : NULL_TREE;
8134 
8135     case ADDR_SPACE_CONVERT_EXPR:
8136       if (integer_zerop (arg0))
8137 	return fold_convert_const (code, type, arg0);
8138       return NULL_TREE;
8139 
8140     case FIXED_CONVERT_EXPR:
8141       tem = fold_convert_const (code, type, arg0);
8142       return tem ? tem : NULL_TREE;
8143 
8144     case VIEW_CONVERT_EXPR:
8145       if (TREE_TYPE (op0) == type)
8146 	return op0;
8147       if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8148 	return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8149 			    type, TREE_OPERAND (op0, 0));
8150       if (TREE_CODE (op0) == MEM_REF)
8151 	return fold_build2_loc (loc, MEM_REF, type,
8152 				TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8153 
8154       /* For integral conversions with the same precision or pointer
8155 	 conversions use a NOP_EXPR instead.  */
8156       if ((INTEGRAL_TYPE_P (type)
8157 	   || POINTER_TYPE_P (type))
8158 	  && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8159 	      || POINTER_TYPE_P (TREE_TYPE (op0)))
8160 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8161 	return fold_convert_loc (loc, type, op0);
8162 
8163       /* Strip inner integral conversions that do not change the precision.  */
8164       if (CONVERT_EXPR_P (op0)
8165 	  && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8166 	      || POINTER_TYPE_P (TREE_TYPE (op0)))
8167 	  && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8168 	      || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8169 	  && (TYPE_PRECISION (TREE_TYPE (op0))
8170 	      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8171 	return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8172 			    type, TREE_OPERAND (op0, 0));
8173 
8174       return fold_view_convert_expr (type, op0);
8175 
8176     case NEGATE_EXPR:
8177       tem = fold_negate_expr (loc, arg0);
8178       if (tem)
8179 	return fold_convert_loc (loc, type, tem);
8180       return NULL_TREE;
8181 
8182     case ABS_EXPR:
8183       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8184 	return fold_abs_const (arg0, type);
8185       else if (TREE_CODE (arg0) == NEGATE_EXPR)
8186 	return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8187       /* Convert fabs((double)float) into (double)fabsf(float).  */
8188       else if (TREE_CODE (arg0) == NOP_EXPR
8189 	       && TREE_CODE (type) == REAL_TYPE)
8190 	{
8191 	  tree targ0 = strip_float_extensions (arg0);
8192 	  if (targ0 != arg0)
8193 	    return fold_convert_loc (loc, type,
8194 				     fold_build1_loc (loc, ABS_EXPR,
8195 						  TREE_TYPE (targ0),
8196 						  targ0));
8197 	}
8198       /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
8199       else if (TREE_CODE (arg0) == ABS_EXPR)
8200 	return arg0;
8201       else if (tree_expr_nonnegative_p (arg0))
8202 	return arg0;
8203 
8204       /* Strip sign ops from argument.  */
8205       if (TREE_CODE (type) == REAL_TYPE)
8206 	{
8207 	  tem = fold_strip_sign_ops (arg0);
8208 	  if (tem)
8209 	    return fold_build1_loc (loc, ABS_EXPR, type,
8210 				fold_convert_loc (loc, type, tem));
8211 	}
8212       return NULL_TREE;
8213 
8214     case CONJ_EXPR:
8215       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8216 	return fold_convert_loc (loc, type, arg0);
8217       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8218 	{
8219 	  tree itype = TREE_TYPE (type);
8220 	  tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8221 	  tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8222 	  return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8223 			      negate_expr (ipart));
8224 	}
8225       if (TREE_CODE (arg0) == COMPLEX_CST)
8226 	{
8227 	  tree itype = TREE_TYPE (type);
8228 	  tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8229 	  tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8230 	  return build_complex (type, rpart, negate_expr (ipart));
8231 	}
8232       if (TREE_CODE (arg0) == CONJ_EXPR)
8233 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8234       return NULL_TREE;
8235 
8236     case BIT_NOT_EXPR:
8237       if (TREE_CODE (arg0) == INTEGER_CST)
8238         return fold_not_const (arg0, type);
8239       else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8240 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8241       /* Convert ~ (-A) to A - 1.  */
8242       else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8243 	return fold_build2_loc (loc, MINUS_EXPR, type,
8244 			    fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8245 			    build_int_cst (type, 1));
8246       /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
8247       else if (INTEGRAL_TYPE_P (type)
8248 	       && ((TREE_CODE (arg0) == MINUS_EXPR
8249 		    && integer_onep (TREE_OPERAND (arg0, 1)))
8250 		   || (TREE_CODE (arg0) == PLUS_EXPR
8251 		       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8252 	{
8253 	  /* Perform the negation in ARG0's type and only then convert
8254 	     to TYPE as to avoid introducing undefined behavior.  */
8255 	  tree t = fold_build1_loc (loc, NEGATE_EXPR,
8256 				    TREE_TYPE (TREE_OPERAND (arg0, 0)),
8257 				    TREE_OPERAND (arg0, 0));
8258 	  return fold_convert_loc (loc, type, t);
8259 	}
8260       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8261       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8262 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8263 			       	     fold_convert_loc (loc, type,
8264 						       TREE_OPERAND (arg0, 0)))))
8265 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8266 			    fold_convert_loc (loc, type,
8267 					      TREE_OPERAND (arg0, 1)));
8268       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8269 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8270 			       	     fold_convert_loc (loc, type,
8271 						       TREE_OPERAND (arg0, 1)))))
8272 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8273 			    fold_convert_loc (loc, type,
8274 					      TREE_OPERAND (arg0, 0)), tem);
8275       /* Perform BIT_NOT_EXPR on each element individually.  */
8276       else if (TREE_CODE (arg0) == VECTOR_CST)
8277 	{
8278 	  tree *elements;
8279 	  tree elem;
8280 	  unsigned count = VECTOR_CST_NELTS (arg0), i;
8281 
8282 	  elements = XALLOCAVEC (tree, count);
8283 	  for (i = 0; i < count; i++)
8284 	    {
8285 	      elem = VECTOR_CST_ELT (arg0, i);
8286 	      elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8287 	      if (elem == NULL_TREE)
8288 		break;
8289 	      elements[i] = elem;
8290 	    }
8291 	  if (i == count)
8292 	    return build_vector (type, elements);
8293 	}
8294 
8295       return NULL_TREE;
8296 
8297     case TRUTH_NOT_EXPR:
8298       /* The argument to invert_truthvalue must have Boolean type.  */
8299       if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8300           arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8301 
8302       /* Note that the operand of this must be an int
8303 	 and its values must be 0 or 1.
8304 	 ("true" is a fixed value perhaps depending on the language,
8305 	 but we don't handle values other than 1 correctly yet.)  */
8306       tem = fold_truth_not_expr (loc, arg0);
8307       if (!tem)
8308 	return NULL_TREE;
8309       return fold_convert_loc (loc, type, tem);
8310 
8311     case REALPART_EXPR:
8312       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8313 	return fold_convert_loc (loc, type, arg0);
8314       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8315 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8316 				 TREE_OPERAND (arg0, 1));
8317       if (TREE_CODE (arg0) == COMPLEX_CST)
8318 	return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8319       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8320 	{
8321 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8322 	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8323 			     fold_build1_loc (loc, REALPART_EXPR, itype,
8324 					  TREE_OPERAND (arg0, 0)),
8325 			     fold_build1_loc (loc, REALPART_EXPR, itype,
8326 					  TREE_OPERAND (arg0, 1)));
8327 	  return fold_convert_loc (loc, type, tem);
8328 	}
8329       if (TREE_CODE (arg0) == CONJ_EXPR)
8330 	{
8331 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8332 	  tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8333 			     TREE_OPERAND (arg0, 0));
8334 	  return fold_convert_loc (loc, type, tem);
8335 	}
8336       if (TREE_CODE (arg0) == CALL_EXPR)
8337 	{
8338 	  tree fn = get_callee_fndecl (arg0);
8339 	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8340 	    switch (DECL_FUNCTION_CODE (fn))
8341 	      {
8342 	      CASE_FLT_FN (BUILT_IN_CEXPI):
8343 	        fn = mathfn_built_in (type, BUILT_IN_COS);
8344 		if (fn)
8345 	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8346 		break;
8347 
8348 	      default:
8349 		break;
8350 	      }
8351 	}
8352       return NULL_TREE;
8353 
8354     case IMAGPART_EXPR:
8355       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8356 	return build_zero_cst (type);
8357       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8358 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8359 				 TREE_OPERAND (arg0, 0));
8360       if (TREE_CODE (arg0) == COMPLEX_CST)
8361 	return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8362       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8363 	{
8364 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8365 	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8366 			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8367 					  TREE_OPERAND (arg0, 0)),
8368 			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8369 					  TREE_OPERAND (arg0, 1)));
8370 	  return fold_convert_loc (loc, type, tem);
8371 	}
8372       if (TREE_CODE (arg0) == CONJ_EXPR)
8373 	{
8374 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8375 	  tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8376 	  return fold_convert_loc (loc, type, negate_expr (tem));
8377 	}
8378       if (TREE_CODE (arg0) == CALL_EXPR)
8379 	{
8380 	  tree fn = get_callee_fndecl (arg0);
8381 	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8382 	    switch (DECL_FUNCTION_CODE (fn))
8383 	      {
8384 	      CASE_FLT_FN (BUILT_IN_CEXPI):
8385 	        fn = mathfn_built_in (type, BUILT_IN_SIN);
8386 		if (fn)
8387 	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8388 		break;
8389 
8390 	      default:
8391 		break;
8392 	      }
8393 	}
8394       return NULL_TREE;
8395 
8396     case INDIRECT_REF:
8397       /* Fold *&X to X if X is an lvalue.  */
8398       if (TREE_CODE (op0) == ADDR_EXPR)
8399 	{
8400 	  tree op00 = TREE_OPERAND (op0, 0);
8401 	  if ((TREE_CODE (op00) == VAR_DECL
8402 	       || TREE_CODE (op00) == PARM_DECL
8403 	       || TREE_CODE (op00) == RESULT_DECL)
8404 	      && !TREE_READONLY (op00))
8405 	    return op00;
8406 	}
8407       return NULL_TREE;
8408 
8409     case VEC_UNPACK_LO_EXPR:
8410     case VEC_UNPACK_HI_EXPR:
8411     case VEC_UNPACK_FLOAT_LO_EXPR:
8412     case VEC_UNPACK_FLOAT_HI_EXPR:
8413       {
8414 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8415 	tree *elts;
8416 	enum tree_code subcode;
8417 
8418 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8419 	if (TREE_CODE (arg0) != VECTOR_CST)
8420 	  return NULL_TREE;
8421 
8422 	elts = XALLOCAVEC (tree, nelts * 2);
8423 	if (!vec_cst_ctor_to_array (arg0, elts))
8424 	  return NULL_TREE;
8425 
8426 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8427 				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
8428 	  elts += nelts;
8429 
8430 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8431 	  subcode = NOP_EXPR;
8432 	else
8433 	  subcode = FLOAT_EXPR;
8434 
8435 	for (i = 0; i < nelts; i++)
8436 	  {
8437 	    elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8438 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8439 	      return NULL_TREE;
8440 	  }
8441 
8442 	return build_vector (type, elts);
8443       }
8444 
8445     case REDUC_MIN_EXPR:
8446     case REDUC_MAX_EXPR:
8447     case REDUC_PLUS_EXPR:
8448       {
8449 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8450 	tree *elts;
8451 	enum tree_code subcode;
8452 
8453 	if (TREE_CODE (op0) != VECTOR_CST)
8454 	  return NULL_TREE;
8455 
8456 	elts = XALLOCAVEC (tree, nelts);
8457 	if (!vec_cst_ctor_to_array (op0, elts))
8458 	  return NULL_TREE;
8459 
8460 	switch (code)
8461 	  {
8462 	  case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8463 	  case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8464 	  case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8465 	  default: gcc_unreachable ();
8466 	  }
8467 
8468 	for (i = 1; i < nelts; i++)
8469 	  {
8470 	    elts[0] = const_binop (subcode, elts[0], elts[i]);
8471 	    if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8472 	      return NULL_TREE;
8473 	    elts[i] = build_zero_cst (TREE_TYPE (type));
8474 	  }
8475 
8476 	return build_vector (type, elts);
8477       }
8478 
8479     default:
8480       return NULL_TREE;
8481     } /* switch (code) */
8482 }
8483 
8484 
8485 /* If the operation was a conversion do _not_ mark a resulting constant
8486    with TREE_OVERFLOW if the original constant was not.  These conversions
8487    have implementation defined behavior and retaining the TREE_OVERFLOW
8488    flag here would confuse later passes such as VRP.  */
8489 tree
8490 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8491 				tree type, tree op0)
8492 {
8493   tree res = fold_unary_loc (loc, code, type, op0);
8494   if (res
8495       && TREE_CODE (res) == INTEGER_CST
8496       && TREE_CODE (op0) == INTEGER_CST
8497       && CONVERT_EXPR_CODE_P (code))
8498     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8499 
8500   return res;
8501 }
8502 
8503 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8504    operands OP0 and OP1.  LOC is the location of the resulting expression.
8505    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8506    Return the folded expression if folding is successful.  Otherwise,
8507    return NULL_TREE.  */
8508 static tree
8509 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8510 		  tree arg0, tree arg1, tree op0, tree op1)
8511 {
8512   tree tem;
8513 
8514   /* We only do these simplifications if we are optimizing.  */
8515   if (!optimize)
8516     return NULL_TREE;
8517 
8518   /* Check for things like (A || B) && (A || C).  We can convert this
8519      to A || (B && C).  Note that either operator can be any of the four
8520      truth and/or operations and the transformation will still be
8521      valid.   Also note that we only care about order for the
8522      ANDIF and ORIF operators.  If B contains side effects, this
8523      might change the truth-value of A.  */
8524   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8525       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8526 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8527 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8528 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8529       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8530     {
8531       tree a00 = TREE_OPERAND (arg0, 0);
8532       tree a01 = TREE_OPERAND (arg0, 1);
8533       tree a10 = TREE_OPERAND (arg1, 0);
8534       tree a11 = TREE_OPERAND (arg1, 1);
8535       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8536 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8537 			 && (code == TRUTH_AND_EXPR
8538 			     || code == TRUTH_OR_EXPR));
8539 
8540       if (operand_equal_p (a00, a10, 0))
8541 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8542 			    fold_build2_loc (loc, code, type, a01, a11));
8543       else if (commutative && operand_equal_p (a00, a11, 0))
8544 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8545 			    fold_build2_loc (loc, code, type, a01, a10));
8546       else if (commutative && operand_equal_p (a01, a10, 0))
8547 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8548 			    fold_build2_loc (loc, code, type, a00, a11));
8549 
8550       /* This case if tricky because we must either have commutative
8551 	 operators or else A10 must not have side-effects.  */
8552 
8553       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8554 	       && operand_equal_p (a01, a11, 0))
8555 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8556 			    fold_build2_loc (loc, code, type, a00, a10),
8557 			    a01);
8558     }
8559 
8560   /* See if we can build a range comparison.  */
8561   if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8562     return tem;
8563 
8564   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8565       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8566     {
8567       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8568       if (tem)
8569 	return fold_build2_loc (loc, code, type, tem, arg1);
8570     }
8571 
8572   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8573       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8574     {
8575       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8576       if (tem)
8577 	return fold_build2_loc (loc, code, type, arg0, tem);
8578     }
8579 
8580   /* Check for the possibility of merging component references.  If our
8581      lhs is another similar operation, try to merge its rhs with our
8582      rhs.  Then try to merge our lhs and rhs.  */
8583   if (TREE_CODE (arg0) == code
8584       && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8585 					 TREE_OPERAND (arg0, 1), arg1)))
8586     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8587 
8588   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8589     return tem;
8590 
8591   if (LOGICAL_OP_NON_SHORT_CIRCUIT
8592       && (code == TRUTH_AND_EXPR
8593           || code == TRUTH_ANDIF_EXPR
8594           || code == TRUTH_OR_EXPR
8595           || code == TRUTH_ORIF_EXPR))
8596     {
8597       enum tree_code ncode, icode;
8598 
8599       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8600 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8601       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8602 
8603       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8604 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8605 	 We don't want to pack more than two leafs to a non-IF AND/OR
8606 	 expression.
8607 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8608 	 equal to IF-CODE, then we don't want to add right-hand operand.
8609 	 If the inner right-hand side of left-hand operand has
8610 	 side-effects, or isn't simple, then we can't add to it,
8611 	 as otherwise we might destroy if-sequence.  */
8612       if (TREE_CODE (arg0) == icode
8613 	  && simple_operand_p_2 (arg1)
8614 	  /* Needed for sequence points to handle trappings, and
8615 	     side-effects.  */
8616 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8617 	{
8618 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8619 				 arg1);
8620 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8621 				  tem);
8622 	}
8623 	/* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8624 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8625       else if (TREE_CODE (arg1) == icode
8626 	  && simple_operand_p_2 (arg0)
8627 	  /* Needed for sequence points to handle trappings, and
8628 	     side-effects.  */
8629 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8630 	{
8631 	  tem = fold_build2_loc (loc, ncode, type,
8632 				 arg0, TREE_OPERAND (arg1, 0));
8633 	  return fold_build2_loc (loc, icode, type, tem,
8634 				  TREE_OPERAND (arg1, 1));
8635 	}
8636       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8637 	 into (A OR B).
8638 	 For sequence point consistancy, we need to check for trapping,
8639 	 and side-effects.  */
8640       else if (code == icode && simple_operand_p_2 (arg0)
8641                && simple_operand_p_2 (arg1))
8642 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8643     }
8644 
8645   return NULL_TREE;
8646 }
8647 
8648 /* Fold a binary expression of code CODE and type TYPE with operands
8649    OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8650    Return the folded expression if folding is successful.  Otherwise,
8651    return NULL_TREE.  */
8652 
8653 static tree
8654 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8655 {
8656   enum tree_code compl_code;
8657 
8658   if (code == MIN_EXPR)
8659     compl_code = MAX_EXPR;
8660   else if (code == MAX_EXPR)
8661     compl_code = MIN_EXPR;
8662   else
8663     gcc_unreachable ();
8664 
8665   /* MIN (MAX (a, b), b) == b.  */
8666   if (TREE_CODE (op0) == compl_code
8667       && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8668     return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8669 
8670   /* MIN (MAX (b, a), b) == b.  */
8671   if (TREE_CODE (op0) == compl_code
8672       && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8673       && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8674     return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8675 
8676   /* MIN (a, MAX (a, b)) == a.  */
8677   if (TREE_CODE (op1) == compl_code
8678       && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8679       && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8680     return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8681 
8682   /* MIN (a, MAX (b, a)) == a.  */
8683   if (TREE_CODE (op1) == compl_code
8684       && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8685       && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8686     return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8687 
8688   return NULL_TREE;
8689 }
8690 
8691 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8692    by changing CODE to reduce the magnitude of constants involved in
8693    ARG0 of the comparison.
8694    Returns a canonicalized comparison tree if a simplification was
8695    possible, otherwise returns NULL_TREE.
8696    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8697    valid if signed overflow is undefined.  */
8698 
8699 static tree
8700 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8701 				 tree arg0, tree arg1,
8702 				 bool *strict_overflow_p)
8703 {
8704   enum tree_code code0 = TREE_CODE (arg0);
8705   tree t, cst0 = NULL_TREE;
8706   int sgn0;
8707   bool swap = false;
8708 
8709   /* Match A +- CST code arg1 and CST code arg1.  We can change the
8710      first form only if overflow is undefined.  */
8711   if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8712 	 /* In principle pointers also have undefined overflow behavior,
8713 	    but that causes problems elsewhere.  */
8714 	 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8715 	 && (code0 == MINUS_EXPR
8716 	     || code0 == PLUS_EXPR)
8717          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8718 	|| code0 == INTEGER_CST))
8719     return NULL_TREE;
8720 
8721   /* Identify the constant in arg0 and its sign.  */
8722   if (code0 == INTEGER_CST)
8723     cst0 = arg0;
8724   else
8725     cst0 = TREE_OPERAND (arg0, 1);
8726   sgn0 = tree_int_cst_sgn (cst0);
8727 
8728   /* Overflowed constants and zero will cause problems.  */
8729   if (integer_zerop (cst0)
8730       || TREE_OVERFLOW (cst0))
8731     return NULL_TREE;
8732 
8733   /* See if we can reduce the magnitude of the constant in
8734      arg0 by changing the comparison code.  */
8735   if (code0 == INTEGER_CST)
8736     {
8737       /* CST <= arg1  ->  CST-1 < arg1.  */
8738       if (code == LE_EXPR && sgn0 == 1)
8739 	code = LT_EXPR;
8740       /* -CST < arg1  ->  -CST-1 <= arg1.  */
8741       else if (code == LT_EXPR && sgn0 == -1)
8742 	code = LE_EXPR;
8743       /* CST > arg1  ->  CST-1 >= arg1.  */
8744       else if (code == GT_EXPR && sgn0 == 1)
8745 	code = GE_EXPR;
8746       /* -CST >= arg1  ->  -CST-1 > arg1.  */
8747       else if (code == GE_EXPR && sgn0 == -1)
8748 	code = GT_EXPR;
8749       else
8750         return NULL_TREE;
8751       /* arg1 code' CST' might be more canonical.  */
8752       swap = true;
8753     }
8754   else
8755     {
8756       /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8757       if (code == LT_EXPR
8758 	  && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8759 	code = LE_EXPR;
8760       /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8761       else if (code == GT_EXPR
8762 	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8763 	code = GE_EXPR;
8764       /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8765       else if (code == LE_EXPR
8766 	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8767 	code = LT_EXPR;
8768       /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8769       else if (code == GE_EXPR
8770 	       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8771 	code = GT_EXPR;
8772       else
8773 	return NULL_TREE;
8774       *strict_overflow_p = true;
8775     }
8776 
8777   /* Now build the constant reduced in magnitude.  But not if that
8778      would produce one outside of its types range.  */
8779   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8780       && ((sgn0 == 1
8781 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8782 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8783 	  || (sgn0 == -1
8784 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8785 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8786     /* We cannot swap the comparison here as that would cause us to
8787        endlessly recurse.  */
8788     return NULL_TREE;
8789 
8790   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8791 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8792   if (code0 != INTEGER_CST)
8793     t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8794   t = fold_convert (TREE_TYPE (arg1), t);
8795 
8796   /* If swapping might yield to a more canonical form, do so.  */
8797   if (swap)
8798     return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8799   else
8800     return fold_build2_loc (loc, code, type, t, arg1);
8801 }
8802 
8803 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8804    overflow further.  Try to decrease the magnitude of constants involved
8805    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8806    and put sole constants at the second argument position.
8807    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8808 
8809 static tree
8810 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8811 			       tree arg0, tree arg1)
8812 {
8813   tree t;
8814   bool strict_overflow_p;
8815   const char * const warnmsg = G_("assuming signed overflow does not occur "
8816 				  "when reducing constant in comparison");
8817 
8818   /* Try canonicalization by simplifying arg0.  */
8819   strict_overflow_p = false;
8820   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8821 				       &strict_overflow_p);
8822   if (t)
8823     {
8824       if (strict_overflow_p)
8825 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8826       return t;
8827     }
8828 
8829   /* Try canonicalization by simplifying arg1 using the swapped
8830      comparison.  */
8831   code = swap_tree_comparison (code);
8832   strict_overflow_p = false;
8833   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8834 				       &strict_overflow_p);
8835   if (t && strict_overflow_p)
8836     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8837   return t;
8838 }
8839 
8840 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8841    space.  This is used to avoid issuing overflow warnings for
8842    expressions like &p->x which can not wrap.  */
8843 
8844 static bool
8845 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8846 {
8847   double_int di_offset, total;
8848 
8849   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8850     return true;
8851 
8852   if (bitpos < 0)
8853     return true;
8854 
8855   if (offset == NULL_TREE)
8856     di_offset = double_int_zero;
8857   else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8858     return true;
8859   else
8860     di_offset = TREE_INT_CST (offset);
8861 
8862   bool overflow;
8863   double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8864   total = di_offset.add_with_sign (units, true, &overflow);
8865   if (overflow)
8866     return true;
8867 
8868   if (total.high != 0)
8869     return true;
8870 
8871   HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8872   if (size <= 0)
8873     return true;
8874 
8875   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8876      array.  */
8877   if (TREE_CODE (base) == ADDR_EXPR)
8878     {
8879       HOST_WIDE_INT base_size;
8880 
8881       base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8882       if (base_size > 0 && size < base_size)
8883 	size = base_size;
8884     }
8885 
8886   return total.low > (unsigned HOST_WIDE_INT) size;
8887 }
8888 
8889 /* Subroutine of fold_binary.  This routine performs all of the
8890    transformations that are common to the equality/inequality
8891    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8892    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8893    fold_binary should call fold_binary.  Fold a comparison with
8894    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8895    the folded comparison or NULL_TREE.  */
8896 
8897 static tree
8898 fold_comparison (location_t loc, enum tree_code code, tree type,
8899 		 tree op0, tree op1)
8900 {
8901   tree arg0, arg1, tem;
8902 
8903   arg0 = op0;
8904   arg1 = op1;
8905 
8906   STRIP_SIGN_NOPS (arg0);
8907   STRIP_SIGN_NOPS (arg1);
8908 
8909   tem = fold_relational_const (code, type, arg0, arg1);
8910   if (tem != NULL_TREE)
8911     return tem;
8912 
8913   /* If one arg is a real or integer constant, put it last.  */
8914   if (tree_swap_operands_p (arg0, arg1, true))
8915     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8916 
8917   /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
8918   if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8919       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8920 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8921 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8922       && (TREE_CODE (arg1) == INTEGER_CST
8923 	  && !TREE_OVERFLOW (arg1)))
8924     {
8925       tree const1 = TREE_OPERAND (arg0, 1);
8926       tree const2 = arg1;
8927       tree variable = TREE_OPERAND (arg0, 0);
8928       tree lhs;
8929       int lhs_add;
8930       lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8931 
8932       lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8933 			 TREE_TYPE (arg1), const2, const1);
8934 
8935       /* If the constant operation overflowed this can be
8936 	 simplified as a comparison against INT_MAX/INT_MIN.  */
8937       if (TREE_CODE (lhs) == INTEGER_CST
8938 	  && TREE_OVERFLOW (lhs)
8939 	  && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8940 	{
8941 	  int const1_sgn = tree_int_cst_sgn (const1);
8942 	  enum tree_code code2 = code;
8943 
8944 	  /* Get the sign of the constant on the lhs if the
8945 	     operation were VARIABLE + CONST1.  */
8946 	  if (TREE_CODE (arg0) == MINUS_EXPR)
8947 	    const1_sgn = -const1_sgn;
8948 
8949 	  /* The sign of the constant determines if we overflowed
8950 	     INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8951 	     Canonicalize to the INT_MIN overflow by swapping the comparison
8952 	     if necessary.  */
8953 	  if (const1_sgn == -1)
8954 	    code2 = swap_tree_comparison (code);
8955 
8956 	  /* We now can look at the canonicalized case
8957 	       VARIABLE + 1  CODE2  INT_MIN
8958 	     and decide on the result.  */
8959 	  if (code2 == LT_EXPR
8960 	      || code2 == LE_EXPR
8961 	      || code2 == EQ_EXPR)
8962 	    return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8963 	  else if (code2 == NE_EXPR
8964 		   || code2 == GE_EXPR
8965 		   || code2 == GT_EXPR)
8966 	    return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8967 	}
8968 
8969       if (TREE_CODE (lhs) == TREE_CODE (arg1)
8970 	  && (TREE_CODE (lhs) != INTEGER_CST
8971 	      || !TREE_OVERFLOW (lhs)))
8972 	{
8973 	  if (code != EQ_EXPR && code != NE_EXPR)
8974 	    fold_overflow_warning ("assuming signed overflow does not occur "
8975 				   "when changing X +- C1 cmp C2 to "
8976 				   "X cmp C1 +- C2",
8977 				   WARN_STRICT_OVERFLOW_COMPARISON);
8978 	  return fold_build2_loc (loc, code, type, variable, lhs);
8979 	}
8980     }
8981 
8982   /* For comparisons of pointers we can decompose it to a compile time
8983      comparison of the base objects and the offsets into the object.
8984      This requires at least one operand being an ADDR_EXPR or a
8985      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
8986   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8987       && (TREE_CODE (arg0) == ADDR_EXPR
8988 	  || TREE_CODE (arg1) == ADDR_EXPR
8989 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8990 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8991     {
8992       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8993       HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8994       enum machine_mode mode;
8995       int volatilep, unsignedp;
8996       bool indirect_base0 = false, indirect_base1 = false;
8997 
8998       /* Get base and offset for the access.  Strip ADDR_EXPR for
8999 	 get_inner_reference, but put it back by stripping INDIRECT_REF
9000 	 off the base object if possible.  indirect_baseN will be true
9001 	 if baseN is not an address but refers to the object itself.  */
9002       base0 = arg0;
9003       if (TREE_CODE (arg0) == ADDR_EXPR)
9004 	{
9005 	  base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9006 				       &bitsize, &bitpos0, &offset0, &mode,
9007 				       &unsignedp, &volatilep, false);
9008 	  if (TREE_CODE (base0) == INDIRECT_REF)
9009 	    base0 = TREE_OPERAND (base0, 0);
9010 	  else
9011 	    indirect_base0 = true;
9012 	}
9013       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9014 	{
9015 	  base0 = TREE_OPERAND (arg0, 0);
9016 	  STRIP_SIGN_NOPS (base0);
9017 	  if (TREE_CODE (base0) == ADDR_EXPR)
9018 	    {
9019 	      base0 = TREE_OPERAND (base0, 0);
9020 	      indirect_base0 = true;
9021 	    }
9022 	  offset0 = TREE_OPERAND (arg0, 1);
9023 	  if (host_integerp (offset0, 0))
9024 	    {
9025 	      HOST_WIDE_INT off = size_low_cst (offset0);
9026 	      if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9027 				   * BITS_PER_UNIT)
9028 		  / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9029 		{
9030 		  bitpos0 = off * BITS_PER_UNIT;
9031 		  offset0 = NULL_TREE;
9032 		}
9033 	    }
9034 	}
9035 
9036       base1 = arg1;
9037       if (TREE_CODE (arg1) == ADDR_EXPR)
9038 	{
9039 	  base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9040 				       &bitsize, &bitpos1, &offset1, &mode,
9041 				       &unsignedp, &volatilep, false);
9042 	  if (TREE_CODE (base1) == INDIRECT_REF)
9043 	    base1 = TREE_OPERAND (base1, 0);
9044 	  else
9045 	    indirect_base1 = true;
9046 	}
9047       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9048 	{
9049 	  base1 = TREE_OPERAND (arg1, 0);
9050 	  STRIP_SIGN_NOPS (base1);
9051 	  if (TREE_CODE (base1) == ADDR_EXPR)
9052 	    {
9053 	      base1 = TREE_OPERAND (base1, 0);
9054 	      indirect_base1 = true;
9055 	    }
9056 	  offset1 = TREE_OPERAND (arg1, 1);
9057 	  if (host_integerp (offset1, 0))
9058 	    {
9059 	      HOST_WIDE_INT off = size_low_cst (offset1);
9060 	      if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9061 				   * BITS_PER_UNIT)
9062 		  / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9063 		{
9064 		  bitpos1 = off * BITS_PER_UNIT;
9065 		  offset1 = NULL_TREE;
9066 		}
9067 	    }
9068 	}
9069 
9070       /* A local variable can never be pointed to by
9071          the default SSA name of an incoming parameter.  */
9072       if ((TREE_CODE (arg0) == ADDR_EXPR
9073            && indirect_base0
9074            && TREE_CODE (base0) == VAR_DECL
9075            && auto_var_in_fn_p (base0, current_function_decl)
9076            && !indirect_base1
9077            && TREE_CODE (base1) == SSA_NAME
9078            && SSA_NAME_IS_DEFAULT_DEF (base1)
9079 	   && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9080           || (TREE_CODE (arg1) == ADDR_EXPR
9081               && indirect_base1
9082               && TREE_CODE (base1) == VAR_DECL
9083               && auto_var_in_fn_p (base1, current_function_decl)
9084               && !indirect_base0
9085               && TREE_CODE (base0) == SSA_NAME
9086               && SSA_NAME_IS_DEFAULT_DEF (base0)
9087 	      && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9088         {
9089           if (code == NE_EXPR)
9090             return constant_boolean_node (1, type);
9091           else if (code == EQ_EXPR)
9092             return constant_boolean_node (0, type);
9093         }
9094       /* If we have equivalent bases we might be able to simplify.  */
9095       else if (indirect_base0 == indirect_base1
9096                && operand_equal_p (base0, base1, 0))
9097 	{
9098 	  /* We can fold this expression to a constant if the non-constant
9099 	     offset parts are equal.  */
9100 	  if ((offset0 == offset1
9101 	       || (offset0 && offset1
9102 		   && operand_equal_p (offset0, offset1, 0)))
9103 	      && (code == EQ_EXPR
9104 		  || code == NE_EXPR
9105 		  || (indirect_base0 && DECL_P (base0))
9106 		  || POINTER_TYPE_OVERFLOW_UNDEFINED))
9107 
9108 	    {
9109 	      if (code != EQ_EXPR
9110 		  && code != NE_EXPR
9111 		  && bitpos0 != bitpos1
9112 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9113 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9114 		fold_overflow_warning (("assuming pointer wraparound does not "
9115 					"occur when comparing P +- C1 with "
9116 					"P +- C2"),
9117 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
9118 
9119 	      switch (code)
9120 		{
9121 		case EQ_EXPR:
9122 		  return constant_boolean_node (bitpos0 == bitpos1, type);
9123 		case NE_EXPR:
9124 		  return constant_boolean_node (bitpos0 != bitpos1, type);
9125 		case LT_EXPR:
9126 		  return constant_boolean_node (bitpos0 < bitpos1, type);
9127 		case LE_EXPR:
9128 		  return constant_boolean_node (bitpos0 <= bitpos1, type);
9129 		case GE_EXPR:
9130 		  return constant_boolean_node (bitpos0 >= bitpos1, type);
9131 		case GT_EXPR:
9132 		  return constant_boolean_node (bitpos0 > bitpos1, type);
9133 		default:;
9134 		}
9135 	    }
9136 	  /* We can simplify the comparison to a comparison of the variable
9137 	     offset parts if the constant offset parts are equal.
9138 	     Be careful to use signed sizetype here because otherwise we
9139 	     mess with array offsets in the wrong way.  This is possible
9140 	     because pointer arithmetic is restricted to retain within an
9141 	     object and overflow on pointer differences is undefined as of
9142 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
9143 	  else if (bitpos0 == bitpos1
9144 		   && ((code == EQ_EXPR || code == NE_EXPR)
9145 		       || (indirect_base0 && DECL_P (base0))
9146 		       || POINTER_TYPE_OVERFLOW_UNDEFINED))
9147 	    {
9148 	      /* By converting to signed sizetype we cover middle-end pointer
9149 	         arithmetic which operates on unsigned pointer types of size
9150 	         type size and ARRAY_REF offsets which are properly sign or
9151 	         zero extended from their type in case it is narrower than
9152 	         sizetype.  */
9153 	      if (offset0 == NULL_TREE)
9154 		offset0 = build_int_cst (ssizetype, 0);
9155 	      else
9156 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
9157 	      if (offset1 == NULL_TREE)
9158 		offset1 = build_int_cst (ssizetype, 0);
9159 	      else
9160 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
9161 
9162 	      if (code != EQ_EXPR
9163 		  && code != NE_EXPR
9164 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9165 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9166 		fold_overflow_warning (("assuming pointer wraparound does not "
9167 					"occur when comparing P +- C1 with "
9168 					"P +- C2"),
9169 				       WARN_STRICT_OVERFLOW_COMPARISON);
9170 
9171 	      return fold_build2_loc (loc, code, type, offset0, offset1);
9172 	    }
9173 	}
9174       /* For non-equal bases we can simplify if they are addresses
9175 	 of local binding decls or constants.  */
9176       else if (indirect_base0 && indirect_base1
9177 	       /* We know that !operand_equal_p (base0, base1, 0)
9178 		  because the if condition was false.  But make
9179 		  sure two decls are not the same.  */
9180 	       && base0 != base1
9181 	       && TREE_CODE (arg0) == ADDR_EXPR
9182 	       && TREE_CODE (arg1) == ADDR_EXPR
9183 	       && (((TREE_CODE (base0) == VAR_DECL
9184 		     || TREE_CODE (base0) == PARM_DECL)
9185 		    && (targetm.binds_local_p (base0)
9186 			|| CONSTANT_CLASS_P (base1)))
9187 		   || CONSTANT_CLASS_P (base0))
9188 	       && (((TREE_CODE (base1) == VAR_DECL
9189 		     || TREE_CODE (base1) == PARM_DECL)
9190 		    && (targetm.binds_local_p (base1)
9191 			|| CONSTANT_CLASS_P (base0)))
9192 		   || CONSTANT_CLASS_P (base1)))
9193 	{
9194 	  if (code == EQ_EXPR)
9195 	    return omit_two_operands_loc (loc, type, boolean_false_node,
9196 				      arg0, arg1);
9197 	  else if (code == NE_EXPR)
9198 	    return omit_two_operands_loc (loc, type, boolean_true_node,
9199 				      arg0, arg1);
9200 	}
9201       /* For equal offsets we can simplify to a comparison of the
9202 	 base addresses.  */
9203       else if (bitpos0 == bitpos1
9204 	       && (indirect_base0
9205 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9206 	       && (indirect_base1
9207 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9208 	       && ((offset0 == offset1)
9209 		   || (offset0 && offset1
9210 		       && operand_equal_p (offset0, offset1, 0))))
9211 	{
9212 	  if (indirect_base0)
9213 	    base0 = build_fold_addr_expr_loc (loc, base0);
9214 	  if (indirect_base1)
9215 	    base1 = build_fold_addr_expr_loc (loc, base1);
9216 	  return fold_build2_loc (loc, code, type, base0, base1);
9217 	}
9218     }
9219 
9220   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9221      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
9222      the resulting offset is smaller in absolute value than the
9223      original one and has the same sign.  */
9224   if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9225       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9226       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9227 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9228       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9229       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9230 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9231     {
9232       tree const1 = TREE_OPERAND (arg0, 1);
9233       tree const2 = TREE_OPERAND (arg1, 1);
9234       tree variable1 = TREE_OPERAND (arg0, 0);
9235       tree variable2 = TREE_OPERAND (arg1, 0);
9236       tree cst;
9237       const char * const warnmsg = G_("assuming signed overflow does not "
9238 				      "occur when combining constants around "
9239 				      "a comparison");
9240 
9241       /* Put the constant on the side where it doesn't overflow and is
9242 	 of lower absolute value and of same sign than before.  */
9243       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9244 			     ? MINUS_EXPR : PLUS_EXPR,
9245 			     const2, const1);
9246       if (!TREE_OVERFLOW (cst)
9247 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9248 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9249 	{
9250 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9251 	  return fold_build2_loc (loc, code, type,
9252 				  variable1,
9253 				  fold_build2_loc (loc, TREE_CODE (arg1),
9254 						   TREE_TYPE (arg1),
9255 						   variable2, cst));
9256 	}
9257 
9258       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9259 			     ? MINUS_EXPR : PLUS_EXPR,
9260 			     const1, const2);
9261       if (!TREE_OVERFLOW (cst)
9262 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9263 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9264 	{
9265 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9266 	  return fold_build2_loc (loc, code, type,
9267 				  fold_build2_loc (loc, TREE_CODE (arg0),
9268 						   TREE_TYPE (arg0),
9269 						   variable1, cst),
9270 				  variable2);
9271 	}
9272     }
9273 
9274   /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9275      signed arithmetic case.  That form is created by the compiler
9276      often enough for folding it to be of value.  One example is in
9277      computing loop trip counts after Operator Strength Reduction.  */
9278   if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9279       && TREE_CODE (arg0) == MULT_EXPR
9280       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9281           && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9282       && integer_zerop (arg1))
9283     {
9284       tree const1 = TREE_OPERAND (arg0, 1);
9285       tree const2 = arg1;                       /* zero */
9286       tree variable1 = TREE_OPERAND (arg0, 0);
9287       enum tree_code cmp_code = code;
9288 
9289       /* Handle unfolded multiplication by zero.  */
9290       if (integer_zerop (const1))
9291 	return fold_build2_loc (loc, cmp_code, type, const1, const2);
9292 
9293       fold_overflow_warning (("assuming signed overflow does not occur when "
9294 			      "eliminating multiplication in comparison "
9295 			      "with zero"),
9296 			     WARN_STRICT_OVERFLOW_COMPARISON);
9297 
9298       /* If const1 is negative we swap the sense of the comparison.  */
9299       if (tree_int_cst_sgn (const1) < 0)
9300         cmp_code = swap_tree_comparison (cmp_code);
9301 
9302       return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9303     }
9304 
9305   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9306   if (tem)
9307     return tem;
9308 
9309   if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9310     {
9311       tree targ0 = strip_float_extensions (arg0);
9312       tree targ1 = strip_float_extensions (arg1);
9313       tree newtype = TREE_TYPE (targ0);
9314 
9315       if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9316 	newtype = TREE_TYPE (targ1);
9317 
9318       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9319       if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9320 	return fold_build2_loc (loc, code, type,
9321 			    fold_convert_loc (loc, newtype, targ0),
9322 			    fold_convert_loc (loc, newtype, targ1));
9323 
9324       /* (-a) CMP (-b) -> b CMP a  */
9325       if (TREE_CODE (arg0) == NEGATE_EXPR
9326 	  && TREE_CODE (arg1) == NEGATE_EXPR)
9327 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9328 			    TREE_OPERAND (arg0, 0));
9329 
9330       if (TREE_CODE (arg1) == REAL_CST)
9331 	{
9332 	  REAL_VALUE_TYPE cst;
9333 	  cst = TREE_REAL_CST (arg1);
9334 
9335 	  /* (-a) CMP CST -> a swap(CMP) (-CST)  */
9336 	  if (TREE_CODE (arg0) == NEGATE_EXPR)
9337 	    return fold_build2_loc (loc, swap_tree_comparison (code), type,
9338 				TREE_OPERAND (arg0, 0),
9339 				build_real (TREE_TYPE (arg1),
9340 					    real_value_negate (&cst)));
9341 
9342 	  /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
9343 	  /* a CMP (-0) -> a CMP 0  */
9344 	  if (REAL_VALUE_MINUS_ZERO (cst))
9345 	    return fold_build2_loc (loc, code, type, arg0,
9346 				build_real (TREE_TYPE (arg1), dconst0));
9347 
9348 	  /* x != NaN is always true, other ops are always false.  */
9349 	  if (REAL_VALUE_ISNAN (cst)
9350 	      && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9351 	    {
9352 	      tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9353 	      return omit_one_operand_loc (loc, type, tem, arg0);
9354 	    }
9355 
9356 	  /* Fold comparisons against infinity.  */
9357 	  if (REAL_VALUE_ISINF (cst)
9358 	      && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9359 	    {
9360 	      tem = fold_inf_compare (loc, code, type, arg0, arg1);
9361 	      if (tem != NULL_TREE)
9362 		return tem;
9363 	    }
9364 	}
9365 
9366       /* If this is a comparison of a real constant with a PLUS_EXPR
9367 	 or a MINUS_EXPR of a real constant, we can convert it into a
9368 	 comparison with a revised real constant as long as no overflow
9369 	 occurs when unsafe_math_optimizations are enabled.  */
9370       if (flag_unsafe_math_optimizations
9371 	  && TREE_CODE (arg1) == REAL_CST
9372 	  && (TREE_CODE (arg0) == PLUS_EXPR
9373 	      || TREE_CODE (arg0) == MINUS_EXPR)
9374 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9375 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9376 				      ? MINUS_EXPR : PLUS_EXPR,
9377 				      arg1, TREE_OPERAND (arg0, 1)))
9378 	  && !TREE_OVERFLOW (tem))
9379 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9380 
9381       /* Likewise, we can simplify a comparison of a real constant with
9382          a MINUS_EXPR whose first operand is also a real constant, i.e.
9383          (c1 - x) < c2 becomes x > c1-c2.  Reordering is allowed on
9384          floating-point types only if -fassociative-math is set.  */
9385       if (flag_associative_math
9386 	  && TREE_CODE (arg1) == REAL_CST
9387 	  && TREE_CODE (arg0) == MINUS_EXPR
9388 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9389 	  && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9390 				      arg1))
9391 	  && !TREE_OVERFLOW (tem))
9392 	return fold_build2_loc (loc, swap_tree_comparison (code), type,
9393 			    TREE_OPERAND (arg0, 1), tem);
9394 
9395       /* Fold comparisons against built-in math functions.  */
9396       if (TREE_CODE (arg1) == REAL_CST
9397 	  && flag_unsafe_math_optimizations
9398 	  && ! flag_errno_math)
9399 	{
9400 	  enum built_in_function fcode = builtin_mathfn_code (arg0);
9401 
9402 	  if (fcode != END_BUILTINS)
9403 	    {
9404 	      tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9405 	      if (tem != NULL_TREE)
9406 		return tem;
9407 	    }
9408 	}
9409     }
9410 
9411   if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9412       && CONVERT_EXPR_P (arg0))
9413     {
9414       /* If we are widening one operand of an integer comparison,
9415 	 see if the other operand is similarly being widened.  Perhaps we
9416 	 can do the comparison in the narrower type.  */
9417       tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9418       if (tem)
9419 	return tem;
9420 
9421       /* Or if we are changing signedness.  */
9422       tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9423       if (tem)
9424 	return tem;
9425     }
9426 
9427   /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9428      constant, we can simplify it.  */
9429   if (TREE_CODE (arg1) == INTEGER_CST
9430       && (TREE_CODE (arg0) == MIN_EXPR
9431 	  || TREE_CODE (arg0) == MAX_EXPR)
9432       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9433     {
9434       tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9435       if (tem)
9436 	return tem;
9437     }
9438 
9439   /* Simplify comparison of something with itself.  (For IEEE
9440      floating-point, we can only do some of these simplifications.)  */
9441   if (operand_equal_p (arg0, arg1, 0))
9442     {
9443       switch (code)
9444 	{
9445 	case EQ_EXPR:
9446 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9447 	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9448 	    return constant_boolean_node (1, type);
9449 	  break;
9450 
9451 	case GE_EXPR:
9452 	case LE_EXPR:
9453 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9454 	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9455 	    return constant_boolean_node (1, type);
9456 	  return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9457 
9458 	case NE_EXPR:
9459 	  /* For NE, we can only do this simplification if integer
9460 	     or we don't honor IEEE floating point NaNs.  */
9461 	  if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9462 	      && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9463 	    break;
9464 	  /* ... fall through ...  */
9465 	case GT_EXPR:
9466 	case LT_EXPR:
9467 	  return constant_boolean_node (0, type);
9468 	default:
9469 	  gcc_unreachable ();
9470 	}
9471     }
9472 
9473   /* If we are comparing an expression that just has comparisons
9474      of two integer values, arithmetic expressions of those comparisons,
9475      and constants, we can simplify it.  There are only three cases
9476      to check: the two values can either be equal, the first can be
9477      greater, or the second can be greater.  Fold the expression for
9478      those three values.  Since each value must be 0 or 1, we have
9479      eight possibilities, each of which corresponds to the constant 0
9480      or 1 or one of the six possible comparisons.
9481 
9482      This handles common cases like (a > b) == 0 but also handles
9483      expressions like  ((x > y) - (y > x)) > 0, which supposedly
9484      occur in macroized code.  */
9485 
9486   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9487     {
9488       tree cval1 = 0, cval2 = 0;
9489       int save_p = 0;
9490 
9491       if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9492 	  /* Don't handle degenerate cases here; they should already
9493 	     have been handled anyway.  */
9494 	  && cval1 != 0 && cval2 != 0
9495 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9496 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9497 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9498 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9499 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9500 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9501 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9502 	{
9503 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9504 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9505 
9506 	  /* We can't just pass T to eval_subst in case cval1 or cval2
9507 	     was the same as ARG1.  */
9508 
9509 	  tree high_result
9510 		= fold_build2_loc (loc, code, type,
9511 			       eval_subst (loc, arg0, cval1, maxval,
9512 					   cval2, minval),
9513 			       arg1);
9514 	  tree equal_result
9515 		= fold_build2_loc (loc, code, type,
9516 			       eval_subst (loc, arg0, cval1, maxval,
9517 					   cval2, maxval),
9518 			       arg1);
9519 	  tree low_result
9520 		= fold_build2_loc (loc, code, type,
9521 			       eval_subst (loc, arg0, cval1, minval,
9522 					   cval2, maxval),
9523 			       arg1);
9524 
9525 	  /* All three of these results should be 0 or 1.  Confirm they are.
9526 	     Then use those values to select the proper code to use.  */
9527 
9528 	  if (TREE_CODE (high_result) == INTEGER_CST
9529 	      && TREE_CODE (equal_result) == INTEGER_CST
9530 	      && TREE_CODE (low_result) == INTEGER_CST)
9531 	    {
9532 	      /* Make a 3-bit mask with the high-order bit being the
9533 		 value for `>', the next for '=', and the low for '<'.  */
9534 	      switch ((integer_onep (high_result) * 4)
9535 		      + (integer_onep (equal_result) * 2)
9536 		      + integer_onep (low_result))
9537 		{
9538 		case 0:
9539 		  /* Always false.  */
9540 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9541 		case 1:
9542 		  code = LT_EXPR;
9543 		  break;
9544 		case 2:
9545 		  code = EQ_EXPR;
9546 		  break;
9547 		case 3:
9548 		  code = LE_EXPR;
9549 		  break;
9550 		case 4:
9551 		  code = GT_EXPR;
9552 		  break;
9553 		case 5:
9554 		  code = NE_EXPR;
9555 		  break;
9556 		case 6:
9557 		  code = GE_EXPR;
9558 		  break;
9559 		case 7:
9560 		  /* Always true.  */
9561 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9562 		}
9563 
9564 	      if (save_p)
9565 		{
9566 		  tem = save_expr (build2 (code, type, cval1, cval2));
9567 		  SET_EXPR_LOCATION (tem, loc);
9568 		  return tem;
9569 		}
9570 	      return fold_build2_loc (loc, code, type, cval1, cval2);
9571 	    }
9572 	}
9573     }
9574 
9575   /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9576      into a single range test.  */
9577   if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9578        || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9579       && TREE_CODE (arg1) == INTEGER_CST
9580       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9581       && !integer_zerop (TREE_OPERAND (arg0, 1))
9582       && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9583       && !TREE_OVERFLOW (arg1))
9584     {
9585       tem = fold_div_compare (loc, code, type, arg0, arg1);
9586       if (tem != NULL_TREE)
9587 	return tem;
9588     }
9589 
9590   /* Fold ~X op ~Y as Y op X.  */
9591   if (TREE_CODE (arg0) == BIT_NOT_EXPR
9592       && TREE_CODE (arg1) == BIT_NOT_EXPR)
9593     {
9594       tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9595       return fold_build2_loc (loc, code, type,
9596 			  fold_convert_loc (loc, cmp_type,
9597 					    TREE_OPERAND (arg1, 0)),
9598 			  TREE_OPERAND (arg0, 0));
9599     }
9600 
9601   /* Fold ~X op C as X op' ~C, where op' is the swapped comparison.  */
9602   if (TREE_CODE (arg0) == BIT_NOT_EXPR
9603       && TREE_CODE (arg1) == INTEGER_CST)
9604     {
9605       tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9606       return fold_build2_loc (loc, swap_tree_comparison (code), type,
9607 			  TREE_OPERAND (arg0, 0),
9608 			  fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9609 				       fold_convert_loc (loc, cmp_type, arg1)));
9610     }
9611 
9612   return NULL_TREE;
9613 }
9614 
9615 
9616 /* Subroutine of fold_binary.  Optimize complex multiplications of the
9617    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
9618    argument EXPR represents the expression "z" of type TYPE.  */
9619 
9620 static tree
9621 fold_mult_zconjz (location_t loc, tree type, tree expr)
9622 {
9623   tree itype = TREE_TYPE (type);
9624   tree rpart, ipart, tem;
9625 
9626   if (TREE_CODE (expr) == COMPLEX_EXPR)
9627     {
9628       rpart = TREE_OPERAND (expr, 0);
9629       ipart = TREE_OPERAND (expr, 1);
9630     }
9631   else if (TREE_CODE (expr) == COMPLEX_CST)
9632     {
9633       rpart = TREE_REALPART (expr);
9634       ipart = TREE_IMAGPART (expr);
9635     }
9636   else
9637     {
9638       expr = save_expr (expr);
9639       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9640       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9641     }
9642 
9643   rpart = save_expr (rpart);
9644   ipart = save_expr (ipart);
9645   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9646 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9647 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9648   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9649 			  build_zero_cst (itype));
9650 }
9651 
9652 
9653 /* Subroutine of fold_binary.  If P is the value of EXPR, computes
9654    power-of-two M and (arbitrary) N such that M divides (P-N).  This condition
9655    guarantees that P and N have the same least significant log2(M) bits.
9656    N is not otherwise constrained.  In particular, N is not normalized to
9657    0 <= N < M as is common.  In general, the precise value of P is unknown.
9658    M is chosen as large as possible such that constant N can be determined.
9659 
9660    Returns M and sets *RESIDUE to N.
9661 
9662    If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9663    account.  This is not always possible due to PR 35705.
9664  */
9665 
9666 static unsigned HOST_WIDE_INT
9667 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9668 				 bool allow_func_align)
9669 {
9670   enum tree_code code;
9671 
9672   *residue = 0;
9673 
9674   code = TREE_CODE (expr);
9675   if (code == ADDR_EXPR)
9676     {
9677       unsigned int bitalign;
9678       get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9679       *residue /= BITS_PER_UNIT;
9680       return bitalign / BITS_PER_UNIT;
9681     }
9682   else if (code == POINTER_PLUS_EXPR)
9683     {
9684       tree op0, op1;
9685       unsigned HOST_WIDE_INT modulus;
9686       enum tree_code inner_code;
9687 
9688       op0 = TREE_OPERAND (expr, 0);
9689       STRIP_NOPS (op0);
9690       modulus = get_pointer_modulus_and_residue (op0, residue,
9691 						 allow_func_align);
9692 
9693       op1 = TREE_OPERAND (expr, 1);
9694       STRIP_NOPS (op1);
9695       inner_code = TREE_CODE (op1);
9696       if (inner_code == INTEGER_CST)
9697 	{
9698 	  *residue += TREE_INT_CST_LOW (op1);
9699 	  return modulus;
9700 	}
9701       else if (inner_code == MULT_EXPR)
9702 	{
9703 	  op1 = TREE_OPERAND (op1, 1);
9704 	  if (TREE_CODE (op1) == INTEGER_CST)
9705 	    {
9706 	      unsigned HOST_WIDE_INT align;
9707 
9708 	      /* Compute the greatest power-of-2 divisor of op1.  */
9709 	      align = TREE_INT_CST_LOW (op1);
9710 	      align &= -align;
9711 
9712 	      /* If align is non-zero and less than *modulus, replace
9713 		 *modulus with align., If align is 0, then either op1 is 0
9714 		 or the greatest power-of-2 divisor of op1 doesn't fit in an
9715 		 unsigned HOST_WIDE_INT.  In either case, no additional
9716 		 constraint is imposed.  */
9717 	      if (align)
9718 		modulus = MIN (modulus, align);
9719 
9720 	      return modulus;
9721 	    }
9722 	}
9723     }
9724 
9725   /* If we get here, we were unable to determine anything useful about the
9726      expression.  */
9727   return 1;
9728 }
9729 
9730 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
9731    CONSTRUCTOR ARG into array ELTS and return true if successful.  */
9732 
9733 static bool
9734 vec_cst_ctor_to_array (tree arg, tree *elts)
9735 {
9736   unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9737 
9738   if (TREE_CODE (arg) == VECTOR_CST)
9739     {
9740       for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9741 	elts[i] = VECTOR_CST_ELT (arg, i);
9742     }
9743   else if (TREE_CODE (arg) == CONSTRUCTOR)
9744     {
9745       constructor_elt *elt;
9746 
9747       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9748 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9749 	  return false;
9750 	else
9751 	  elts[i] = elt->value;
9752     }
9753   else
9754     return false;
9755   for (; i < nelts; i++)
9756     elts[i]
9757       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9758   return true;
9759 }
9760 
9761 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9762    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9763    NULL_TREE otherwise.  */
9764 
9765 static tree
9766 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9767 {
9768   unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9769   tree *elts;
9770   bool need_ctor = false;
9771 
9772   gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9773 	      && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9774   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9775       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9776     return NULL_TREE;
9777 
9778   elts = XALLOCAVEC (tree, nelts * 3);
9779   if (!vec_cst_ctor_to_array (arg0, elts)
9780       || !vec_cst_ctor_to_array (arg1, elts + nelts))
9781     return NULL_TREE;
9782 
9783   for (i = 0; i < nelts; i++)
9784     {
9785       if (!CONSTANT_CLASS_P (elts[sel[i]]))
9786 	need_ctor = true;
9787       elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9788     }
9789 
9790   if (need_ctor)
9791     {
9792       vec<constructor_elt, va_gc> *v;
9793       vec_alloc (v, nelts);
9794       for (i = 0; i < nelts; i++)
9795 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9796       return build_constructor (type, v);
9797     }
9798   else
9799     return build_vector (type, &elts[2 * nelts]);
9800 }
9801 
9802 /* Try to fold a pointer difference of type TYPE two address expressions of
9803    array references AREF0 and AREF1 using location LOC.  Return a
9804    simplified expression for the difference or NULL_TREE.  */
9805 
9806 static tree
9807 fold_addr_of_array_ref_difference (location_t loc, tree type,
9808 				   tree aref0, tree aref1)
9809 {
9810   tree base0 = TREE_OPERAND (aref0, 0);
9811   tree base1 = TREE_OPERAND (aref1, 0);
9812   tree base_offset = build_int_cst (type, 0);
9813 
9814   /* If the bases are array references as well, recurse.  If the bases
9815      are pointer indirections compute the difference of the pointers.
9816      If the bases are equal, we are set.  */
9817   if ((TREE_CODE (base0) == ARRAY_REF
9818        && TREE_CODE (base1) == ARRAY_REF
9819        && (base_offset
9820 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9821       || (INDIRECT_REF_P (base0)
9822 	  && INDIRECT_REF_P (base1)
9823 	  && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9824 					     TREE_OPERAND (base0, 0),
9825 					     TREE_OPERAND (base1, 0))))
9826       || operand_equal_p (base0, base1, 0))
9827     {
9828       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9829       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9830       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9831       tree diff = build2 (MINUS_EXPR, type, op0, op1);
9832       return fold_build2_loc (loc, PLUS_EXPR, type,
9833 			      base_offset,
9834 			      fold_build2_loc (loc, MULT_EXPR, type,
9835 					       diff, esz));
9836     }
9837   return NULL_TREE;
9838 }
9839 
9840 /* If the real or vector real constant CST of type TYPE has an exact
9841    inverse, return it, else return NULL.  */
9842 
9843 static tree
9844 exact_inverse (tree type, tree cst)
9845 {
9846   REAL_VALUE_TYPE r;
9847   tree unit_type, *elts;
9848   enum machine_mode mode;
9849   unsigned vec_nelts, i;
9850 
9851   switch (TREE_CODE (cst))
9852     {
9853     case REAL_CST:
9854       r = TREE_REAL_CST (cst);
9855 
9856       if (exact_real_inverse (TYPE_MODE (type), &r))
9857 	return build_real (type, r);
9858 
9859       return NULL_TREE;
9860 
9861     case VECTOR_CST:
9862       vec_nelts = VECTOR_CST_NELTS (cst);
9863       elts = XALLOCAVEC (tree, vec_nelts);
9864       unit_type = TREE_TYPE (type);
9865       mode = TYPE_MODE (unit_type);
9866 
9867       for (i = 0; i < vec_nelts; i++)
9868 	{
9869 	  r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9870 	  if (!exact_real_inverse (mode, &r))
9871 	    return NULL_TREE;
9872 	  elts[i] = build_real (unit_type, r);
9873 	}
9874 
9875       return build_vector (type, elts);
9876 
9877     default:
9878       return NULL_TREE;
9879     }
9880 }
9881 
9882 /*  Mask out the tz least significant bits of X of type TYPE where
9883     tz is the number of trailing zeroes in Y.  */
9884 static double_int
9885 mask_with_tz (tree type, double_int x, double_int y)
9886 {
9887   int tz = y.trailing_zeros ();
9888 
9889   if (tz > 0)
9890     {
9891       double_int mask;
9892 
9893       mask = ~double_int::mask (tz);
9894       mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9895       return mask & x;
9896     }
9897   return x;
9898 }
9899 
9900 /* Fold a binary expression of code CODE and type TYPE with operands
9901    OP0 and OP1.  LOC is the location of the resulting expression.
9902    Return the folded expression if folding is successful.  Otherwise,
9903    return NULL_TREE.  */
9904 
9905 tree
9906 fold_binary_loc (location_t loc,
9907 	     enum tree_code code, tree type, tree op0, tree op1)
9908 {
9909   enum tree_code_class kind = TREE_CODE_CLASS (code);
9910   tree arg0, arg1, tem;
9911   tree t1 = NULL_TREE;
9912   bool strict_overflow_p;
9913 
9914   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9915 	      && TREE_CODE_LENGTH (code) == 2
9916 	      && op0 != NULL_TREE
9917 	      && op1 != NULL_TREE);
9918 
9919   arg0 = op0;
9920   arg1 = op1;
9921 
9922   /* Strip any conversions that don't change the mode.  This is
9923      safe for every expression, except for a comparison expression
9924      because its signedness is derived from its operands.  So, in
9925      the latter case, only strip conversions that don't change the
9926      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
9927      preserved.
9928 
9929      Note that this is done as an internal manipulation within the
9930      constant folder, in order to find the simplest representation
9931      of the arguments so that their form can be studied.  In any
9932      cases, the appropriate type conversions should be put back in
9933      the tree that will get out of the constant folder.  */
9934 
9935   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9936     {
9937       STRIP_SIGN_NOPS (arg0);
9938       STRIP_SIGN_NOPS (arg1);
9939     }
9940   else
9941     {
9942       STRIP_NOPS (arg0);
9943       STRIP_NOPS (arg1);
9944     }
9945 
9946   /* Note that TREE_CONSTANT isn't enough: static var addresses are
9947      constant but we can't do arithmetic on them.  */
9948   if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9949       || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9950       || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9951       || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9952       || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9953       || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9954       || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9955     {
9956       if (kind == tcc_binary)
9957 	{
9958 	  /* Make sure type and arg0 have the same saturating flag.  */
9959 	  gcc_assert (TYPE_SATURATING (type)
9960 		      == TYPE_SATURATING (TREE_TYPE (arg0)));
9961 	  tem = const_binop (code, arg0, arg1);
9962 	}
9963       else if (kind == tcc_comparison)
9964 	tem = fold_relational_const (code, type, arg0, arg1);
9965       else
9966 	tem = NULL_TREE;
9967 
9968       if (tem != NULL_TREE)
9969 	{
9970 	  if (TREE_TYPE (tem) != type)
9971 	    tem = fold_convert_loc (loc, type, tem);
9972 	  return tem;
9973 	}
9974     }
9975 
9976   /* If this is a commutative operation, and ARG0 is a constant, move it
9977      to ARG1 to reduce the number of tests below.  */
9978   if (commutative_tree_code (code)
9979       && tree_swap_operands_p (arg0, arg1, true))
9980     return fold_build2_loc (loc, code, type, op1, op0);
9981 
9982   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9983 
9984      First check for cases where an arithmetic operation is applied to a
9985      compound, conditional, or comparison operation.  Push the arithmetic
9986      operation inside the compound or conditional to see if any folding
9987      can then be done.  Convert comparison to conditional for this purpose.
9988      The also optimizes non-constant cases that used to be done in
9989      expand_expr.
9990 
9991      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9992      one of the operands is a comparison and the other is a comparison, a
9993      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
9994      code below would make the expression more complex.  Change it to a
9995      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
9996      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
9997 
9998   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9999        || code == EQ_EXPR || code == NE_EXPR)
10000       && TREE_CODE (type) != VECTOR_TYPE
10001       && ((truth_value_p (TREE_CODE (arg0))
10002 	   && (truth_value_p (TREE_CODE (arg1))
10003 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
10004 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
10005 	  || (truth_value_p (TREE_CODE (arg1))
10006 	      && (truth_value_p (TREE_CODE (arg0))
10007 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
10008 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
10009     {
10010       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10011 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10012 			 : TRUTH_XOR_EXPR,
10013 			 boolean_type_node,
10014 			 fold_convert_loc (loc, boolean_type_node, arg0),
10015 			 fold_convert_loc (loc, boolean_type_node, arg1));
10016 
10017       if (code == EQ_EXPR)
10018 	tem = invert_truthvalue_loc (loc, tem);
10019 
10020       return fold_convert_loc (loc, type, tem);
10021     }
10022 
10023   if (TREE_CODE_CLASS (code) == tcc_binary
10024       || TREE_CODE_CLASS (code) == tcc_comparison)
10025     {
10026       if (TREE_CODE (arg0) == COMPOUND_EXPR)
10027 	{
10028 	  tem = fold_build2_loc (loc, code, type,
10029 			     fold_convert_loc (loc, TREE_TYPE (op0),
10030 					       TREE_OPERAND (arg0, 1)), op1);
10031 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10032 			     tem);
10033 	}
10034       if (TREE_CODE (arg1) == COMPOUND_EXPR
10035 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10036 	{
10037 	  tem = fold_build2_loc (loc, code, type, op0,
10038 			     fold_convert_loc (loc, TREE_TYPE (op1),
10039 					       TREE_OPERAND (arg1, 1)));
10040 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10041 			     tem);
10042 	}
10043 
10044       if (TREE_CODE (arg0) == COND_EXPR
10045 	  || TREE_CODE (arg0) == VEC_COND_EXPR
10046 	  || COMPARISON_CLASS_P (arg0))
10047 	{
10048 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10049 						     arg0, arg1,
10050 						     /*cond_first_p=*/1);
10051 	  if (tem != NULL_TREE)
10052 	    return tem;
10053 	}
10054 
10055       if (TREE_CODE (arg1) == COND_EXPR
10056 	  || TREE_CODE (arg1) == VEC_COND_EXPR
10057 	  || COMPARISON_CLASS_P (arg1))
10058 	{
10059 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10060 						     arg1, arg0,
10061 					             /*cond_first_p=*/0);
10062 	  if (tem != NULL_TREE)
10063 	    return tem;
10064 	}
10065     }
10066 
10067   switch (code)
10068     {
10069     case MEM_REF:
10070       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
10071       if (TREE_CODE (arg0) == ADDR_EXPR
10072 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10073 	{
10074 	  tree iref = TREE_OPERAND (arg0, 0);
10075 	  return fold_build2 (MEM_REF, type,
10076 			      TREE_OPERAND (iref, 0),
10077 			      int_const_binop (PLUS_EXPR, arg1,
10078 					       TREE_OPERAND (iref, 1)));
10079 	}
10080 
10081       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
10082       if (TREE_CODE (arg0) == ADDR_EXPR
10083 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
10084 	{
10085 	  tree base;
10086 	  HOST_WIDE_INT coffset;
10087 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10088 						&coffset);
10089 	  if (!base)
10090 	    return NULL_TREE;
10091 	  return fold_build2 (MEM_REF, type,
10092 			      build_fold_addr_expr (base),
10093 			      int_const_binop (PLUS_EXPR, arg1,
10094 					       size_int (coffset)));
10095 	}
10096 
10097       return NULL_TREE;
10098 
10099     case POINTER_PLUS_EXPR:
10100       /* 0 +p index -> (type)index */
10101       if (integer_zerop (arg0))
10102 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10103 
10104       /* PTR +p 0 -> PTR */
10105       if (integer_zerop (arg1))
10106 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10107 
10108       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
10109       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10110 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10111         return fold_convert_loc (loc, type,
10112 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10113 					      fold_convert_loc (loc, sizetype,
10114 								arg1),
10115 					      fold_convert_loc (loc, sizetype,
10116 								arg0)));
10117 
10118       /* (PTR +p B) +p A -> PTR +p (B + A) */
10119       if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10120 	{
10121 	  tree inner;
10122 	  tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10123 	  tree arg00 = TREE_OPERAND (arg0, 0);
10124 	  inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10125 			       arg01, fold_convert_loc (loc, sizetype, arg1));
10126 	  return fold_convert_loc (loc, type,
10127 				   fold_build_pointer_plus_loc (loc,
10128 								arg00, inner));
10129 	}
10130 
10131       /* PTR_CST +p CST -> CST1 */
10132       if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10133 	return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10134 			    fold_convert_loc (loc, type, arg1));
10135 
10136      /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10137 	of the array.  Loop optimizer sometimes produce this type of
10138 	expressions.  */
10139       if (TREE_CODE (arg0) == ADDR_EXPR)
10140 	{
10141 	  tem = try_move_mult_to_index (loc, arg0,
10142 					fold_convert_loc (loc,
10143 							  ssizetype, arg1));
10144 	  if (tem)
10145 	    return fold_convert_loc (loc, type, tem);
10146 	}
10147 
10148       return NULL_TREE;
10149 
10150     case PLUS_EXPR:
10151       /* A + (-B) -> A - B */
10152       if (TREE_CODE (arg1) == NEGATE_EXPR)
10153 	return fold_build2_loc (loc, MINUS_EXPR, type,
10154 			    fold_convert_loc (loc, type, arg0),
10155 			    fold_convert_loc (loc, type,
10156 					      TREE_OPERAND (arg1, 0)));
10157       /* (-A) + B -> B - A */
10158       if (TREE_CODE (arg0) == NEGATE_EXPR
10159 	  && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10160 	return fold_build2_loc (loc, MINUS_EXPR, type,
10161 			    fold_convert_loc (loc, type, arg1),
10162 			    fold_convert_loc (loc, type,
10163 					      TREE_OPERAND (arg0, 0)));
10164 
10165       if (INTEGRAL_TYPE_P (type))
10166 	{
10167 	  /* Convert ~A + 1 to -A.  */
10168 	  if (TREE_CODE (arg0) == BIT_NOT_EXPR
10169 	      && integer_onep (arg1))
10170 	    return fold_build1_loc (loc, NEGATE_EXPR, type,
10171 				fold_convert_loc (loc, type,
10172 						  TREE_OPERAND (arg0, 0)));
10173 
10174 	  /* ~X + X is -1.  */
10175 	  if (TREE_CODE (arg0) == BIT_NOT_EXPR
10176 	      && !TYPE_OVERFLOW_TRAPS (type))
10177 	    {
10178 	      tree tem = TREE_OPERAND (arg0, 0);
10179 
10180 	      STRIP_NOPS (tem);
10181 	      if (operand_equal_p (tem, arg1, 0))
10182 		{
10183 		  t1 = build_int_cst_type (type, -1);
10184 		  return omit_one_operand_loc (loc, type, t1, arg1);
10185 		}
10186 	    }
10187 
10188 	  /* X + ~X is -1.  */
10189 	  if (TREE_CODE (arg1) == BIT_NOT_EXPR
10190 	      && !TYPE_OVERFLOW_TRAPS (type))
10191 	    {
10192 	      tree tem = TREE_OPERAND (arg1, 0);
10193 
10194 	      STRIP_NOPS (tem);
10195 	      if (operand_equal_p (arg0, tem, 0))
10196 		{
10197 		  t1 = build_int_cst_type (type, -1);
10198 		  return omit_one_operand_loc (loc, type, t1, arg0);
10199 		}
10200 	    }
10201 
10202 	  /* X + (X / CST) * -CST is X % CST.  */
10203 	  if (TREE_CODE (arg1) == MULT_EXPR
10204 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10205 	      && operand_equal_p (arg0,
10206 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10207 	    {
10208 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10209 	      tree cst1 = TREE_OPERAND (arg1, 1);
10210 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10211 				      cst1, cst0);
10212 	      if (sum && integer_zerop (sum))
10213 		return fold_convert_loc (loc, type,
10214 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10215 						      TREE_TYPE (arg0), arg0,
10216 						      cst0));
10217 	    }
10218 	}
10219 
10220       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10221 	 one.  Make sure the type is not saturating and has the signedness of
10222 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10223 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10224       if ((TREE_CODE (arg0) == MULT_EXPR
10225 	   || TREE_CODE (arg1) == MULT_EXPR)
10226 	  && !TYPE_SATURATING (type)
10227 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10228 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10229 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10230         {
10231 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10232 	  if (tem)
10233 	    return tem;
10234 	}
10235 
10236       if (! FLOAT_TYPE_P (type))
10237 	{
10238 	  if (integer_zerop (arg1))
10239 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10240 
10241 	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10242 	     with a constant, and the two constants have no bits in common,
10243 	     we should treat this as a BIT_IOR_EXPR since this may produce more
10244 	     simplifications.  */
10245 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10246 	      && TREE_CODE (arg1) == BIT_AND_EXPR
10247 	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10248 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10249 	      && integer_zerop (const_binop (BIT_AND_EXPR,
10250 					     TREE_OPERAND (arg0, 1),
10251 					     TREE_OPERAND (arg1, 1))))
10252 	    {
10253 	      code = BIT_IOR_EXPR;
10254 	      goto bit_ior;
10255 	    }
10256 
10257 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10258 	     (plus (plus (mult) (mult)) (foo)) so that we can
10259 	     take advantage of the factoring cases below.  */
10260 	  if (TYPE_OVERFLOW_WRAPS (type)
10261 	      && (((TREE_CODE (arg0) == PLUS_EXPR
10262 		    || TREE_CODE (arg0) == MINUS_EXPR)
10263 		   && TREE_CODE (arg1) == MULT_EXPR)
10264 		  || ((TREE_CODE (arg1) == PLUS_EXPR
10265 		       || TREE_CODE (arg1) == MINUS_EXPR)
10266 		      && TREE_CODE (arg0) == MULT_EXPR)))
10267 	    {
10268 	      tree parg0, parg1, parg, marg;
10269 	      enum tree_code pcode;
10270 
10271 	      if (TREE_CODE (arg1) == MULT_EXPR)
10272 		parg = arg0, marg = arg1;
10273 	      else
10274 		parg = arg1, marg = arg0;
10275 	      pcode = TREE_CODE (parg);
10276 	      parg0 = TREE_OPERAND (parg, 0);
10277 	      parg1 = TREE_OPERAND (parg, 1);
10278 	      STRIP_NOPS (parg0);
10279 	      STRIP_NOPS (parg1);
10280 
10281 	      if (TREE_CODE (parg0) == MULT_EXPR
10282 		  && TREE_CODE (parg1) != MULT_EXPR)
10283 		return fold_build2_loc (loc, pcode, type,
10284 				    fold_build2_loc (loc, PLUS_EXPR, type,
10285 						 fold_convert_loc (loc, type,
10286 								   parg0),
10287 						 fold_convert_loc (loc, type,
10288 								   marg)),
10289 				    fold_convert_loc (loc, type, parg1));
10290 	      if (TREE_CODE (parg0) != MULT_EXPR
10291 		  && TREE_CODE (parg1) == MULT_EXPR)
10292 		return
10293 		  fold_build2_loc (loc, PLUS_EXPR, type,
10294 			       fold_convert_loc (loc, type, parg0),
10295 			       fold_build2_loc (loc, pcode, type,
10296 					    fold_convert_loc (loc, type, marg),
10297 					    fold_convert_loc (loc, type,
10298 							      parg1)));
10299 	    }
10300 	}
10301       else
10302 	{
10303 	  /* See if ARG1 is zero and X + ARG1 reduces to X.  */
10304 	  if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10305 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10306 
10307 	  /* Likewise if the operands are reversed.  */
10308 	  if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10309 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10310 
10311 	  /* Convert X + -C into X - C.  */
10312 	  if (TREE_CODE (arg1) == REAL_CST
10313 	      && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10314 	    {
10315 	      tem = fold_negate_const (arg1, type);
10316 	      if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10317 		return fold_build2_loc (loc, MINUS_EXPR, type,
10318 				    fold_convert_loc (loc, type, arg0),
10319 				    fold_convert_loc (loc, type, tem));
10320 	    }
10321 
10322 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10323 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
10324 	     if signed zeros are involved.  */
10325 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10326               && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10327 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10328 	    {
10329 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10330 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10331 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10332 	      bool arg0rz = false, arg0iz = false;
10333 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
10334 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
10335 		{
10336 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10337 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10338 		  if (arg0rz && arg1i && real_zerop (arg1i))
10339 		    {
10340 		      tree rp = arg1r ? arg1r
10341 				  : build1 (REALPART_EXPR, rtype, arg1);
10342 		      tree ip = arg0i ? arg0i
10343 				  : build1 (IMAGPART_EXPR, rtype, arg0);
10344 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10345 		    }
10346 		  else if (arg0iz && arg1r && real_zerop (arg1r))
10347 		    {
10348 		      tree rp = arg0r ? arg0r
10349 				  : build1 (REALPART_EXPR, rtype, arg0);
10350 		      tree ip = arg1i ? arg1i
10351 				  : build1 (IMAGPART_EXPR, rtype, arg1);
10352 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10353 		    }
10354 		}
10355 	    }
10356 
10357 	  if (flag_unsafe_math_optimizations
10358 	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10359 	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10360 	      && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10361 	    return tem;
10362 
10363 	  /* Convert x+x into x*2.0.  */
10364 	  if (operand_equal_p (arg0, arg1, 0)
10365 	      && SCALAR_FLOAT_TYPE_P (type))
10366 	    return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10367 				build_real (type, dconst2));
10368 
10369           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10370              We associate floats only if the user has specified
10371              -fassociative-math.  */
10372           if (flag_associative_math
10373               && TREE_CODE (arg1) == PLUS_EXPR
10374               && TREE_CODE (arg0) != MULT_EXPR)
10375             {
10376               tree tree10 = TREE_OPERAND (arg1, 0);
10377               tree tree11 = TREE_OPERAND (arg1, 1);
10378               if (TREE_CODE (tree11) == MULT_EXPR
10379 		  && TREE_CODE (tree10) == MULT_EXPR)
10380                 {
10381                   tree tree0;
10382                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10383                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10384                 }
10385             }
10386           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10387              We associate floats only if the user has specified
10388              -fassociative-math.  */
10389           if (flag_associative_math
10390               && TREE_CODE (arg0) == PLUS_EXPR
10391               && TREE_CODE (arg1) != MULT_EXPR)
10392             {
10393               tree tree00 = TREE_OPERAND (arg0, 0);
10394               tree tree01 = TREE_OPERAND (arg0, 1);
10395               if (TREE_CODE (tree01) == MULT_EXPR
10396 		  && TREE_CODE (tree00) == MULT_EXPR)
10397                 {
10398                   tree tree0;
10399                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10400                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10401                 }
10402             }
10403 	}
10404 
10405      bit_rotate:
10406       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10407 	 is a rotate of A by C1 bits.  */
10408       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10409 	 is a rotate of A by B bits.  */
10410       {
10411 	enum tree_code code0, code1;
10412 	tree rtype;
10413 	code0 = TREE_CODE (arg0);
10414 	code1 = TREE_CODE (arg1);
10415 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10416 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10417 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
10418 			        TREE_OPERAND (arg1, 0), 0)
10419 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10420 	        TYPE_UNSIGNED (rtype))
10421 	    /* Only create rotates in complete modes.  Other cases are not
10422 	       expanded properly.  */
10423 	    && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10424 	  {
10425 	    tree tree01, tree11;
10426 	    enum tree_code code01, code11;
10427 
10428 	    tree01 = TREE_OPERAND (arg0, 1);
10429 	    tree11 = TREE_OPERAND (arg1, 1);
10430 	    STRIP_NOPS (tree01);
10431 	    STRIP_NOPS (tree11);
10432 	    code01 = TREE_CODE (tree01);
10433 	    code11 = TREE_CODE (tree11);
10434 	    if (code01 == INTEGER_CST
10435 		&& code11 == INTEGER_CST
10436 		&& TREE_INT_CST_HIGH (tree01) == 0
10437 		&& TREE_INT_CST_HIGH (tree11) == 0
10438 		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10439 		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10440 	      {
10441 		tem = build2_loc (loc, LROTATE_EXPR,
10442 				  TREE_TYPE (TREE_OPERAND (arg0, 0)),
10443 				  TREE_OPERAND (arg0, 0),
10444 				  code0 == LSHIFT_EXPR ? tree01 : tree11);
10445 		return fold_convert_loc (loc, type, tem);
10446 	      }
10447 	    else if (code11 == MINUS_EXPR)
10448 	      {
10449 		tree tree110, tree111;
10450 		tree110 = TREE_OPERAND (tree11, 0);
10451 		tree111 = TREE_OPERAND (tree11, 1);
10452 		STRIP_NOPS (tree110);
10453 		STRIP_NOPS (tree111);
10454 		if (TREE_CODE (tree110) == INTEGER_CST
10455 		    && 0 == compare_tree_int (tree110,
10456 					      TYPE_PRECISION
10457 					      (TREE_TYPE (TREE_OPERAND
10458 							  (arg0, 0))))
10459 		    && operand_equal_p (tree01, tree111, 0))
10460 		  return
10461 		    fold_convert_loc (loc, type,
10462 				      build2 ((code0 == LSHIFT_EXPR
10463 					       ? LROTATE_EXPR
10464 					       : RROTATE_EXPR),
10465 					      TREE_TYPE (TREE_OPERAND (arg0, 0)),
10466 					      TREE_OPERAND (arg0, 0), tree01));
10467 	      }
10468 	    else if (code01 == MINUS_EXPR)
10469 	      {
10470 		tree tree010, tree011;
10471 		tree010 = TREE_OPERAND (tree01, 0);
10472 		tree011 = TREE_OPERAND (tree01, 1);
10473 		STRIP_NOPS (tree010);
10474 		STRIP_NOPS (tree011);
10475 		if (TREE_CODE (tree010) == INTEGER_CST
10476 		    && 0 == compare_tree_int (tree010,
10477 					      TYPE_PRECISION
10478 					      (TREE_TYPE (TREE_OPERAND
10479 							  (arg0, 0))))
10480 		    && operand_equal_p (tree11, tree011, 0))
10481 		    return fold_convert_loc
10482 		      (loc, type,
10483 		       build2 ((code0 != LSHIFT_EXPR
10484 				? LROTATE_EXPR
10485 				: RROTATE_EXPR),
10486 			       TREE_TYPE (TREE_OPERAND (arg0, 0)),
10487 			       TREE_OPERAND (arg0, 0), tree11));
10488 	      }
10489 	  }
10490       }
10491 
10492     associate:
10493       /* In most languages, can't associate operations on floats through
10494 	 parentheses.  Rather than remember where the parentheses were, we
10495 	 don't associate floats at all, unless the user has specified
10496 	 -fassociative-math.
10497 	 And, we need to make sure type is not saturating.  */
10498 
10499       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10500 	  && !TYPE_SATURATING (type))
10501 	{
10502 	  tree var0, con0, lit0, minus_lit0;
10503 	  tree var1, con1, lit1, minus_lit1;
10504 	  tree atype = type;
10505 	  bool ok = true;
10506 
10507 	  /* Split both trees into variables, constants, and literals.  Then
10508 	     associate each group together, the constants with literals,
10509 	     then the result with variables.  This increases the chances of
10510 	     literals being recombined later and of generating relocatable
10511 	     expressions for the sum of a constant and literal.  */
10512 	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10513 	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10514 			     code == MINUS_EXPR);
10515 
10516 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
10517 	  if (code == MINUS_EXPR)
10518 	    code = PLUS_EXPR;
10519 
10520 	  /* With undefined overflow prefer doing association in a type
10521 	     which wraps on overflow, if that is one of the operand types.  */
10522 	  if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10523 	      || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10524 	    {
10525 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10526 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10527 		atype = TREE_TYPE (arg0);
10528 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10529 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10530 		atype = TREE_TYPE (arg1);
10531 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10532 	    }
10533 
10534 	  /* With undefined overflow we can only associate constants with one
10535 	     variable, and constants whose association doesn't overflow.  */
10536 	  if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10537 	      || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10538 	    {
10539 	      if (var0 && var1)
10540 		{
10541 		  tree tmp0 = var0;
10542 		  tree tmp1 = var1;
10543 
10544 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
10545 		    tmp0 = TREE_OPERAND (tmp0, 0);
10546 		  if (CONVERT_EXPR_P (tmp0)
10547 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10548 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10549 			  <= TYPE_PRECISION (atype)))
10550 		    tmp0 = TREE_OPERAND (tmp0, 0);
10551 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
10552 		    tmp1 = TREE_OPERAND (tmp1, 0);
10553 		  if (CONVERT_EXPR_P (tmp1)
10554 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10555 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10556 			  <= TYPE_PRECISION (atype)))
10557 		    tmp1 = TREE_OPERAND (tmp1, 0);
10558 		  /* The only case we can still associate with two variables
10559 		     is if they are the same, modulo negation and bit-pattern
10560 		     preserving conversions.  */
10561 		  if (!operand_equal_p (tmp0, tmp1, 0))
10562 		    ok = false;
10563 		}
10564 	    }
10565 
10566 	  /* Only do something if we found more than two objects.  Otherwise,
10567 	     nothing has changed and we risk infinite recursion.  */
10568 	  if (ok
10569 	      && (2 < ((var0 != 0) + (var1 != 0)
10570 		       + (con0 != 0) + (con1 != 0)
10571 		       + (lit0 != 0) + (lit1 != 0)
10572 		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
10573 	    {
10574 	      bool any_overflows = false;
10575 	      if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10576 	      if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10577 	      if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10578 	      if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10579 	      var0 = associate_trees (loc, var0, var1, code, atype);
10580 	      con0 = associate_trees (loc, con0, con1, code, atype);
10581 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
10582 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10583 					    code, atype);
10584 
10585 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
10586 		 greater than the positive part.  Otherwise, the multiplicative
10587 		 folding code (i.e extract_muldiv) may be fooled in case
10588 		 unsigned constants are subtracted, like in the following
10589 		 example: ((X*2 + 4) - 8U)/2.  */
10590 	      if (minus_lit0 && lit0)
10591 		{
10592 		  if (TREE_CODE (lit0) == INTEGER_CST
10593 		      && TREE_CODE (minus_lit0) == INTEGER_CST
10594 		      && tree_int_cst_lt (lit0, minus_lit0))
10595 		    {
10596 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10597 						    MINUS_EXPR, atype);
10598 		      lit0 = 0;
10599 		    }
10600 		  else
10601 		    {
10602 		      lit0 = associate_trees (loc, lit0, minus_lit0,
10603 					      MINUS_EXPR, atype);
10604 		      minus_lit0 = 0;
10605 		    }
10606 		}
10607 
10608 	      /* Don't introduce overflows through reassociation.  */
10609 	      if (!any_overflows
10610 		  && ((lit0 && TREE_OVERFLOW_P (lit0))
10611 		      || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10612 		return NULL_TREE;
10613 
10614 	      if (minus_lit0)
10615 		{
10616 		  if (con0 == 0)
10617 		    return
10618 		      fold_convert_loc (loc, type,
10619 					associate_trees (loc, var0, minus_lit0,
10620 							 MINUS_EXPR, atype));
10621 		  else
10622 		    {
10623 		      con0 = associate_trees (loc, con0, minus_lit0,
10624 					      MINUS_EXPR, atype);
10625 		      return
10626 			fold_convert_loc (loc, type,
10627 					  associate_trees (loc, var0, con0,
10628 							   PLUS_EXPR, atype));
10629 		    }
10630 		}
10631 
10632 	      con0 = associate_trees (loc, con0, lit0, code, atype);
10633 	      return
10634 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10635 							      code, atype));
10636 	    }
10637 	}
10638 
10639       return NULL_TREE;
10640 
10641     case MINUS_EXPR:
10642       /* Pointer simplifications for subtraction, simple reassociations. */
10643       if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10644 	{
10645 	  /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10646 	  if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10647 	      && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10648 	    {
10649 	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10650 	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10651 	      tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10652 	      tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10653 	      return fold_build2_loc (loc, PLUS_EXPR, type,
10654 				  fold_build2_loc (loc, MINUS_EXPR, type,
10655 					       arg00, arg10),
10656 				  fold_build2_loc (loc, MINUS_EXPR, type,
10657 					       arg01, arg11));
10658 	    }
10659 	  /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10660 	  else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10661 	    {
10662 	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10663 	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10664 	      tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10665 				      fold_convert_loc (loc, type, arg1));
10666 	      if (tmp)
10667 	        return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10668 	    }
10669 	}
10670       /* A - (-B) -> A + B */
10671       if (TREE_CODE (arg1) == NEGATE_EXPR)
10672 	return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10673 			    fold_convert_loc (loc, type,
10674 					      TREE_OPERAND (arg1, 0)));
10675       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
10676       if (TREE_CODE (arg0) == NEGATE_EXPR
10677 	  && (FLOAT_TYPE_P (type)
10678 	      || INTEGRAL_TYPE_P (type))
10679 	  && negate_expr_p (arg1)
10680 	  && reorder_operands_p (arg0, arg1))
10681 	return fold_build2_loc (loc, MINUS_EXPR, type,
10682 			    fold_convert_loc (loc, type,
10683 					      negate_expr (arg1)),
10684 			    fold_convert_loc (loc, type,
10685 					      TREE_OPERAND (arg0, 0)));
10686       /* Convert -A - 1 to ~A.  */
10687       if (INTEGRAL_TYPE_P (type)
10688 	  && TREE_CODE (arg0) == NEGATE_EXPR
10689 	  && integer_onep (arg1)
10690 	  && !TYPE_OVERFLOW_TRAPS (type))
10691 	return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10692 			    fold_convert_loc (loc, type,
10693 					      TREE_OPERAND (arg0, 0)));
10694 
10695       /* Convert -1 - A to ~A.  */
10696       if (INTEGRAL_TYPE_P (type)
10697 	  && integer_all_onesp (arg0))
10698 	return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10699 
10700 
10701       /* X - (X / CST) * CST is X % CST.  */
10702       if (INTEGRAL_TYPE_P (type)
10703 	  && TREE_CODE (arg1) == MULT_EXPR
10704 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10705 	  && operand_equal_p (arg0,
10706 			      TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10707 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10708 			      TREE_OPERAND (arg1, 1), 0))
10709 	return
10710 	  fold_convert_loc (loc, type,
10711 			    fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10712 					 arg0, TREE_OPERAND (arg1, 1)));
10713 
10714       if (! FLOAT_TYPE_P (type))
10715 	{
10716 	  if (integer_zerop (arg0))
10717 	    return negate_expr (fold_convert_loc (loc, type, arg1));
10718 	  if (integer_zerop (arg1))
10719 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10720 
10721 	  /* Fold A - (A & B) into ~B & A.  */
10722 	  if (!TREE_SIDE_EFFECTS (arg0)
10723 	      && TREE_CODE (arg1) == BIT_AND_EXPR)
10724 	    {
10725 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10726 		{
10727 		  tree arg10 = fold_convert_loc (loc, type,
10728 						 TREE_OPERAND (arg1, 0));
10729 		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10730 				      fold_build1_loc (loc, BIT_NOT_EXPR,
10731 						   type, arg10),
10732 				      fold_convert_loc (loc, type, arg0));
10733 		}
10734 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10735 		{
10736 		  tree arg11 = fold_convert_loc (loc,
10737 						 type, TREE_OPERAND (arg1, 1));
10738 		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10739 				      fold_build1_loc (loc, BIT_NOT_EXPR,
10740 						   type, arg11),
10741 				      fold_convert_loc (loc, type, arg0));
10742 		}
10743 	    }
10744 
10745 	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10746 	     any power of 2 minus 1.  */
10747 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10748 	      && TREE_CODE (arg1) == BIT_AND_EXPR
10749 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
10750 				  TREE_OPERAND (arg1, 0), 0))
10751 	    {
10752 	      tree mask0 = TREE_OPERAND (arg0, 1);
10753 	      tree mask1 = TREE_OPERAND (arg1, 1);
10754 	      tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10755 
10756 	      if (operand_equal_p (tem, mask1, 0))
10757 		{
10758 		  tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10759 				     TREE_OPERAND (arg0, 0), mask1);
10760 		  return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10761 		}
10762 	    }
10763 	}
10764 
10765       /* See if ARG1 is zero and X - ARG1 reduces to X.  */
10766       else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10767 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10768 
10769       /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
10770 	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10771 	 (-ARG1 + ARG0) reduces to -ARG1.  */
10772       else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10773 	return negate_expr (fold_convert_loc (loc, type, arg1));
10774 
10775       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10776 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
10777 	 signed zeros are involved.  */
10778       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10779 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10780 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10781         {
10782 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10783 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10784 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10785 	  bool arg0rz = false, arg0iz = false;
10786 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
10787 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
10788 	    {
10789 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10790 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10791 	      if (arg0rz && arg1i && real_zerop (arg1i))
10792 	        {
10793 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10794 					 arg1r ? arg1r
10795 					 : build1 (REALPART_EXPR, rtype, arg1));
10796 		  tree ip = arg0i ? arg0i
10797 		    : build1 (IMAGPART_EXPR, rtype, arg0);
10798 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10799 		}
10800 	      else if (arg0iz && arg1r && real_zerop (arg1r))
10801 	        {
10802 		  tree rp = arg0r ? arg0r
10803 		    : build1 (REALPART_EXPR, rtype, arg0);
10804 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10805 					 arg1i ? arg1i
10806 					 : build1 (IMAGPART_EXPR, rtype, arg1));
10807 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10808 		}
10809 	    }
10810 	}
10811 
10812       /* Fold &x - &x.  This can happen from &x.foo - &x.
10813 	 This is unsafe for certain floats even in non-IEEE formats.
10814 	 In IEEE, it is unsafe because it does wrong for NaNs.
10815 	 Also note that operand_equal_p is always false if an operand
10816 	 is volatile.  */
10817 
10818       if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10819 	  && operand_equal_p (arg0, arg1, 0))
10820 	return build_zero_cst (type);
10821 
10822       /* A - B -> A + (-B) if B is easily negatable.  */
10823       if (negate_expr_p (arg1)
10824 	  && ((FLOAT_TYPE_P (type)
10825                /* Avoid this transformation if B is a positive REAL_CST.  */
10826 	       && (TREE_CODE (arg1) != REAL_CST
10827 		   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10828 	      || INTEGRAL_TYPE_P (type)))
10829 	return fold_build2_loc (loc, PLUS_EXPR, type,
10830 			    fold_convert_loc (loc, type, arg0),
10831 			    fold_convert_loc (loc, type,
10832 					      negate_expr (arg1)));
10833 
10834       /* Try folding difference of addresses.  */
10835       {
10836 	HOST_WIDE_INT diff;
10837 
10838 	if ((TREE_CODE (arg0) == ADDR_EXPR
10839 	     || TREE_CODE (arg1) == ADDR_EXPR)
10840 	    && ptr_difference_const (arg0, arg1, &diff))
10841 	  return build_int_cst_type (type, diff);
10842       }
10843 
10844       /* Fold &a[i] - &a[j] to i-j.  */
10845       if (TREE_CODE (arg0) == ADDR_EXPR
10846 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10847 	  && TREE_CODE (arg1) == ADDR_EXPR
10848 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10849         {
10850 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
10851 							TREE_OPERAND (arg0, 0),
10852 							TREE_OPERAND (arg1, 0));
10853 	  if (tem)
10854 	    return tem;
10855 	}
10856 
10857       if (FLOAT_TYPE_P (type)
10858 	  && flag_unsafe_math_optimizations
10859 	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10860 	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10861 	  && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10862 	return tem;
10863 
10864       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10865 	 one.  Make sure the type is not saturating and has the signedness of
10866 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10867 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10868       if ((TREE_CODE (arg0) == MULT_EXPR
10869 	   || TREE_CODE (arg1) == MULT_EXPR)
10870 	  && !TYPE_SATURATING (type)
10871 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10872 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10873 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10874         {
10875 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10876 	  if (tem)
10877 	    return tem;
10878 	}
10879 
10880       goto associate;
10881 
10882     case MULT_EXPR:
10883       /* (-A) * (-B) -> A * B  */
10884       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10885 	return fold_build2_loc (loc, MULT_EXPR, type,
10886 			    fold_convert_loc (loc, type,
10887 					      TREE_OPERAND (arg0, 0)),
10888 			    fold_convert_loc (loc, type,
10889 					      negate_expr (arg1)));
10890       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10891 	return fold_build2_loc (loc, MULT_EXPR, type,
10892 			    fold_convert_loc (loc, type,
10893 					      negate_expr (arg0)),
10894 			    fold_convert_loc (loc, type,
10895 					      TREE_OPERAND (arg1, 0)));
10896 
10897       if (! FLOAT_TYPE_P (type))
10898 	{
10899 	  if (integer_zerop (arg1))
10900 	    return omit_one_operand_loc (loc, type, arg1, arg0);
10901 	  if (integer_onep (arg1))
10902 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10903 	  /* Transform x * -1 into -x.  Make sure to do the negation
10904 	     on the original operand with conversions not stripped
10905 	     because we can only strip non-sign-changing conversions.  */
10906 	  if (integer_all_onesp (arg1))
10907 	    return fold_convert_loc (loc, type, negate_expr (op0));
10908 	  /* Transform x * -C into -x * C if x is easily negatable.  */
10909 	  if (TREE_CODE (arg1) == INTEGER_CST
10910 	      && tree_int_cst_sgn (arg1) == -1
10911 	      && negate_expr_p (arg0)
10912 	      && (tem = negate_expr (arg1)) != arg1
10913 	      && !TREE_OVERFLOW (tem))
10914 	    return fold_build2_loc (loc, MULT_EXPR, type,
10915 	    			fold_convert_loc (loc, type,
10916 						  negate_expr (arg0)),
10917 				tem);
10918 
10919 	  /* (a * (1 << b)) is (a << b)  */
10920 	  if (TREE_CODE (arg1) == LSHIFT_EXPR
10921 	      && integer_onep (TREE_OPERAND (arg1, 0)))
10922 	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10923 				TREE_OPERAND (arg1, 1));
10924 	  if (TREE_CODE (arg0) == LSHIFT_EXPR
10925 	      && integer_onep (TREE_OPERAND (arg0, 0)))
10926 	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10927 				TREE_OPERAND (arg0, 1));
10928 
10929 	  /* (A + A) * C -> A * 2 * C  */
10930 	  if (TREE_CODE (arg0) == PLUS_EXPR
10931 	      && TREE_CODE (arg1) == INTEGER_CST
10932 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
10933 			          TREE_OPERAND (arg0, 1), 0))
10934 	    return fold_build2_loc (loc, MULT_EXPR, type,
10935 				omit_one_operand_loc (loc, type,
10936 						  TREE_OPERAND (arg0, 0),
10937 						  TREE_OPERAND (arg0, 1)),
10938 				fold_build2_loc (loc, MULT_EXPR, type,
10939 					     build_int_cst (type, 2) , arg1));
10940 
10941 	  /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10942 	     sign-changing only.  */
10943 	  if (TREE_CODE (arg1) == INTEGER_CST
10944 	      && TREE_CODE (arg0) == EXACT_DIV_EXPR
10945 	      && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10946 	    return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10947 
10948 	  strict_overflow_p = false;
10949 	  if (TREE_CODE (arg1) == INTEGER_CST
10950 	      && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10951 					     &strict_overflow_p)))
10952 	    {
10953 	      if (strict_overflow_p)
10954 		fold_overflow_warning (("assuming signed overflow does not "
10955 					"occur when simplifying "
10956 					"multiplication"),
10957 				       WARN_STRICT_OVERFLOW_MISC);
10958 	      return fold_convert_loc (loc, type, tem);
10959 	    }
10960 
10961 	  /* Optimize z * conj(z) for integer complex numbers.  */
10962 	  if (TREE_CODE (arg0) == CONJ_EXPR
10963 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10964 	    return fold_mult_zconjz (loc, type, arg1);
10965 	  if (TREE_CODE (arg1) == CONJ_EXPR
10966 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10967 	    return fold_mult_zconjz (loc, type, arg0);
10968 	}
10969       else
10970 	{
10971 	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
10972 	     when x is NaN, since x * 0 is also NaN.  Nor are they the
10973 	     same in modes with signed zeros, since multiplying a
10974 	     negative value by 0 gives -0, not +0.  */
10975 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10976 	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10977 	      && real_zerop (arg1))
10978 	    return omit_one_operand_loc (loc, type, arg1, arg0);
10979 	  /* In IEEE floating point, x*1 is not equivalent to x for snans.
10980 	     Likewise for complex arithmetic with signed zeros.  */
10981 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10982 	      && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10983 		  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10984 	      && real_onep (arg1))
10985 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10986 
10987 	  /* Transform x * -1.0 into -x.  */
10988 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10989 	      && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10990 		  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10991 	      && real_minus_onep (arg1))
10992 	    return fold_convert_loc (loc, type, negate_expr (arg0));
10993 
10994 	  /* Convert (C1/X)*C2 into (C1*C2)/X.  This transformation may change
10995              the result for floating point types due to rounding so it is applied
10996              only if -fassociative-math was specify.  */
10997 	  if (flag_associative_math
10998 	      && TREE_CODE (arg0) == RDIV_EXPR
10999 	      && TREE_CODE (arg1) == REAL_CST
11000 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11001 	    {
11002 	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11003 				      arg1);
11004 	      if (tem)
11005 		return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11006 				    TREE_OPERAND (arg0, 1));
11007 	    }
11008 
11009           /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
11010 	  if (operand_equal_p (arg0, arg1, 0))
11011 	    {
11012 	      tree tem = fold_strip_sign_ops (arg0);
11013 	      if (tem != NULL_TREE)
11014 		{
11015 		  tem = fold_convert_loc (loc, type, tem);
11016 		  return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11017 		}
11018 	    }
11019 
11020 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11021 	     This is not the same for NaNs or if signed zeros are
11022 	     involved.  */
11023 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11024               && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11025 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11026 	      && TREE_CODE (arg1) == COMPLEX_CST
11027 	      && real_zerop (TREE_REALPART (arg1)))
11028 	    {
11029 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11030 	      if (real_onep (TREE_IMAGPART (arg1)))
11031 		return
11032 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11033 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11034 							     rtype, arg0)),
11035 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11036 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
11037 		return
11038 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11039 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11040 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11041 							     rtype, arg0)));
11042 	    }
11043 
11044 	  /* Optimize z * conj(z) for floating point complex numbers.
11045 	     Guarded by flag_unsafe_math_optimizations as non-finite
11046 	     imaginary components don't produce scalar results.  */
11047 	  if (flag_unsafe_math_optimizations
11048 	      && TREE_CODE (arg0) == CONJ_EXPR
11049 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11050 	    return fold_mult_zconjz (loc, type, arg1);
11051 	  if (flag_unsafe_math_optimizations
11052 	      && TREE_CODE (arg1) == CONJ_EXPR
11053 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11054 	    return fold_mult_zconjz (loc, type, arg0);
11055 
11056 	  if (flag_unsafe_math_optimizations)
11057 	    {
11058 	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11059 	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11060 
11061 	      /* Optimizations of root(...)*root(...).  */
11062 	      if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11063 		{
11064 		  tree rootfn, arg;
11065 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11066 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11067 
11068 		  /* Optimize sqrt(x)*sqrt(x) as x.  */
11069 		  if (BUILTIN_SQRT_P (fcode0)
11070 		      && operand_equal_p (arg00, arg10, 0)
11071 		      && ! HONOR_SNANS (TYPE_MODE (type)))
11072 		    return arg00;
11073 
11074 	          /* Optimize root(x)*root(y) as root(x*y).  */
11075 		  rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11076 		  arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11077 		  return build_call_expr_loc (loc, rootfn, 1, arg);
11078 		}
11079 
11080 	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
11081 	      if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11082 		{
11083 		  tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11084 		  tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11085 					  CALL_EXPR_ARG (arg0, 0),
11086 					  CALL_EXPR_ARG (arg1, 0));
11087 		  return build_call_expr_loc (loc, expfn, 1, arg);
11088 		}
11089 
11090 	      /* Optimizations of pow(...)*pow(...).  */
11091 	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11092 		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11093 		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11094 		{
11095 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11096 		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
11097 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11098 		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
11099 
11100 		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
11101 		  if (operand_equal_p (arg01, arg11, 0))
11102 		    {
11103 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11104 		      tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11105 					      arg00, arg10);
11106 		      return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11107 		    }
11108 
11109 		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
11110 		  if (operand_equal_p (arg00, arg10, 0))
11111 		    {
11112 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11113 		      tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11114 					      arg01, arg11);
11115 		      return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11116 		    }
11117 		}
11118 
11119 	      /* Optimize tan(x)*cos(x) as sin(x).  */
11120 	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11121 		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11122 		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11123 		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11124 		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11125 		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11126 		  && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11127 				      CALL_EXPR_ARG (arg1, 0), 0))
11128 		{
11129 		  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11130 
11131 		  if (sinfn != NULL_TREE)
11132 		    return build_call_expr_loc (loc, sinfn, 1,
11133 					    CALL_EXPR_ARG (arg0, 0));
11134 		}
11135 
11136 	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
11137 	      if (fcode1 == BUILT_IN_POW
11138 		  || fcode1 == BUILT_IN_POWF
11139 		  || fcode1 == BUILT_IN_POWL)
11140 		{
11141 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11142 		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
11143 		  if (TREE_CODE (arg11) == REAL_CST
11144 		      && !TREE_OVERFLOW (arg11)
11145 		      && operand_equal_p (arg0, arg10, 0))
11146 		    {
11147 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11148 		      REAL_VALUE_TYPE c;
11149 		      tree arg;
11150 
11151 		      c = TREE_REAL_CST (arg11);
11152 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11153 		      arg = build_real (type, c);
11154 		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11155 		    }
11156 		}
11157 
11158 	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
11159 	      if (fcode0 == BUILT_IN_POW
11160 		  || fcode0 == BUILT_IN_POWF
11161 		  || fcode0 == BUILT_IN_POWL)
11162 		{
11163 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11164 		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
11165 		  if (TREE_CODE (arg01) == REAL_CST
11166 		      && !TREE_OVERFLOW (arg01)
11167 		      && operand_equal_p (arg1, arg00, 0))
11168 		    {
11169 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11170 		      REAL_VALUE_TYPE c;
11171 		      tree arg;
11172 
11173 		      c = TREE_REAL_CST (arg01);
11174 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11175 		      arg = build_real (type, c);
11176 		      return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11177 		    }
11178 		}
11179 
11180 	      /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x.  */
11181 	      if (!in_gimple_form
11182 		  && optimize
11183 		  && operand_equal_p (arg0, arg1, 0))
11184 		{
11185 		  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11186 
11187 		  if (powfn)
11188 		    {
11189 		      tree arg = build_real (type, dconst2);
11190 		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11191 		    }
11192 		}
11193 	    }
11194 	}
11195       goto associate;
11196 
11197     case BIT_IOR_EXPR:
11198     bit_ior:
11199       if (integer_all_onesp (arg1))
11200 	return omit_one_operand_loc (loc, type, arg1, arg0);
11201       if (integer_zerop (arg1))
11202 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11203       if (operand_equal_p (arg0, arg1, 0))
11204 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11205 
11206       /* ~X | X is -1.  */
11207       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11208 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11209 	{
11210 	  t1 = build_zero_cst (type);
11211 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11212 	  return omit_one_operand_loc (loc, type, t1, arg1);
11213 	}
11214 
11215       /* X | ~X is -1.  */
11216       if (TREE_CODE (arg1) == BIT_NOT_EXPR
11217 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11218 	{
11219 	  t1 = build_zero_cst (type);
11220 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11221 	  return omit_one_operand_loc (loc, type, t1, arg0);
11222 	}
11223 
11224       /* Canonicalize (X & C1) | C2.  */
11225       if (TREE_CODE (arg0) == BIT_AND_EXPR
11226 	  && TREE_CODE (arg1) == INTEGER_CST
11227 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11228 	{
11229 	  double_int c1, c2, c3, msk;
11230 	  int width = TYPE_PRECISION (type), w;
11231 
11232 	  c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11233 	  c2 = tree_to_double_int (arg1);
11234 
11235 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
11236 	  if ((c1 & c2) == c1)
11237 	    return omit_one_operand_loc (loc, type, arg1,
11238 					 TREE_OPERAND (arg0, 0));
11239 
11240 	  msk = double_int::mask (width);
11241 
11242 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
11243 	  if (msk.and_not (c1 | c2).is_zero ())
11244 	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11245 				    TREE_OPERAND (arg0, 0), arg1);
11246 
11247 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11248 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11249 	     mode which allows further optimizations.  */
11250 	  c1 &= msk;
11251 	  c2 &= msk;
11252 	  c3 = c1.and_not (c2);
11253 	  for (w = BITS_PER_UNIT;
11254 	       w <= width && w <= HOST_BITS_PER_WIDE_INT;
11255 	       w <<= 1)
11256 	    {
11257 	      unsigned HOST_WIDE_INT mask
11258 		= (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11259 	      if (((c1.low | c2.low) & mask) == mask
11260 		  && (c1.low & ~mask) == 0 && c1.high == 0)
11261 		{
11262 		  c3 = double_int::from_uhwi (mask);
11263 		  break;
11264 		}
11265 	    }
11266 
11267 	  if (c3 != c1)
11268 	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11269 				    fold_build2_loc (loc, BIT_AND_EXPR, type,
11270 						     TREE_OPERAND (arg0, 0),
11271 						     double_int_to_tree (type,
11272 									 c3)),
11273 				    arg1);
11274 	}
11275 
11276       /* (X & Y) | Y is (X, Y).  */
11277       if (TREE_CODE (arg0) == BIT_AND_EXPR
11278 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11279 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11280       /* (X & Y) | X is (Y, X).  */
11281       if (TREE_CODE (arg0) == BIT_AND_EXPR
11282 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11283 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11284 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11285       /* X | (X & Y) is (Y, X).  */
11286       if (TREE_CODE (arg1) == BIT_AND_EXPR
11287 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11288 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11289 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11290       /* X | (Y & X) is (Y, X).  */
11291       if (TREE_CODE (arg1) == BIT_AND_EXPR
11292 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11293 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11294 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11295 
11296       /* (X & ~Y) | (~X & Y) is X ^ Y */
11297       if (TREE_CODE (arg0) == BIT_AND_EXPR
11298 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
11299         {
11300 	  tree a0, a1, l0, l1, n0, n1;
11301 
11302 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11303 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11304 
11305 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11306 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11307 
11308 	  n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11309 	  n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11310 
11311 	  if ((operand_equal_p (n0, a0, 0)
11312 	       && operand_equal_p (n1, a1, 0))
11313 	      || (operand_equal_p (n0, a1, 0)
11314 		  && operand_equal_p (n1, a0, 0)))
11315 	    return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11316 	}
11317 
11318       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11319       if (t1 != NULL_TREE)
11320 	return t1;
11321 
11322       /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11323 
11324 	 This results in more efficient code for machines without a NAND
11325 	 instruction.  Combine will canonicalize to the first form
11326 	 which will allow use of NAND instructions provided by the
11327 	 backend if they exist.  */
11328       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11329 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11330 	{
11331 	  return
11332 	    fold_build1_loc (loc, BIT_NOT_EXPR, type,
11333 			 build2 (BIT_AND_EXPR, type,
11334 				 fold_convert_loc (loc, type,
11335 						   TREE_OPERAND (arg0, 0)),
11336 				 fold_convert_loc (loc, type,
11337 						   TREE_OPERAND (arg1, 0))));
11338 	}
11339 
11340       /* See if this can be simplified into a rotate first.  If that
11341 	 is unsuccessful continue in the association code.  */
11342       goto bit_rotate;
11343 
11344     case BIT_XOR_EXPR:
11345       if (integer_zerop (arg1))
11346 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11347       if (integer_all_onesp (arg1))
11348 	return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11349       if (operand_equal_p (arg0, arg1, 0))
11350 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11351 
11352       /* ~X ^ X is -1.  */
11353       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11354 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11355 	{
11356 	  t1 = build_zero_cst (type);
11357 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11358 	  return omit_one_operand_loc (loc, type, t1, arg1);
11359 	}
11360 
11361       /* X ^ ~X is -1.  */
11362       if (TREE_CODE (arg1) == BIT_NOT_EXPR
11363 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11364 	{
11365 	  t1 = build_zero_cst (type);
11366 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11367 	  return omit_one_operand_loc (loc, type, t1, arg0);
11368 	}
11369 
11370       /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11371          with a constant, and the two constants have no bits in common,
11372 	 we should treat this as a BIT_IOR_EXPR since this may produce more
11373 	 simplifications.  */
11374       if (TREE_CODE (arg0) == BIT_AND_EXPR
11375 	  && TREE_CODE (arg1) == BIT_AND_EXPR
11376 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11377 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11378 	  && integer_zerop (const_binop (BIT_AND_EXPR,
11379 					 TREE_OPERAND (arg0, 1),
11380 					 TREE_OPERAND (arg1, 1))))
11381 	{
11382 	  code = BIT_IOR_EXPR;
11383 	  goto bit_ior;
11384 	}
11385 
11386       /* (X | Y) ^ X -> Y & ~ X*/
11387       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11388           && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11389         {
11390 	  tree t2 = TREE_OPERAND (arg0, 1);
11391 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11392 			    arg1);
11393 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11394 			    fold_convert_loc (loc, type, t2),
11395 			    fold_convert_loc (loc, type, t1));
11396 	  return t1;
11397 	}
11398 
11399       /* (Y | X) ^ X -> Y & ~ X*/
11400       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11401           && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11402         {
11403 	  tree t2 = TREE_OPERAND (arg0, 0);
11404 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11405 			    arg1);
11406 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11407 			    fold_convert_loc (loc, type, t2),
11408 			    fold_convert_loc (loc, type, t1));
11409 	  return t1;
11410 	}
11411 
11412       /* X ^ (X | Y) -> Y & ~ X*/
11413       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11414           && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11415         {
11416 	  tree t2 = TREE_OPERAND (arg1, 1);
11417 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11418 			    arg0);
11419 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11420 			    fold_convert_loc (loc, type, t2),
11421 			    fold_convert_loc (loc, type, t1));
11422 	  return t1;
11423 	}
11424 
11425       /* X ^ (Y | X) -> Y & ~ X*/
11426       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11427           && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11428         {
11429 	  tree t2 = TREE_OPERAND (arg1, 0);
11430 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11431 			    arg0);
11432 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11433 			    fold_convert_loc (loc, type, t2),
11434 			    fold_convert_loc (loc, type, t1));
11435 	  return t1;
11436 	}
11437 
11438       /* Convert ~X ^ ~Y to X ^ Y.  */
11439       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11440 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11441 	return fold_build2_loc (loc, code, type,
11442 			    fold_convert_loc (loc, type,
11443 					      TREE_OPERAND (arg0, 0)),
11444 			    fold_convert_loc (loc, type,
11445 					      TREE_OPERAND (arg1, 0)));
11446 
11447       /* Convert ~X ^ C to X ^ ~C.  */
11448       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11449 	  && TREE_CODE (arg1) == INTEGER_CST)
11450 	return fold_build2_loc (loc, code, type,
11451 			    fold_convert_loc (loc, type,
11452 					      TREE_OPERAND (arg0, 0)),
11453 			    fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11454 
11455       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11456       if (TREE_CODE (arg0) == BIT_AND_EXPR
11457 	  && integer_onep (TREE_OPERAND (arg0, 1))
11458 	  && integer_onep (arg1))
11459 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11460 				build_zero_cst (TREE_TYPE (arg0)));
11461 
11462       /* Fold (X & Y) ^ Y as ~X & Y.  */
11463       if (TREE_CODE (arg0) == BIT_AND_EXPR
11464 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11465 	{
11466 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11467 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11468 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11469 			      fold_convert_loc (loc, type, arg1));
11470 	}
11471       /* Fold (X & Y) ^ X as ~Y & X.  */
11472       if (TREE_CODE (arg0) == BIT_AND_EXPR
11473 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11474 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11475 	{
11476 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11477 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11478 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11479 			      fold_convert_loc (loc, type, arg1));
11480 	}
11481       /* Fold X ^ (X & Y) as X & ~Y.  */
11482       if (TREE_CODE (arg1) == BIT_AND_EXPR
11483 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11484 	{
11485 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11486 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11487 			      fold_convert_loc (loc, type, arg0),
11488 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11489 	}
11490       /* Fold X ^ (Y & X) as ~Y & X.  */
11491       if (TREE_CODE (arg1) == BIT_AND_EXPR
11492 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11493 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11494 	{
11495 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11496 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11497 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11498 			      fold_convert_loc (loc, type, arg0));
11499 	}
11500 
11501       /* See if this can be simplified into a rotate first.  If that
11502 	 is unsuccessful continue in the association code.  */
11503       goto bit_rotate;
11504 
11505     case BIT_AND_EXPR:
11506       if (integer_all_onesp (arg1))
11507 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11508       if (integer_zerop (arg1))
11509 	return omit_one_operand_loc (loc, type, arg1, arg0);
11510       if (operand_equal_p (arg0, arg1, 0))
11511 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11512 
11513       /* ~X & X, (X == 0) & X, and !X & X are always zero.  */
11514       if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11515 	   || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11516 	   || (TREE_CODE (arg0) == EQ_EXPR
11517 	       && integer_zerop (TREE_OPERAND (arg0, 1))))
11518 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11519 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11520 
11521       /* X & ~X , X & (X == 0), and X & !X are always zero.  */
11522       if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11523 	   || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11524 	   || (TREE_CODE (arg1) == EQ_EXPR
11525 	       && integer_zerop (TREE_OPERAND (arg1, 1))))
11526 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11527 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11528 
11529       /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2).  */
11530       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11531 	  && TREE_CODE (arg1) == INTEGER_CST
11532 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11533 	{
11534 	  tree tmp1 = fold_convert_loc (loc, type, arg1);
11535 	  tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11536 	  tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11537 	  tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11538 	  tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11539 	  return
11540 	    fold_convert_loc (loc, type,
11541 			      fold_build2_loc (loc, BIT_IOR_EXPR,
11542 					   type, tmp2, tmp3));
11543 	}
11544 
11545       /* (X | Y) & Y is (X, Y).  */
11546       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11547 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11548 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11549       /* (X | Y) & X is (Y, X).  */
11550       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11551 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11552 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11553 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11554       /* X & (X | Y) is (Y, X).  */
11555       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11556 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11557 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11558 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11559       /* X & (Y | X) is (Y, X).  */
11560       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11561 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11562 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11563 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11564 
11565       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11566       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11567 	  && integer_onep (TREE_OPERAND (arg0, 1))
11568 	  && integer_onep (arg1))
11569 	{
11570 	  tree tem2;
11571 	  tem = TREE_OPERAND (arg0, 0);
11572 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11573 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11574 				  tem, tem2);
11575 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11576 				  build_zero_cst (TREE_TYPE (tem)));
11577 	}
11578       /* Fold ~X & 1 as (X & 1) == 0.  */
11579       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11580 	  && integer_onep (arg1))
11581 	{
11582 	  tree tem2;
11583 	  tem = TREE_OPERAND (arg0, 0);
11584 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11585 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11586 				  tem, tem2);
11587 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11588 				  build_zero_cst (TREE_TYPE (tem)));
11589 	}
11590       /* Fold !X & 1 as X == 0.  */
11591       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11592 	  && integer_onep (arg1))
11593 	{
11594 	  tem = TREE_OPERAND (arg0, 0);
11595 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
11596 				  build_zero_cst (TREE_TYPE (tem)));
11597 	}
11598 
11599       /* Fold (X ^ Y) & Y as ~X & Y.  */
11600       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11601 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11602 	{
11603 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11604 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11605 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11606 			      fold_convert_loc (loc, type, arg1));
11607 	}
11608       /* Fold (X ^ Y) & X as ~Y & X.  */
11609       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11610 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11611 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11612 	{
11613 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11614 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11615 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11616 			      fold_convert_loc (loc, type, arg1));
11617 	}
11618       /* Fold X & (X ^ Y) as X & ~Y.  */
11619       if (TREE_CODE (arg1) == BIT_XOR_EXPR
11620 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11621 	{
11622 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11623 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11624 			      fold_convert_loc (loc, type, arg0),
11625 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11626 	}
11627       /* Fold X & (Y ^ X) as ~Y & X.  */
11628       if (TREE_CODE (arg1) == BIT_XOR_EXPR
11629 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11630 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11631 	{
11632 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11633 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11634 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11635 			      fold_convert_loc (loc, type, arg0));
11636 	}
11637 
11638       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11639          multiple of 1 << CST.  */
11640       if (TREE_CODE (arg1) == INTEGER_CST)
11641 	{
11642 	  double_int cst1 = tree_to_double_int (arg1);
11643 	  double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11644 					 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11645 	  if ((cst1 & ncst1) == ncst1
11646 	      && multiple_of_p (type, arg0,
11647 				double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11648 	    return fold_convert_loc (loc, type, arg0);
11649 	}
11650 
11651       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11652          bits from CST2.  */
11653       if (TREE_CODE (arg1) == INTEGER_CST
11654 	  && TREE_CODE (arg0) == MULT_EXPR
11655 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11656 	{
11657 	  double_int darg1 = tree_to_double_int (arg1);
11658 	  double_int masked
11659 	    = mask_with_tz (type, darg1,
11660 	                    tree_to_double_int (TREE_OPERAND (arg0, 1)));
11661 
11662 	  if (masked.is_zero ())
11663 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
11664 	                                  arg0, arg1);
11665 	  else if (masked != darg1)
11666 	    {
11667 	      /* Avoid the transform if arg1 is a mask of some
11668 	         mode which allows further optimizations.  */
11669 	      int pop = darg1.popcount ();
11670 	      if (!(pop >= BITS_PER_UNIT
11671 		    && exact_log2 (pop) != -1
11672 		    && double_int::mask (pop) == darg1))
11673 		return fold_build2_loc (loc, code, type, op0,
11674 					double_int_to_tree (type, masked));
11675 	    }
11676 	}
11677 
11678       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11679 	 ((A & N) + B) & M -> (A + B) & M
11680 	 Similarly if (N & M) == 0,
11681 	 ((A | N) + B) & M -> (A + B) & M
11682 	 and for - instead of + (or unary - instead of +)
11683 	 and/or ^ instead of |.
11684 	 If B is constant and (B & M) == 0, fold into A & M.  */
11685       if (host_integerp (arg1, 1))
11686 	{
11687 	  unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11688 	  if (~cst1 && (cst1 & (cst1 + 1)) == 0
11689 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11690 	      && (TREE_CODE (arg0) == PLUS_EXPR
11691 		  || TREE_CODE (arg0) == MINUS_EXPR
11692 		  || TREE_CODE (arg0) == NEGATE_EXPR)
11693 	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11694 		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11695 	    {
11696 	      tree pmop[2];
11697 	      int which = 0;
11698 	      unsigned HOST_WIDE_INT cst0;
11699 
11700 	      /* Now we know that arg0 is (C + D) or (C - D) or
11701 		 -C and arg1 (M) is == (1LL << cst) - 1.
11702 		 Store C into PMOP[0] and D into PMOP[1].  */
11703 	      pmop[0] = TREE_OPERAND (arg0, 0);
11704 	      pmop[1] = NULL;
11705 	      if (TREE_CODE (arg0) != NEGATE_EXPR)
11706 		{
11707 		  pmop[1] = TREE_OPERAND (arg0, 1);
11708 		  which = 1;
11709 		}
11710 
11711 	      if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11712 		  || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11713 		      & cst1) != cst1)
11714 		which = -1;
11715 
11716 	      for (; which >= 0; which--)
11717 		switch (TREE_CODE (pmop[which]))
11718 		  {
11719 		  case BIT_AND_EXPR:
11720 		  case BIT_IOR_EXPR:
11721 		  case BIT_XOR_EXPR:
11722 		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11723 			!= INTEGER_CST)
11724 		      break;
11725 		    /* tree_low_cst not used, because we don't care about
11726 		       the upper bits.  */
11727 		    cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11728 		    cst0 &= cst1;
11729 		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11730 		      {
11731 			if (cst0 != cst1)
11732 			  break;
11733 		      }
11734 		    else if (cst0 != 0)
11735 		      break;
11736 		    /* If C or D is of the form (A & N) where
11737 		       (N & M) == M, or of the form (A | N) or
11738 		       (A ^ N) where (N & M) == 0, replace it with A.  */
11739 		    pmop[which] = TREE_OPERAND (pmop[which], 0);
11740 		    break;
11741 		  case INTEGER_CST:
11742 		    /* If C or D is a N where (N & M) == 0, it can be
11743 		       omitted (assumed 0).  */
11744 		    if ((TREE_CODE (arg0) == PLUS_EXPR
11745 			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11746 			&& (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11747 		      pmop[which] = NULL;
11748 		    break;
11749 		  default:
11750 		    break;
11751 		  }
11752 
11753 	      /* Only build anything new if we optimized one or both arguments
11754 		 above.  */
11755 	      if (pmop[0] != TREE_OPERAND (arg0, 0)
11756 		  || (TREE_CODE (arg0) != NEGATE_EXPR
11757 		      && pmop[1] != TREE_OPERAND (arg0, 1)))
11758 		{
11759 		  tree utype = TREE_TYPE (arg0);
11760 		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11761 		    {
11762 		      /* Perform the operations in a type that has defined
11763 			 overflow behavior.  */
11764 		      utype = unsigned_type_for (TREE_TYPE (arg0));
11765 		      if (pmop[0] != NULL)
11766 			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11767 		      if (pmop[1] != NULL)
11768 			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11769 		    }
11770 
11771 		  if (TREE_CODE (arg0) == NEGATE_EXPR)
11772 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11773 		  else if (TREE_CODE (arg0) == PLUS_EXPR)
11774 		    {
11775 		      if (pmop[0] != NULL && pmop[1] != NULL)
11776 			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11777 					       pmop[0], pmop[1]);
11778 		      else if (pmop[0] != NULL)
11779 			tem = pmop[0];
11780 		      else if (pmop[1] != NULL)
11781 			tem = pmop[1];
11782 		      else
11783 			return build_int_cst (type, 0);
11784 		    }
11785 		  else if (pmop[0] == NULL)
11786 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11787 		  else
11788 		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11789 					   pmop[0], pmop[1]);
11790 		  /* TEM is now the new binary +, - or unary - replacement.  */
11791 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11792 					 fold_convert_loc (loc, utype, arg1));
11793 		  return fold_convert_loc (loc, type, tem);
11794 		}
11795 	    }
11796 	}
11797 
11798       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11799       if (t1 != NULL_TREE)
11800 	return t1;
11801       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
11802       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11803 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11804 	{
11805 	  unsigned int prec
11806 	    = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11807 
11808 	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11809 	      && (~TREE_INT_CST_LOW (arg1)
11810 		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11811 	    return
11812 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11813 	}
11814 
11815       /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11816 
11817 	 This results in more efficient code for machines without a NOR
11818 	 instruction.  Combine will canonicalize to the first form
11819 	 which will allow use of NOR instructions provided by the
11820 	 backend if they exist.  */
11821       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11822 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11823 	{
11824 	  return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11825 			      build2 (BIT_IOR_EXPR, type,
11826 				      fold_convert_loc (loc, type,
11827 							TREE_OPERAND (arg0, 0)),
11828 				      fold_convert_loc (loc, type,
11829 							TREE_OPERAND (arg1, 0))));
11830 	}
11831 
11832       /* If arg0 is derived from the address of an object or function, we may
11833 	 be able to fold this expression using the object or function's
11834 	 alignment.  */
11835       if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11836 	{
11837 	  unsigned HOST_WIDE_INT modulus, residue;
11838 	  unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11839 
11840 	  modulus = get_pointer_modulus_and_residue (arg0, &residue,
11841 						     integer_onep (arg1));
11842 
11843 	  /* This works because modulus is a power of 2.  If this weren't the
11844 	     case, we'd have to replace it by its greatest power-of-2
11845 	     divisor: modulus & -modulus.  */
11846 	  if (low < modulus)
11847 	    return build_int_cst (type, residue & low);
11848 	}
11849 
11850       /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11851 	      (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11852 	 if the new mask might be further optimized.  */
11853       if ((TREE_CODE (arg0) == LSHIFT_EXPR
11854 	   || TREE_CODE (arg0) == RSHIFT_EXPR)
11855 	  && host_integerp (TREE_OPERAND (arg0, 1), 1)
11856 	  && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11857 	  && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11858 	     < TYPE_PRECISION (TREE_TYPE (arg0))
11859 	  && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11860 	  && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11861 	{
11862 	  unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11863 	  unsigned HOST_WIDE_INT mask
11864 	    = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11865 	  unsigned HOST_WIDE_INT newmask, zerobits = 0;
11866 	  tree shift_type = TREE_TYPE (arg0);
11867 
11868 	  if (TREE_CODE (arg0) == LSHIFT_EXPR)
11869 	    zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11870 	  else if (TREE_CODE (arg0) == RSHIFT_EXPR
11871 		   && TYPE_PRECISION (TREE_TYPE (arg0))
11872 		      == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11873 	    {
11874 	      unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11875 	      tree arg00 = TREE_OPERAND (arg0, 0);
11876 	      /* See if more bits can be proven as zero because of
11877 		 zero extension.  */
11878 	      if (TREE_CODE (arg00) == NOP_EXPR
11879 		  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11880 		{
11881 		  tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11882 		  if (TYPE_PRECISION (inner_type)
11883 		      == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11884 		      && TYPE_PRECISION (inner_type) < prec)
11885 		    {
11886 		      prec = TYPE_PRECISION (inner_type);
11887 		      /* See if we can shorten the right shift.  */
11888 		      if (shiftc < prec)
11889 			shift_type = inner_type;
11890 		    }
11891 		}
11892 	      zerobits = ~(unsigned HOST_WIDE_INT) 0;
11893 	      zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11894 	      zerobits <<= prec - shiftc;
11895 	      /* For arithmetic shift if sign bit could be set, zerobits
11896 		 can contain actually sign bits, so no transformation is
11897 		 possible, unless MASK masks them all away.  In that
11898 		 case the shift needs to be converted into logical shift.  */
11899 	      if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11900 		  && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11901 		{
11902 		  if ((mask & zerobits) == 0)
11903 		    shift_type = unsigned_type_for (TREE_TYPE (arg0));
11904 		  else
11905 		    zerobits = 0;
11906 		}
11907 	    }
11908 
11909 	  /* ((X << 16) & 0xff00) is (X, 0).  */
11910 	  if ((mask & zerobits) == mask)
11911 	    return omit_one_operand_loc (loc, type,
11912 				     build_int_cst (type, 0), arg0);
11913 
11914 	  newmask = mask | zerobits;
11915 	  if (newmask != mask && (newmask & (newmask + 1)) == 0)
11916 	    {
11917 	      unsigned int prec;
11918 
11919 	      /* Only do the transformation if NEWMASK is some integer
11920 		 mode's mask.  */
11921 	      for (prec = BITS_PER_UNIT;
11922 		   prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11923 		if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11924 		  break;
11925 	      if (prec < HOST_BITS_PER_WIDE_INT
11926 		  || newmask == ~(unsigned HOST_WIDE_INT) 0)
11927 		{
11928 		  tree newmaskt;
11929 
11930 		  if (shift_type != TREE_TYPE (arg0))
11931 		    {
11932 		      tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11933 					 fold_convert_loc (loc, shift_type,
11934 							   TREE_OPERAND (arg0, 0)),
11935 					 TREE_OPERAND (arg0, 1));
11936 		      tem = fold_convert_loc (loc, type, tem);
11937 		    }
11938 		  else
11939 		    tem = op0;
11940 		  newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11941 		  if (!tree_int_cst_equal (newmaskt, arg1))
11942 		    return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11943 		}
11944 	    }
11945 	}
11946 
11947       goto associate;
11948 
11949     case RDIV_EXPR:
11950       /* Don't touch a floating-point divide by zero unless the mode
11951 	 of the constant can represent infinity.  */
11952       if (TREE_CODE (arg1) == REAL_CST
11953 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11954 	  && real_zerop (arg1))
11955 	return NULL_TREE;
11956 
11957       /* Optimize A / A to 1.0 if we don't care about
11958 	 NaNs or Infinities.  Skip the transformation
11959 	 for non-real operands.  */
11960       if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11961 	  && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11962 	  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11963 	  && operand_equal_p (arg0, arg1, 0))
11964 	{
11965 	  tree r = build_real (TREE_TYPE (arg0), dconst1);
11966 
11967 	  return omit_two_operands_loc (loc, type, r, arg0, arg1);
11968 	}
11969 
11970       /* The complex version of the above A / A optimization.  */
11971       if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11972 	  && operand_equal_p (arg0, arg1, 0))
11973 	{
11974 	  tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11975 	  if (! HONOR_NANS (TYPE_MODE (elem_type))
11976 	      && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11977 	    {
11978 	      tree r = build_real (elem_type, dconst1);
11979 	      /* omit_two_operands will call fold_convert for us.  */
11980 	      return omit_two_operands_loc (loc, type, r, arg0, arg1);
11981 	    }
11982 	}
11983 
11984       /* (-A) / (-B) -> A / B  */
11985       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11986 	return fold_build2_loc (loc, RDIV_EXPR, type,
11987 			    TREE_OPERAND (arg0, 0),
11988 			    negate_expr (arg1));
11989       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11990 	return fold_build2_loc (loc, RDIV_EXPR, type,
11991 			    negate_expr (arg0),
11992 			    TREE_OPERAND (arg1, 0));
11993 
11994       /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
11995       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11996 	  && real_onep (arg1))
11997 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11998 
11999       /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
12000       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12001 	  && real_minus_onep (arg1))
12002 	return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12003 						  negate_expr (arg0)));
12004 
12005       /* If ARG1 is a constant, we can convert this to a multiply by the
12006 	 reciprocal.  This does not have the same rounding properties,
12007 	 so only do this if -freciprocal-math.  We can actually
12008 	 always safely do it if ARG1 is a power of two, but it's hard to
12009 	 tell if it is or not in a portable manner.  */
12010       if (optimize
12011 	  && (TREE_CODE (arg1) == REAL_CST
12012 	      || (TREE_CODE (arg1) == COMPLEX_CST
12013 		  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12014 	      || (TREE_CODE (arg1) == VECTOR_CST
12015 		  && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12016 	{
12017 	  if (flag_reciprocal_math
12018 	      && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12019 	    return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12020 	  /* Find the reciprocal if optimizing and the result is exact.
12021 	     TODO: Complex reciprocal not implemented.  */
12022 	  if (TREE_CODE (arg1) != COMPLEX_CST)
12023 	    {
12024 	      tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12025 
12026 	      if (inverse)
12027 		return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12028 	    }
12029 	}
12030       /* Convert A/B/C to A/(B*C).  */
12031       if (flag_reciprocal_math
12032 	  && TREE_CODE (arg0) == RDIV_EXPR)
12033 	return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12034 			    fold_build2_loc (loc, MULT_EXPR, type,
12035 					 TREE_OPERAND (arg0, 1), arg1));
12036 
12037       /* Convert A/(B/C) to (A/B)*C.  */
12038       if (flag_reciprocal_math
12039 	  && TREE_CODE (arg1) == RDIV_EXPR)
12040 	return fold_build2_loc (loc, MULT_EXPR, type,
12041 			    fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12042 					 TREE_OPERAND (arg1, 0)),
12043 			    TREE_OPERAND (arg1, 1));
12044 
12045       /* Convert C1/(X*C2) into (C1/C2)/X.  */
12046       if (flag_reciprocal_math
12047 	  && TREE_CODE (arg1) == MULT_EXPR
12048 	  && TREE_CODE (arg0) == REAL_CST
12049 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12050 	{
12051 	  tree tem = const_binop (RDIV_EXPR, arg0,
12052 				  TREE_OPERAND (arg1, 1));
12053 	  if (tem)
12054 	    return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12055 				TREE_OPERAND (arg1, 0));
12056 	}
12057 
12058       if (flag_unsafe_math_optimizations)
12059 	{
12060 	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12061 	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12062 
12063 	  /* Optimize sin(x)/cos(x) as tan(x).  */
12064 	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12065 	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12066 	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12067 	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12068 				  CALL_EXPR_ARG (arg1, 0), 0))
12069 	    {
12070 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12071 
12072 	      if (tanfn != NULL_TREE)
12073 		return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12074 	    }
12075 
12076 	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
12077 	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12078 	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12079 	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12080 	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12081 				  CALL_EXPR_ARG (arg1, 0), 0))
12082 	    {
12083 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12084 
12085 	      if (tanfn != NULL_TREE)
12086 		{
12087 		  tree tmp = build_call_expr_loc (loc, tanfn, 1,
12088 					      CALL_EXPR_ARG (arg0, 0));
12089 		  return fold_build2_loc (loc, RDIV_EXPR, type,
12090 				      build_real (type, dconst1), tmp);
12091 		}
12092 	    }
12093 
12094  	  /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12095 	     NaNs or Infinities.  */
12096  	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12097  	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12098  	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12099 	    {
12100 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
12101 	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
12102 
12103 	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12104 		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12105 		  && operand_equal_p (arg00, arg01, 0))
12106 		{
12107 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12108 
12109 		  if (cosfn != NULL_TREE)
12110 		    return build_call_expr_loc (loc, cosfn, 1, arg00);
12111 		}
12112 	    }
12113 
12114  	  /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12115 	     NaNs or Infinities.  */
12116  	  if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12117  	       || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12118  	       || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12119 	    {
12120 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
12121 	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
12122 
12123 	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12124 		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12125 		  && operand_equal_p (arg00, arg01, 0))
12126 		{
12127 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12128 
12129 		  if (cosfn != NULL_TREE)
12130 		    {
12131 		      tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12132 		      return fold_build2_loc (loc, RDIV_EXPR, type,
12133 					  build_real (type, dconst1),
12134 					  tmp);
12135 		    }
12136 		}
12137 	    }
12138 
12139 	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
12140 	  if (fcode0 == BUILT_IN_POW
12141 	      || fcode0 == BUILT_IN_POWF
12142 	      || fcode0 == BUILT_IN_POWL)
12143 	    {
12144 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
12145 	      tree arg01 = CALL_EXPR_ARG (arg0, 1);
12146 	      if (TREE_CODE (arg01) == REAL_CST
12147 		  && !TREE_OVERFLOW (arg01)
12148 		  && operand_equal_p (arg1, arg00, 0))
12149 		{
12150 		  tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12151 		  REAL_VALUE_TYPE c;
12152 		  tree arg;
12153 
12154 		  c = TREE_REAL_CST (arg01);
12155 		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12156 		  arg = build_real (type, c);
12157 		  return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12158 		}
12159 	    }
12160 
12161 	  /* Optimize a/root(b/c) into a*root(c/b).  */
12162 	  if (BUILTIN_ROOT_P (fcode1))
12163 	    {
12164 	      tree rootarg = CALL_EXPR_ARG (arg1, 0);
12165 
12166 	      if (TREE_CODE (rootarg) == RDIV_EXPR)
12167 		{
12168 		  tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12169 		  tree b = TREE_OPERAND (rootarg, 0);
12170 		  tree c = TREE_OPERAND (rootarg, 1);
12171 
12172 		  tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12173 
12174 		  tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12175 		  return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12176 		}
12177 	    }
12178 
12179 	  /* Optimize x/expN(y) into x*expN(-y).  */
12180 	  if (BUILTIN_EXPONENT_P (fcode1))
12181 	    {
12182 	      tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12183 	      tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12184 	      arg1 = build_call_expr_loc (loc,
12185 				      expfn, 1,
12186 				      fold_convert_loc (loc, type, arg));
12187 	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12188 	    }
12189 
12190 	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
12191 	  if (fcode1 == BUILT_IN_POW
12192 	      || fcode1 == BUILT_IN_POWF
12193 	      || fcode1 == BUILT_IN_POWL)
12194 	    {
12195 	      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12196 	      tree arg10 = CALL_EXPR_ARG (arg1, 0);
12197 	      tree arg11 = CALL_EXPR_ARG (arg1, 1);
12198 	      tree neg11 = fold_convert_loc (loc, type,
12199 					     negate_expr (arg11));
12200 	      arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12201 	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12202 	    }
12203 	}
12204       return NULL_TREE;
12205 
12206     case TRUNC_DIV_EXPR:
12207       /* Optimize (X & (-A)) / A where A is a power of 2,
12208 	 to X >> log2(A) */
12209       if (TREE_CODE (arg0) == BIT_AND_EXPR
12210 	  && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12211 	  && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12212 	{
12213 	  tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12214 				      arg1, TREE_OPERAND (arg0, 1));
12215 	  if (sum && integer_zerop (sum)) {
12216 	    unsigned long pow2;
12217 
12218 	    if (TREE_INT_CST_LOW (arg1))
12219 	      pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12220 	    else
12221 	      pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12222 		      + HOST_BITS_PER_WIDE_INT;
12223 
12224 	    return fold_build2_loc (loc, RSHIFT_EXPR, type,
12225 			  TREE_OPERAND (arg0, 0),
12226 			  build_int_cst (integer_type_node, pow2));
12227 	  }
12228 	}
12229 
12230       /* Fall through */
12231 
12232     case FLOOR_DIV_EXPR:
12233       /* Simplify A / (B << N) where A and B are positive and B is
12234 	 a power of 2, to A >> (N + log2(B)).  */
12235       strict_overflow_p = false;
12236       if (TREE_CODE (arg1) == LSHIFT_EXPR
12237 	  && (TYPE_UNSIGNED (type)
12238 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12239 	{
12240 	  tree sval = TREE_OPERAND (arg1, 0);
12241 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12242 	    {
12243 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
12244 	      unsigned long pow2;
12245 
12246 	      if (TREE_INT_CST_LOW (sval))
12247 		pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12248 	      else
12249 		pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12250 		       + HOST_BITS_PER_WIDE_INT;
12251 
12252 	      if (strict_overflow_p)
12253 		fold_overflow_warning (("assuming signed overflow does not "
12254 					"occur when simplifying A / (B << N)"),
12255 				       WARN_STRICT_OVERFLOW_MISC);
12256 
12257 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12258 					sh_cnt,
12259 					build_int_cst (TREE_TYPE (sh_cnt),
12260 						       pow2));
12261 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
12262 				  fold_convert_loc (loc, type, arg0), sh_cnt);
12263 	    }
12264 	}
12265 
12266       /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12267 	 TRUNC_DIV_EXPR.  Rewrite into the latter in this case.  */
12268       if (INTEGRAL_TYPE_P (type)
12269 	  && TYPE_UNSIGNED (type)
12270 	  && code == FLOOR_DIV_EXPR)
12271 	return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12272 
12273       /* Fall through */
12274 
12275     case ROUND_DIV_EXPR:
12276     case CEIL_DIV_EXPR:
12277     case EXACT_DIV_EXPR:
12278       if (integer_onep (arg1))
12279 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12280       if (integer_zerop (arg1))
12281 	return NULL_TREE;
12282       /* X / -1 is -X.  */
12283       if (!TYPE_UNSIGNED (type)
12284 	  && TREE_CODE (arg1) == INTEGER_CST
12285 	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12286 	  && TREE_INT_CST_HIGH (arg1) == -1)
12287 	return fold_convert_loc (loc, type, negate_expr (arg0));
12288 
12289       /* Convert -A / -B to A / B when the type is signed and overflow is
12290 	 undefined.  */
12291       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12292 	  && TREE_CODE (arg0) == NEGATE_EXPR
12293 	  && negate_expr_p (arg1))
12294 	{
12295 	  if (INTEGRAL_TYPE_P (type))
12296 	    fold_overflow_warning (("assuming signed overflow does not occur "
12297 				    "when distributing negation across "
12298 				    "division"),
12299 				   WARN_STRICT_OVERFLOW_MISC);
12300 	  return fold_build2_loc (loc, code, type,
12301 			      fold_convert_loc (loc, type,
12302 						TREE_OPERAND (arg0, 0)),
12303 			      fold_convert_loc (loc, type,
12304 						negate_expr (arg1)));
12305 	}
12306       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12307 	  && TREE_CODE (arg1) == NEGATE_EXPR
12308 	  && negate_expr_p (arg0))
12309 	{
12310 	  if (INTEGRAL_TYPE_P (type))
12311 	    fold_overflow_warning (("assuming signed overflow does not occur "
12312 				    "when distributing negation across "
12313 				    "division"),
12314 				   WARN_STRICT_OVERFLOW_MISC);
12315 	  return fold_build2_loc (loc, code, type,
12316 			      fold_convert_loc (loc, type,
12317 						negate_expr (arg0)),
12318 			      fold_convert_loc (loc, type,
12319 						TREE_OPERAND (arg1, 0)));
12320 	}
12321 
12322       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12323 	 operation, EXACT_DIV_EXPR.
12324 
12325 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12326 	 At one time others generated faster code, it's not clear if they do
12327 	 after the last round to changes to the DIV code in expmed.c.  */
12328       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12329 	  && multiple_of_p (type, arg0, arg1))
12330 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12331 
12332       strict_overflow_p = false;
12333       if (TREE_CODE (arg1) == INTEGER_CST
12334 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12335 					 &strict_overflow_p)))
12336 	{
12337 	  if (strict_overflow_p)
12338 	    fold_overflow_warning (("assuming signed overflow does not occur "
12339 				    "when simplifying division"),
12340 				   WARN_STRICT_OVERFLOW_MISC);
12341 	  return fold_convert_loc (loc, type, tem);
12342 	}
12343 
12344       return NULL_TREE;
12345 
12346     case CEIL_MOD_EXPR:
12347     case FLOOR_MOD_EXPR:
12348     case ROUND_MOD_EXPR:
12349     case TRUNC_MOD_EXPR:
12350       /* X % 1 is always zero, but be sure to preserve any side
12351 	 effects in X.  */
12352       if (integer_onep (arg1))
12353 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12354 
12355       /* X % 0, return X % 0 unchanged so that we can get the
12356 	 proper warnings and errors.  */
12357       if (integer_zerop (arg1))
12358 	return NULL_TREE;
12359 
12360       /* 0 % X is always zero, but be sure to preserve any side
12361 	 effects in X.  Place this after checking for X == 0.  */
12362       if (integer_zerop (arg0))
12363 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12364 
12365       /* X % -1 is zero.  */
12366       if (!TYPE_UNSIGNED (type)
12367 	  && TREE_CODE (arg1) == INTEGER_CST
12368 	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12369 	  && TREE_INT_CST_HIGH (arg1) == -1)
12370 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12371 
12372       /* X % -C is the same as X % C.  */
12373       if (code == TRUNC_MOD_EXPR
12374 	  && !TYPE_UNSIGNED (type)
12375 	  && TREE_CODE (arg1) == INTEGER_CST
12376 	  && !TREE_OVERFLOW (arg1)
12377 	  && TREE_INT_CST_HIGH (arg1) < 0
12378 	  && !TYPE_OVERFLOW_TRAPS (type)
12379 	  /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
12380 	  && !sign_bit_p (arg1, arg1))
12381 	return fold_build2_loc (loc, code, type,
12382 			    fold_convert_loc (loc, type, arg0),
12383 			    fold_convert_loc (loc, type,
12384 					      negate_expr (arg1)));
12385 
12386       /* X % -Y is the same as X % Y.  */
12387       if (code == TRUNC_MOD_EXPR
12388 	  && !TYPE_UNSIGNED (type)
12389 	  && TREE_CODE (arg1) == NEGATE_EXPR
12390 	  && !TYPE_OVERFLOW_TRAPS (type))
12391 	return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12392 			    fold_convert_loc (loc, type,
12393 					      TREE_OPERAND (arg1, 0)));
12394 
12395       strict_overflow_p = false;
12396       if (TREE_CODE (arg1) == INTEGER_CST
12397 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12398 					 &strict_overflow_p)))
12399 	{
12400 	  if (strict_overflow_p)
12401 	    fold_overflow_warning (("assuming signed overflow does not occur "
12402 				    "when simplifying modulus"),
12403 				   WARN_STRICT_OVERFLOW_MISC);
12404 	  return fold_convert_loc (loc, type, tem);
12405 	}
12406 
12407       /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12408          i.e. "X % C" into "X & (C - 1)", if X and C are positive.  */
12409       if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12410 	  && (TYPE_UNSIGNED (type)
12411 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12412 	{
12413 	  tree c = arg1;
12414 	  /* Also optimize A % (C << N)  where C is a power of 2,
12415 	     to A & ((C << N) - 1).  */
12416 	  if (TREE_CODE (arg1) == LSHIFT_EXPR)
12417 	    c = TREE_OPERAND (arg1, 0);
12418 
12419 	  if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12420 	    {
12421 	      tree mask
12422 		= fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12423 				   build_int_cst (TREE_TYPE (arg1), 1));
12424 	      if (strict_overflow_p)
12425 		fold_overflow_warning (("assuming signed overflow does not "
12426 					"occur when simplifying "
12427 					"X % (power of two)"),
12428 				       WARN_STRICT_OVERFLOW_MISC);
12429 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
12430 				      fold_convert_loc (loc, type, arg0),
12431 				      fold_convert_loc (loc, type, mask));
12432 	    }
12433 	}
12434 
12435       return NULL_TREE;
12436 
12437     case LROTATE_EXPR:
12438     case RROTATE_EXPR:
12439       if (integer_all_onesp (arg0))
12440 	return omit_one_operand_loc (loc, type, arg0, arg1);
12441       goto shift;
12442 
12443     case RSHIFT_EXPR:
12444       /* Optimize -1 >> x for arithmetic right shifts.  */
12445       if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12446 	  && tree_expr_nonnegative_p (arg1))
12447 	return omit_one_operand_loc (loc, type, arg0, arg1);
12448       /* ... fall through ...  */
12449 
12450     case LSHIFT_EXPR:
12451     shift:
12452       if (integer_zerop (arg1))
12453 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12454       if (integer_zerop (arg0))
12455 	return omit_one_operand_loc (loc, type, arg0, arg1);
12456 
12457       /* Since negative shift count is not well-defined,
12458 	 don't try to compute it in the compiler.  */
12459       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12460 	return NULL_TREE;
12461 
12462       /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
12463       if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12464 	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12465 	  && host_integerp (TREE_OPERAND (arg0, 1), false)
12466 	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12467 	{
12468 	  HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12469 			       + TREE_INT_CST_LOW (arg1));
12470 
12471 	  /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12472 	     being well defined.  */
12473 	  if (low >= TYPE_PRECISION (type))
12474 	    {
12475 	      if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12476 	        low = low % TYPE_PRECISION (type);
12477 	      else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12478 		return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12479 					 TREE_OPERAND (arg0, 0));
12480 	      else
12481 		low = TYPE_PRECISION (type) - 1;
12482 	    }
12483 
12484 	  return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12485 			      build_int_cst (type, low));
12486 	}
12487 
12488       /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12489          into x & ((unsigned)-1 >> c) for unsigned types.  */
12490       if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12491            || (TYPE_UNSIGNED (type)
12492 	       && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12493 	  && host_integerp (arg1, false)
12494 	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12495 	  && host_integerp (TREE_OPERAND (arg0, 1), false)
12496 	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12497 	{
12498 	  HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12499 	  HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12500 	  tree lshift;
12501 	  tree arg00;
12502 
12503 	  if (low0 == low1)
12504 	    {
12505 	      arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12506 
12507 	      lshift = build_int_cst (type, -1);
12508 	      lshift = int_const_binop (code, lshift, arg1);
12509 
12510 	      return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12511 	    }
12512 	}
12513 
12514       /* Rewrite an LROTATE_EXPR by a constant into an
12515 	 RROTATE_EXPR by a new constant.  */
12516       if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12517 	{
12518 	  tree tem = build_int_cst (TREE_TYPE (arg1),
12519 				    TYPE_PRECISION (type));
12520 	  tem = const_binop (MINUS_EXPR, tem, arg1);
12521 	  return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12522 	}
12523 
12524       /* If we have a rotate of a bit operation with the rotate count and
12525 	 the second operand of the bit operation both constant,
12526 	 permute the two operations.  */
12527       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12528 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
12529 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
12530 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
12531 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12532 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
12533 			    fold_build2_loc (loc, code, type,
12534 					 TREE_OPERAND (arg0, 0), arg1),
12535 			    fold_build2_loc (loc, code, type,
12536 					 TREE_OPERAND (arg0, 1), arg1));
12537 
12538       /* Two consecutive rotates adding up to the precision of the
12539 	 type can be ignored.  */
12540       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12541 	  && TREE_CODE (arg0) == RROTATE_EXPR
12542 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12543 	  && TREE_INT_CST_HIGH (arg1) == 0
12544 	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12545 	  && ((TREE_INT_CST_LOW (arg1)
12546 	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12547 	      == (unsigned int) TYPE_PRECISION (type)))
12548 	return TREE_OPERAND (arg0, 0);
12549 
12550       /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12551 	      (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12552 	 if the latter can be further optimized.  */
12553       if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12554 	  && TREE_CODE (arg0) == BIT_AND_EXPR
12555 	  && TREE_CODE (arg1) == INTEGER_CST
12556 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12557 	{
12558 	  tree mask = fold_build2_loc (loc, code, type,
12559 				   fold_convert_loc (loc, type,
12560 						     TREE_OPERAND (arg0, 1)),
12561 				   arg1);
12562 	  tree shift = fold_build2_loc (loc, code, type,
12563 				    fold_convert_loc (loc, type,
12564 						      TREE_OPERAND (arg0, 0)),
12565 				    arg1);
12566 	  tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12567 	  if (tem)
12568 	    return tem;
12569 	}
12570 
12571       return NULL_TREE;
12572 
12573     case MIN_EXPR:
12574       if (operand_equal_p (arg0, arg1, 0))
12575 	return omit_one_operand_loc (loc, type, arg0, arg1);
12576       if (INTEGRAL_TYPE_P (type)
12577 	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12578 	return omit_one_operand_loc (loc, type, arg1, arg0);
12579       tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12580       if (tem)
12581 	return tem;
12582       goto associate;
12583 
12584     case MAX_EXPR:
12585       if (operand_equal_p (arg0, arg1, 0))
12586 	return omit_one_operand_loc (loc, type, arg0, arg1);
12587       if (INTEGRAL_TYPE_P (type)
12588 	  && TYPE_MAX_VALUE (type)
12589 	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12590 	return omit_one_operand_loc (loc, type, arg1, arg0);
12591       tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12592       if (tem)
12593 	return tem;
12594       goto associate;
12595 
12596     case TRUTH_ANDIF_EXPR:
12597       /* Note that the operands of this must be ints
12598 	 and their values must be 0 or 1.
12599 	 ("true" is a fixed value perhaps depending on the language.)  */
12600       /* If first arg is constant zero, return it.  */
12601       if (integer_zerop (arg0))
12602 	return fold_convert_loc (loc, type, arg0);
12603     case TRUTH_AND_EXPR:
12604       /* If either arg is constant true, drop it.  */
12605       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12606 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12607       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12608 	  /* Preserve sequence points.  */
12609 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12610 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12611       /* If second arg is constant zero, result is zero, but first arg
12612 	 must be evaluated.  */
12613       if (integer_zerop (arg1))
12614 	return omit_one_operand_loc (loc, type, arg1, arg0);
12615       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12616 	 case will be handled here.  */
12617       if (integer_zerop (arg0))
12618 	return omit_one_operand_loc (loc, type, arg0, arg1);
12619 
12620       /* !X && X is always false.  */
12621       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12622 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12623 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12624       /* X && !X is always false.  */
12625       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12626 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12627 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12628 
12629       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
12630 	 means A >= Y && A != MAX, but in this case we know that
12631 	 A < X <= MAX.  */
12632 
12633       if (!TREE_SIDE_EFFECTS (arg0)
12634 	  && !TREE_SIDE_EFFECTS (arg1))
12635 	{
12636 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12637 	  if (tem && !operand_equal_p (tem, arg0, 0))
12638 	    return fold_build2_loc (loc, code, type, tem, arg1);
12639 
12640 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12641 	  if (tem && !operand_equal_p (tem, arg1, 0))
12642 	    return fold_build2_loc (loc, code, type, arg0, tem);
12643 	}
12644 
12645       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12646           != NULL_TREE)
12647         return tem;
12648 
12649       return NULL_TREE;
12650 
12651     case TRUTH_ORIF_EXPR:
12652       /* Note that the operands of this must be ints
12653 	 and their values must be 0 or true.
12654 	 ("true" is a fixed value perhaps depending on the language.)  */
12655       /* If first arg is constant true, return it.  */
12656       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12657 	return fold_convert_loc (loc, type, arg0);
12658     case TRUTH_OR_EXPR:
12659       /* If either arg is constant zero, drop it.  */
12660       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12661 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12662       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12663 	  /* Preserve sequence points.  */
12664 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12665 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12666       /* If second arg is constant true, result is true, but we must
12667 	 evaluate first arg.  */
12668       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12669 	return omit_one_operand_loc (loc, type, arg1, arg0);
12670       /* Likewise for first arg, but note this only occurs here for
12671 	 TRUTH_OR_EXPR.  */
12672       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12673 	return omit_one_operand_loc (loc, type, arg0, arg1);
12674 
12675       /* !X || X is always true.  */
12676       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12677 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12678 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12679       /* X || !X is always true.  */
12680       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12681 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12682 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12683 
12684       /* (X && !Y) || (!X && Y) is X ^ Y */
12685       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12686 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12687         {
12688 	  tree a0, a1, l0, l1, n0, n1;
12689 
12690 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12691 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12692 
12693 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12694 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12695 
12696 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12697 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12698 
12699 	  if ((operand_equal_p (n0, a0, 0)
12700 	       && operand_equal_p (n1, a1, 0))
12701 	      || (operand_equal_p (n0, a1, 0)
12702 		  && operand_equal_p (n1, a0, 0)))
12703 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12704 	}
12705 
12706       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12707           != NULL_TREE)
12708         return tem;
12709 
12710       return NULL_TREE;
12711 
12712     case TRUTH_XOR_EXPR:
12713       /* If the second arg is constant zero, drop it.  */
12714       if (integer_zerop (arg1))
12715 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12716       /* If the second arg is constant true, this is a logical inversion.  */
12717       if (integer_onep (arg1))
12718 	{
12719 	  /* Only call invert_truthvalue if operand is a truth value.  */
12720 	  if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12721 	    tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12722 	  else
12723 	    tem = invert_truthvalue_loc (loc, arg0);
12724 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12725 	}
12726       /* Identical arguments cancel to zero.  */
12727       if (operand_equal_p (arg0, arg1, 0))
12728 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12729 
12730       /* !X ^ X is always true.  */
12731       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12732 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12733 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12734 
12735       /* X ^ !X is always true.  */
12736       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12737 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12738 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12739 
12740       return NULL_TREE;
12741 
12742     case EQ_EXPR:
12743     case NE_EXPR:
12744       STRIP_NOPS (arg0);
12745       STRIP_NOPS (arg1);
12746 
12747       tem = fold_comparison (loc, code, type, op0, op1);
12748       if (tem != NULL_TREE)
12749 	return tem;
12750 
12751       /* bool_var != 0 becomes bool_var. */
12752       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12753           && code == NE_EXPR)
12754         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12755 
12756       /* bool_var == 1 becomes bool_var. */
12757       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12758           && code == EQ_EXPR)
12759         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12760 
12761       /* bool_var != 1 becomes !bool_var. */
12762       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12763           && code == NE_EXPR)
12764         return fold_convert_loc (loc, type,
12765 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12766 						  TREE_TYPE (arg0), arg0));
12767 
12768       /* bool_var == 0 becomes !bool_var. */
12769       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12770           && code == EQ_EXPR)
12771         return fold_convert_loc (loc, type,
12772 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12773 						  TREE_TYPE (arg0), arg0));
12774 
12775       /* !exp != 0 becomes !exp */
12776       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12777 	  && code == NE_EXPR)
12778         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12779 
12780       /* If this is an equality comparison of the address of two non-weak,
12781 	 unaliased symbols neither of which are extern (since we do not
12782 	 have access to attributes for externs), then we know the result.  */
12783       if (TREE_CODE (arg0) == ADDR_EXPR
12784 	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12785 	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12786 	  && ! lookup_attribute ("alias",
12787 				 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12788 	  && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12789 	  && TREE_CODE (arg1) == ADDR_EXPR
12790 	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12791 	  && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12792 	  && ! lookup_attribute ("alias",
12793 				 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12794 	  && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12795 	{
12796 	  /* We know that we're looking at the address of two
12797 	     non-weak, unaliased, static _DECL nodes.
12798 
12799 	     It is both wasteful and incorrect to call operand_equal_p
12800 	     to compare the two ADDR_EXPR nodes.  It is wasteful in that
12801 	     all we need to do is test pointer equality for the arguments
12802 	     to the two ADDR_EXPR nodes.  It is incorrect to use
12803 	     operand_equal_p as that function is NOT equivalent to a
12804 	     C equality test.  It can in fact return false for two
12805 	     objects which would test as equal using the C equality
12806 	     operator.  */
12807 	  bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12808 	  return constant_boolean_node (equal
12809 				        ? code == EQ_EXPR : code != EQ_EXPR,
12810 				        type);
12811 	}
12812 
12813       /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12814 	 a MINUS_EXPR of a constant, we can convert it into a comparison with
12815 	 a revised constant as long as no overflow occurs.  */
12816       if (TREE_CODE (arg1) == INTEGER_CST
12817 	  && (TREE_CODE (arg0) == PLUS_EXPR
12818 	      || TREE_CODE (arg0) == MINUS_EXPR)
12819 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12820 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12821 				      ? MINUS_EXPR : PLUS_EXPR,
12822 				      fold_convert_loc (loc, TREE_TYPE (arg0),
12823 							arg1),
12824 				      TREE_OPERAND (arg0, 1)))
12825 	  && !TREE_OVERFLOW (tem))
12826 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12827 
12828       /* Similarly for a NEGATE_EXPR.  */
12829       if (TREE_CODE (arg0) == NEGATE_EXPR
12830 	  && TREE_CODE (arg1) == INTEGER_CST
12831 	  && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12832 							arg1)))
12833 	  && TREE_CODE (tem) == INTEGER_CST
12834 	  && !TREE_OVERFLOW (tem))
12835 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12836 
12837       /* Similarly for a BIT_XOR_EXPR;  X ^ C1 == C2 is X == (C1 ^ C2).  */
12838       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12839 	  && TREE_CODE (arg1) == INTEGER_CST
12840 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12841 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12842 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12843 					 fold_convert_loc (loc,
12844 							   TREE_TYPE (arg0),
12845 							   arg1),
12846 					 TREE_OPERAND (arg0, 1)));
12847 
12848       /* Transform comparisons of the form X +- Y CMP X to Y CMP 0.  */
12849       if ((TREE_CODE (arg0) == PLUS_EXPR
12850 	   || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12851 	   || TREE_CODE (arg0) == MINUS_EXPR)
12852 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12853 									0)),
12854 			      arg1, 0)
12855 	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12856 	      || POINTER_TYPE_P (TREE_TYPE (arg0))))
12857 	{
12858 	  tree val = TREE_OPERAND (arg0, 1);
12859 	  return omit_two_operands_loc (loc, type,
12860 				    fold_build2_loc (loc, code, type,
12861 						 val,
12862 						 build_int_cst (TREE_TYPE (val),
12863 								0)),
12864 				    TREE_OPERAND (arg0, 0), arg1);
12865 	}
12866 
12867       /* Transform comparisons of the form C - X CMP X if C % 2 == 1.  */
12868       if (TREE_CODE (arg0) == MINUS_EXPR
12869 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12870 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12871 									1)),
12872 			      arg1, 0)
12873 	  && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12874 	{
12875 	  return omit_two_operands_loc (loc, type,
12876 				    code == NE_EXPR
12877 				    ? boolean_true_node : boolean_false_node,
12878 				    TREE_OPERAND (arg0, 1), arg1);
12879 	}
12880 
12881       /* If we have X - Y == 0, we can convert that to X == Y and similarly
12882 	 for !=.  Don't do this for ordered comparisons due to overflow.  */
12883       if (TREE_CODE (arg0) == MINUS_EXPR
12884 	  && integer_zerop (arg1))
12885 	return fold_build2_loc (loc, code, type,
12886 			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12887 
12888       /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
12889       if (TREE_CODE (arg0) == ABS_EXPR
12890 	  && (integer_zerop (arg1) || real_zerop (arg1)))
12891 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12892 
12893       /* If this is an EQ or NE comparison with zero and ARG0 is
12894 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
12895 	 two operations, but the latter can be done in one less insn
12896 	 on machines that have only two-operand insns or on which a
12897 	 constant cannot be the first operand.  */
12898       if (TREE_CODE (arg0) == BIT_AND_EXPR
12899 	  && integer_zerop (arg1))
12900 	{
12901 	  tree arg00 = TREE_OPERAND (arg0, 0);
12902 	  tree arg01 = TREE_OPERAND (arg0, 1);
12903 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
12904 	      && integer_onep (TREE_OPERAND (arg00, 0)))
12905 	    {
12906 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12907 				      arg01, TREE_OPERAND (arg00, 1));
12908 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12909 				 build_int_cst (TREE_TYPE (arg0), 1));
12910 	      return fold_build2_loc (loc, code, type,
12911 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12912 				  arg1);
12913 	    }
12914 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
12915 		   && integer_onep (TREE_OPERAND (arg01, 0)))
12916 	    {
12917 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12918 				      arg00, TREE_OPERAND (arg01, 1));
12919 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12920 				 build_int_cst (TREE_TYPE (arg0), 1));
12921 	      return fold_build2_loc (loc, code, type,
12922 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12923 				  arg1);
12924 	    }
12925 	}
12926 
12927       /* If this is an NE or EQ comparison of zero against the result of a
12928 	 signed MOD operation whose second operand is a power of 2, make
12929 	 the MOD operation unsigned since it is simpler and equivalent.  */
12930       if (integer_zerop (arg1)
12931 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12932 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12933 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
12934 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12935 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12936 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
12937 	{
12938 	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12939 	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12940 				     fold_convert_loc (loc, newtype,
12941 						       TREE_OPERAND (arg0, 0)),
12942 				     fold_convert_loc (loc, newtype,
12943 						       TREE_OPERAND (arg0, 1)));
12944 
12945 	  return fold_build2_loc (loc, code, type, newmod,
12946 			      fold_convert_loc (loc, newtype, arg1));
12947 	}
12948 
12949       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12950 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
12951 	 a single bit.  */
12952       if (TREE_CODE (arg0) == BIT_AND_EXPR
12953 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12954 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12955 	     == INTEGER_CST
12956 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12957 	  && integer_zerop (arg1))
12958 	{
12959 	  tree itype = TREE_TYPE (arg0);
12960 	  unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12961 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12962 
12963 	  /* Check for a valid shift count.  */
12964 	  if (TREE_INT_CST_HIGH (arg001) == 0
12965 	      && TREE_INT_CST_LOW (arg001) < prec)
12966 	    {
12967 	      tree arg01 = TREE_OPERAND (arg0, 1);
12968 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12969 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12970 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12971 		 can be rewritten as (X & (C2 << C1)) != 0.  */
12972 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12973 		{
12974 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12975 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12976 		  return fold_build2_loc (loc, code, type, tem,
12977 					  fold_convert_loc (loc, itype, arg1));
12978 		}
12979 	      /* Otherwise, for signed (arithmetic) shifts,
12980 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12981 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
12982 	      else if (!TYPE_UNSIGNED (itype))
12983 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12984 				    arg000, build_int_cst (itype, 0));
12985 	      /* Otherwise, of unsigned (logical) shifts,
12986 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12987 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
12988 	      else
12989 		return omit_one_operand_loc (loc, type,
12990 					 code == EQ_EXPR ? integer_one_node
12991 							 : integer_zero_node,
12992 					 arg000);
12993 	    }
12994 	}
12995 
12996       /* If we have (A & C) == C where C is a power of 2, convert this into
12997 	 (A & C) != 0.  Similarly for NE_EXPR.  */
12998       if (TREE_CODE (arg0) == BIT_AND_EXPR
12999 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
13000 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13001 	return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13002 			    arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13003 						    integer_zero_node));
13004 
13005       /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13006 	 bit, then fold the expression into A < 0 or A >= 0.  */
13007       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13008       if (tem)
13009 	return tem;
13010 
13011       /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13012 	 Similarly for NE_EXPR.  */
13013       if (TREE_CODE (arg0) == BIT_AND_EXPR
13014 	  && TREE_CODE (arg1) == INTEGER_CST
13015 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13016 	{
13017 	  tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13018 				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
13019 				   TREE_OPERAND (arg0, 1));
13020 	  tree dandnotc
13021 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13022 			       fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13023 			       notc);
13024 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13025 	  if (integer_nonzerop (dandnotc))
13026 	    return omit_one_operand_loc (loc, type, rslt, arg0);
13027 	}
13028 
13029       /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13030 	 Similarly for NE_EXPR.  */
13031       if (TREE_CODE (arg0) == BIT_IOR_EXPR
13032 	  && TREE_CODE (arg1) == INTEGER_CST
13033 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13034 	{
13035 	  tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13036 	  tree candnotd
13037 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13038 			       TREE_OPERAND (arg0, 1),
13039 			       fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13040 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13041 	  if (integer_nonzerop (candnotd))
13042 	    return omit_one_operand_loc (loc, type, rslt, arg0);
13043 	}
13044 
13045       /* If this is a comparison of a field, we may be able to simplify it.  */
13046       if ((TREE_CODE (arg0) == COMPONENT_REF
13047 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
13048 	  /* Handle the constant case even without -O
13049 	     to make sure the warnings are given.  */
13050 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13051 	{
13052 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13053 	  if (t1)
13054 	    return t1;
13055 	}
13056 
13057       /* Optimize comparisons of strlen vs zero to a compare of the
13058 	 first character of the string vs zero.  To wit,
13059 		strlen(ptr) == 0   =>  *ptr == 0
13060 		strlen(ptr) != 0   =>  *ptr != 0
13061 	 Other cases should reduce to one of these two (or a constant)
13062 	 due to the return value of strlen being unsigned.  */
13063       if (TREE_CODE (arg0) == CALL_EXPR
13064 	  && integer_zerop (arg1))
13065 	{
13066 	  tree fndecl = get_callee_fndecl (arg0);
13067 
13068 	  if (fndecl
13069 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13070 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13071 	      && call_expr_nargs (arg0) == 1
13072 	      && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13073 	    {
13074 	      tree iref = build_fold_indirect_ref_loc (loc,
13075 						   CALL_EXPR_ARG (arg0, 0));
13076 	      return fold_build2_loc (loc, code, type, iref,
13077 				  build_int_cst (TREE_TYPE (iref), 0));
13078 	    }
13079 	}
13080 
13081       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13082 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
13083       if (TREE_CODE (arg0) == RSHIFT_EXPR
13084 	  && integer_zerop (arg1)
13085 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13086 	{
13087 	  tree arg00 = TREE_OPERAND (arg0, 0);
13088 	  tree arg01 = TREE_OPERAND (arg0, 1);
13089 	  tree itype = TREE_TYPE (arg00);
13090 	  if (TREE_INT_CST_HIGH (arg01) == 0
13091 	      && !(TREE_CODE (itype) == COMPLEX_TYPE
13092 		   || TREE_CODE (itype) == VECTOR_TYPE)
13093 	      && TREE_INT_CST_LOW (arg01)
13094 		 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13095 	    {
13096 	      if (TYPE_UNSIGNED (itype))
13097 		{
13098 		  itype = signed_type_for (itype);
13099 		  arg00 = fold_convert_loc (loc, itype, arg00);
13100 		}
13101 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13102 				  type, arg00, build_zero_cst (itype));
13103 	    }
13104 	}
13105 
13106       /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
13107       if (integer_zerop (arg1)
13108 	  && TREE_CODE (arg0) == BIT_XOR_EXPR)
13109 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13110 			    TREE_OPERAND (arg0, 1));
13111 
13112       /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
13113       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13114 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13115 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13116 				build_zero_cst (TREE_TYPE (arg0)));
13117       /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
13118       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13119 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13120 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13121 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13122 				build_zero_cst (TREE_TYPE (arg0)));
13123 
13124       /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
13125       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13126 	  && TREE_CODE (arg1) == INTEGER_CST
13127 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13128 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13129 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13130 					 TREE_OPERAND (arg0, 1), arg1));
13131 
13132       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13133 	 (X & C) == 0 when C is a single bit.  */
13134       if (TREE_CODE (arg0) == BIT_AND_EXPR
13135 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13136 	  && integer_zerop (arg1)
13137 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
13138 	{
13139 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13140 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13141 				 TREE_OPERAND (arg0, 1));
13142 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13143 				  type, tem,
13144 				  fold_convert_loc (loc, TREE_TYPE (arg0),
13145 						    arg1));
13146 	}
13147 
13148       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13149 	 constant C is a power of two, i.e. a single bit.  */
13150       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13151 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13152 	  && integer_zerop (arg1)
13153 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
13154 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13155 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13156 	{
13157 	  tree arg00 = TREE_OPERAND (arg0, 0);
13158 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13159 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
13160 	}
13161 
13162       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13163 	 when is C is a power of two, i.e. a single bit.  */
13164       if (TREE_CODE (arg0) == BIT_AND_EXPR
13165 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13166 	  && integer_zerop (arg1)
13167 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
13168 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13169 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13170 	{
13171 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13172 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13173 			     arg000, TREE_OPERAND (arg0, 1));
13174 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13175 			      tem, build_int_cst (TREE_TYPE (tem), 0));
13176 	}
13177 
13178       if (integer_zerop (arg1)
13179 	  && tree_expr_nonzero_p (arg0))
13180         {
13181 	  tree res = constant_boolean_node (code==NE_EXPR, type);
13182 	  return omit_one_operand_loc (loc, type, res, arg0);
13183 	}
13184 
13185       /* Fold -X op -Y as X op Y, where op is eq/ne.  */
13186       if (TREE_CODE (arg0) == NEGATE_EXPR
13187           && TREE_CODE (arg1) == NEGATE_EXPR)
13188 	return fold_build2_loc (loc, code, type,
13189 				TREE_OPERAND (arg0, 0),
13190 				fold_convert_loc (loc, TREE_TYPE (arg0),
13191 						  TREE_OPERAND (arg1, 0)));
13192 
13193       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
13194       if (TREE_CODE (arg0) == BIT_AND_EXPR
13195 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
13196 	{
13197 	  tree arg00 = TREE_OPERAND (arg0, 0);
13198 	  tree arg01 = TREE_OPERAND (arg0, 1);
13199 	  tree arg10 = TREE_OPERAND (arg1, 0);
13200 	  tree arg11 = TREE_OPERAND (arg1, 1);
13201 	  tree itype = TREE_TYPE (arg0);
13202 
13203 	  if (operand_equal_p (arg01, arg11, 0))
13204 	    return fold_build2_loc (loc, code, type,
13205 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13206 					     fold_build2_loc (loc,
13207 							  BIT_XOR_EXPR, itype,
13208 							  arg00, arg10),
13209 					     arg01),
13210 				build_zero_cst (itype));
13211 
13212 	  if (operand_equal_p (arg01, arg10, 0))
13213 	    return fold_build2_loc (loc, code, type,
13214 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13215 					     fold_build2_loc (loc,
13216 							  BIT_XOR_EXPR, itype,
13217 							  arg00, arg11),
13218 					     arg01),
13219 				build_zero_cst (itype));
13220 
13221 	  if (operand_equal_p (arg00, arg11, 0))
13222 	    return fold_build2_loc (loc, code, type,
13223 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13224 					     fold_build2_loc (loc,
13225 							  BIT_XOR_EXPR, itype,
13226 							  arg01, arg10),
13227 					     arg00),
13228 				build_zero_cst (itype));
13229 
13230 	  if (operand_equal_p (arg00, arg10, 0))
13231 	    return fold_build2_loc (loc, code, type,
13232 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13233 					     fold_build2_loc (loc,
13234 							  BIT_XOR_EXPR, itype,
13235 							  arg01, arg11),
13236 					     arg00),
13237 				build_zero_cst (itype));
13238 	}
13239 
13240       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13241 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
13242 	{
13243 	  tree arg00 = TREE_OPERAND (arg0, 0);
13244 	  tree arg01 = TREE_OPERAND (arg0, 1);
13245 	  tree arg10 = TREE_OPERAND (arg1, 0);
13246 	  tree arg11 = TREE_OPERAND (arg1, 1);
13247 	  tree itype = TREE_TYPE (arg0);
13248 
13249 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13250 	     operand_equal_p guarantees no side-effects so we don't need
13251 	     to use omit_one_operand on Z.  */
13252 	  if (operand_equal_p (arg01, arg11, 0))
13253 	    return fold_build2_loc (loc, code, type, arg00,
13254 				    fold_convert_loc (loc, TREE_TYPE (arg00),
13255 						      arg10));
13256 	  if (operand_equal_p (arg01, arg10, 0))
13257 	    return fold_build2_loc (loc, code, type, arg00,
13258 				    fold_convert_loc (loc, TREE_TYPE (arg00),
13259 						      arg11));
13260 	  if (operand_equal_p (arg00, arg11, 0))
13261 	    return fold_build2_loc (loc, code, type, arg01,
13262 				    fold_convert_loc (loc, TREE_TYPE (arg01),
13263 						      arg10));
13264 	  if (operand_equal_p (arg00, arg10, 0))
13265 	    return fold_build2_loc (loc, code, type, arg01,
13266 				    fold_convert_loc (loc, TREE_TYPE (arg01),
13267 						      arg11));
13268 
13269 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
13270 	  if (TREE_CODE (arg01) == INTEGER_CST
13271 	      && TREE_CODE (arg11) == INTEGER_CST)
13272 	    {
13273 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13274 				     fold_convert_loc (loc, itype, arg11));
13275 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13276 	      return fold_build2_loc (loc, code, type, tem,
13277 				      fold_convert_loc (loc, itype, arg10));
13278 	    }
13279 	}
13280 
13281       /* Attempt to simplify equality/inequality comparisons of complex
13282 	 values.  Only lower the comparison if the result is known or
13283 	 can be simplified to a single scalar comparison.  */
13284       if ((TREE_CODE (arg0) == COMPLEX_EXPR
13285 	   || TREE_CODE (arg0) == COMPLEX_CST)
13286 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
13287 	      || TREE_CODE (arg1) == COMPLEX_CST))
13288 	{
13289 	  tree real0, imag0, real1, imag1;
13290 	  tree rcond, icond;
13291 
13292 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
13293 	    {
13294 	      real0 = TREE_OPERAND (arg0, 0);
13295 	      imag0 = TREE_OPERAND (arg0, 1);
13296 	    }
13297 	  else
13298 	    {
13299 	      real0 = TREE_REALPART (arg0);
13300 	      imag0 = TREE_IMAGPART (arg0);
13301 	    }
13302 
13303 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
13304 	    {
13305 	      real1 = TREE_OPERAND (arg1, 0);
13306 	      imag1 = TREE_OPERAND (arg1, 1);
13307 	    }
13308 	  else
13309 	    {
13310 	      real1 = TREE_REALPART (arg1);
13311 	      imag1 = TREE_IMAGPART (arg1);
13312 	    }
13313 
13314 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
13315 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13316 	    {
13317 	      if (integer_zerop (rcond))
13318 		{
13319 		  if (code == EQ_EXPR)
13320 		    return omit_two_operands_loc (loc, type, boolean_false_node,
13321 					      imag0, imag1);
13322 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13323 		}
13324 	      else
13325 		{
13326 		  if (code == NE_EXPR)
13327 		    return omit_two_operands_loc (loc, type, boolean_true_node,
13328 					      imag0, imag1);
13329 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13330 		}
13331 	    }
13332 
13333 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
13334 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
13335 	    {
13336 	      if (integer_zerop (icond))
13337 		{
13338 		  if (code == EQ_EXPR)
13339 		    return omit_two_operands_loc (loc, type, boolean_false_node,
13340 					      real0, real1);
13341 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13342 		}
13343 	      else
13344 		{
13345 		  if (code == NE_EXPR)
13346 		    return omit_two_operands_loc (loc, type, boolean_true_node,
13347 					      real0, real1);
13348 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13349 		}
13350 	    }
13351 	}
13352 
13353       return NULL_TREE;
13354 
13355     case LT_EXPR:
13356     case GT_EXPR:
13357     case LE_EXPR:
13358     case GE_EXPR:
13359       tem = fold_comparison (loc, code, type, op0, op1);
13360       if (tem != NULL_TREE)
13361 	return tem;
13362 
13363       /* Transform comparisons of the form X +- C CMP X.  */
13364       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13365 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13366 	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13367 	       && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13368 	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13369 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13370 	{
13371 	  tree arg01 = TREE_OPERAND (arg0, 1);
13372 	  enum tree_code code0 = TREE_CODE (arg0);
13373 	  int is_positive;
13374 
13375 	  if (TREE_CODE (arg01) == REAL_CST)
13376 	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13377 	  else
13378 	    is_positive = tree_int_cst_sgn (arg01);
13379 
13380 	  /* (X - c) > X becomes false.  */
13381 	  if (code == GT_EXPR
13382 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
13383 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
13384 	    {
13385 	      if (TREE_CODE (arg01) == INTEGER_CST
13386 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13387 		fold_overflow_warning (("assuming signed overflow does not "
13388 					"occur when assuming that (X - c) > X "
13389 					"is always false"),
13390 				       WARN_STRICT_OVERFLOW_ALL);
13391 	      return constant_boolean_node (0, type);
13392 	    }
13393 
13394 	  /* Likewise (X + c) < X becomes false.  */
13395 	  if (code == LT_EXPR
13396 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
13397 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
13398 	    {
13399 	      if (TREE_CODE (arg01) == INTEGER_CST
13400 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13401 		fold_overflow_warning (("assuming signed overflow does not "
13402 					"occur when assuming that "
13403 					"(X + c) < X is always false"),
13404 				       WARN_STRICT_OVERFLOW_ALL);
13405 	      return constant_boolean_node (0, type);
13406 	    }
13407 
13408 	  /* Convert (X - c) <= X to true.  */
13409 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13410 	      && code == LE_EXPR
13411 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
13412 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
13413 	    {
13414 	      if (TREE_CODE (arg01) == INTEGER_CST
13415 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13416 		fold_overflow_warning (("assuming signed overflow does not "
13417 					"occur when assuming that "
13418 					"(X - c) <= X is always true"),
13419 				       WARN_STRICT_OVERFLOW_ALL);
13420 	      return constant_boolean_node (1, type);
13421 	    }
13422 
13423 	  /* Convert (X + c) >= X to true.  */
13424 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13425 	      && code == GE_EXPR
13426 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
13427 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
13428 	    {
13429 	      if (TREE_CODE (arg01) == INTEGER_CST
13430 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13431 		fold_overflow_warning (("assuming signed overflow does not "
13432 					"occur when assuming that "
13433 					"(X + c) >= X is always true"),
13434 				       WARN_STRICT_OVERFLOW_ALL);
13435 	      return constant_boolean_node (1, type);
13436 	    }
13437 
13438 	  if (TREE_CODE (arg01) == INTEGER_CST)
13439 	    {
13440 	      /* Convert X + c > X and X - c < X to true for integers.  */
13441 	      if (code == GT_EXPR
13442 	          && ((code0 == PLUS_EXPR && is_positive > 0)
13443 		      || (code0 == MINUS_EXPR && is_positive < 0)))
13444 		{
13445 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13446 		    fold_overflow_warning (("assuming signed overflow does "
13447 					    "not occur when assuming that "
13448 					    "(X + c) > X is always true"),
13449 					   WARN_STRICT_OVERFLOW_ALL);
13450 		  return constant_boolean_node (1, type);
13451 		}
13452 
13453 	      if (code == LT_EXPR
13454 	          && ((code0 == MINUS_EXPR && is_positive > 0)
13455 		      || (code0 == PLUS_EXPR && is_positive < 0)))
13456 		{
13457 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13458 		    fold_overflow_warning (("assuming signed overflow does "
13459 					    "not occur when assuming that "
13460 					    "(X - c) < X is always true"),
13461 					   WARN_STRICT_OVERFLOW_ALL);
13462 		  return constant_boolean_node (1, type);
13463 		}
13464 
13465 	      /* Convert X + c <= X and X - c >= X to false for integers.  */
13466 	      if (code == LE_EXPR
13467 	          && ((code0 == PLUS_EXPR && is_positive > 0)
13468 		      || (code0 == MINUS_EXPR && is_positive < 0)))
13469 		{
13470 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13471 		    fold_overflow_warning (("assuming signed overflow does "
13472 					    "not occur when assuming that "
13473 					    "(X + c) <= X is always false"),
13474 					   WARN_STRICT_OVERFLOW_ALL);
13475 		  return constant_boolean_node (0, type);
13476 		}
13477 
13478 	      if (code == GE_EXPR
13479 	          && ((code0 == MINUS_EXPR && is_positive > 0)
13480 		      || (code0 == PLUS_EXPR && is_positive < 0)))
13481 		{
13482 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13483 		    fold_overflow_warning (("assuming signed overflow does "
13484 					    "not occur when assuming that "
13485 					    "(X - c) >= X is always false"),
13486 					   WARN_STRICT_OVERFLOW_ALL);
13487 		  return constant_boolean_node (0, type);
13488 		}
13489 	    }
13490 	}
13491 
13492       /* Comparisons with the highest or lowest possible integer of
13493 	 the specified precision will have known values.  */
13494       {
13495 	tree arg1_type = TREE_TYPE (arg1);
13496 	unsigned int width = TYPE_PRECISION (arg1_type);
13497 
13498 	if (TREE_CODE (arg1) == INTEGER_CST
13499 	    && width <= HOST_BITS_PER_DOUBLE_INT
13500 	    && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13501 	  {
13502 	    HOST_WIDE_INT signed_max_hi;
13503 	    unsigned HOST_WIDE_INT signed_max_lo;
13504 	    unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13505 
13506 	    if (width <= HOST_BITS_PER_WIDE_INT)
13507 	      {
13508 		signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13509 				- 1;
13510 		signed_max_hi = 0;
13511 		max_hi = 0;
13512 
13513 		if (TYPE_UNSIGNED (arg1_type))
13514 		  {
13515 		    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13516 		    min_lo = 0;
13517 		    min_hi = 0;
13518 		  }
13519 		else
13520 		  {
13521 		    max_lo = signed_max_lo;
13522 		    min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13523 		    min_hi = -1;
13524 		  }
13525 	      }
13526 	    else
13527 	      {
13528 		width -= HOST_BITS_PER_WIDE_INT;
13529 		signed_max_lo = -1;
13530 		signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13531 				- 1;
13532 		max_lo = -1;
13533 		min_lo = 0;
13534 
13535 		if (TYPE_UNSIGNED (arg1_type))
13536 		  {
13537 		    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13538 		    min_hi = 0;
13539 		  }
13540 		else
13541 		  {
13542 		    max_hi = signed_max_hi;
13543 		    min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13544 		  }
13545 	      }
13546 
13547 	    if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13548 		&& TREE_INT_CST_LOW (arg1) == max_lo)
13549 	      switch (code)
13550 		{
13551 		case GT_EXPR:
13552 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13553 
13554 		case GE_EXPR:
13555 		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13556 
13557 		case LE_EXPR:
13558 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13559 
13560 		case LT_EXPR:
13561 		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13562 
13563 		/* The GE_EXPR and LT_EXPR cases above are not normally
13564 		   reached because of previous transformations.  */
13565 
13566 		default:
13567 		  break;
13568 		}
13569 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13570 		     == max_hi
13571 		     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13572 	      switch (code)
13573 		{
13574 		case GT_EXPR:
13575 		  arg1 = const_binop (PLUS_EXPR, arg1,
13576 				      build_int_cst (TREE_TYPE (arg1), 1));
13577 		  return fold_build2_loc (loc, EQ_EXPR, type,
13578 				      fold_convert_loc (loc,
13579 							TREE_TYPE (arg1), arg0),
13580 				      arg1);
13581 		case LE_EXPR:
13582 		  arg1 = const_binop (PLUS_EXPR, arg1,
13583 				      build_int_cst (TREE_TYPE (arg1), 1));
13584 		  return fold_build2_loc (loc, NE_EXPR, type,
13585 				      fold_convert_loc (loc, TREE_TYPE (arg1),
13586 							arg0),
13587 				      arg1);
13588 		default:
13589 		  break;
13590 		}
13591 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13592 		     == min_hi
13593 		     && TREE_INT_CST_LOW (arg1) == min_lo)
13594 	      switch (code)
13595 		{
13596 		case LT_EXPR:
13597 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13598 
13599 		case LE_EXPR:
13600 		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13601 
13602 		case GE_EXPR:
13603 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13604 
13605 		case GT_EXPR:
13606 		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13607 
13608 		default:
13609 		  break;
13610 		}
13611 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13612 		     == min_hi
13613 		     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13614 	      switch (code)
13615 		{
13616 		case GE_EXPR:
13617 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13618 		  return fold_build2_loc (loc, NE_EXPR, type,
13619 				      fold_convert_loc (loc,
13620 							TREE_TYPE (arg1), arg0),
13621 				      arg1);
13622 		case LT_EXPR:
13623 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13624 		  return fold_build2_loc (loc, EQ_EXPR, type,
13625 				      fold_convert_loc (loc, TREE_TYPE (arg1),
13626 							arg0),
13627 				      arg1);
13628 		default:
13629 		  break;
13630 		}
13631 
13632 	    else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13633 		     && TREE_INT_CST_LOW (arg1) == signed_max_lo
13634 		     && TYPE_UNSIGNED (arg1_type)
13635 		     /* We will flip the signedness of the comparison operator
13636 			associated with the mode of arg1, so the sign bit is
13637 			specified by this mode.  Check that arg1 is the signed
13638 			max associated with this sign bit.  */
13639 		     && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13640 		     /* signed_type does not work on pointer types.  */
13641 		     && INTEGRAL_TYPE_P (arg1_type))
13642 	      {
13643 		/* The following case also applies to X < signed_max+1
13644 		   and X >= signed_max+1 because previous transformations.  */
13645 		if (code == LE_EXPR || code == GT_EXPR)
13646 		  {
13647 		    tree st;
13648 		    st = signed_type_for (TREE_TYPE (arg1));
13649 		    return fold_build2_loc (loc,
13650 					code == LE_EXPR ? GE_EXPR : LT_EXPR,
13651 					type, fold_convert_loc (loc, st, arg0),
13652 					build_int_cst (st, 0));
13653 		  }
13654 	      }
13655 	  }
13656       }
13657 
13658       /* If we are comparing an ABS_EXPR with a constant, we can
13659 	 convert all the cases into explicit comparisons, but they may
13660 	 well not be faster than doing the ABS and one comparison.
13661 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
13662 	 and a comparison, and is probably faster.  */
13663       if (code == LE_EXPR
13664 	  && TREE_CODE (arg1) == INTEGER_CST
13665 	  && TREE_CODE (arg0) == ABS_EXPR
13666 	  && ! TREE_SIDE_EFFECTS (arg0)
13667 	  && (0 != (tem = negate_expr (arg1)))
13668 	  && TREE_CODE (tem) == INTEGER_CST
13669 	  && !TREE_OVERFLOW (tem))
13670 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13671 			    build2 (GE_EXPR, type,
13672 				    TREE_OPERAND (arg0, 0), tem),
13673 			    build2 (LE_EXPR, type,
13674 				    TREE_OPERAND (arg0, 0), arg1));
13675 
13676       /* Convert ABS_EXPR<x> >= 0 to true.  */
13677       strict_overflow_p = false;
13678       if (code == GE_EXPR
13679 	  && (integer_zerop (arg1)
13680 	      || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13681 		  && real_zerop (arg1)))
13682 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13683 	{
13684 	  if (strict_overflow_p)
13685 	    fold_overflow_warning (("assuming signed overflow does not occur "
13686 				    "when simplifying comparison of "
13687 				    "absolute value and zero"),
13688 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13689 	  return omit_one_operand_loc (loc, type,
13690 				       constant_boolean_node (true, type),
13691 				       arg0);
13692 	}
13693 
13694       /* Convert ABS_EXPR<x> < 0 to false.  */
13695       strict_overflow_p = false;
13696       if (code == LT_EXPR
13697 	  && (integer_zerop (arg1) || real_zerop (arg1))
13698 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13699 	{
13700 	  if (strict_overflow_p)
13701 	    fold_overflow_warning (("assuming signed overflow does not occur "
13702 				    "when simplifying comparison of "
13703 				    "absolute value and zero"),
13704 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13705 	  return omit_one_operand_loc (loc, type,
13706 				       constant_boolean_node (false, type),
13707 				       arg0);
13708 	}
13709 
13710       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13711 	 and similarly for >= into !=.  */
13712       if ((code == LT_EXPR || code == GE_EXPR)
13713 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13714 	  && TREE_CODE (arg1) == LSHIFT_EXPR
13715 	  && integer_onep (TREE_OPERAND (arg1, 0)))
13716 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13717 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13718 				   TREE_OPERAND (arg1, 1)),
13719 			   build_zero_cst (TREE_TYPE (arg0)));
13720 
13721       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
13722 	 otherwise Y might be >= # of bits in X's type and thus e.g.
13723 	 (unsigned char) (1 << Y) for Y 15 might be 0.
13724 	 If the cast is widening, then 1 << Y should have unsigned type,
13725 	 otherwise if Y is number of bits in the signed shift type minus 1,
13726 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
13727 	 31 might be 0xffffffff80000000.  */
13728       if ((code == LT_EXPR || code == GE_EXPR)
13729 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13730 	  && CONVERT_EXPR_P (arg1)
13731 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13732 	  && (TYPE_PRECISION (TREE_TYPE (arg1))
13733 	      >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13734 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13735 	      || (TYPE_PRECISION (TREE_TYPE (arg1))
13736 		  == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13737 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13738 	{
13739 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13740 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13741 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13742 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13743 			     build_zero_cst (TREE_TYPE (arg0)));
13744 	}
13745 
13746       return NULL_TREE;
13747 
13748     case UNORDERED_EXPR:
13749     case ORDERED_EXPR:
13750     case UNLT_EXPR:
13751     case UNLE_EXPR:
13752     case UNGT_EXPR:
13753     case UNGE_EXPR:
13754     case UNEQ_EXPR:
13755     case LTGT_EXPR:
13756       if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13757 	{
13758 	  t1 = fold_relational_const (code, type, arg0, arg1);
13759 	  if (t1 != NULL_TREE)
13760 	    return t1;
13761 	}
13762 
13763       /* If the first operand is NaN, the result is constant.  */
13764       if (TREE_CODE (arg0) == REAL_CST
13765 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13766 	  && (code != LTGT_EXPR || ! flag_trapping_math))
13767 	{
13768 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13769 	       ? integer_zero_node
13770 	       : integer_one_node;
13771 	  return omit_one_operand_loc (loc, type, t1, arg1);
13772 	}
13773 
13774       /* If the second operand is NaN, the result is constant.  */
13775       if (TREE_CODE (arg1) == REAL_CST
13776 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13777 	  && (code != LTGT_EXPR || ! flag_trapping_math))
13778 	{
13779 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13780 	       ? integer_zero_node
13781 	       : integer_one_node;
13782 	  return omit_one_operand_loc (loc, type, t1, arg0);
13783 	}
13784 
13785       /* Simplify unordered comparison of something with itself.  */
13786       if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13787 	  && operand_equal_p (arg0, arg1, 0))
13788 	return constant_boolean_node (1, type);
13789 
13790       if (code == LTGT_EXPR
13791 	  && !flag_trapping_math
13792 	  && operand_equal_p (arg0, arg1, 0))
13793 	return constant_boolean_node (0, type);
13794 
13795       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
13796       {
13797 	tree targ0 = strip_float_extensions (arg0);
13798 	tree targ1 = strip_float_extensions (arg1);
13799 	tree newtype = TREE_TYPE (targ0);
13800 
13801 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13802 	  newtype = TREE_TYPE (targ1);
13803 
13804 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13805 	  return fold_build2_loc (loc, code, type,
13806 			      fold_convert_loc (loc, newtype, targ0),
13807 			      fold_convert_loc (loc, newtype, targ1));
13808       }
13809 
13810       return NULL_TREE;
13811 
13812     case COMPOUND_EXPR:
13813       /* When pedantic, a compound expression can be neither an lvalue
13814 	 nor an integer constant expression.  */
13815       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13816 	return NULL_TREE;
13817       /* Don't let (0, 0) be null pointer constant.  */
13818       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13819 				 : fold_convert_loc (loc, type, arg1);
13820       return pedantic_non_lvalue_loc (loc, tem);
13821 
13822     case COMPLEX_EXPR:
13823       if ((TREE_CODE (arg0) == REAL_CST
13824 	   && TREE_CODE (arg1) == REAL_CST)
13825 	  || (TREE_CODE (arg0) == INTEGER_CST
13826 	      && TREE_CODE (arg1) == INTEGER_CST))
13827 	return build_complex (type, arg0, arg1);
13828       if (TREE_CODE (arg0) == REALPART_EXPR
13829 	  && TREE_CODE (arg1) == IMAGPART_EXPR
13830 	  && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13831 	  && operand_equal_p (TREE_OPERAND (arg0, 0),
13832 			      TREE_OPERAND (arg1, 0), 0))
13833 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13834 				     TREE_OPERAND (arg1, 0));
13835       return NULL_TREE;
13836 
13837     case ASSERT_EXPR:
13838       /* An ASSERT_EXPR should never be passed to fold_binary.  */
13839       gcc_unreachable ();
13840 
13841     case VEC_PACK_TRUNC_EXPR:
13842     case VEC_PACK_FIX_TRUNC_EXPR:
13843       {
13844 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13845 	tree *elts;
13846 
13847 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13848 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13849 	if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13850 	  return NULL_TREE;
13851 
13852 	elts = XALLOCAVEC (tree, nelts);
13853 	if (!vec_cst_ctor_to_array (arg0, elts)
13854 	    || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13855 	  return NULL_TREE;
13856 
13857 	for (i = 0; i < nelts; i++)
13858 	  {
13859 	    elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13860 					  ? NOP_EXPR : FIX_TRUNC_EXPR,
13861 					  TREE_TYPE (type), elts[i]);
13862 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13863 	      return NULL_TREE;
13864 	  }
13865 
13866 	return build_vector (type, elts);
13867       }
13868 
13869     case VEC_WIDEN_MULT_LO_EXPR:
13870     case VEC_WIDEN_MULT_HI_EXPR:
13871     case VEC_WIDEN_MULT_EVEN_EXPR:
13872     case VEC_WIDEN_MULT_ODD_EXPR:
13873       {
13874 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13875 	unsigned int out, ofs, scale;
13876 	tree *elts;
13877 
13878 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13879 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13880 	if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13881 	  return NULL_TREE;
13882 
13883 	elts = XALLOCAVEC (tree, nelts * 4);
13884 	if (!vec_cst_ctor_to_array (arg0, elts)
13885 	    || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13886 	  return NULL_TREE;
13887 
13888 	if (code == VEC_WIDEN_MULT_LO_EXPR)
13889 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13890 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
13891 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13892 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13893 	  scale = 1, ofs = 0;
13894 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13895 	  scale = 1, ofs = 1;
13896 
13897 	for (out = 0; out < nelts; out++)
13898 	  {
13899 	    unsigned int in1 = (out << scale) + ofs;
13900 	    unsigned int in2 = in1 + nelts * 2;
13901 	    tree t1, t2;
13902 
13903 	    t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13904 	    t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13905 
13906 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
13907 	      return NULL_TREE;
13908 	    elts[out] = const_binop (MULT_EXPR, t1, t2);
13909 	    if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13910 	      return NULL_TREE;
13911 	  }
13912 
13913 	return build_vector (type, elts);
13914       }
13915 
13916     default:
13917       return NULL_TREE;
13918     } /* switch (code) */
13919 }
13920 
13921 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
13922    a LABEL_EXPR; otherwise return NULL_TREE.  Do not check the subtrees
13923    of GOTO_EXPR.  */
13924 
13925 static tree
13926 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13927 {
13928   switch (TREE_CODE (*tp))
13929     {
13930     case LABEL_EXPR:
13931       return *tp;
13932 
13933     case GOTO_EXPR:
13934       *walk_subtrees = 0;
13935 
13936       /* ... fall through ...  */
13937 
13938     default:
13939       return NULL_TREE;
13940     }
13941 }
13942 
13943 /* Return whether the sub-tree ST contains a label which is accessible from
13944    outside the sub-tree.  */
13945 
13946 static bool
13947 contains_label_p (tree st)
13948 {
13949   return
13950    (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13951 }
13952 
13953 /* Fold a ternary expression of code CODE and type TYPE with operands
13954    OP0, OP1, and OP2.  Return the folded expression if folding is
13955    successful.  Otherwise, return NULL_TREE.  */
13956 
13957 tree
13958 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13959 		  tree op0, tree op1, tree op2)
13960 {
13961   tree tem;
13962   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13963   enum tree_code_class kind = TREE_CODE_CLASS (code);
13964 
13965   gcc_assert (IS_EXPR_CODE_CLASS (kind)
13966 	      && TREE_CODE_LENGTH (code) == 3);
13967 
13968   /* Strip any conversions that don't change the mode.  This is safe
13969      for every expression, except for a comparison expression because
13970      its signedness is derived from its operands.  So, in the latter
13971      case, only strip conversions that don't change the signedness.
13972 
13973      Note that this is done as an internal manipulation within the
13974      constant folder, in order to find the simplest representation of
13975      the arguments so that their form can be studied.  In any cases,
13976      the appropriate type conversions should be put back in the tree
13977      that will get out of the constant folder.  */
13978   if (op0)
13979     {
13980       arg0 = op0;
13981       STRIP_NOPS (arg0);
13982     }
13983 
13984   if (op1)
13985     {
13986       arg1 = op1;
13987       STRIP_NOPS (arg1);
13988     }
13989 
13990   if (op2)
13991     {
13992       arg2 = op2;
13993       STRIP_NOPS (arg2);
13994     }
13995 
13996   switch (code)
13997     {
13998     case COMPONENT_REF:
13999       if (TREE_CODE (arg0) == CONSTRUCTOR
14000 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14001 	{
14002 	  unsigned HOST_WIDE_INT idx;
14003 	  tree field, value;
14004 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14005 	    if (field == arg1)
14006 	      return value;
14007 	}
14008       return NULL_TREE;
14009 
14010     case COND_EXPR:
14011       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14012 	 so all simple results must be passed through pedantic_non_lvalue.  */
14013       if (TREE_CODE (arg0) == INTEGER_CST)
14014 	{
14015 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
14016 	  tem = integer_zerop (arg0) ? op2 : op1;
14017 	  /* Only optimize constant conditions when the selected branch
14018 	     has the same type as the COND_EXPR.  This avoids optimizing
14019              away "c ? x : throw", where the throw has a void type.
14020              Avoid throwing away that operand which contains label.  */
14021           if ((!TREE_SIDE_EFFECTS (unused_op)
14022                || !contains_label_p (unused_op))
14023               && (! VOID_TYPE_P (TREE_TYPE (tem))
14024                   || VOID_TYPE_P (type)))
14025 	    return pedantic_non_lvalue_loc (loc, tem);
14026 	  return NULL_TREE;
14027 	}
14028       if (operand_equal_p (arg1, op2, 0))
14029 	return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14030 
14031       /* If we have A op B ? A : C, we may be able to convert this to a
14032 	 simpler expression, depending on the operation and the values
14033 	 of B and C.  Signed zeros prevent all of these transformations,
14034 	 for reasons given above each one.
14035 
14036          Also try swapping the arguments and inverting the conditional.  */
14037       if (COMPARISON_CLASS_P (arg0)
14038 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14039 					     arg1, TREE_OPERAND (arg0, 1))
14040 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14041 	{
14042 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14043 	  if (tem)
14044 	    return tem;
14045 	}
14046 
14047       if (COMPARISON_CLASS_P (arg0)
14048 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14049 					     op2,
14050 					     TREE_OPERAND (arg0, 1))
14051 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14052 	{
14053 	  location_t loc0 = expr_location_or (arg0, loc);
14054 	  tem = fold_truth_not_expr (loc0, arg0);
14055 	  if (tem && COMPARISON_CLASS_P (tem))
14056 	    {
14057 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14058 	      if (tem)
14059 		return tem;
14060 	    }
14061 	}
14062 
14063       /* If the second operand is simpler than the third, swap them
14064 	 since that produces better jump optimization results.  */
14065       if (truth_value_p (TREE_CODE (arg0))
14066 	  && tree_swap_operands_p (op1, op2, false))
14067 	{
14068 	  location_t loc0 = expr_location_or (arg0, loc);
14069 	  /* See if this can be inverted.  If it can't, possibly because
14070 	     it was a floating-point inequality comparison, don't do
14071 	     anything.  */
14072 	  tem = fold_truth_not_expr (loc0, arg0);
14073 	  if (tem)
14074 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
14075 	}
14076 
14077       /* Convert A ? 1 : 0 to simply A.  */
14078       if (integer_onep (op1)
14079 	  && integer_zerop (op2)
14080 	  /* If we try to convert OP0 to our type, the
14081 	     call to fold will try to move the conversion inside
14082 	     a COND, which will recurse.  In that case, the COND_EXPR
14083 	     is probably the best choice, so leave it alone.  */
14084 	  && type == TREE_TYPE (arg0))
14085 	return pedantic_non_lvalue_loc (loc, arg0);
14086 
14087       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
14088 	 over COND_EXPR in cases such as floating point comparisons.  */
14089       if (integer_zerop (op1)
14090 	  && integer_onep (op2)
14091 	  && truth_value_p (TREE_CODE (arg0)))
14092 	return pedantic_non_lvalue_loc (loc,
14093 				    fold_convert_loc (loc, type,
14094 					      invert_truthvalue_loc (loc,
14095 								     arg0)));
14096 
14097       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
14098       if (TREE_CODE (arg0) == LT_EXPR
14099 	  && integer_zerop (TREE_OPERAND (arg0, 1))
14100 	  && integer_zerop (op2)
14101 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14102 	{
14103 	  /* sign_bit_p looks through both zero and sign extensions,
14104 	     but for this optimization only sign extensions are
14105 	     usable.  */
14106 	  tree tem2 = TREE_OPERAND (arg0, 0);
14107 	  while (tem != tem2)
14108 	    {
14109 	      if (TREE_CODE (tem2) != NOP_EXPR
14110 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14111 		{
14112 		  tem = NULL_TREE;
14113 		  break;
14114 		}
14115 	      tem2 = TREE_OPERAND (tem2, 0);
14116 	    }
14117 	  /* sign_bit_p only checks ARG1 bits within A's precision.
14118 	     If <sign bit of A> has wider type than A, bits outside
14119 	     of A's precision in <sign bit of A> need to be checked.
14120 	     If they are all 0, this optimization needs to be done
14121 	     in unsigned A's type, if they are all 1 in signed A's type,
14122 	     otherwise this can't be done.  */
14123 	  if (tem
14124 	      && TYPE_PRECISION (TREE_TYPE (tem))
14125 		 < TYPE_PRECISION (TREE_TYPE (arg1))
14126 	      && TYPE_PRECISION (TREE_TYPE (tem))
14127 		 < TYPE_PRECISION (type))
14128 	    {
14129 	      unsigned HOST_WIDE_INT mask_lo;
14130 	      HOST_WIDE_INT mask_hi;
14131 	      int inner_width, outer_width;
14132 	      tree tem_type;
14133 
14134 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14135 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14136 	      if (outer_width > TYPE_PRECISION (type))
14137 		outer_width = TYPE_PRECISION (type);
14138 
14139 	      if (outer_width > HOST_BITS_PER_WIDE_INT)
14140 		{
14141 		  mask_hi = ((unsigned HOST_WIDE_INT) -1
14142 			     >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14143 		  mask_lo = -1;
14144 		}
14145 	      else
14146 		{
14147 		  mask_hi = 0;
14148 		  mask_lo = ((unsigned HOST_WIDE_INT) -1
14149 			     >> (HOST_BITS_PER_WIDE_INT - outer_width));
14150 		}
14151 	      if (inner_width > HOST_BITS_PER_WIDE_INT)
14152 		{
14153 		  mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14154 			       >> (HOST_BITS_PER_WIDE_INT - inner_width));
14155 		  mask_lo = 0;
14156 		}
14157 	      else
14158 		mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14159 			     >> (HOST_BITS_PER_WIDE_INT - inner_width));
14160 
14161 	      if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14162 		  && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14163 		{
14164 		  tem_type = signed_type_for (TREE_TYPE (tem));
14165 		  tem = fold_convert_loc (loc, tem_type, tem);
14166 		}
14167 	      else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14168 		       && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14169 		{
14170 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
14171 		  tem = fold_convert_loc (loc, tem_type, tem);
14172 		}
14173 	      else
14174 		tem = NULL;
14175 	    }
14176 
14177 	  if (tem)
14178 	    return
14179 	      fold_convert_loc (loc, type,
14180 				fold_build2_loc (loc, BIT_AND_EXPR,
14181 					     TREE_TYPE (tem), tem,
14182 					     fold_convert_loc (loc,
14183 							       TREE_TYPE (tem),
14184 							       arg1)));
14185 	}
14186 
14187       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
14188 	 already handled above.  */
14189       if (TREE_CODE (arg0) == BIT_AND_EXPR
14190 	  && integer_onep (TREE_OPERAND (arg0, 1))
14191 	  && integer_zerop (op2)
14192 	  && integer_pow2p (arg1))
14193 	{
14194 	  tree tem = TREE_OPERAND (arg0, 0);
14195 	  STRIP_NOPS (tem);
14196 	  if (TREE_CODE (tem) == RSHIFT_EXPR
14197               && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14198               && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14199 	         TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14200 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
14201 				TREE_OPERAND (tem, 0), arg1);
14202 	}
14203 
14204       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
14205 	 is probably obsolete because the first operand should be a
14206 	 truth value (that's why we have the two cases above), but let's
14207 	 leave it in until we can confirm this for all front-ends.  */
14208       if (integer_zerop (op2)
14209 	  && TREE_CODE (arg0) == NE_EXPR
14210 	  && integer_zerop (TREE_OPERAND (arg0, 1))
14211 	  && integer_pow2p (arg1)
14212 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14213 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14214 			      arg1, OEP_ONLY_CONST))
14215 	return pedantic_non_lvalue_loc (loc,
14216 				    fold_convert_loc (loc, type,
14217 						      TREE_OPERAND (arg0, 0)));
14218 
14219       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
14220       if (integer_zerop (op2)
14221 	  && truth_value_p (TREE_CODE (arg0))
14222 	  && truth_value_p (TREE_CODE (arg1)))
14223 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14224 			    fold_convert_loc (loc, type, arg0),
14225 			    arg1);
14226 
14227       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
14228       if (integer_onep (op2)
14229 	  && truth_value_p (TREE_CODE (arg0))
14230 	  && truth_value_p (TREE_CODE (arg1)))
14231 	{
14232 	  location_t loc0 = expr_location_or (arg0, loc);
14233 	  /* Only perform transformation if ARG0 is easily inverted.  */
14234 	  tem = fold_truth_not_expr (loc0, arg0);
14235 	  if (tem)
14236 	    return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14237 				fold_convert_loc (loc, type, tem),
14238 				arg1);
14239 	}
14240 
14241       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
14242       if (integer_zerop (arg1)
14243 	  && truth_value_p (TREE_CODE (arg0))
14244 	  && truth_value_p (TREE_CODE (op2)))
14245 	{
14246 	  location_t loc0 = expr_location_or (arg0, loc);
14247 	  /* Only perform transformation if ARG0 is easily inverted.  */
14248 	  tem = fold_truth_not_expr (loc0, arg0);
14249 	  if (tem)
14250 	    return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14251 				fold_convert_loc (loc, type, tem),
14252 				op2);
14253 	}
14254 
14255       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
14256       if (integer_onep (arg1)
14257 	  && truth_value_p (TREE_CODE (arg0))
14258 	  && truth_value_p (TREE_CODE (op2)))
14259 	return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14260 			    fold_convert_loc (loc, type, arg0),
14261 			    op2);
14262 
14263       return NULL_TREE;
14264 
14265     case VEC_COND_EXPR:
14266       if (TREE_CODE (arg0) == VECTOR_CST)
14267 	{
14268 	  if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14269 	    return pedantic_non_lvalue_loc (loc, op1);
14270 	  if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14271 	    return pedantic_non_lvalue_loc (loc, op2);
14272 	}
14273       return NULL_TREE;
14274 
14275     case CALL_EXPR:
14276       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
14277 	 of fold_ternary on them.  */
14278       gcc_unreachable ();
14279 
14280     case BIT_FIELD_REF:
14281       if ((TREE_CODE (arg0) == VECTOR_CST
14282 	   || (TREE_CODE (arg0) == CONSTRUCTOR
14283 	       && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14284 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
14285 	      || (TREE_CODE (type) == VECTOR_TYPE
14286 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14287 	{
14288 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14289 	  unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14290 	  unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14291 	  unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14292 
14293 	  if (n != 0
14294 	      && (idx % width) == 0
14295 	      && (n % width) == 0
14296 	      && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14297 	    {
14298 	      idx = idx / width;
14299 	      n = n / width;
14300 
14301 	      if (TREE_CODE (arg0) == VECTOR_CST)
14302 		{
14303 		  if (n == 1)
14304 		    return VECTOR_CST_ELT (arg0, idx);
14305 
14306 		  tree *vals = XALLOCAVEC (tree, n);
14307 		  for (unsigned i = 0; i < n; ++i)
14308 		    vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14309 		  return build_vector (type, vals);
14310 		}
14311 
14312 	      /* Constructor elements can be subvectors.  */
14313 	      unsigned HOST_WIDE_INT k = 1;
14314 	      if (CONSTRUCTOR_NELTS (arg0) != 0)
14315 		{
14316 		  tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14317 		  if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14318 		    k = TYPE_VECTOR_SUBPARTS (cons_elem);
14319 		}
14320 
14321 	      /* We keep an exact subset of the constructor elements.  */
14322 	      if ((idx % k) == 0 && (n % k) == 0)
14323 		{
14324 		  if (CONSTRUCTOR_NELTS (arg0) == 0)
14325 		    return build_constructor (type, NULL);
14326 		  idx /= k;
14327 		  n /= k;
14328 		  if (n == 1)
14329 		    {
14330 		      if (idx < CONSTRUCTOR_NELTS (arg0))
14331 			return CONSTRUCTOR_ELT (arg0, idx)->value;
14332 		      return build_zero_cst (type);
14333 		    }
14334 
14335 		  vec<constructor_elt, va_gc> *vals;
14336 		  vec_alloc (vals, n);
14337 		  for (unsigned i = 0;
14338 		       i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14339 		       ++i)
14340 		    CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14341 					    CONSTRUCTOR_ELT
14342 					      (arg0, idx + i)->value);
14343 		  return build_constructor (type, vals);
14344 		}
14345 	      /* The bitfield references a single constructor element.  */
14346 	      else if (idx + n <= (idx / k + 1) * k)
14347 		{
14348 		  if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14349 		    return build_zero_cst (type);
14350 		  else if (n == k)
14351 		    return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14352 		  else
14353 		    return fold_build3_loc (loc, code, type,
14354 		      CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14355 		      build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14356 		}
14357 	    }
14358 	}
14359 
14360       /* A bit-field-ref that referenced the full argument can be stripped.  */
14361       if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14362 	  && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14363 	  && integer_zerop (op2))
14364 	return fold_convert_loc (loc, type, arg0);
14365 
14366       /* On constants we can use native encode/interpret to constant
14367          fold (nearly) all BIT_FIELD_REFs.  */
14368       if (CONSTANT_CLASS_P (arg0)
14369 	  && can_native_interpret_type_p (type)
14370 	  && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14371 	  /* This limitation should not be necessary, we just need to
14372 	     round this up to mode size.  */
14373 	  && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14374 	  /* Need bit-shifting of the buffer to relax the following.  */
14375 	  && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14376 	{
14377 	  unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14378 	  unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14379 	  unsigned HOST_WIDE_INT clen;
14380 	  clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14381 	  /* ???  We cannot tell native_encode_expr to start at
14382 	     some random byte only.  So limit us to a reasonable amount
14383 	     of work.  */
14384 	  if (clen <= 4096)
14385 	    {
14386 	      unsigned char *b = XALLOCAVEC (unsigned char, clen);
14387 	      unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14388 	      if (len > 0
14389 		  && len * BITS_PER_UNIT >= bitpos + bitsize)
14390 		{
14391 		  tree v = native_interpret_expr (type,
14392 						  b + bitpos / BITS_PER_UNIT,
14393 						  bitsize / BITS_PER_UNIT);
14394 		  if (v)
14395 		    return v;
14396 		}
14397 	    }
14398 	}
14399 
14400       return NULL_TREE;
14401 
14402     case FMA_EXPR:
14403       /* For integers we can decompose the FMA if possible.  */
14404       if (TREE_CODE (arg0) == INTEGER_CST
14405 	  && TREE_CODE (arg1) == INTEGER_CST)
14406 	return fold_build2_loc (loc, PLUS_EXPR, type,
14407 				const_binop (MULT_EXPR, arg0, arg1), arg2);
14408       if (integer_zerop (arg2))
14409 	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14410 
14411       return fold_fma (loc, type, arg0, arg1, arg2);
14412 
14413     case VEC_PERM_EXPR:
14414       if (TREE_CODE (arg2) == VECTOR_CST)
14415 	{
14416 	  unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14417 	  unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14418 	  tree t;
14419 	  bool need_mask_canon = false;
14420 	  bool all_in_vec0 = true;
14421 	  bool all_in_vec1 = true;
14422 	  bool maybe_identity = true;
14423 	  bool single_arg = (op0 == op1);
14424 	  bool changed = false;
14425 
14426 	  mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14427 	  gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14428 	  for (i = 0; i < nelts; i++)
14429 	    {
14430 	      tree val = VECTOR_CST_ELT (arg2, i);
14431 	      if (TREE_CODE (val) != INTEGER_CST)
14432 		return NULL_TREE;
14433 
14434 	      sel[i] = TREE_INT_CST_LOW (val) & mask;
14435 	      if (TREE_INT_CST_HIGH (val)
14436 		  || ((unsigned HOST_WIDE_INT)
14437 		      TREE_INT_CST_LOW (val) != sel[i]))
14438 		need_mask_canon = true;
14439 
14440 	      if (sel[i] < nelts)
14441 		all_in_vec1 = false;
14442 	      else
14443 		all_in_vec0 = false;
14444 
14445 	      if ((sel[i] & (nelts-1)) != i)
14446 		maybe_identity = false;
14447 	    }
14448 
14449 	  if (maybe_identity)
14450 	    {
14451 	      if (all_in_vec0)
14452 		return op0;
14453 	      if (all_in_vec1)
14454 		return op1;
14455 	    }
14456 
14457 	  if (all_in_vec0)
14458 	    op1 = op0;
14459 	  else if (all_in_vec1)
14460 	    {
14461 	      op0 = op1;
14462 	      for (i = 0; i < nelts; i++)
14463 		sel[i] -= nelts;
14464 	      need_mask_canon = true;
14465 	    }
14466 
14467 	  if ((TREE_CODE (op0) == VECTOR_CST
14468 	       || TREE_CODE (op0) == CONSTRUCTOR)
14469 	      && (TREE_CODE (op1) == VECTOR_CST
14470 		  || TREE_CODE (op1) == CONSTRUCTOR))
14471 	    {
14472 	      t = fold_vec_perm (type, op0, op1, sel);
14473 	      if (t != NULL_TREE)
14474 		return t;
14475 	    }
14476 
14477 	  if (op0 == op1 && !single_arg)
14478 	    changed = true;
14479 
14480 	  if (need_mask_canon && arg2 == op2)
14481 	    {
14482 	      tree *tsel = XALLOCAVEC (tree, nelts);
14483 	      tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14484 	      for (i = 0; i < nelts; i++)
14485 		tsel[i] = build_int_cst (eltype, sel[i]);
14486 	      op2 = build_vector (TREE_TYPE (arg2), tsel);
14487 	      changed = true;
14488 	    }
14489 
14490 	  if (changed)
14491 	    return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14492 	}
14493       return NULL_TREE;
14494 
14495     default:
14496       return NULL_TREE;
14497     } /* switch (code) */
14498 }
14499 
14500 /* Perform constant folding and related simplification of EXPR.
14501    The related simplifications include x*1 => x, x*0 => 0, etc.,
14502    and application of the associative law.
14503    NOP_EXPR conversions may be removed freely (as long as we
14504    are careful not to change the type of the overall expression).
14505    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14506    but we can constant-fold them if they have constant operands.  */
14507 
14508 #ifdef ENABLE_FOLD_CHECKING
14509 # define fold(x) fold_1 (x)
14510 static tree fold_1 (tree);
14511 static
14512 #endif
14513 tree
14514 fold (tree expr)
14515 {
14516   const tree t = expr;
14517   enum tree_code code = TREE_CODE (t);
14518   enum tree_code_class kind = TREE_CODE_CLASS (code);
14519   tree tem;
14520   location_t loc = EXPR_LOCATION (expr);
14521 
14522   /* Return right away if a constant.  */
14523   if (kind == tcc_constant)
14524     return t;
14525 
14526   /* CALL_EXPR-like objects with variable numbers of operands are
14527      treated specially.  */
14528   if (kind == tcc_vl_exp)
14529     {
14530       if (code == CALL_EXPR)
14531 	{
14532 	  tem = fold_call_expr (loc, expr, false);
14533 	  return tem ? tem : expr;
14534 	}
14535       return expr;
14536     }
14537 
14538   if (IS_EXPR_CODE_CLASS (kind))
14539     {
14540       tree type = TREE_TYPE (t);
14541       tree op0, op1, op2;
14542 
14543       switch (TREE_CODE_LENGTH (code))
14544 	{
14545 	case 1:
14546 	  op0 = TREE_OPERAND (t, 0);
14547 	  tem = fold_unary_loc (loc, code, type, op0);
14548 	  return tem ? tem : expr;
14549 	case 2:
14550 	  op0 = TREE_OPERAND (t, 0);
14551 	  op1 = TREE_OPERAND (t, 1);
14552 	  tem = fold_binary_loc (loc, code, type, op0, op1);
14553 	  return tem ? tem : expr;
14554 	case 3:
14555 	  op0 = TREE_OPERAND (t, 0);
14556 	  op1 = TREE_OPERAND (t, 1);
14557 	  op2 = TREE_OPERAND (t, 2);
14558 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14559 	  return tem ? tem : expr;
14560 	default:
14561 	  break;
14562 	}
14563     }
14564 
14565   switch (code)
14566     {
14567     case ARRAY_REF:
14568       {
14569 	tree op0 = TREE_OPERAND (t, 0);
14570 	tree op1 = TREE_OPERAND (t, 1);
14571 
14572 	if (TREE_CODE (op1) == INTEGER_CST
14573 	    && TREE_CODE (op0) == CONSTRUCTOR
14574 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14575 	  {
14576 	    vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14577 	    unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14578 	    unsigned HOST_WIDE_INT begin = 0;
14579 
14580 	    /* Find a matching index by means of a binary search.  */
14581 	    while (begin != end)
14582 	      {
14583 		unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14584 		tree index = (*elts)[middle].index;
14585 
14586 		if (TREE_CODE (index) == INTEGER_CST
14587 		    && tree_int_cst_lt (index, op1))
14588 		  begin = middle + 1;
14589 		else if (TREE_CODE (index) == INTEGER_CST
14590 			 && tree_int_cst_lt (op1, index))
14591 		  end = middle;
14592 		else if (TREE_CODE (index) == RANGE_EXPR
14593 			 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14594 		  begin = middle + 1;
14595 		else if (TREE_CODE (index) == RANGE_EXPR
14596 			 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14597 		  end = middle;
14598 		else
14599 		  return (*elts)[middle].value;
14600 	      }
14601 	  }
14602 
14603 	return t;
14604       }
14605 
14606       /* Return a VECTOR_CST if possible.  */
14607     case CONSTRUCTOR:
14608       {
14609 	tree type = TREE_TYPE (t);
14610 	if (TREE_CODE (type) != VECTOR_TYPE)
14611 	  return t;
14612 
14613 	tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14614 	unsigned HOST_WIDE_INT idx, pos = 0;
14615 	tree value;
14616 
14617 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14618 	  {
14619 	    if (!CONSTANT_CLASS_P (value))
14620 	      return t;
14621 	    if (TREE_CODE (value) == VECTOR_CST)
14622 	      {
14623 		for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14624 		  vec[pos++] = VECTOR_CST_ELT (value, i);
14625 	      }
14626 	    else
14627 	      vec[pos++] = value;
14628 	  }
14629 	for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14630 	  vec[pos] = build_zero_cst (TREE_TYPE (type));
14631 
14632 	return build_vector (type, vec);
14633       }
14634 
14635     case CONST_DECL:
14636       return fold (DECL_INITIAL (t));
14637 
14638     default:
14639       return t;
14640     } /* switch (code) */
14641 }
14642 
14643 #ifdef ENABLE_FOLD_CHECKING
14644 #undef fold
14645 
14646 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14647 				hash_table <pointer_hash <tree_node> >);
14648 static void fold_check_failed (const_tree, const_tree);
14649 void print_fold_checksum (const_tree);
14650 
14651 /* When --enable-checking=fold, compute a digest of expr before
14652    and after actual fold call to see if fold did not accidentally
14653    change original expr.  */
14654 
14655 tree
14656 fold (tree expr)
14657 {
14658   tree ret;
14659   struct md5_ctx ctx;
14660   unsigned char checksum_before[16], checksum_after[16];
14661   hash_table <pointer_hash <tree_node> > ht;
14662 
14663   ht.create (32);
14664   md5_init_ctx (&ctx);
14665   fold_checksum_tree (expr, &ctx, ht);
14666   md5_finish_ctx (&ctx, checksum_before);
14667   ht.empty ();
14668 
14669   ret = fold_1 (expr);
14670 
14671   md5_init_ctx (&ctx);
14672   fold_checksum_tree (expr, &ctx, ht);
14673   md5_finish_ctx (&ctx, checksum_after);
14674   ht.dispose ();
14675 
14676   if (memcmp (checksum_before, checksum_after, 16))
14677     fold_check_failed (expr, ret);
14678 
14679   return ret;
14680 }
14681 
14682 void
14683 print_fold_checksum (const_tree expr)
14684 {
14685   struct md5_ctx ctx;
14686   unsigned char checksum[16], cnt;
14687   hash_table <pointer_hash <tree_node> > ht;
14688 
14689   ht.create (32);
14690   md5_init_ctx (&ctx);
14691   fold_checksum_tree (expr, &ctx, ht);
14692   md5_finish_ctx (&ctx, checksum);
14693   ht.dispose ();
14694   for (cnt = 0; cnt < 16; ++cnt)
14695     fprintf (stderr, "%02x", checksum[cnt]);
14696   putc ('\n', stderr);
14697 }
14698 
14699 static void
14700 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14701 {
14702   internal_error ("fold check: original tree changed by fold");
14703 }
14704 
14705 static void
14706 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14707 		    hash_table <pointer_hash <tree_node> > ht)
14708 {
14709   tree_node **slot;
14710   enum tree_code code;
14711   union tree_node buf;
14712   int i, len;
14713 
14714  recursive_label:
14715   if (expr == NULL)
14716     return;
14717   slot = ht.find_slot (expr, INSERT);
14718   if (*slot != NULL)
14719     return;
14720   *slot = CONST_CAST_TREE (expr);
14721   code = TREE_CODE (expr);
14722   if (TREE_CODE_CLASS (code) == tcc_declaration
14723       && DECL_ASSEMBLER_NAME_SET_P (expr))
14724     {
14725       /* Allow DECL_ASSEMBLER_NAME to be modified.  */
14726       memcpy ((char *) &buf, expr, tree_size (expr));
14727       SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14728       expr = (tree) &buf;
14729     }
14730   else if (TREE_CODE_CLASS (code) == tcc_type
14731 	   && (TYPE_POINTER_TO (expr)
14732 	       || TYPE_REFERENCE_TO (expr)
14733 	       || TYPE_CACHED_VALUES_P (expr)
14734 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14735 	       || TYPE_NEXT_VARIANT (expr)))
14736     {
14737       /* Allow these fields to be modified.  */
14738       tree tmp;
14739       memcpy ((char *) &buf, expr, tree_size (expr));
14740       expr = tmp = (tree) &buf;
14741       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14742       TYPE_POINTER_TO (tmp) = NULL;
14743       TYPE_REFERENCE_TO (tmp) = NULL;
14744       TYPE_NEXT_VARIANT (tmp) = NULL;
14745       if (TYPE_CACHED_VALUES_P (tmp))
14746 	{
14747 	  TYPE_CACHED_VALUES_P (tmp) = 0;
14748 	  TYPE_CACHED_VALUES (tmp) = NULL;
14749 	}
14750     }
14751   md5_process_bytes (expr, tree_size (expr), ctx);
14752   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14753     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14754   if (TREE_CODE_CLASS (code) != tcc_type
14755       && TREE_CODE_CLASS (code) != tcc_declaration
14756       && code != TREE_LIST
14757       && code != SSA_NAME
14758       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14759     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14760   switch (TREE_CODE_CLASS (code))
14761     {
14762     case tcc_constant:
14763       switch (code)
14764 	{
14765 	case STRING_CST:
14766 	  md5_process_bytes (TREE_STRING_POINTER (expr),
14767 			     TREE_STRING_LENGTH (expr), ctx);
14768 	  break;
14769 	case COMPLEX_CST:
14770 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14771 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14772 	  break;
14773 	case VECTOR_CST:
14774 	  for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14775 	    fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14776 	  break;
14777 	default:
14778 	  break;
14779 	}
14780       break;
14781     case tcc_exceptional:
14782       switch (code)
14783 	{
14784 	case TREE_LIST:
14785 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14786 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14787 	  expr = TREE_CHAIN (expr);
14788 	  goto recursive_label;
14789 	  break;
14790 	case TREE_VEC:
14791 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14792 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14793 	  break;
14794 	default:
14795 	  break;
14796 	}
14797       break;
14798     case tcc_expression:
14799     case tcc_reference:
14800     case tcc_comparison:
14801     case tcc_unary:
14802     case tcc_binary:
14803     case tcc_statement:
14804     case tcc_vl_exp:
14805       len = TREE_OPERAND_LENGTH (expr);
14806       for (i = 0; i < len; ++i)
14807 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14808       break;
14809     case tcc_declaration:
14810       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14811       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14812       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14813 	{
14814 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14815 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14816 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14817 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14818 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14819 	}
14820       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14821 	fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14822 
14823       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14824 	{
14825 	  fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14826 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14827 	  fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14828 	}
14829       break;
14830     case tcc_type:
14831       if (TREE_CODE (expr) == ENUMERAL_TYPE)
14832         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14833       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14834       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14835       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14836       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14837       if (INTEGRAL_TYPE_P (expr)
14838           || SCALAR_FLOAT_TYPE_P (expr))
14839 	{
14840 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14841 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14842 	}
14843       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14844       if (TREE_CODE (expr) == RECORD_TYPE
14845 	  || TREE_CODE (expr) == UNION_TYPE
14846 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
14847 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14848       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14849       break;
14850     default:
14851       break;
14852     }
14853 }
14854 
14855 /* Helper function for outputting the checksum of a tree T.  When
14856    debugging with gdb, you can "define mynext" to be "next" followed
14857    by "call debug_fold_checksum (op0)", then just trace down till the
14858    outputs differ.  */
14859 
14860 DEBUG_FUNCTION void
14861 debug_fold_checksum (const_tree t)
14862 {
14863   int i;
14864   unsigned char checksum[16];
14865   struct md5_ctx ctx;
14866   hash_table <pointer_hash <tree_node> > ht;
14867   ht.create (32);
14868 
14869   md5_init_ctx (&ctx);
14870   fold_checksum_tree (t, &ctx, ht);
14871   md5_finish_ctx (&ctx, checksum);
14872   ht.empty ();
14873 
14874   for (i = 0; i < 16; i++)
14875     fprintf (stderr, "%d ", checksum[i]);
14876 
14877   fprintf (stderr, "\n");
14878 }
14879 
14880 #endif
14881 
14882 /* Fold a unary tree expression with code CODE of type TYPE with an
14883    operand OP0.  LOC is the location of the resulting expression.
14884    Return a folded expression if successful.  Otherwise, return a tree
14885    expression with code CODE of type TYPE with an operand OP0.  */
14886 
14887 tree
14888 fold_build1_stat_loc (location_t loc,
14889 		      enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14890 {
14891   tree tem;
14892 #ifdef ENABLE_FOLD_CHECKING
14893   unsigned char checksum_before[16], checksum_after[16];
14894   struct md5_ctx ctx;
14895   hash_table <pointer_hash <tree_node> > ht;
14896 
14897   ht.create (32);
14898   md5_init_ctx (&ctx);
14899   fold_checksum_tree (op0, &ctx, ht);
14900   md5_finish_ctx (&ctx, checksum_before);
14901   ht.empty ();
14902 #endif
14903 
14904   tem = fold_unary_loc (loc, code, type, op0);
14905   if (!tem)
14906     tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14907 
14908 #ifdef ENABLE_FOLD_CHECKING
14909   md5_init_ctx (&ctx);
14910   fold_checksum_tree (op0, &ctx, ht);
14911   md5_finish_ctx (&ctx, checksum_after);
14912   ht.dispose ();
14913 
14914   if (memcmp (checksum_before, checksum_after, 16))
14915     fold_check_failed (op0, tem);
14916 #endif
14917   return tem;
14918 }
14919 
14920 /* Fold a binary tree expression with code CODE of type TYPE with
14921    operands OP0 and OP1.  LOC is the location of the resulting
14922    expression.  Return a folded expression if successful.  Otherwise,
14923    return a tree expression with code CODE of type TYPE with operands
14924    OP0 and OP1.  */
14925 
14926 tree
14927 fold_build2_stat_loc (location_t loc,
14928 		      enum tree_code code, tree type, tree op0, tree op1
14929 		      MEM_STAT_DECL)
14930 {
14931   tree tem;
14932 #ifdef ENABLE_FOLD_CHECKING
14933   unsigned char checksum_before_op0[16],
14934                 checksum_before_op1[16],
14935 		checksum_after_op0[16],
14936 		checksum_after_op1[16];
14937   struct md5_ctx ctx;
14938   hash_table <pointer_hash <tree_node> > ht;
14939 
14940   ht.create (32);
14941   md5_init_ctx (&ctx);
14942   fold_checksum_tree (op0, &ctx, ht);
14943   md5_finish_ctx (&ctx, checksum_before_op0);
14944   ht.empty ();
14945 
14946   md5_init_ctx (&ctx);
14947   fold_checksum_tree (op1, &ctx, ht);
14948   md5_finish_ctx (&ctx, checksum_before_op1);
14949   ht.empty ();
14950 #endif
14951 
14952   tem = fold_binary_loc (loc, code, type, op0, op1);
14953   if (!tem)
14954     tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14955 
14956 #ifdef ENABLE_FOLD_CHECKING
14957   md5_init_ctx (&ctx);
14958   fold_checksum_tree (op0, &ctx, ht);
14959   md5_finish_ctx (&ctx, checksum_after_op0);
14960   ht.empty ();
14961 
14962   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14963     fold_check_failed (op0, tem);
14964 
14965   md5_init_ctx (&ctx);
14966   fold_checksum_tree (op1, &ctx, ht);
14967   md5_finish_ctx (&ctx, checksum_after_op1);
14968   ht.dispose ();
14969 
14970   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14971     fold_check_failed (op1, tem);
14972 #endif
14973   return tem;
14974 }
14975 
14976 /* Fold a ternary tree expression with code CODE of type TYPE with
14977    operands OP0, OP1, and OP2.  Return a folded expression if
14978    successful.  Otherwise, return a tree expression with code CODE of
14979    type TYPE with operands OP0, OP1, and OP2.  */
14980 
14981 tree
14982 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14983 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
14984 {
14985   tree tem;
14986 #ifdef ENABLE_FOLD_CHECKING
14987   unsigned char checksum_before_op0[16],
14988                 checksum_before_op1[16],
14989                 checksum_before_op2[16],
14990 		checksum_after_op0[16],
14991 		checksum_after_op1[16],
14992 		checksum_after_op2[16];
14993   struct md5_ctx ctx;
14994   hash_table <pointer_hash <tree_node> > ht;
14995 
14996   ht.create (32);
14997   md5_init_ctx (&ctx);
14998   fold_checksum_tree (op0, &ctx, ht);
14999   md5_finish_ctx (&ctx, checksum_before_op0);
15000   ht.empty ();
15001 
15002   md5_init_ctx (&ctx);
15003   fold_checksum_tree (op1, &ctx, ht);
15004   md5_finish_ctx (&ctx, checksum_before_op1);
15005   ht.empty ();
15006 
15007   md5_init_ctx (&ctx);
15008   fold_checksum_tree (op2, &ctx, ht);
15009   md5_finish_ctx (&ctx, checksum_before_op2);
15010   ht.empty ();
15011 #endif
15012 
15013   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15014   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15015   if (!tem)
15016     tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15017 
15018 #ifdef ENABLE_FOLD_CHECKING
15019   md5_init_ctx (&ctx);
15020   fold_checksum_tree (op0, &ctx, ht);
15021   md5_finish_ctx (&ctx, checksum_after_op0);
15022   ht.empty ();
15023 
15024   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15025     fold_check_failed (op0, tem);
15026 
15027   md5_init_ctx (&ctx);
15028   fold_checksum_tree (op1, &ctx, ht);
15029   md5_finish_ctx (&ctx, checksum_after_op1);
15030   ht.empty ();
15031 
15032   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15033     fold_check_failed (op1, tem);
15034 
15035   md5_init_ctx (&ctx);
15036   fold_checksum_tree (op2, &ctx, ht);
15037   md5_finish_ctx (&ctx, checksum_after_op2);
15038   ht.dispose ();
15039 
15040   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15041     fold_check_failed (op2, tem);
15042 #endif
15043   return tem;
15044 }
15045 
15046 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15047    arguments in ARGARRAY, and a null static chain.
15048    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
15049    of type TYPE from the given operands as constructed by build_call_array.  */
15050 
15051 tree
15052 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15053 			   int nargs, tree *argarray)
15054 {
15055   tree tem;
15056 #ifdef ENABLE_FOLD_CHECKING
15057   unsigned char checksum_before_fn[16],
15058                 checksum_before_arglist[16],
15059 		checksum_after_fn[16],
15060 		checksum_after_arglist[16];
15061   struct md5_ctx ctx;
15062   hash_table <pointer_hash <tree_node> > ht;
15063   int i;
15064 
15065   ht.create (32);
15066   md5_init_ctx (&ctx);
15067   fold_checksum_tree (fn, &ctx, ht);
15068   md5_finish_ctx (&ctx, checksum_before_fn);
15069   ht.empty ();
15070 
15071   md5_init_ctx (&ctx);
15072   for (i = 0; i < nargs; i++)
15073     fold_checksum_tree (argarray[i], &ctx, ht);
15074   md5_finish_ctx (&ctx, checksum_before_arglist);
15075   ht.empty ();
15076 #endif
15077 
15078   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15079 
15080 #ifdef ENABLE_FOLD_CHECKING
15081   md5_init_ctx (&ctx);
15082   fold_checksum_tree (fn, &ctx, ht);
15083   md5_finish_ctx (&ctx, checksum_after_fn);
15084   ht.empty ();
15085 
15086   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15087     fold_check_failed (fn, tem);
15088 
15089   md5_init_ctx (&ctx);
15090   for (i = 0; i < nargs; i++)
15091     fold_checksum_tree (argarray[i], &ctx, ht);
15092   md5_finish_ctx (&ctx, checksum_after_arglist);
15093   ht.dispose ();
15094 
15095   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15096     fold_check_failed (NULL_TREE, tem);
15097 #endif
15098   return tem;
15099 }
15100 
15101 /* Perform constant folding and related simplification of initializer
15102    expression EXPR.  These behave identically to "fold_buildN" but ignore
15103    potential run-time traps and exceptions that fold must preserve.  */
15104 
15105 #define START_FOLD_INIT \
15106   int saved_signaling_nans = flag_signaling_nans;\
15107   int saved_trapping_math = flag_trapping_math;\
15108   int saved_rounding_math = flag_rounding_math;\
15109   int saved_trapv = flag_trapv;\
15110   int saved_folding_initializer = folding_initializer;\
15111   flag_signaling_nans = 0;\
15112   flag_trapping_math = 0;\
15113   flag_rounding_math = 0;\
15114   flag_trapv = 0;\
15115   folding_initializer = 1;
15116 
15117 #define END_FOLD_INIT \
15118   flag_signaling_nans = saved_signaling_nans;\
15119   flag_trapping_math = saved_trapping_math;\
15120   flag_rounding_math = saved_rounding_math;\
15121   flag_trapv = saved_trapv;\
15122   folding_initializer = saved_folding_initializer;
15123 
15124 tree
15125 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15126 			     tree type, tree op)
15127 {
15128   tree result;
15129   START_FOLD_INIT;
15130 
15131   result = fold_build1_loc (loc, code, type, op);
15132 
15133   END_FOLD_INIT;
15134   return result;
15135 }
15136 
15137 tree
15138 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15139 			     tree type, tree op0, tree op1)
15140 {
15141   tree result;
15142   START_FOLD_INIT;
15143 
15144   result = fold_build2_loc (loc, code, type, op0, op1);
15145 
15146   END_FOLD_INIT;
15147   return result;
15148 }
15149 
15150 tree
15151 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15152 			     tree type, tree op0, tree op1, tree op2)
15153 {
15154   tree result;
15155   START_FOLD_INIT;
15156 
15157   result = fold_build3_loc (loc, code, type, op0, op1, op2);
15158 
15159   END_FOLD_INIT;
15160   return result;
15161 }
15162 
15163 tree
15164 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15165 				       int nargs, tree *argarray)
15166 {
15167   tree result;
15168   START_FOLD_INIT;
15169 
15170   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15171 
15172   END_FOLD_INIT;
15173   return result;
15174 }
15175 
15176 #undef START_FOLD_INIT
15177 #undef END_FOLD_INIT
15178 
15179 /* Determine if first argument is a multiple of second argument.  Return 0 if
15180    it is not, or we cannot easily determined it to be.
15181 
15182    An example of the sort of thing we care about (at this point; this routine
15183    could surely be made more general, and expanded to do what the *_DIV_EXPR's
15184    fold cases do now) is discovering that
15185 
15186      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15187 
15188    is a multiple of
15189 
15190      SAVE_EXPR (J * 8)
15191 
15192    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15193 
15194    This code also handles discovering that
15195 
15196      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15197 
15198    is a multiple of 8 so we don't have to worry about dealing with a
15199    possible remainder.
15200 
15201    Note that we *look* inside a SAVE_EXPR only to determine how it was
15202    calculated; it is not safe for fold to do much of anything else with the
15203    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15204    at run time.  For example, the latter example above *cannot* be implemented
15205    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15206    evaluation time of the original SAVE_EXPR is not necessarily the same at
15207    the time the new expression is evaluated.  The only optimization of this
15208    sort that would be valid is changing
15209 
15210      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15211 
15212    divided by 8 to
15213 
15214      SAVE_EXPR (I) * SAVE_EXPR (J)
15215 
15216    (where the same SAVE_EXPR (J) is used in the original and the
15217    transformed version).  */
15218 
15219 int
15220 multiple_of_p (tree type, const_tree top, const_tree bottom)
15221 {
15222   if (operand_equal_p (top, bottom, 0))
15223     return 1;
15224 
15225   if (TREE_CODE (type) != INTEGER_TYPE)
15226     return 0;
15227 
15228   switch (TREE_CODE (top))
15229     {
15230     case BIT_AND_EXPR:
15231       /* Bitwise and provides a power of two multiple.  If the mask is
15232 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
15233       if (!integer_pow2p (bottom))
15234 	return 0;
15235       /* FALLTHRU */
15236 
15237     case MULT_EXPR:
15238       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15239 	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15240 
15241     case PLUS_EXPR:
15242     case MINUS_EXPR:
15243       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15244 	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15245 
15246     case LSHIFT_EXPR:
15247       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15248 	{
15249 	  tree op1, t1;
15250 
15251 	  op1 = TREE_OPERAND (top, 1);
15252 	  /* const_binop may not detect overflow correctly,
15253 	     so check for it explicitly here.  */
15254 	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15255 	      > TREE_INT_CST_LOW (op1)
15256 	      && TREE_INT_CST_HIGH (op1) == 0
15257 	      && 0 != (t1 = fold_convert (type,
15258 					  const_binop (LSHIFT_EXPR,
15259 						       size_one_node,
15260 						       op1)))
15261 	      && !TREE_OVERFLOW (t1))
15262 	    return multiple_of_p (type, t1, bottom);
15263 	}
15264       return 0;
15265 
15266     case NOP_EXPR:
15267       /* Can't handle conversions from non-integral or wider integral type.  */
15268       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15269 	  || (TYPE_PRECISION (type)
15270 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15271 	return 0;
15272 
15273       /* .. fall through ...  */
15274 
15275     case SAVE_EXPR:
15276       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15277 
15278     case COND_EXPR:
15279       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15280 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15281 
15282     case INTEGER_CST:
15283       if (TREE_CODE (bottom) != INTEGER_CST
15284 	  || integer_zerop (bottom)
15285 	  || (TYPE_UNSIGNED (type)
15286 	      && (tree_int_cst_sgn (top) < 0
15287 		  || tree_int_cst_sgn (bottom) < 0)))
15288 	return 0;
15289       return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15290 					     top, bottom));
15291 
15292     default:
15293       return 0;
15294     }
15295 }
15296 
15297 /* Return true if CODE or TYPE is known to be non-negative. */
15298 
15299 static bool
15300 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15301 {
15302   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15303       && truth_value_p (code))
15304     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15305        have a signed:1 type (where the value is -1 and 0).  */
15306     return true;
15307   return false;
15308 }
15309 
15310 /* Return true if (CODE OP0) is known to be non-negative.  If the return
15311    value is based on the assumption that signed overflow is undefined,
15312    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15313    *STRICT_OVERFLOW_P.  */
15314 
15315 bool
15316 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15317 				bool *strict_overflow_p)
15318 {
15319   if (TYPE_UNSIGNED (type))
15320     return true;
15321 
15322   switch (code)
15323     {
15324     case ABS_EXPR:
15325       /* We can't return 1 if flag_wrapv is set because
15326 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
15327       if (!INTEGRAL_TYPE_P (type))
15328 	return true;
15329       if (TYPE_OVERFLOW_UNDEFINED (type))
15330 	{
15331 	  *strict_overflow_p = true;
15332 	  return true;
15333 	}
15334       break;
15335 
15336     case NON_LVALUE_EXPR:
15337     case FLOAT_EXPR:
15338     case FIX_TRUNC_EXPR:
15339       return tree_expr_nonnegative_warnv_p (op0,
15340 					    strict_overflow_p);
15341 
15342     case NOP_EXPR:
15343       {
15344 	tree inner_type = TREE_TYPE (op0);
15345 	tree outer_type = type;
15346 
15347 	if (TREE_CODE (outer_type) == REAL_TYPE)
15348 	  {
15349 	    if (TREE_CODE (inner_type) == REAL_TYPE)
15350 	      return tree_expr_nonnegative_warnv_p (op0,
15351 						    strict_overflow_p);
15352 	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
15353 	      {
15354 		if (TYPE_UNSIGNED (inner_type))
15355 		  return true;
15356 		return tree_expr_nonnegative_warnv_p (op0,
15357 						      strict_overflow_p);
15358 	      }
15359 	  }
15360 	else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15361 	  {
15362 	    if (TREE_CODE (inner_type) == REAL_TYPE)
15363 	      return tree_expr_nonnegative_warnv_p (op0,
15364 						    strict_overflow_p);
15365 	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
15366 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15367 		      && TYPE_UNSIGNED (inner_type);
15368 	  }
15369       }
15370       break;
15371 
15372     default:
15373       return tree_simple_nonnegative_warnv_p (code, type);
15374     }
15375 
15376   /* We don't know sign of `t', so be conservative and return false.  */
15377   return false;
15378 }
15379 
15380 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
15381    value is based on the assumption that signed overflow is undefined,
15382    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15383    *STRICT_OVERFLOW_P.  */
15384 
15385 bool
15386 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15387 				      tree op1, bool *strict_overflow_p)
15388 {
15389   if (TYPE_UNSIGNED (type))
15390     return true;
15391 
15392   switch (code)
15393     {
15394     case POINTER_PLUS_EXPR:
15395     case PLUS_EXPR:
15396       if (FLOAT_TYPE_P (type))
15397 	return (tree_expr_nonnegative_warnv_p (op0,
15398 					       strict_overflow_p)
15399 		&& tree_expr_nonnegative_warnv_p (op1,
15400 						  strict_overflow_p));
15401 
15402       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15403 	 both unsigned and at least 2 bits shorter than the result.  */
15404       if (TREE_CODE (type) == INTEGER_TYPE
15405 	  && TREE_CODE (op0) == NOP_EXPR
15406 	  && TREE_CODE (op1) == NOP_EXPR)
15407 	{
15408 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15409 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15410 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15411 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15412 	    {
15413 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
15414 				       TYPE_PRECISION (inner2)) + 1;
15415 	      return prec < TYPE_PRECISION (type);
15416 	    }
15417 	}
15418       break;
15419 
15420     case MULT_EXPR:
15421       if (FLOAT_TYPE_P (type))
15422 	{
15423 	  /* x * x for floating point x is always non-negative.  */
15424 	  if (operand_equal_p (op0, op1, 0))
15425 	    return true;
15426 	  return (tree_expr_nonnegative_warnv_p (op0,
15427 						 strict_overflow_p)
15428 		  && tree_expr_nonnegative_warnv_p (op1,
15429 						    strict_overflow_p));
15430 	}
15431 
15432       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15433 	 both unsigned and their total bits is shorter than the result.  */
15434       if (TREE_CODE (type) == INTEGER_TYPE
15435 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15436 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15437 	{
15438 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15439 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
15440 	    : TREE_TYPE (op0);
15441 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15442 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
15443 	    : TREE_TYPE (op1);
15444 
15445 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
15446 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
15447 
15448 	  if (TREE_CODE (op0) == INTEGER_CST)
15449 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15450 
15451 	  if (TREE_CODE (op1) == INTEGER_CST)
15452 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15453 
15454 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15455 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15456 	    {
15457 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15458 		? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15459 		: TYPE_PRECISION (inner0);
15460 
15461 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15462 		? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15463 		: TYPE_PRECISION (inner1);
15464 
15465 	      return precision0 + precision1 < TYPE_PRECISION (type);
15466 	    }
15467 	}
15468       return false;
15469 
15470     case BIT_AND_EXPR:
15471     case MAX_EXPR:
15472       return (tree_expr_nonnegative_warnv_p (op0,
15473 					     strict_overflow_p)
15474 	      || tree_expr_nonnegative_warnv_p (op1,
15475 						strict_overflow_p));
15476 
15477     case BIT_IOR_EXPR:
15478     case BIT_XOR_EXPR:
15479     case MIN_EXPR:
15480     case RDIV_EXPR:
15481     case TRUNC_DIV_EXPR:
15482     case CEIL_DIV_EXPR:
15483     case FLOOR_DIV_EXPR:
15484     case ROUND_DIV_EXPR:
15485       return (tree_expr_nonnegative_warnv_p (op0,
15486 					     strict_overflow_p)
15487 	      && tree_expr_nonnegative_warnv_p (op1,
15488 						strict_overflow_p));
15489 
15490     case TRUNC_MOD_EXPR:
15491     case CEIL_MOD_EXPR:
15492     case FLOOR_MOD_EXPR:
15493     case ROUND_MOD_EXPR:
15494       return tree_expr_nonnegative_warnv_p (op0,
15495 					    strict_overflow_p);
15496     default:
15497       return tree_simple_nonnegative_warnv_p (code, type);
15498     }
15499 
15500   /* We don't know sign of `t', so be conservative and return false.  */
15501   return false;
15502 }
15503 
15504 /* Return true if T is known to be non-negative.  If the return
15505    value is based on the assumption that signed overflow is undefined,
15506    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15507    *STRICT_OVERFLOW_P.  */
15508 
15509 bool
15510 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15511 {
15512   if (TYPE_UNSIGNED (TREE_TYPE (t)))
15513     return true;
15514 
15515   switch (TREE_CODE (t))
15516     {
15517     case INTEGER_CST:
15518       return tree_int_cst_sgn (t) >= 0;
15519 
15520     case REAL_CST:
15521       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15522 
15523     case FIXED_CST:
15524       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15525 
15526     case COND_EXPR:
15527       return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15528 					     strict_overflow_p)
15529 	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15530 						strict_overflow_p));
15531     default:
15532       return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15533 						   TREE_TYPE (t));
15534     }
15535   /* We don't know sign of `t', so be conservative and return false.  */
15536   return false;
15537 }
15538 
15539 /* Return true if T is known to be non-negative.  If the return
15540    value is based on the assumption that signed overflow is undefined,
15541    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15542    *STRICT_OVERFLOW_P.  */
15543 
15544 bool
15545 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15546 			       tree arg0, tree arg1, bool *strict_overflow_p)
15547 {
15548   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15549     switch (DECL_FUNCTION_CODE (fndecl))
15550       {
15551 	CASE_FLT_FN (BUILT_IN_ACOS):
15552 	CASE_FLT_FN (BUILT_IN_ACOSH):
15553 	CASE_FLT_FN (BUILT_IN_CABS):
15554 	CASE_FLT_FN (BUILT_IN_COSH):
15555 	CASE_FLT_FN (BUILT_IN_ERFC):
15556 	CASE_FLT_FN (BUILT_IN_EXP):
15557 	CASE_FLT_FN (BUILT_IN_EXP10):
15558 	CASE_FLT_FN (BUILT_IN_EXP2):
15559 	CASE_FLT_FN (BUILT_IN_FABS):
15560 	CASE_FLT_FN (BUILT_IN_FDIM):
15561 	CASE_FLT_FN (BUILT_IN_HYPOT):
15562 	CASE_FLT_FN (BUILT_IN_POW10):
15563 	CASE_INT_FN (BUILT_IN_FFS):
15564 	CASE_INT_FN (BUILT_IN_PARITY):
15565 	CASE_INT_FN (BUILT_IN_POPCOUNT):
15566       case BUILT_IN_BSWAP32:
15567       case BUILT_IN_BSWAP64:
15568 	/* Always true.  */
15569 	return true;
15570 
15571 	CASE_FLT_FN (BUILT_IN_SQRT):
15572 	/* sqrt(-0.0) is -0.0.  */
15573 	if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15574 	  return true;
15575 	return tree_expr_nonnegative_warnv_p (arg0,
15576 					      strict_overflow_p);
15577 
15578 	CASE_FLT_FN (BUILT_IN_ASINH):
15579 	CASE_FLT_FN (BUILT_IN_ATAN):
15580 	CASE_FLT_FN (BUILT_IN_ATANH):
15581 	CASE_FLT_FN (BUILT_IN_CBRT):
15582 	CASE_FLT_FN (BUILT_IN_CEIL):
15583 	CASE_FLT_FN (BUILT_IN_ERF):
15584 	CASE_FLT_FN (BUILT_IN_EXPM1):
15585 	CASE_FLT_FN (BUILT_IN_FLOOR):
15586 	CASE_FLT_FN (BUILT_IN_FMOD):
15587 	CASE_FLT_FN (BUILT_IN_FREXP):
15588 	CASE_FLT_FN (BUILT_IN_ICEIL):
15589 	CASE_FLT_FN (BUILT_IN_IFLOOR):
15590 	CASE_FLT_FN (BUILT_IN_IRINT):
15591 	CASE_FLT_FN (BUILT_IN_IROUND):
15592 	CASE_FLT_FN (BUILT_IN_LCEIL):
15593 	CASE_FLT_FN (BUILT_IN_LDEXP):
15594 	CASE_FLT_FN (BUILT_IN_LFLOOR):
15595 	CASE_FLT_FN (BUILT_IN_LLCEIL):
15596 	CASE_FLT_FN (BUILT_IN_LLFLOOR):
15597 	CASE_FLT_FN (BUILT_IN_LLRINT):
15598 	CASE_FLT_FN (BUILT_IN_LLROUND):
15599 	CASE_FLT_FN (BUILT_IN_LRINT):
15600 	CASE_FLT_FN (BUILT_IN_LROUND):
15601 	CASE_FLT_FN (BUILT_IN_MODF):
15602 	CASE_FLT_FN (BUILT_IN_NEARBYINT):
15603 	CASE_FLT_FN (BUILT_IN_RINT):
15604 	CASE_FLT_FN (BUILT_IN_ROUND):
15605 	CASE_FLT_FN (BUILT_IN_SCALB):
15606 	CASE_FLT_FN (BUILT_IN_SCALBLN):
15607 	CASE_FLT_FN (BUILT_IN_SCALBN):
15608 	CASE_FLT_FN (BUILT_IN_SIGNBIT):
15609 	CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15610 	CASE_FLT_FN (BUILT_IN_SINH):
15611 	CASE_FLT_FN (BUILT_IN_TANH):
15612 	CASE_FLT_FN (BUILT_IN_TRUNC):
15613 	/* True if the 1st argument is nonnegative.  */
15614 	return tree_expr_nonnegative_warnv_p (arg0,
15615 					      strict_overflow_p);
15616 
15617 	CASE_FLT_FN (BUILT_IN_FMAX):
15618 	/* True if the 1st OR 2nd arguments are nonnegative.  */
15619 	return (tree_expr_nonnegative_warnv_p (arg0,
15620 					       strict_overflow_p)
15621 		|| (tree_expr_nonnegative_warnv_p (arg1,
15622 						   strict_overflow_p)));
15623 
15624 	CASE_FLT_FN (BUILT_IN_FMIN):
15625 	/* True if the 1st AND 2nd arguments are nonnegative.  */
15626 	return (tree_expr_nonnegative_warnv_p (arg0,
15627 					       strict_overflow_p)
15628 		&& (tree_expr_nonnegative_warnv_p (arg1,
15629 						   strict_overflow_p)));
15630 
15631 	CASE_FLT_FN (BUILT_IN_COPYSIGN):
15632 	/* True if the 2nd argument is nonnegative.  */
15633 	return tree_expr_nonnegative_warnv_p (arg1,
15634 					      strict_overflow_p);
15635 
15636 	CASE_FLT_FN (BUILT_IN_POWI):
15637 	/* True if the 1st argument is nonnegative or the second
15638 	   argument is an even integer.  */
15639 	if (TREE_CODE (arg1) == INTEGER_CST
15640 	    && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15641 	  return true;
15642 	return tree_expr_nonnegative_warnv_p (arg0,
15643 					      strict_overflow_p);
15644 
15645 	CASE_FLT_FN (BUILT_IN_POW):
15646 	/* True if the 1st argument is nonnegative or the second
15647 	   argument is an even integer valued real.  */
15648 	if (TREE_CODE (arg1) == REAL_CST)
15649 	  {
15650 	    REAL_VALUE_TYPE c;
15651 	    HOST_WIDE_INT n;
15652 
15653 	    c = TREE_REAL_CST (arg1);
15654 	    n = real_to_integer (&c);
15655 	    if ((n & 1) == 0)
15656 	      {
15657 		REAL_VALUE_TYPE cint;
15658 		real_from_integer (&cint, VOIDmode, n,
15659 				   n < 0 ? -1 : 0, 0);
15660 		if (real_identical (&c, &cint))
15661 		  return true;
15662 	      }
15663 	  }
15664 	return tree_expr_nonnegative_warnv_p (arg0,
15665 					      strict_overflow_p);
15666 
15667       default:
15668 	break;
15669       }
15670   return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15671 					  type);
15672 }
15673 
15674 /* Return true if T is known to be non-negative.  If the return
15675    value is based on the assumption that signed overflow is undefined,
15676    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15677    *STRICT_OVERFLOW_P.  */
15678 
15679 bool
15680 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15681 {
15682   enum tree_code code = TREE_CODE (t);
15683   if (TYPE_UNSIGNED (TREE_TYPE (t)))
15684     return true;
15685 
15686   switch (code)
15687     {
15688     case TARGET_EXPR:
15689       {
15690 	tree temp = TARGET_EXPR_SLOT (t);
15691 	t = TARGET_EXPR_INITIAL (t);
15692 
15693 	/* If the initializer is non-void, then it's a normal expression
15694 	   that will be assigned to the slot.  */
15695 	if (!VOID_TYPE_P (t))
15696 	  return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15697 
15698 	/* Otherwise, the initializer sets the slot in some way.  One common
15699 	   way is an assignment statement at the end of the initializer.  */
15700 	while (1)
15701 	  {
15702 	    if (TREE_CODE (t) == BIND_EXPR)
15703 	      t = expr_last (BIND_EXPR_BODY (t));
15704 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15705 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
15706 	      t = expr_last (TREE_OPERAND (t, 0));
15707 	    else if (TREE_CODE (t) == STATEMENT_LIST)
15708 	      t = expr_last (t);
15709 	    else
15710 	      break;
15711 	  }
15712 	if (TREE_CODE (t) == MODIFY_EXPR
15713 	    && TREE_OPERAND (t, 0) == temp)
15714 	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15715 						strict_overflow_p);
15716 
15717 	return false;
15718       }
15719 
15720     case CALL_EXPR:
15721       {
15722 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
15723 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
15724 
15725 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15726 					      get_callee_fndecl (t),
15727 					      arg0,
15728 					      arg1,
15729 					      strict_overflow_p);
15730       }
15731     case COMPOUND_EXPR:
15732     case MODIFY_EXPR:
15733       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15734 					    strict_overflow_p);
15735     case BIND_EXPR:
15736       return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15737 					    strict_overflow_p);
15738     case SAVE_EXPR:
15739       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15740 					    strict_overflow_p);
15741 
15742     default:
15743       return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15744 						   TREE_TYPE (t));
15745     }
15746 
15747   /* We don't know sign of `t', so be conservative and return false.  */
15748   return false;
15749 }
15750 
15751 /* Return true if T is known to be non-negative.  If the return
15752    value is based on the assumption that signed overflow is undefined,
15753    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15754    *STRICT_OVERFLOW_P.  */
15755 
15756 bool
15757 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15758 {
15759   enum tree_code code;
15760   if (t == error_mark_node)
15761     return false;
15762 
15763   code = TREE_CODE (t);
15764   switch (TREE_CODE_CLASS (code))
15765     {
15766     case tcc_binary:
15767     case tcc_comparison:
15768       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15769 					      TREE_TYPE (t),
15770 					      TREE_OPERAND (t, 0),
15771 					      TREE_OPERAND (t, 1),
15772 					      strict_overflow_p);
15773 
15774     case tcc_unary:
15775       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15776 					     TREE_TYPE (t),
15777 					     TREE_OPERAND (t, 0),
15778 					     strict_overflow_p);
15779 
15780     case tcc_constant:
15781     case tcc_declaration:
15782     case tcc_reference:
15783       return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15784 
15785     default:
15786       break;
15787     }
15788 
15789   switch (code)
15790     {
15791     case TRUTH_AND_EXPR:
15792     case TRUTH_OR_EXPR:
15793     case TRUTH_XOR_EXPR:
15794       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15795 					      TREE_TYPE (t),
15796 					      TREE_OPERAND (t, 0),
15797 					      TREE_OPERAND (t, 1),
15798 					      strict_overflow_p);
15799     case TRUTH_NOT_EXPR:
15800       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15801 					     TREE_TYPE (t),
15802 					     TREE_OPERAND (t, 0),
15803 					     strict_overflow_p);
15804 
15805     case COND_EXPR:
15806     case CONSTRUCTOR:
15807     case OBJ_TYPE_REF:
15808     case ASSERT_EXPR:
15809     case ADDR_EXPR:
15810     case WITH_SIZE_EXPR:
15811     case SSA_NAME:
15812       return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15813 
15814     default:
15815       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15816     }
15817 }
15818 
15819 /* Return true if `t' is known to be non-negative.  Handle warnings
15820    about undefined signed overflow.  */
15821 
15822 bool
15823 tree_expr_nonnegative_p (tree t)
15824 {
15825   bool ret, strict_overflow_p;
15826 
15827   strict_overflow_p = false;
15828   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15829   if (strict_overflow_p)
15830     fold_overflow_warning (("assuming signed overflow does not occur when "
15831 			    "determining that expression is always "
15832 			    "non-negative"),
15833 			   WARN_STRICT_OVERFLOW_MISC);
15834   return ret;
15835 }
15836 
15837 
15838 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15839    For floating point we further ensure that T is not denormal.
15840    Similar logic is present in nonzero_address in rtlanal.h.
15841 
15842    If the return value is based on the assumption that signed overflow
15843    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15844    change *STRICT_OVERFLOW_P.  */
15845 
15846 bool
15847 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15848 				 bool *strict_overflow_p)
15849 {
15850   switch (code)
15851     {
15852     case ABS_EXPR:
15853       return tree_expr_nonzero_warnv_p (op0,
15854 					strict_overflow_p);
15855 
15856     case NOP_EXPR:
15857       {
15858 	tree inner_type = TREE_TYPE (op0);
15859 	tree outer_type = type;
15860 
15861 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15862 		&& tree_expr_nonzero_warnv_p (op0,
15863 					      strict_overflow_p));
15864       }
15865       break;
15866 
15867     case NON_LVALUE_EXPR:
15868       return tree_expr_nonzero_warnv_p (op0,
15869 					strict_overflow_p);
15870 
15871     default:
15872       break;
15873   }
15874 
15875   return false;
15876 }
15877 
15878 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15879    For floating point we further ensure that T is not denormal.
15880    Similar logic is present in nonzero_address in rtlanal.h.
15881 
15882    If the return value is based on the assumption that signed overflow
15883    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15884    change *STRICT_OVERFLOW_P.  */
15885 
15886 bool
15887 tree_binary_nonzero_warnv_p (enum tree_code code,
15888 			     tree type,
15889 			     tree op0,
15890 			     tree op1, bool *strict_overflow_p)
15891 {
15892   bool sub_strict_overflow_p;
15893   switch (code)
15894     {
15895     case POINTER_PLUS_EXPR:
15896     case PLUS_EXPR:
15897       if (TYPE_OVERFLOW_UNDEFINED (type))
15898 	{
15899 	  /* With the presence of negative values it is hard
15900 	     to say something.  */
15901 	  sub_strict_overflow_p = false;
15902 	  if (!tree_expr_nonnegative_warnv_p (op0,
15903 					      &sub_strict_overflow_p)
15904 	      || !tree_expr_nonnegative_warnv_p (op1,
15905 						 &sub_strict_overflow_p))
15906 	    return false;
15907 	  /* One of operands must be positive and the other non-negative.  */
15908 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
15909 	     overflows, on a twos-complement machine the sum of two
15910 	     nonnegative numbers can never be zero.  */
15911 	  return (tree_expr_nonzero_warnv_p (op0,
15912 					     strict_overflow_p)
15913 		  || tree_expr_nonzero_warnv_p (op1,
15914 						strict_overflow_p));
15915 	}
15916       break;
15917 
15918     case MULT_EXPR:
15919       if (TYPE_OVERFLOW_UNDEFINED (type))
15920 	{
15921 	  if (tree_expr_nonzero_warnv_p (op0,
15922 					 strict_overflow_p)
15923 	      && tree_expr_nonzero_warnv_p (op1,
15924 					    strict_overflow_p))
15925 	    {
15926 	      *strict_overflow_p = true;
15927 	      return true;
15928 	    }
15929 	}
15930       break;
15931 
15932     case MIN_EXPR:
15933       sub_strict_overflow_p = false;
15934       if (tree_expr_nonzero_warnv_p (op0,
15935 				     &sub_strict_overflow_p)
15936 	  && tree_expr_nonzero_warnv_p (op1,
15937 					&sub_strict_overflow_p))
15938 	{
15939 	  if (sub_strict_overflow_p)
15940 	    *strict_overflow_p = true;
15941 	}
15942       break;
15943 
15944     case MAX_EXPR:
15945       sub_strict_overflow_p = false;
15946       if (tree_expr_nonzero_warnv_p (op0,
15947 				     &sub_strict_overflow_p))
15948 	{
15949 	  if (sub_strict_overflow_p)
15950 	    *strict_overflow_p = true;
15951 
15952 	  /* When both operands are nonzero, then MAX must be too.  */
15953 	  if (tree_expr_nonzero_warnv_p (op1,
15954 					 strict_overflow_p))
15955 	    return true;
15956 
15957 	  /* MAX where operand 0 is positive is positive.  */
15958 	  return tree_expr_nonnegative_warnv_p (op0,
15959 					       strict_overflow_p);
15960 	}
15961       /* MAX where operand 1 is positive is positive.  */
15962       else if (tree_expr_nonzero_warnv_p (op1,
15963 					  &sub_strict_overflow_p)
15964 	       && tree_expr_nonnegative_warnv_p (op1,
15965 						 &sub_strict_overflow_p))
15966 	{
15967 	  if (sub_strict_overflow_p)
15968 	    *strict_overflow_p = true;
15969 	  return true;
15970 	}
15971       break;
15972 
15973     case BIT_IOR_EXPR:
15974       return (tree_expr_nonzero_warnv_p (op1,
15975 					 strict_overflow_p)
15976 	      || tree_expr_nonzero_warnv_p (op0,
15977 					    strict_overflow_p));
15978 
15979     default:
15980       break;
15981   }
15982 
15983   return false;
15984 }
15985 
15986 /* Return true when T is an address and is known to be nonzero.
15987    For floating point we further ensure that T is not denormal.
15988    Similar logic is present in nonzero_address in rtlanal.h.
15989 
15990    If the return value is based on the assumption that signed overflow
15991    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15992    change *STRICT_OVERFLOW_P.  */
15993 
15994 bool
15995 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15996 {
15997   bool sub_strict_overflow_p;
15998   switch (TREE_CODE (t))
15999     {
16000     case INTEGER_CST:
16001       return !integer_zerop (t);
16002 
16003     case ADDR_EXPR:
16004       {
16005 	tree base = TREE_OPERAND (t, 0);
16006 	if (!DECL_P (base))
16007 	  base = get_base_address (base);
16008 
16009 	if (!base)
16010 	  return false;
16011 
16012 	/* Weak declarations may link to NULL.  Other things may also be NULL
16013 	   so protect with -fdelete-null-pointer-checks; but not variables
16014 	   allocated on the stack.  */
16015 	if (DECL_P (base)
16016 	    && (flag_delete_null_pointer_checks
16017 		|| (DECL_CONTEXT (base)
16018 		    && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16019 		    && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16020 	  return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16021 
16022 	/* Constants are never weak.  */
16023 	if (CONSTANT_CLASS_P (base))
16024 	  return true;
16025 
16026 	return false;
16027       }
16028 
16029     case COND_EXPR:
16030       sub_strict_overflow_p = false;
16031       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16032 				     &sub_strict_overflow_p)
16033 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16034 					&sub_strict_overflow_p))
16035 	{
16036 	  if (sub_strict_overflow_p)
16037 	    *strict_overflow_p = true;
16038 	  return true;
16039 	}
16040       break;
16041 
16042     default:
16043       break;
16044     }
16045   return false;
16046 }
16047 
16048 /* Return true when T is an address and is known to be nonzero.
16049    For floating point we further ensure that T is not denormal.
16050    Similar logic is present in nonzero_address in rtlanal.h.
16051 
16052    If the return value is based on the assumption that signed overflow
16053    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16054    change *STRICT_OVERFLOW_P.  */
16055 
16056 bool
16057 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16058 {
16059   tree type = TREE_TYPE (t);
16060   enum tree_code code;
16061 
16062   /* Doing something useful for floating point would need more work.  */
16063   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16064     return false;
16065 
16066   code = TREE_CODE (t);
16067   switch (TREE_CODE_CLASS (code))
16068     {
16069     case tcc_unary:
16070       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16071 					      strict_overflow_p);
16072     case tcc_binary:
16073     case tcc_comparison:
16074       return tree_binary_nonzero_warnv_p (code, type,
16075 					       TREE_OPERAND (t, 0),
16076 					       TREE_OPERAND (t, 1),
16077 					       strict_overflow_p);
16078     case tcc_constant:
16079     case tcc_declaration:
16080     case tcc_reference:
16081       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16082 
16083     default:
16084       break;
16085     }
16086 
16087   switch (code)
16088     {
16089     case TRUTH_NOT_EXPR:
16090       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16091 					      strict_overflow_p);
16092 
16093     case TRUTH_AND_EXPR:
16094     case TRUTH_OR_EXPR:
16095     case TRUTH_XOR_EXPR:
16096       return tree_binary_nonzero_warnv_p (code, type,
16097 					       TREE_OPERAND (t, 0),
16098 					       TREE_OPERAND (t, 1),
16099 					       strict_overflow_p);
16100 
16101     case COND_EXPR:
16102     case CONSTRUCTOR:
16103     case OBJ_TYPE_REF:
16104     case ASSERT_EXPR:
16105     case ADDR_EXPR:
16106     case WITH_SIZE_EXPR:
16107     case SSA_NAME:
16108       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16109 
16110     case COMPOUND_EXPR:
16111     case MODIFY_EXPR:
16112     case BIND_EXPR:
16113       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16114 					strict_overflow_p);
16115 
16116     case SAVE_EXPR:
16117       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16118 					strict_overflow_p);
16119 
16120     case CALL_EXPR:
16121       return alloca_call_p (t);
16122 
16123     default:
16124       break;
16125     }
16126   return false;
16127 }
16128 
16129 /* Return true when T is an address and is known to be nonzero.
16130    Handle warnings about undefined signed overflow.  */
16131 
16132 bool
16133 tree_expr_nonzero_p (tree t)
16134 {
16135   bool ret, strict_overflow_p;
16136 
16137   strict_overflow_p = false;
16138   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16139   if (strict_overflow_p)
16140     fold_overflow_warning (("assuming signed overflow does not occur when "
16141 			    "determining that expression is always "
16142 			    "non-zero"),
16143 			   WARN_STRICT_OVERFLOW_MISC);
16144   return ret;
16145 }
16146 
16147 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16148    attempt to fold the expression to a constant without modifying TYPE,
16149    OP0 or OP1.
16150 
16151    If the expression could be simplified to a constant, then return
16152    the constant.  If the expression would not be simplified to a
16153    constant, then return NULL_TREE.  */
16154 
16155 tree
16156 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16157 {
16158   tree tem = fold_binary (code, type, op0, op1);
16159   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16160 }
16161 
16162 /* Given the components of a unary expression CODE, TYPE and OP0,
16163    attempt to fold the expression to a constant without modifying
16164    TYPE or OP0.
16165 
16166    If the expression could be simplified to a constant, then return
16167    the constant.  If the expression would not be simplified to a
16168    constant, then return NULL_TREE.  */
16169 
16170 tree
16171 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16172 {
16173   tree tem = fold_unary (code, type, op0);
16174   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16175 }
16176 
16177 /* If EXP represents referencing an element in a constant string
16178    (either via pointer arithmetic or array indexing), return the
16179    tree representing the value accessed, otherwise return NULL.  */
16180 
16181 tree
16182 fold_read_from_constant_string (tree exp)
16183 {
16184   if ((TREE_CODE (exp) == INDIRECT_REF
16185        || TREE_CODE (exp) == ARRAY_REF)
16186       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16187     {
16188       tree exp1 = TREE_OPERAND (exp, 0);
16189       tree index;
16190       tree string;
16191       location_t loc = EXPR_LOCATION (exp);
16192 
16193       if (TREE_CODE (exp) == INDIRECT_REF)
16194 	string = string_constant (exp1, &index);
16195       else
16196 	{
16197 	  tree low_bound = array_ref_low_bound (exp);
16198 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16199 
16200 	  /* Optimize the special-case of a zero lower bound.
16201 
16202 	     We convert the low_bound to sizetype to avoid some problems
16203 	     with constant folding.  (E.g. suppose the lower bound is 1,
16204 	     and its mode is QI.  Without the conversion,l (ARRAY
16205 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16206 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
16207 	  if (! integer_zerop (low_bound))
16208 	    index = size_diffop_loc (loc, index,
16209 				 fold_convert_loc (loc, sizetype, low_bound));
16210 
16211 	  string = exp1;
16212 	}
16213 
16214       if (string
16215 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16216 	  && TREE_CODE (string) == STRING_CST
16217 	  && TREE_CODE (index) == INTEGER_CST
16218 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16219 	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16220 	      == MODE_INT)
16221 	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16222 	return build_int_cst_type (TREE_TYPE (exp),
16223 				   (TREE_STRING_POINTER (string)
16224 				    [TREE_INT_CST_LOW (index)]));
16225     }
16226   return NULL;
16227 }
16228 
16229 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16230    an integer constant, real, or fixed-point constant.
16231 
16232    TYPE is the type of the result.  */
16233 
16234 static tree
16235 fold_negate_const (tree arg0, tree type)
16236 {
16237   tree t = NULL_TREE;
16238 
16239   switch (TREE_CODE (arg0))
16240     {
16241     case INTEGER_CST:
16242       {
16243 	double_int val = tree_to_double_int (arg0);
16244 	bool overflow;
16245 	val = val.neg_with_overflow (&overflow);
16246 	t = force_fit_type_double (type, val, 1,
16247 				   (overflow | TREE_OVERFLOW (arg0))
16248 				   && !TYPE_UNSIGNED (type));
16249 	break;
16250       }
16251 
16252     case REAL_CST:
16253       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16254       break;
16255 
16256     case FIXED_CST:
16257       {
16258         FIXED_VALUE_TYPE f;
16259         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16260 					    &(TREE_FIXED_CST (arg0)), NULL,
16261 					    TYPE_SATURATING (type));
16262 	t = build_fixed (type, f);
16263 	/* Propagate overflow flags.  */
16264 	if (overflow_p | TREE_OVERFLOW (arg0))
16265 	  TREE_OVERFLOW (t) = 1;
16266 	break;
16267       }
16268 
16269     default:
16270       gcc_unreachable ();
16271     }
16272 
16273   return t;
16274 }
16275 
16276 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16277    an integer constant or real constant.
16278 
16279    TYPE is the type of the result.  */
16280 
16281 tree
16282 fold_abs_const (tree arg0, tree type)
16283 {
16284   tree t = NULL_TREE;
16285 
16286   switch (TREE_CODE (arg0))
16287     {
16288     case INTEGER_CST:
16289       {
16290 	double_int val = tree_to_double_int (arg0);
16291 
16292         /* If the value is unsigned or non-negative, then the absolute value
16293 	   is the same as the ordinary value.  */
16294 	if (TYPE_UNSIGNED (type)
16295 	    || !val.is_negative ())
16296 	  t = arg0;
16297 
16298 	/* If the value is negative, then the absolute value is
16299 	   its negation.  */
16300 	else
16301 	  {
16302 	    bool overflow;
16303 	    val = val.neg_with_overflow (&overflow);
16304 	    t = force_fit_type_double (type, val, -1,
16305 				       overflow | TREE_OVERFLOW (arg0));
16306 	  }
16307       }
16308       break;
16309 
16310     case REAL_CST:
16311       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16312 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16313       else
16314 	t =  arg0;
16315       break;
16316 
16317     default:
16318       gcc_unreachable ();
16319     }
16320 
16321   return t;
16322 }
16323 
16324 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16325    constant.  TYPE is the type of the result.  */
16326 
16327 static tree
16328 fold_not_const (const_tree arg0, tree type)
16329 {
16330   double_int val;
16331 
16332   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16333 
16334   val = ~tree_to_double_int (arg0);
16335   return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16336 }
16337 
16338 /* Given CODE, a relational operator, the target type, TYPE and two
16339    constant operands OP0 and OP1, return the result of the
16340    relational operation.  If the result is not a compile time
16341    constant, then return NULL_TREE.  */
16342 
16343 static tree
16344 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16345 {
16346   int result, invert;
16347 
16348   /* From here on, the only cases we handle are when the result is
16349      known to be a constant.  */
16350 
16351   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16352     {
16353       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16354       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16355 
16356       /* Handle the cases where either operand is a NaN.  */
16357       if (real_isnan (c0) || real_isnan (c1))
16358 	{
16359 	  switch (code)
16360 	    {
16361 	    case EQ_EXPR:
16362 	    case ORDERED_EXPR:
16363 	      result = 0;
16364 	      break;
16365 
16366 	    case NE_EXPR:
16367 	    case UNORDERED_EXPR:
16368 	    case UNLT_EXPR:
16369 	    case UNLE_EXPR:
16370 	    case UNGT_EXPR:
16371 	    case UNGE_EXPR:
16372 	    case UNEQ_EXPR:
16373               result = 1;
16374 	      break;
16375 
16376 	    case LT_EXPR:
16377 	    case LE_EXPR:
16378 	    case GT_EXPR:
16379 	    case GE_EXPR:
16380 	    case LTGT_EXPR:
16381 	      if (flag_trapping_math)
16382 		return NULL_TREE;
16383 	      result = 0;
16384 	      break;
16385 
16386 	    default:
16387 	      gcc_unreachable ();
16388 	    }
16389 
16390 	  return constant_boolean_node (result, type);
16391 	}
16392 
16393       return constant_boolean_node (real_compare (code, c0, c1), type);
16394     }
16395 
16396   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16397     {
16398       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16399       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16400       return constant_boolean_node (fixed_compare (code, c0, c1), type);
16401     }
16402 
16403   /* Handle equality/inequality of complex constants.  */
16404   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16405     {
16406       tree rcond = fold_relational_const (code, type,
16407 					  TREE_REALPART (op0),
16408 					  TREE_REALPART (op1));
16409       tree icond = fold_relational_const (code, type,
16410 					  TREE_IMAGPART (op0),
16411 					  TREE_IMAGPART (op1));
16412       if (code == EQ_EXPR)
16413 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16414       else if (code == NE_EXPR)
16415 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16416       else
16417 	return NULL_TREE;
16418     }
16419 
16420   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16421     {
16422       unsigned count = VECTOR_CST_NELTS (op0);
16423       tree *elts =  XALLOCAVEC (tree, count);
16424       gcc_assert (VECTOR_CST_NELTS (op1) == count
16425 		  && TYPE_VECTOR_SUBPARTS (type) == count);
16426 
16427       for (unsigned i = 0; i < count; i++)
16428 	{
16429 	  tree elem_type = TREE_TYPE (type);
16430 	  tree elem0 = VECTOR_CST_ELT (op0, i);
16431 	  tree elem1 = VECTOR_CST_ELT (op1, i);
16432 
16433 	  tree tem = fold_relational_const (code, elem_type,
16434 					    elem0, elem1);
16435 
16436 	  if (tem == NULL_TREE)
16437 	    return NULL_TREE;
16438 
16439 	  elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16440 	}
16441 
16442       return build_vector (type, elts);
16443     }
16444 
16445   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16446 
16447      To compute GT, swap the arguments and do LT.
16448      To compute GE, do LT and invert the result.
16449      To compute LE, swap the arguments, do LT and invert the result.
16450      To compute NE, do EQ and invert the result.
16451 
16452      Therefore, the code below must handle only EQ and LT.  */
16453 
16454   if (code == LE_EXPR || code == GT_EXPR)
16455     {
16456       tree tem = op0;
16457       op0 = op1;
16458       op1 = tem;
16459       code = swap_tree_comparison (code);
16460     }
16461 
16462   /* Note that it is safe to invert for real values here because we
16463      have already handled the one case that it matters.  */
16464 
16465   invert = 0;
16466   if (code == NE_EXPR || code == GE_EXPR)
16467     {
16468       invert = 1;
16469       code = invert_tree_comparison (code, false);
16470     }
16471 
16472   /* Compute a result for LT or EQ if args permit;
16473      Otherwise return T.  */
16474   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16475     {
16476       if (code == EQ_EXPR)
16477 	result = tree_int_cst_equal (op0, op1);
16478       else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16479 	result = INT_CST_LT_UNSIGNED (op0, op1);
16480       else
16481 	result = INT_CST_LT (op0, op1);
16482     }
16483   else
16484     return NULL_TREE;
16485 
16486   if (invert)
16487     result ^= 1;
16488   return constant_boolean_node (result, type);
16489 }
16490 
16491 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16492    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
16493    itself.  */
16494 
16495 tree
16496 fold_build_cleanup_point_expr (tree type, tree expr)
16497 {
16498   /* If the expression does not have side effects then we don't have to wrap
16499      it with a cleanup point expression.  */
16500   if (!TREE_SIDE_EFFECTS (expr))
16501     return expr;
16502 
16503   /* If the expression is a return, check to see if the expression inside the
16504      return has no side effects or the right hand side of the modify expression
16505      inside the return. If either don't have side effects set we don't need to
16506      wrap the expression in a cleanup point expression.  Note we don't check the
16507      left hand side of the modify because it should always be a return decl.  */
16508   if (TREE_CODE (expr) == RETURN_EXPR)
16509     {
16510       tree op = TREE_OPERAND (expr, 0);
16511       if (!op || !TREE_SIDE_EFFECTS (op))
16512         return expr;
16513       op = TREE_OPERAND (op, 1);
16514       if (!TREE_SIDE_EFFECTS (op))
16515         return expr;
16516     }
16517 
16518   return build1 (CLEANUP_POINT_EXPR, type, expr);
16519 }
16520 
16521 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16522    of an indirection through OP0, or NULL_TREE if no simplification is
16523    possible.  */
16524 
16525 tree
16526 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16527 {
16528   tree sub = op0;
16529   tree subtype;
16530 
16531   STRIP_NOPS (sub);
16532   subtype = TREE_TYPE (sub);
16533   if (!POINTER_TYPE_P (subtype))
16534     return NULL_TREE;
16535 
16536   if (TREE_CODE (sub) == ADDR_EXPR)
16537     {
16538       tree op = TREE_OPERAND (sub, 0);
16539       tree optype = TREE_TYPE (op);
16540       /* *&CONST_DECL -> to the value of the const decl.  */
16541       if (TREE_CODE (op) == CONST_DECL)
16542 	return DECL_INITIAL (op);
16543       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
16544       if (type == optype)
16545 	{
16546 	  tree fop = fold_read_from_constant_string (op);
16547 	  if (fop)
16548 	    return fop;
16549 	  else
16550 	    return op;
16551 	}
16552       /* *(foo *)&fooarray => fooarray[0] */
16553       else if (TREE_CODE (optype) == ARRAY_TYPE
16554 	       && type == TREE_TYPE (optype)
16555 	       && (!in_gimple_form
16556 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16557 	{
16558 	  tree type_domain = TYPE_DOMAIN (optype);
16559 	  tree min_val = size_zero_node;
16560 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
16561 	    min_val = TYPE_MIN_VALUE (type_domain);
16562 	  if (in_gimple_form
16563 	      && TREE_CODE (min_val) != INTEGER_CST)
16564 	    return NULL_TREE;
16565 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
16566 			     NULL_TREE, NULL_TREE);
16567 	}
16568       /* *(foo *)&complexfoo => __real__ complexfoo */
16569       else if (TREE_CODE (optype) == COMPLEX_TYPE
16570 	       && type == TREE_TYPE (optype))
16571 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
16572       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16573       else if (TREE_CODE (optype) == VECTOR_TYPE
16574 	       && type == TREE_TYPE (optype))
16575 	{
16576 	  tree part_width = TYPE_SIZE (type);
16577 	  tree index = bitsize_int (0);
16578 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16579 	}
16580     }
16581 
16582   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16583       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16584     {
16585       tree op00 = TREE_OPERAND (sub, 0);
16586       tree op01 = TREE_OPERAND (sub, 1);
16587 
16588       STRIP_NOPS (op00);
16589       if (TREE_CODE (op00) == ADDR_EXPR)
16590 	{
16591 	  tree op00type;
16592 	  op00 = TREE_OPERAND (op00, 0);
16593 	  op00type = TREE_TYPE (op00);
16594 
16595 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16596 	  if (TREE_CODE (op00type) == VECTOR_TYPE
16597 	      && type == TREE_TYPE (op00type))
16598 	    {
16599 	      HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16600 	      tree part_width = TYPE_SIZE (type);
16601 	      unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16602 	      unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16603 	      tree index = bitsize_int (indexi);
16604 
16605 	      if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16606 		return fold_build3_loc (loc,
16607 					BIT_FIELD_REF, type, op00,
16608 					part_width, index);
16609 
16610 	    }
16611 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16612 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
16613 		   && type == TREE_TYPE (op00type))
16614 	    {
16615 	      tree size = TYPE_SIZE_UNIT (type);
16616 	      if (tree_int_cst_equal (size, op01))
16617 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16618 	    }
16619 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
16620 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
16621 		   && type == TREE_TYPE (op00type))
16622 	    {
16623 	      tree type_domain = TYPE_DOMAIN (op00type);
16624 	      tree min_val = size_zero_node;
16625 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
16626 		min_val = TYPE_MIN_VALUE (type_domain);
16627 	      op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16628 				     TYPE_SIZE_UNIT (type));
16629 	      op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16630 	      return build4_loc (loc, ARRAY_REF, type, op00, op01,
16631 				 NULL_TREE, NULL_TREE);
16632 	    }
16633 	}
16634     }
16635 
16636   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16637   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16638       && type == TREE_TYPE (TREE_TYPE (subtype))
16639       && (!in_gimple_form
16640 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16641     {
16642       tree type_domain;
16643       tree min_val = size_zero_node;
16644       sub = build_fold_indirect_ref_loc (loc, sub);
16645       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16646       if (type_domain && TYPE_MIN_VALUE (type_domain))
16647 	min_val = TYPE_MIN_VALUE (type_domain);
16648       if (in_gimple_form
16649 	  && TREE_CODE (min_val) != INTEGER_CST)
16650 	return NULL_TREE;
16651       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16652 			 NULL_TREE);
16653     }
16654 
16655   return NULL_TREE;
16656 }
16657 
16658 /* Builds an expression for an indirection through T, simplifying some
16659    cases.  */
16660 
16661 tree
16662 build_fold_indirect_ref_loc (location_t loc, tree t)
16663 {
16664   tree type = TREE_TYPE (TREE_TYPE (t));
16665   tree sub = fold_indirect_ref_1 (loc, type, t);
16666 
16667   if (sub)
16668     return sub;
16669 
16670   return build1_loc (loc, INDIRECT_REF, type, t);
16671 }
16672 
16673 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
16674 
16675 tree
16676 fold_indirect_ref_loc (location_t loc, tree t)
16677 {
16678   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16679 
16680   if (sub)
16681     return sub;
16682   else
16683     return t;
16684 }
16685 
16686 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16687    whose result is ignored.  The type of the returned tree need not be
16688    the same as the original expression.  */
16689 
16690 tree
16691 fold_ignored_result (tree t)
16692 {
16693   if (!TREE_SIDE_EFFECTS (t))
16694     return integer_zero_node;
16695 
16696   for (;;)
16697     switch (TREE_CODE_CLASS (TREE_CODE (t)))
16698       {
16699       case tcc_unary:
16700 	t = TREE_OPERAND (t, 0);
16701 	break;
16702 
16703       case tcc_binary:
16704       case tcc_comparison:
16705 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16706 	  t = TREE_OPERAND (t, 0);
16707 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16708 	  t = TREE_OPERAND (t, 1);
16709 	else
16710 	  return t;
16711 	break;
16712 
16713       case tcc_expression:
16714 	switch (TREE_CODE (t))
16715 	  {
16716 	  case COMPOUND_EXPR:
16717 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16718 	      return t;
16719 	    t = TREE_OPERAND (t, 0);
16720 	    break;
16721 
16722 	  case COND_EXPR:
16723 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16724 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16725 	      return t;
16726 	    t = TREE_OPERAND (t, 0);
16727 	    break;
16728 
16729 	  default:
16730 	    return t;
16731 	  }
16732 	break;
16733 
16734       default:
16735 	return t;
16736       }
16737 }
16738 
16739 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16740    This can only be applied to objects of a sizetype.  */
16741 
16742 tree
16743 round_up_loc (location_t loc, tree value, int divisor)
16744 {
16745   tree div = NULL_TREE;
16746 
16747   gcc_assert (divisor > 0);
16748   if (divisor == 1)
16749     return value;
16750 
16751   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16752      have to do anything.  Only do this when we are not given a const,
16753      because in that case, this check is more expensive than just
16754      doing it.  */
16755   if (TREE_CODE (value) != INTEGER_CST)
16756     {
16757       div = build_int_cst (TREE_TYPE (value), divisor);
16758 
16759       if (multiple_of_p (TREE_TYPE (value), value, div))
16760 	return value;
16761     }
16762 
16763   /* If divisor is a power of two, simplify this to bit manipulation.  */
16764   if (divisor == (divisor & -divisor))
16765     {
16766       if (TREE_CODE (value) == INTEGER_CST)
16767 	{
16768 	  double_int val = tree_to_double_int (value);
16769 	  bool overflow_p;
16770 
16771 	  if ((val.low & (divisor - 1)) == 0)
16772 	    return value;
16773 
16774 	  overflow_p = TREE_OVERFLOW (value);
16775 	  val.low &= ~(divisor - 1);
16776 	  val.low += divisor;
16777 	  if (val.low == 0)
16778 	    {
16779 	      val.high++;
16780 	      if (val.high == 0)
16781 		overflow_p = true;
16782 	    }
16783 
16784 	  return force_fit_type_double (TREE_TYPE (value), val,
16785 					-1, overflow_p);
16786 	}
16787       else
16788 	{
16789 	  tree t;
16790 
16791 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
16792 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
16793 	  t = build_int_cst (TREE_TYPE (value), -divisor);
16794 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16795 	}
16796     }
16797   else
16798     {
16799       if (!div)
16800 	div = build_int_cst (TREE_TYPE (value), divisor);
16801       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16802       value = size_binop_loc (loc, MULT_EXPR, value, div);
16803     }
16804 
16805   return value;
16806 }
16807 
16808 /* Likewise, but round down.  */
16809 
16810 tree
16811 round_down_loc (location_t loc, tree value, int divisor)
16812 {
16813   tree div = NULL_TREE;
16814 
16815   gcc_assert (divisor > 0);
16816   if (divisor == 1)
16817     return value;
16818 
16819   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16820      have to do anything.  Only do this when we are not given a const,
16821      because in that case, this check is more expensive than just
16822      doing it.  */
16823   if (TREE_CODE (value) != INTEGER_CST)
16824     {
16825       div = build_int_cst (TREE_TYPE (value), divisor);
16826 
16827       if (multiple_of_p (TREE_TYPE (value), value, div))
16828 	return value;
16829     }
16830 
16831   /* If divisor is a power of two, simplify this to bit manipulation.  */
16832   if (divisor == (divisor & -divisor))
16833     {
16834       tree t;
16835 
16836       t = build_int_cst (TREE_TYPE (value), -divisor);
16837       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16838     }
16839   else
16840     {
16841       if (!div)
16842 	div = build_int_cst (TREE_TYPE (value), divisor);
16843       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16844       value = size_binop_loc (loc, MULT_EXPR, value, div);
16845     }
16846 
16847   return value;
16848 }
16849 
16850 /* Returns the pointer to the base of the object addressed by EXP and
16851    extracts the information about the offset of the access, storing it
16852    to PBITPOS and POFFSET.  */
16853 
16854 static tree
16855 split_address_to_core_and_offset (tree exp,
16856 				  HOST_WIDE_INT *pbitpos, tree *poffset)
16857 {
16858   tree core;
16859   enum machine_mode mode;
16860   int unsignedp, volatilep;
16861   HOST_WIDE_INT bitsize;
16862   location_t loc = EXPR_LOCATION (exp);
16863 
16864   if (TREE_CODE (exp) == ADDR_EXPR)
16865     {
16866       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16867 				  poffset, &mode, &unsignedp, &volatilep,
16868 				  false);
16869       core = build_fold_addr_expr_loc (loc, core);
16870     }
16871   else
16872     {
16873       core = exp;
16874       *pbitpos = 0;
16875       *poffset = NULL_TREE;
16876     }
16877 
16878   return core;
16879 }
16880 
16881 /* Returns true if addresses of E1 and E2 differ by a constant, false
16882    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
16883 
16884 bool
16885 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16886 {
16887   tree core1, core2;
16888   HOST_WIDE_INT bitpos1, bitpos2;
16889   tree toffset1, toffset2, tdiff, type;
16890 
16891   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16892   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16893 
16894   if (bitpos1 % BITS_PER_UNIT != 0
16895       || bitpos2 % BITS_PER_UNIT != 0
16896       || !operand_equal_p (core1, core2, 0))
16897     return false;
16898 
16899   if (toffset1 && toffset2)
16900     {
16901       type = TREE_TYPE (toffset1);
16902       if (type != TREE_TYPE (toffset2))
16903 	toffset2 = fold_convert (type, toffset2);
16904 
16905       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16906       if (!cst_and_fits_in_hwi (tdiff))
16907 	return false;
16908 
16909       *diff = int_cst_value (tdiff);
16910     }
16911   else if (toffset1 || toffset2)
16912     {
16913       /* If only one of the offsets is non-constant, the difference cannot
16914 	 be a constant.  */
16915       return false;
16916     }
16917   else
16918     *diff = 0;
16919 
16920   *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16921   return true;
16922 }
16923 
16924 /* Simplify the floating point expression EXP when the sign of the
16925    result is not significant.  Return NULL_TREE if no simplification
16926    is possible.  */
16927 
16928 tree
16929 fold_strip_sign_ops (tree exp)
16930 {
16931   tree arg0, arg1;
16932   location_t loc = EXPR_LOCATION (exp);
16933 
16934   switch (TREE_CODE (exp))
16935     {
16936     case ABS_EXPR:
16937     case NEGATE_EXPR:
16938       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16939       return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16940 
16941     case MULT_EXPR:
16942     case RDIV_EXPR:
16943       if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16944 	return NULL_TREE;
16945       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16946       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16947       if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16948 	return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16949 			    arg0 ? arg0 : TREE_OPERAND (exp, 0),
16950 			    arg1 ? arg1 : TREE_OPERAND (exp, 1));
16951       break;
16952 
16953     case COMPOUND_EXPR:
16954       arg0 = TREE_OPERAND (exp, 0);
16955       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16956       if (arg1)
16957 	return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16958       break;
16959 
16960     case COND_EXPR:
16961       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16962       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16963       if (arg0 || arg1)
16964 	return fold_build3_loc (loc,
16965 			    COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16966 			    arg0 ? arg0 : TREE_OPERAND (exp, 1),
16967 			    arg1 ? arg1 : TREE_OPERAND (exp, 2));
16968       break;
16969 
16970     case CALL_EXPR:
16971       {
16972 	const enum built_in_function fcode = builtin_mathfn_code (exp);
16973 	switch (fcode)
16974 	{
16975 	CASE_FLT_FN (BUILT_IN_COPYSIGN):
16976 	  /* Strip copysign function call, return the 1st argument. */
16977 	  arg0 = CALL_EXPR_ARG (exp, 0);
16978 	  arg1 = CALL_EXPR_ARG (exp, 1);
16979 	  return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16980 
16981 	default:
16982 	  /* Strip sign ops from the argument of "odd" math functions.  */
16983 	  if (negate_mathfn_p (fcode))
16984             {
16985 	      arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16986 	      if (arg0)
16987 		return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16988 	    }
16989 	  break;
16990 	}
16991       }
16992       break;
16993 
16994     default:
16995       break;
16996     }
16997   return NULL_TREE;
16998 }
16999