xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/fold-const.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "hash-set.h"
49 #include "machmode.h"
50 #include "vec.h"
51 #include "double-int.h"
52 #include "input.h"
53 #include "alias.h"
54 #include "symtab.h"
55 #include "wide-int.h"
56 #include "inchash.h"
57 #include "tree.h"
58 #include "fold-const.h"
59 #include "stor-layout.h"
60 #include "calls.h"
61 #include "tree-iterator.h"
62 #include "realmpfr.h"
63 #include "rtl.h"
64 #include "hashtab.h"
65 #include "hard-reg-set.h"
66 #include "function.h"
67 #include "statistics.h"
68 #include "real.h"
69 #include "fixed-value.h"
70 #include "insn-config.h"
71 #include "expmed.h"
72 #include "dojump.h"
73 #include "explow.h"
74 #include "emit-rtl.h"
75 #include "varasm.h"
76 #include "stmt.h"
77 #include "expr.h"
78 #include "tm_p.h"
79 #include "target.h"
80 #include "diagnostic-core.h"
81 #include "intl.h"
82 #include "langhooks.h"
83 #include "md5.h"
84 #include "predict.h"
85 #include "basic-block.h"
86 #include "tree-ssa-alias.h"
87 #include "internal-fn.h"
88 #include "tree-eh.h"
89 #include "gimple-expr.h"
90 #include "is-a.h"
91 #include "gimple.h"
92 #include "gimplify.h"
93 #include "tree-dfa.h"
94 #include "hash-table.h"  /* Required for ENABLE_FOLD_CHECKING.  */
95 #include "builtins.h"
96 #include "hash-map.h"
97 #include "plugin-api.h"
98 #include "ipa-ref.h"
99 #include "cgraph.h"
100 #include "generic-match.h"
101 #include "optabs.h"
102 
103 /* Nonzero if we are folding constants inside an initializer; zero
104    otherwise.  */
105 int folding_initializer = 0;
106 
107 /* The following constants represent a bit based encoding of GCC's
108    comparison operators.  This encoding simplifies transformations
109    on relational comparison operators, such as AND and OR.  */
110 enum comparison_code {
111   COMPCODE_FALSE = 0,
112   COMPCODE_LT = 1,
113   COMPCODE_EQ = 2,
114   COMPCODE_LE = 3,
115   COMPCODE_GT = 4,
116   COMPCODE_LTGT = 5,
117   COMPCODE_GE = 6,
118   COMPCODE_ORD = 7,
119   COMPCODE_UNORD = 8,
120   COMPCODE_UNLT = 9,
121   COMPCODE_UNEQ = 10,
122   COMPCODE_UNLE = 11,
123   COMPCODE_UNGT = 12,
124   COMPCODE_NE = 13,
125   COMPCODE_UNGE = 14,
126   COMPCODE_TRUE = 15
127 };
128 
129 static bool negate_mathfn_p (enum built_in_function);
130 static bool negate_expr_p (tree);
131 static tree negate_expr (tree);
132 static tree split_tree (location_t, tree, tree, enum tree_code,
133 			tree *, tree *, tree *, int);
134 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
135 static enum comparison_code comparison_to_compcode (enum tree_code);
136 static enum tree_code compcode_to_comparison (enum comparison_code);
137 static int operand_equal_for_comparison_p (tree, tree, tree);
138 static int twoval_comparison_p (tree, tree *, tree *, int *);
139 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
140 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
141 static tree make_bit_field_ref (location_t, tree, tree,
142 				HOST_WIDE_INT, HOST_WIDE_INT, int);
143 static tree optimize_bit_field_compare (location_t, enum tree_code,
144 					tree, tree, tree);
145 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
146 				    HOST_WIDE_INT *,
147 				    machine_mode *, int *, int *,
148 				    tree *, tree *);
149 static int simple_operand_p (const_tree);
150 static bool simple_operand_p_2 (tree);
151 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
152 static tree range_predecessor (tree);
153 static tree range_successor (tree);
154 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
155 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
156 static tree unextend (tree, int, int, tree);
157 static tree optimize_minmax_comparison (location_t, enum tree_code,
158 					tree, tree, tree);
159 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
160 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
161 static tree fold_binary_op_with_conditional_arg (location_t,
162 						 enum tree_code, tree,
163 						 tree, tree,
164 						 tree, tree, int);
165 static tree fold_mathfn_compare (location_t,
166 				 enum built_in_function, enum tree_code,
167 				 tree, tree, tree);
168 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
169 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
170 static bool reorder_operands_p (const_tree, const_tree);
171 static tree fold_negate_const (tree, tree);
172 static tree fold_not_const (const_tree, tree);
173 static tree fold_relational_const (enum tree_code, tree, tree, tree);
174 static tree fold_convert_const (enum tree_code, tree, tree);
175 static tree fold_view_convert_expr (tree, tree);
176 static bool vec_cst_ctor_to_array (tree, tree *);
177 
178 
179 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
180    Otherwise, return LOC.  */
181 
182 static location_t
183 expr_location_or (tree t, location_t loc)
184 {
185   location_t tloc = EXPR_LOCATION (t);
186   return tloc == UNKNOWN_LOCATION ? loc : tloc;
187 }
188 
189 /* Similar to protected_set_expr_location, but never modify x in place,
190    if location can and needs to be set, unshare it.  */
191 
192 static inline tree
193 protected_set_expr_location_unshare (tree x, location_t loc)
194 {
195   if (CAN_HAVE_LOCATION_P (x)
196       && EXPR_LOCATION (x) != loc
197       && !(TREE_CODE (x) == SAVE_EXPR
198 	   || TREE_CODE (x) == TARGET_EXPR
199 	   || TREE_CODE (x) == BIND_EXPR))
200     {
201       x = copy_node (x);
202       SET_EXPR_LOCATION (x, loc);
203     }
204   return x;
205 }
206 
207 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
208    division and returns the quotient.  Otherwise returns
209    NULL_TREE.  */
210 
211 tree
212 div_if_zero_remainder (const_tree arg1, const_tree arg2)
213 {
214   widest_int quo;
215 
216   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
217 			 SIGNED, &quo))
218     return wide_int_to_tree (TREE_TYPE (arg1), quo);
219 
220   return NULL_TREE;
221 }
222 
223 /* This is nonzero if we should defer warnings about undefined
224    overflow.  This facility exists because these warnings are a
225    special case.  The code to estimate loop iterations does not want
226    to issue any warnings, since it works with expressions which do not
227    occur in user code.  Various bits of cleanup code call fold(), but
228    only use the result if it has certain characteristics (e.g., is a
229    constant); that code only wants to issue a warning if the result is
230    used.  */
231 
232 static int fold_deferring_overflow_warnings;
233 
234 /* If a warning about undefined overflow is deferred, this is the
235    warning.  Note that this may cause us to turn two warnings into
236    one, but that is fine since it is sufficient to only give one
237    warning per expression.  */
238 
239 static const char* fold_deferred_overflow_warning;
240 
241 /* If a warning about undefined overflow is deferred, this is the
242    level at which the warning should be emitted.  */
243 
244 static enum warn_strict_overflow_code fold_deferred_overflow_code;
245 
246 /* Start deferring overflow warnings.  We could use a stack here to
247    permit nested calls, but at present it is not necessary.  */
248 
249 void
250 fold_defer_overflow_warnings (void)
251 {
252   ++fold_deferring_overflow_warnings;
253 }
254 
255 /* Stop deferring overflow warnings.  If there is a pending warning,
256    and ISSUE is true, then issue the warning if appropriate.  STMT is
257    the statement with which the warning should be associated (used for
258    location information); STMT may be NULL.  CODE is the level of the
259    warning--a warn_strict_overflow_code value.  This function will use
260    the smaller of CODE and the deferred code when deciding whether to
261    issue the warning.  CODE may be zero to mean to always use the
262    deferred code.  */
263 
264 void
265 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
266 {
267   const char *warnmsg;
268   location_t locus;
269 
270   gcc_assert (fold_deferring_overflow_warnings > 0);
271   --fold_deferring_overflow_warnings;
272   if (fold_deferring_overflow_warnings > 0)
273     {
274       if (fold_deferred_overflow_warning != NULL
275 	  && code != 0
276 	  && code < (int) fold_deferred_overflow_code)
277 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
278       return;
279     }
280 
281   warnmsg = fold_deferred_overflow_warning;
282   fold_deferred_overflow_warning = NULL;
283 
284   if (!issue || warnmsg == NULL)
285     return;
286 
287   if (gimple_no_warning_p (stmt))
288     return;
289 
290   /* Use the smallest code level when deciding to issue the
291      warning.  */
292   if (code == 0 || code > (int) fold_deferred_overflow_code)
293     code = fold_deferred_overflow_code;
294 
295   if (!issue_strict_overflow_warning (code))
296     return;
297 
298   if (stmt == NULL)
299     locus = input_location;
300   else
301     locus = gimple_location (stmt);
302   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
303 }
304 
305 /* Stop deferring overflow warnings, ignoring any deferred
306    warnings.  */
307 
308 void
309 fold_undefer_and_ignore_overflow_warnings (void)
310 {
311   fold_undefer_overflow_warnings (false, NULL, 0);
312 }
313 
314 /* Whether we are deferring overflow warnings.  */
315 
316 bool
317 fold_deferring_overflow_warnings_p (void)
318 {
319   return fold_deferring_overflow_warnings > 0;
320 }
321 
322 /* This is called when we fold something based on the fact that signed
323    overflow is undefined.  */
324 
325 static void
326 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
327 {
328   if (fold_deferring_overflow_warnings > 0)
329     {
330       if (fold_deferred_overflow_warning == NULL
331 	  || wc < fold_deferred_overflow_code)
332 	{
333 	  fold_deferred_overflow_warning = gmsgid;
334 	  fold_deferred_overflow_code = wc;
335 	}
336     }
337   else if (issue_strict_overflow_warning (wc))
338     warning (OPT_Wstrict_overflow, gmsgid);
339 }
340 
341 /* Return true if the built-in mathematical function specified by CODE
342    is odd, i.e. -f(x) == f(-x).  */
343 
344 static bool
345 negate_mathfn_p (enum built_in_function code)
346 {
347   switch (code)
348     {
349     CASE_FLT_FN (BUILT_IN_ASIN):
350     CASE_FLT_FN (BUILT_IN_ASINH):
351     CASE_FLT_FN (BUILT_IN_ATAN):
352     CASE_FLT_FN (BUILT_IN_ATANH):
353     CASE_FLT_FN (BUILT_IN_CASIN):
354     CASE_FLT_FN (BUILT_IN_CASINH):
355     CASE_FLT_FN (BUILT_IN_CATAN):
356     CASE_FLT_FN (BUILT_IN_CATANH):
357     CASE_FLT_FN (BUILT_IN_CBRT):
358     CASE_FLT_FN (BUILT_IN_CPROJ):
359     CASE_FLT_FN (BUILT_IN_CSIN):
360     CASE_FLT_FN (BUILT_IN_CSINH):
361     CASE_FLT_FN (BUILT_IN_CTAN):
362     CASE_FLT_FN (BUILT_IN_CTANH):
363     CASE_FLT_FN (BUILT_IN_ERF):
364     CASE_FLT_FN (BUILT_IN_LLROUND):
365     CASE_FLT_FN (BUILT_IN_LROUND):
366     CASE_FLT_FN (BUILT_IN_ROUND):
367     CASE_FLT_FN (BUILT_IN_SIN):
368     CASE_FLT_FN (BUILT_IN_SINH):
369     CASE_FLT_FN (BUILT_IN_TAN):
370     CASE_FLT_FN (BUILT_IN_TANH):
371     CASE_FLT_FN (BUILT_IN_TRUNC):
372       return true;
373 
374     CASE_FLT_FN (BUILT_IN_LLRINT):
375     CASE_FLT_FN (BUILT_IN_LRINT):
376     CASE_FLT_FN (BUILT_IN_NEARBYINT):
377     CASE_FLT_FN (BUILT_IN_RINT):
378       return !flag_rounding_math;
379 
380     default:
381       break;
382     }
383   return false;
384 }
385 
386 /* Check whether we may negate an integer constant T without causing
387    overflow.  */
388 
389 bool
390 may_negate_without_overflow_p (const_tree t)
391 {
392   tree type;
393 
394   gcc_assert (TREE_CODE (t) == INTEGER_CST);
395 
396   type = TREE_TYPE (t);
397   if (TYPE_UNSIGNED (type))
398     return false;
399 
400   return !wi::only_sign_bit_p (t);
401 }
402 
403 /* Determine whether an expression T can be cheaply negated using
404    the function negate_expr without introducing undefined overflow.  */
405 
406 static bool
407 negate_expr_p (tree t)
408 {
409   tree type;
410 
411   if (t == 0)
412     return false;
413 
414   type = TREE_TYPE (t);
415 
416   STRIP_SIGN_NOPS (t);
417   switch (TREE_CODE (t))
418     {
419     case INTEGER_CST:
420       if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
421 	return true;
422 
423       /* Check that -CST will not overflow type.  */
424       return may_negate_without_overflow_p (t);
425     case BIT_NOT_EXPR:
426       return (INTEGRAL_TYPE_P (type)
427 	      && TYPE_OVERFLOW_WRAPS (type));
428 
429     case FIXED_CST:
430       return true;
431 
432     case NEGATE_EXPR:
433       return !TYPE_OVERFLOW_SANITIZED (type);
434 
435     case REAL_CST:
436       /* We want to canonicalize to positive real constants.  Pretend
437          that only negative ones can be easily negated.  */
438       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
439 
440     case COMPLEX_CST:
441       return negate_expr_p (TREE_REALPART (t))
442 	     && negate_expr_p (TREE_IMAGPART (t));
443 
444     case VECTOR_CST:
445       {
446 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
447 	  return true;
448 
449 	int count = TYPE_VECTOR_SUBPARTS (type), i;
450 
451 	for (i = 0; i < count; i++)
452 	  if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
453 	    return false;
454 
455 	return true;
456       }
457 
458     case COMPLEX_EXPR:
459       return negate_expr_p (TREE_OPERAND (t, 0))
460 	     && negate_expr_p (TREE_OPERAND (t, 1));
461 
462     case CONJ_EXPR:
463       return negate_expr_p (TREE_OPERAND (t, 0));
464 
465     case PLUS_EXPR:
466       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
467 	  || HONOR_SIGNED_ZEROS (element_mode (type))
468 	  || (INTEGRAL_TYPE_P (type)
469 	      && ! TYPE_OVERFLOW_WRAPS (type)))
470 	return false;
471       /* -(A + B) -> (-B) - A.  */
472       if (negate_expr_p (TREE_OPERAND (t, 1))
473 	  && reorder_operands_p (TREE_OPERAND (t, 0),
474 				 TREE_OPERAND (t, 1)))
475 	return true;
476       /* -(A + B) -> (-A) - B.  */
477       return negate_expr_p (TREE_OPERAND (t, 0));
478 
479     case MINUS_EXPR:
480       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
481       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
482 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
483 	     && (! INTEGRAL_TYPE_P (type)
484 		 || TYPE_OVERFLOW_WRAPS (type))
485 	     && reorder_operands_p (TREE_OPERAND (t, 0),
486 				    TREE_OPERAND (t, 1));
487 
488     case MULT_EXPR:
489       if (TYPE_UNSIGNED (type))
490 	break;
491       /* INT_MIN/n * n doesn't overflow while negating one operand it does
492          if n is a power of two.  */
493       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
494 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
495 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
496 		 && ! integer_pow2p (TREE_OPERAND (t, 0)))
497 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
498 		    && ! integer_pow2p (TREE_OPERAND (t, 1)))))
499 	break;
500 
501       /* Fall through.  */
502 
503     case RDIV_EXPR:
504       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
505 	return negate_expr_p (TREE_OPERAND (t, 1))
506 	       || negate_expr_p (TREE_OPERAND (t, 0));
507       break;
508 
509     case TRUNC_DIV_EXPR:
510     case ROUND_DIV_EXPR:
511     case EXACT_DIV_EXPR:
512       /* In general we can't negate A / B, because if A is INT_MIN and
513 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
514 	 and actually traps on some architectures.  But if overflow is
515 	 undefined, we can negate, because - (INT_MIN / 1) is an
516 	 overflow.  */
517       if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
518 	{
519 	  if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
520 	    break;
521 	  /* If overflow is undefined then we have to be careful because
522 	     we ask whether it's ok to associate the negate with the
523 	     division which is not ok for example for
524 	     -((a - b) / c) where (-(a - b)) / c may invoke undefined
525 	     overflow because of negating INT_MIN.  So do not use
526 	     negate_expr_p here but open-code the two important cases.  */
527 	  if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
528 	      || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
529 		  && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
530 	    return true;
531 	}
532       else if (negate_expr_p (TREE_OPERAND (t, 0)))
533 	return true;
534       return negate_expr_p (TREE_OPERAND (t, 1));
535 
536     case NOP_EXPR:
537       /* Negate -((double)float) as (double)(-float).  */
538       if (TREE_CODE (type) == REAL_TYPE)
539 	{
540 	  tree tem = strip_float_extensions (t);
541 	  if (tem != t)
542 	    return negate_expr_p (tem);
543 	}
544       break;
545 
546     case CALL_EXPR:
547       /* Negate -f(x) as f(-x).  */
548       if (negate_mathfn_p (builtin_mathfn_code (t)))
549 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
550       break;
551 
552     case RSHIFT_EXPR:
553       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
554       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
555 	{
556 	  tree op1 = TREE_OPERAND (t, 1);
557 	  if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
558 	    return true;
559 	}
560       break;
561 
562     default:
563       break;
564     }
565   return false;
566 }
567 
568 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
569    simplification is possible.
570    If negate_expr_p would return true for T, NULL_TREE will never be
571    returned.  */
572 
573 static tree
574 fold_negate_expr (location_t loc, tree t)
575 {
576   tree type = TREE_TYPE (t);
577   tree tem;
578 
579   switch (TREE_CODE (t))
580     {
581     /* Convert - (~A) to A + 1.  */
582     case BIT_NOT_EXPR:
583       if (INTEGRAL_TYPE_P (type))
584         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
585                             build_one_cst (type));
586       break;
587 
588     case INTEGER_CST:
589       tem = fold_negate_const (t, type);
590       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
591 	  || (ANY_INTEGRAL_TYPE_P (type)
592 	      && !TYPE_OVERFLOW_TRAPS (type)
593 	      && TYPE_OVERFLOW_WRAPS (type))
594 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
595 	return tem;
596       break;
597 
598     case REAL_CST:
599       tem = fold_negate_const (t, type);
600       return tem;
601 
602     case FIXED_CST:
603       tem = fold_negate_const (t, type);
604       return tem;
605 
606     case COMPLEX_CST:
607       {
608 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
609 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
610 	if (rpart && ipart)
611 	  return build_complex (type, rpart, ipart);
612       }
613       break;
614 
615     case VECTOR_CST:
616       {
617 	int count = TYPE_VECTOR_SUBPARTS (type), i;
618 	tree *elts = XALLOCAVEC (tree, count);
619 
620 	for (i = 0; i < count; i++)
621 	  {
622 	    elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
623 	    if (elts[i] == NULL_TREE)
624 	      return NULL_TREE;
625 	  }
626 
627 	return build_vector (type, elts);
628       }
629 
630     case COMPLEX_EXPR:
631       if (negate_expr_p (t))
632 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
633 			    fold_negate_expr (loc, TREE_OPERAND (t, 0)),
634 			    fold_negate_expr (loc, TREE_OPERAND (t, 1)));
635       break;
636 
637     case CONJ_EXPR:
638       if (negate_expr_p (t))
639 	return fold_build1_loc (loc, CONJ_EXPR, type,
640 			    fold_negate_expr (loc, TREE_OPERAND (t, 0)));
641       break;
642 
643     case NEGATE_EXPR:
644       if (!TYPE_OVERFLOW_SANITIZED (type))
645 	return TREE_OPERAND (t, 0);
646       break;
647 
648     case PLUS_EXPR:
649       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
650 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
651 	{
652 	  /* -(A + B) -> (-B) - A.  */
653 	  if (negate_expr_p (TREE_OPERAND (t, 1))
654 	      && reorder_operands_p (TREE_OPERAND (t, 0),
655 				     TREE_OPERAND (t, 1)))
656 	    {
657 	      tem = negate_expr (TREE_OPERAND (t, 1));
658 	      return fold_build2_loc (loc, MINUS_EXPR, type,
659 				  tem, TREE_OPERAND (t, 0));
660 	    }
661 
662 	  /* -(A + B) -> (-A) - B.  */
663 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
664 	    {
665 	      tem = negate_expr (TREE_OPERAND (t, 0));
666 	      return fold_build2_loc (loc, MINUS_EXPR, type,
667 				  tem, TREE_OPERAND (t, 1));
668 	    }
669 	}
670       break;
671 
672     case MINUS_EXPR:
673       /* - (A - B) -> B - A  */
674       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
675 	  && !HONOR_SIGNED_ZEROS (element_mode (type))
676 	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
677 	return fold_build2_loc (loc, MINUS_EXPR, type,
678 			    TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
679       break;
680 
681     case MULT_EXPR:
682       if (TYPE_UNSIGNED (type))
683         break;
684 
685       /* Fall through.  */
686 
687     case RDIV_EXPR:
688       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
689 	{
690 	  tem = TREE_OPERAND (t, 1);
691 	  if (negate_expr_p (tem))
692 	    return fold_build2_loc (loc, TREE_CODE (t), type,
693 				TREE_OPERAND (t, 0), negate_expr (tem));
694 	  tem = TREE_OPERAND (t, 0);
695 	  if (negate_expr_p (tem))
696 	    return fold_build2_loc (loc, TREE_CODE (t), type,
697 				negate_expr (tem), TREE_OPERAND (t, 1));
698 	}
699       break;
700 
701     case TRUNC_DIV_EXPR:
702     case ROUND_DIV_EXPR:
703     case EXACT_DIV_EXPR:
704       /* In general we can't negate A / B, because if A is INT_MIN and
705 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
706 	 and actually traps on some architectures.  But if overflow is
707 	 undefined, we can negate, because - (INT_MIN / 1) is an
708 	 overflow.  */
709       if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
710         {
711 	  const char * const warnmsg = G_("assuming signed overflow does not "
712 					  "occur when negating a division");
713           tem = TREE_OPERAND (t, 1);
714           if (negate_expr_p (tem))
715 	    {
716 	      if (INTEGRAL_TYPE_P (type)
717 		  && (TREE_CODE (tem) != INTEGER_CST
718 		      || integer_onep (tem)))
719 		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
720 	      return fold_build2_loc (loc, TREE_CODE (t), type,
721 				  TREE_OPERAND (t, 0), negate_expr (tem));
722 	    }
723 	  /* If overflow is undefined then we have to be careful because
724 	     we ask whether it's ok to associate the negate with the
725 	     division which is not ok for example for
726 	     -((a - b) / c) where (-(a - b)) / c may invoke undefined
727 	     overflow because of negating INT_MIN.  So do not use
728 	     negate_expr_p here but open-code the two important cases.  */
729           tem = TREE_OPERAND (t, 0);
730 	  if ((INTEGRAL_TYPE_P (type)
731 	       && (TREE_CODE (tem) == NEGATE_EXPR
732 		   || (TREE_CODE (tem) == INTEGER_CST
733 		       && may_negate_without_overflow_p (tem))))
734 	      || !INTEGRAL_TYPE_P (type))
735 	    return fold_build2_loc (loc, TREE_CODE (t), type,
736 				    negate_expr (tem), TREE_OPERAND (t, 1));
737         }
738       break;
739 
740     case NOP_EXPR:
741       /* Convert -((double)float) into (double)(-float).  */
742       if (TREE_CODE (type) == REAL_TYPE)
743 	{
744 	  tem = strip_float_extensions (t);
745 	  if (tem != t && negate_expr_p (tem))
746 	    return fold_convert_loc (loc, type, negate_expr (tem));
747 	}
748       break;
749 
750     case CALL_EXPR:
751       /* Negate -f(x) as f(-x).  */
752       if (negate_mathfn_p (builtin_mathfn_code (t))
753 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
754 	{
755 	  tree fndecl, arg;
756 
757 	  fndecl = get_callee_fndecl (t);
758 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
759 	  return build_call_expr_loc (loc, fndecl, 1, arg);
760 	}
761       break;
762 
763     case RSHIFT_EXPR:
764       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
765       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
766 	{
767 	  tree op1 = TREE_OPERAND (t, 1);
768 	  if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
769 	    {
770 	      tree ntype = TYPE_UNSIGNED (type)
771 			   ? signed_type_for (type)
772 			   : unsigned_type_for (type);
773 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
774 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
775 	      return fold_convert_loc (loc, type, temp);
776 	    }
777 	}
778       break;
779 
780     default:
781       break;
782     }
783 
784   return NULL_TREE;
785 }
786 
787 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
788    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
789    return NULL_TREE. */
790 
791 static tree
792 negate_expr (tree t)
793 {
794   tree type, tem;
795   location_t loc;
796 
797   if (t == NULL_TREE)
798     return NULL_TREE;
799 
800   loc = EXPR_LOCATION (t);
801   type = TREE_TYPE (t);
802   STRIP_SIGN_NOPS (t);
803 
804   tem = fold_negate_expr (loc, t);
805   if (!tem)
806     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
807   return fold_convert_loc (loc, type, tem);
808 }
809 
810 /* Split a tree IN into a constant, literal and variable parts that could be
811    combined with CODE to make IN.  "constant" means an expression with
812    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
813    commutative arithmetic operation.  Store the constant part into *CONP,
814    the literal in *LITP and return the variable part.  If a part isn't
815    present, set it to null.  If the tree does not decompose in this way,
816    return the entire tree as the variable part and the other parts as null.
817 
818    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
819    case, we negate an operand that was subtracted.  Except if it is a
820    literal for which we use *MINUS_LITP instead.
821 
822    If NEGATE_P is true, we are negating all of IN, again except a literal
823    for which we use *MINUS_LITP instead.  If a variable part is of pointer
824    type, it is negated after converting to TYPE.  This prevents us from
825    generating illegal MINUS pointer expression.  LOC is the location of
826    the converted variable part.
827 
828    If IN is itself a literal or constant, return it as appropriate.
829 
830    Note that we do not guarantee that any of the three values will be the
831    same type as IN, but they will have the same signedness and mode.  */
832 
833 static tree
834 split_tree (location_t loc, tree in, tree type, enum tree_code code,
835 	    tree *conp, tree *litp, tree *minus_litp, int negate_p)
836 {
837   tree var = 0;
838 
839   *conp = 0;
840   *litp = 0;
841   *minus_litp = 0;
842 
843   /* Strip any conversions that don't change the machine mode or signedness.  */
844   STRIP_SIGN_NOPS (in);
845 
846   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
847       || TREE_CODE (in) == FIXED_CST)
848     *litp = in;
849   else if (TREE_CODE (in) == code
850 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
851 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
852 	       /* We can associate addition and subtraction together (even
853 		  though the C standard doesn't say so) for integers because
854 		  the value is not affected.  For reals, the value might be
855 		  affected, so we can't.  */
856 	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
857 		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
858     {
859       tree op0 = TREE_OPERAND (in, 0);
860       tree op1 = TREE_OPERAND (in, 1);
861       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
862       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
863 
864       /* First see if either of the operands is a literal, then a constant.  */
865       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
866 	  || TREE_CODE (op0) == FIXED_CST)
867 	*litp = op0, op0 = 0;
868       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
869 	       || TREE_CODE (op1) == FIXED_CST)
870 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
871 
872       if (op0 != 0 && TREE_CONSTANT (op0))
873 	*conp = op0, op0 = 0;
874       else if (op1 != 0 && TREE_CONSTANT (op1))
875 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
876 
877       /* If we haven't dealt with either operand, this is not a case we can
878 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
879       if (op0 != 0 && op1 != 0)
880 	var = in;
881       else if (op0 != 0)
882 	var = op0;
883       else
884 	var = op1, neg_var_p = neg1_p;
885 
886       /* Now do any needed negations.  */
887       if (neg_litp_p)
888 	*minus_litp = *litp, *litp = 0;
889       if (neg_conp_p)
890 	*conp = negate_expr (*conp);
891       if (neg_var_p && var)
892 	{
893 	  /* Convert to TYPE before negating.  */
894 	  var = fold_convert_loc (loc, type, var);
895 	  var = negate_expr (var);
896 	}
897     }
898   else if (TREE_CODE (in) == BIT_NOT_EXPR
899 	   && code == PLUS_EXPR)
900     {
901       /* -X - 1 is folded to ~X, undo that here.  */
902       *minus_litp = build_one_cst (TREE_TYPE (in));
903       var = negate_expr (TREE_OPERAND (in, 0));
904     }
905   else if (TREE_CONSTANT (in))
906     *conp = in;
907   else
908     var = in;
909 
910   if (negate_p)
911     {
912       if (*litp)
913 	*minus_litp = *litp, *litp = 0;
914       else if (*minus_litp)
915 	*litp = *minus_litp, *minus_litp = 0;
916       *conp = negate_expr (*conp);
917       if (var)
918 	{
919 	  /* Convert to TYPE before negating.  */
920 	  var = fold_convert_loc (loc, type, var);
921 	  var = negate_expr (var);
922 	}
923     }
924 
925   return var;
926 }
927 
928 /* Re-associate trees split by the above function.  T1 and T2 are
929    either expressions to associate or null.  Return the new
930    expression, if any.  LOC is the location of the new expression.  If
931    we build an operation, do it in TYPE and with CODE.  */
932 
933 static tree
934 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
935 {
936   if (t1 == 0)
937     return t2;
938   else if (t2 == 0)
939     return t1;
940 
941   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
942      try to fold this since we will have infinite recursion.  But do
943      deal with any NEGATE_EXPRs.  */
944   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
945       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
946     {
947       if (code == PLUS_EXPR)
948 	{
949 	  if (TREE_CODE (t1) == NEGATE_EXPR)
950 	    return build2_loc (loc, MINUS_EXPR, type,
951 			       fold_convert_loc (loc, type, t2),
952 			       fold_convert_loc (loc, type,
953 						 TREE_OPERAND (t1, 0)));
954 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
955 	    return build2_loc (loc, MINUS_EXPR, type,
956 			       fold_convert_loc (loc, type, t1),
957 			       fold_convert_loc (loc, type,
958 						 TREE_OPERAND (t2, 0)));
959 	  else if (integer_zerop (t2))
960 	    return fold_convert_loc (loc, type, t1);
961 	}
962       else if (code == MINUS_EXPR)
963 	{
964 	  if (integer_zerop (t2))
965 	    return fold_convert_loc (loc, type, t1);
966 	}
967 
968       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
969 			 fold_convert_loc (loc, type, t2));
970     }
971 
972   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
973 			  fold_convert_loc (loc, type, t2));
974 }
975 
976 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
977    for use in int_const_binop, size_binop and size_diffop.  */
978 
979 static bool
980 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
981 {
982   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
983     return false;
984   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
985     return false;
986 
987   switch (code)
988     {
989     case LSHIFT_EXPR:
990     case RSHIFT_EXPR:
991     case LROTATE_EXPR:
992     case RROTATE_EXPR:
993       return true;
994 
995     default:
996       break;
997     }
998 
999   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1000 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1001 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
1002 }
1003 
1004 
1005 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1006    to produce a new constant.  Return NULL_TREE if we don't know how
1007    to evaluate CODE at compile-time.  */
1008 
1009 static tree
1010 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
1011 		   int overflowable)
1012 {
1013   wide_int res;
1014   tree t;
1015   tree type = TREE_TYPE (arg1);
1016   signop sign = TYPE_SIGN (type);
1017   bool overflow = false;
1018 
1019   wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
1020 				  TYPE_SIGN (TREE_TYPE (parg2)));
1021 
1022   switch (code)
1023     {
1024     case BIT_IOR_EXPR:
1025       res = wi::bit_or (arg1, arg2);
1026       break;
1027 
1028     case BIT_XOR_EXPR:
1029       res = wi::bit_xor (arg1, arg2);
1030       break;
1031 
1032     case BIT_AND_EXPR:
1033       res = wi::bit_and (arg1, arg2);
1034       break;
1035 
1036     case RSHIFT_EXPR:
1037     case LSHIFT_EXPR:
1038       if (wi::neg_p (arg2))
1039 	{
1040 	  arg2 = -arg2;
1041 	  if (code == RSHIFT_EXPR)
1042 	    code = LSHIFT_EXPR;
1043 	  else
1044 	    code = RSHIFT_EXPR;
1045 	}
1046 
1047       if (code == RSHIFT_EXPR)
1048 	/* It's unclear from the C standard whether shifts can overflow.
1049 	   The following code ignores overflow; perhaps a C standard
1050 	   interpretation ruling is needed.  */
1051 	res = wi::rshift (arg1, arg2, sign);
1052       else
1053 	res = wi::lshift (arg1, arg2);
1054       break;
1055 
1056     case RROTATE_EXPR:
1057     case LROTATE_EXPR:
1058       if (wi::neg_p (arg2))
1059 	{
1060 	  arg2 = -arg2;
1061 	  if (code == RROTATE_EXPR)
1062 	    code = LROTATE_EXPR;
1063 	  else
1064 	    code = RROTATE_EXPR;
1065 	}
1066 
1067       if (code == RROTATE_EXPR)
1068 	res = wi::rrotate (arg1, arg2);
1069       else
1070 	res = wi::lrotate (arg1, arg2);
1071       break;
1072 
1073     case PLUS_EXPR:
1074       res = wi::add (arg1, arg2, sign, &overflow);
1075       break;
1076 
1077     case MINUS_EXPR:
1078       res = wi::sub (arg1, arg2, sign, &overflow);
1079       break;
1080 
1081     case MULT_EXPR:
1082       res = wi::mul (arg1, arg2, sign, &overflow);
1083       break;
1084 
1085     case MULT_HIGHPART_EXPR:
1086       res = wi::mul_high (arg1, arg2, sign);
1087       break;
1088 
1089     case TRUNC_DIV_EXPR:
1090     case EXACT_DIV_EXPR:
1091       if (arg2 == 0)
1092 	return NULL_TREE;
1093       res = wi::div_trunc (arg1, arg2, sign, &overflow);
1094       break;
1095 
1096     case FLOOR_DIV_EXPR:
1097       if (arg2 == 0)
1098 	return NULL_TREE;
1099       res = wi::div_floor (arg1, arg2, sign, &overflow);
1100       break;
1101 
1102     case CEIL_DIV_EXPR:
1103       if (arg2 == 0)
1104 	return NULL_TREE;
1105       res = wi::div_ceil (arg1, arg2, sign, &overflow);
1106       break;
1107 
1108     case ROUND_DIV_EXPR:
1109       if (arg2 == 0)
1110 	return NULL_TREE;
1111       res = wi::div_round (arg1, arg2, sign, &overflow);
1112       break;
1113 
1114     case TRUNC_MOD_EXPR:
1115       if (arg2 == 0)
1116 	return NULL_TREE;
1117       res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1118       break;
1119 
1120     case FLOOR_MOD_EXPR:
1121       if (arg2 == 0)
1122 	return NULL_TREE;
1123       res = wi::mod_floor (arg1, arg2, sign, &overflow);
1124       break;
1125 
1126     case CEIL_MOD_EXPR:
1127       if (arg2 == 0)
1128 	return NULL_TREE;
1129       res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1130       break;
1131 
1132     case ROUND_MOD_EXPR:
1133       if (arg2 == 0)
1134 	return NULL_TREE;
1135       res = wi::mod_round (arg1, arg2, sign, &overflow);
1136       break;
1137 
1138     case MIN_EXPR:
1139       res = wi::min (arg1, arg2, sign);
1140       break;
1141 
1142     case MAX_EXPR:
1143       res = wi::max (arg1, arg2, sign);
1144       break;
1145 
1146     default:
1147       return NULL_TREE;
1148     }
1149 
1150   t = force_fit_type (type, res, overflowable,
1151 		      (((sign == SIGNED || overflowable == -1)
1152 			&& overflow)
1153 		       | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1154 
1155   return t;
1156 }
1157 
1158 tree
1159 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1160 {
1161   return int_const_binop_1 (code, arg1, arg2, 1);
1162 }
1163 
1164 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1165    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1166    are the same kind of constant and the same machine mode.  Return zero if
1167    combining the constants is not allowed in the current operating mode.  */
1168 
1169 static tree
1170 const_binop (enum tree_code code, tree arg1, tree arg2)
1171 {
1172   /* Sanity check for the recursive cases.  */
1173   if (!arg1 || !arg2)
1174     return NULL_TREE;
1175 
1176   STRIP_NOPS (arg1);
1177   STRIP_NOPS (arg2);
1178 
1179   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1180     {
1181       if (code == POINTER_PLUS_EXPR)
1182 	return int_const_binop (PLUS_EXPR,
1183 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1184 
1185       return int_const_binop (code, arg1, arg2);
1186     }
1187 
1188   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1189     {
1190       machine_mode mode;
1191       REAL_VALUE_TYPE d1;
1192       REAL_VALUE_TYPE d2;
1193       REAL_VALUE_TYPE value;
1194       REAL_VALUE_TYPE result;
1195       bool inexact;
1196       tree t, type;
1197 
1198       /* The following codes are handled by real_arithmetic.  */
1199       switch (code)
1200 	{
1201 	case PLUS_EXPR:
1202 	case MINUS_EXPR:
1203 	case MULT_EXPR:
1204 	case RDIV_EXPR:
1205 	case MIN_EXPR:
1206 	case MAX_EXPR:
1207 	  break;
1208 
1209 	default:
1210 	  return NULL_TREE;
1211 	}
1212 
1213       d1 = TREE_REAL_CST (arg1);
1214       d2 = TREE_REAL_CST (arg2);
1215 
1216       type = TREE_TYPE (arg1);
1217       mode = TYPE_MODE (type);
1218 
1219       /* Don't perform operation if we honor signaling NaNs and
1220 	 either operand is a NaN.  */
1221       if (HONOR_SNANS (mode)
1222 	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1223 	return NULL_TREE;
1224 
1225       /* Don't perform operation if it would raise a division
1226 	 by zero exception.  */
1227       if (code == RDIV_EXPR
1228 	  && REAL_VALUES_EQUAL (d2, dconst0)
1229 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1230 	return NULL_TREE;
1231 
1232       /* If either operand is a NaN, just return it.  Otherwise, set up
1233 	 for floating-point trap; we return an overflow.  */
1234       if (REAL_VALUE_ISNAN (d1))
1235 	return arg1;
1236       else if (REAL_VALUE_ISNAN (d2))
1237 	return arg2;
1238 
1239       inexact = real_arithmetic (&value, code, &d1, &d2);
1240       real_convert (&result, mode, &value);
1241 
1242       /* Don't constant fold this floating point operation if
1243 	 the result has overflowed and flag_trapping_math.  */
1244       if (flag_trapping_math
1245 	  && MODE_HAS_INFINITIES (mode)
1246 	  && REAL_VALUE_ISINF (result)
1247 	  && !REAL_VALUE_ISINF (d1)
1248 	  && !REAL_VALUE_ISINF (d2))
1249 	return NULL_TREE;
1250 
1251       /* Don't constant fold this floating point operation if the
1252 	 result may dependent upon the run-time rounding mode and
1253 	 flag_rounding_math is set, or if GCC's software emulation
1254 	 is unable to accurately represent the result.  */
1255       if ((flag_rounding_math
1256 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1257 	  && (inexact || !real_identical (&result, &value)))
1258 	return NULL_TREE;
1259 
1260       t = build_real (type, result);
1261 
1262       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1263       return t;
1264     }
1265 
1266   if (TREE_CODE (arg1) == FIXED_CST)
1267     {
1268       FIXED_VALUE_TYPE f1;
1269       FIXED_VALUE_TYPE f2;
1270       FIXED_VALUE_TYPE result;
1271       tree t, type;
1272       int sat_p;
1273       bool overflow_p;
1274 
1275       /* The following codes are handled by fixed_arithmetic.  */
1276       switch (code)
1277         {
1278 	case PLUS_EXPR:
1279 	case MINUS_EXPR:
1280 	case MULT_EXPR:
1281 	case TRUNC_DIV_EXPR:
1282 	  if (TREE_CODE (arg2) != FIXED_CST)
1283 	    return NULL_TREE;
1284 	  f2 = TREE_FIXED_CST (arg2);
1285 	  break;
1286 
1287 	case LSHIFT_EXPR:
1288 	case RSHIFT_EXPR:
1289 	  {
1290 	    if (TREE_CODE (arg2) != INTEGER_CST)
1291 	      return NULL_TREE;
1292 	    wide_int w2 = arg2;
1293 	    f2.data.high = w2.elt (1);
1294 	    f2.data.low = w2.elt (0);
1295 	    f2.mode = SImode;
1296 	  }
1297 	  break;
1298 
1299         default:
1300 	  return NULL_TREE;
1301         }
1302 
1303       f1 = TREE_FIXED_CST (arg1);
1304       type = TREE_TYPE (arg1);
1305       sat_p = TYPE_SATURATING (type);
1306       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1307       t = build_fixed (type, result);
1308       /* Propagate overflow flags.  */
1309       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1310 	TREE_OVERFLOW (t) = 1;
1311       return t;
1312     }
1313 
1314   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1315     {
1316       tree type = TREE_TYPE (arg1);
1317       tree r1 = TREE_REALPART (arg1);
1318       tree i1 = TREE_IMAGPART (arg1);
1319       tree r2 = TREE_REALPART (arg2);
1320       tree i2 = TREE_IMAGPART (arg2);
1321       tree real, imag;
1322 
1323       switch (code)
1324 	{
1325 	case PLUS_EXPR:
1326 	case MINUS_EXPR:
1327 	  real = const_binop (code, r1, r2);
1328 	  imag = const_binop (code, i1, i2);
1329 	  break;
1330 
1331 	case MULT_EXPR:
1332 	  if (COMPLEX_FLOAT_TYPE_P (type))
1333 	    return do_mpc_arg2 (arg1, arg2, type,
1334 				/* do_nonfinite= */ folding_initializer,
1335 				mpc_mul);
1336 
1337 	  real = const_binop (MINUS_EXPR,
1338 			      const_binop (MULT_EXPR, r1, r2),
1339 			      const_binop (MULT_EXPR, i1, i2));
1340 	  imag = const_binop (PLUS_EXPR,
1341 			      const_binop (MULT_EXPR, r1, i2),
1342 			      const_binop (MULT_EXPR, i1, r2));
1343 	  break;
1344 
1345 	case RDIV_EXPR:
1346 	  if (COMPLEX_FLOAT_TYPE_P (type))
1347 	    return do_mpc_arg2 (arg1, arg2, type,
1348                                 /* do_nonfinite= */ folding_initializer,
1349 				mpc_div);
1350 	  /* Fallthru ... */
1351 	case TRUNC_DIV_EXPR:
1352 	case CEIL_DIV_EXPR:
1353 	case FLOOR_DIV_EXPR:
1354 	case ROUND_DIV_EXPR:
1355 	  if (flag_complex_method == 0)
1356 	  {
1357 	    /* Keep this algorithm in sync with
1358 	       tree-complex.c:expand_complex_div_straight().
1359 
1360 	       Expand complex division to scalars, straightforward algorithm.
1361 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1362 	       t = br*br + bi*bi
1363 	    */
1364 	    tree magsquared
1365 	      = const_binop (PLUS_EXPR,
1366 			     const_binop (MULT_EXPR, r2, r2),
1367 			     const_binop (MULT_EXPR, i2, i2));
1368 	    tree t1
1369 	      = const_binop (PLUS_EXPR,
1370 			     const_binop (MULT_EXPR, r1, r2),
1371 			     const_binop (MULT_EXPR, i1, i2));
1372 	    tree t2
1373 	      = const_binop (MINUS_EXPR,
1374 			     const_binop (MULT_EXPR, i1, r2),
1375 			     const_binop (MULT_EXPR, r1, i2));
1376 
1377 	    real = const_binop (code, t1, magsquared);
1378 	    imag = const_binop (code, t2, magsquared);
1379 	  }
1380 	  else
1381 	  {
1382 	    /* Keep this algorithm in sync with
1383                tree-complex.c:expand_complex_div_wide().
1384 
1385 	       Expand complex division to scalars, modified algorithm to minimize
1386 	       overflow with wide input ranges.  */
1387 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1388 					fold_abs_const (r2, TREE_TYPE (type)),
1389 					fold_abs_const (i2, TREE_TYPE (type)));
1390 
1391 	    if (integer_nonzerop (compare))
1392 	      {
1393 		/* In the TRUE branch, we compute
1394 		   ratio = br/bi;
1395 		   div = (br * ratio) + bi;
1396 		   tr = (ar * ratio) + ai;
1397 		   ti = (ai * ratio) - ar;
1398 		   tr = tr / div;
1399 		   ti = ti / div;  */
1400 		tree ratio = const_binop (code, r2, i2);
1401 		tree div = const_binop (PLUS_EXPR, i2,
1402 					const_binop (MULT_EXPR, r2, ratio));
1403 		real = const_binop (MULT_EXPR, r1, ratio);
1404 		real = const_binop (PLUS_EXPR, real, i1);
1405 		real = const_binop (code, real, div);
1406 
1407 		imag = const_binop (MULT_EXPR, i1, ratio);
1408 		imag = const_binop (MINUS_EXPR, imag, r1);
1409 		imag = const_binop (code, imag, div);
1410 	      }
1411 	    else
1412 	      {
1413 		/* In the FALSE branch, we compute
1414 		   ratio = d/c;
1415 		   divisor = (d * ratio) + c;
1416 		   tr = (b * ratio) + a;
1417 		   ti = b - (a * ratio);
1418 		   tr = tr / div;
1419 		   ti = ti / div;  */
1420 		tree ratio = const_binop (code, i2, r2);
1421 		tree div = const_binop (PLUS_EXPR, r2,
1422                                         const_binop (MULT_EXPR, i2, ratio));
1423 
1424 		real = const_binop (MULT_EXPR, i1, ratio);
1425 		real = const_binop (PLUS_EXPR, real, r1);
1426 		real = const_binop (code, real, div);
1427 
1428 		imag = const_binop (MULT_EXPR, r1, ratio);
1429 		imag = const_binop (MINUS_EXPR, i1, imag);
1430 		imag = const_binop (code, imag, div);
1431 	      }
1432 	  }
1433 	  break;
1434 
1435 	default:
1436 	  return NULL_TREE;
1437 	}
1438 
1439       if (real && imag)
1440 	return build_complex (type, real, imag);
1441     }
1442 
1443   if (TREE_CODE (arg1) == VECTOR_CST
1444       && TREE_CODE (arg2) == VECTOR_CST)
1445     {
1446       tree type = TREE_TYPE (arg1);
1447       int count = TYPE_VECTOR_SUBPARTS (type), i;
1448       tree *elts = XALLOCAVEC (tree, count);
1449 
1450       for (i = 0; i < count; i++)
1451 	{
1452 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1453 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1454 
1455 	  elts[i] = const_binop (code, elem1, elem2);
1456 
1457 	  /* It is possible that const_binop cannot handle the given
1458 	     code and return NULL_TREE */
1459 	  if (elts[i] == NULL_TREE)
1460 	    return NULL_TREE;
1461 	}
1462 
1463       return build_vector (type, elts);
1464     }
1465 
1466   /* Shifts allow a scalar offset for a vector.  */
1467   if (TREE_CODE (arg1) == VECTOR_CST
1468       && TREE_CODE (arg2) == INTEGER_CST)
1469     {
1470       tree type = TREE_TYPE (arg1);
1471       int count = TYPE_VECTOR_SUBPARTS (type), i;
1472       tree *elts = XALLOCAVEC (tree, count);
1473 
1474       for (i = 0; i < count; i++)
1475 	{
1476 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1477 
1478 	  elts[i] = const_binop (code, elem1, arg2);
1479 
1480 	  /* It is possible that const_binop cannot handle the given
1481 	     code and return NULL_TREE.  */
1482 	  if (elts[i] == NULL_TREE)
1483 	    return NULL_TREE;
1484 	}
1485 
1486       return build_vector (type, elts);
1487     }
1488   return NULL_TREE;
1489 }
1490 
1491 /* Overload that adds a TYPE parameter to be able to dispatch
1492    to fold_relational_const.  */
1493 
1494 tree
1495 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1496 {
1497   if (TREE_CODE_CLASS (code) == tcc_comparison)
1498     return fold_relational_const (code, type, arg1, arg2);
1499 
1500   /* ???  Until we make the const_binop worker take the type of the
1501      result as argument put those cases that need it here.  */
1502   switch (code)
1503     {
1504     case COMPLEX_EXPR:
1505       if ((TREE_CODE (arg1) == REAL_CST
1506 	   && TREE_CODE (arg2) == REAL_CST)
1507 	  || (TREE_CODE (arg1) == INTEGER_CST
1508 	      && TREE_CODE (arg2) == INTEGER_CST))
1509 	return build_complex (type, arg1, arg2);
1510       return NULL_TREE;
1511 
1512     case VEC_PACK_TRUNC_EXPR:
1513     case VEC_PACK_FIX_TRUNC_EXPR:
1514       {
1515 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1516 	tree *elts;
1517 
1518 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1519 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1520 	if (TREE_CODE (arg1) != VECTOR_CST
1521 	    || TREE_CODE (arg2) != VECTOR_CST)
1522 	  return NULL_TREE;
1523 
1524 	elts = XALLOCAVEC (tree, nelts);
1525 	if (!vec_cst_ctor_to_array (arg1, elts)
1526 	    || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1527 	  return NULL_TREE;
1528 
1529 	for (i = 0; i < nelts; i++)
1530 	  {
1531 	    elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1532 					  ? NOP_EXPR : FIX_TRUNC_EXPR,
1533 					  TREE_TYPE (type), elts[i]);
1534 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1535 	      return NULL_TREE;
1536 	  }
1537 
1538 	return build_vector (type, elts);
1539       }
1540 
1541     case VEC_WIDEN_MULT_LO_EXPR:
1542     case VEC_WIDEN_MULT_HI_EXPR:
1543     case VEC_WIDEN_MULT_EVEN_EXPR:
1544     case VEC_WIDEN_MULT_ODD_EXPR:
1545       {
1546 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1547 	unsigned int out, ofs, scale;
1548 	tree *elts;
1549 
1550 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1551 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1552 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1553 	  return NULL_TREE;
1554 
1555 	elts = XALLOCAVEC (tree, nelts * 4);
1556 	if (!vec_cst_ctor_to_array (arg1, elts)
1557 	    || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1558 	  return NULL_TREE;
1559 
1560 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1561 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1562 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1563 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1564 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1565 	  scale = 1, ofs = 0;
1566 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1567 	  scale = 1, ofs = 1;
1568 
1569 	for (out = 0; out < nelts; out++)
1570 	  {
1571 	    unsigned int in1 = (out << scale) + ofs;
1572 	    unsigned int in2 = in1 + nelts * 2;
1573 	    tree t1, t2;
1574 
1575 	    t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1576 	    t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1577 
1578 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1579 	      return NULL_TREE;
1580 	    elts[out] = const_binop (MULT_EXPR, t1, t2);
1581 	    if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1582 	      return NULL_TREE;
1583 	  }
1584 
1585 	return build_vector (type, elts);
1586       }
1587 
1588     default:;
1589     }
1590 
1591   if (TREE_CODE_CLASS (code) != tcc_binary)
1592     return NULL_TREE;
1593 
1594   /* Make sure type and arg0 have the same saturating flag.  */
1595   gcc_checking_assert (TYPE_SATURATING (type)
1596 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1597 
1598   return const_binop (code, arg1, arg2);
1599 }
1600 
1601 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1602    Return zero if computing the constants is not possible.  */
1603 
1604 tree
1605 const_unop (enum tree_code code, tree type, tree arg0)
1606 {
1607   switch (code)
1608     {
1609     CASE_CONVERT:
1610     case FLOAT_EXPR:
1611     case FIX_TRUNC_EXPR:
1612     case FIXED_CONVERT_EXPR:
1613       return fold_convert_const (code, type, arg0);
1614 
1615     case ADDR_SPACE_CONVERT_EXPR:
1616       if (integer_zerop (arg0))
1617 	return fold_convert_const (code, type, arg0);
1618       break;
1619 
1620     case VIEW_CONVERT_EXPR:
1621       return fold_view_convert_expr (type, arg0);
1622 
1623     case NEGATE_EXPR:
1624       {
1625 	/* Can't call fold_negate_const directly here as that doesn't
1626 	   handle all cases and we might not be able to negate some
1627 	   constants.  */
1628 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1629 	if (tem && CONSTANT_CLASS_P (tem))
1630 	  return tem;
1631 	break;
1632       }
1633 
1634     case ABS_EXPR:
1635       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1636 	return fold_abs_const (arg0, type);
1637       break;
1638 
1639     case CONJ_EXPR:
1640       if (TREE_CODE (arg0) == COMPLEX_CST)
1641 	{
1642 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1643 					  TREE_TYPE (type));
1644 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1645 	}
1646       break;
1647 
1648     case BIT_NOT_EXPR:
1649       if (TREE_CODE (arg0) == INTEGER_CST)
1650 	return fold_not_const (arg0, type);
1651       /* Perform BIT_NOT_EXPR on each element individually.  */
1652       else if (TREE_CODE (arg0) == VECTOR_CST)
1653 	{
1654 	  tree *elements;
1655 	  tree elem;
1656 	  unsigned count = VECTOR_CST_NELTS (arg0), i;
1657 
1658 	  elements = XALLOCAVEC (tree, count);
1659 	  for (i = 0; i < count; i++)
1660 	    {
1661 	      elem = VECTOR_CST_ELT (arg0, i);
1662 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1663 	      if (elem == NULL_TREE)
1664 		break;
1665 	      elements[i] = elem;
1666 	    }
1667 	  if (i == count)
1668 	    return build_vector (type, elements);
1669 	}
1670       break;
1671 
1672     case TRUTH_NOT_EXPR:
1673       if (TREE_CODE (arg0) == INTEGER_CST)
1674 	return constant_boolean_node (integer_zerop (arg0), type);
1675       break;
1676 
1677     case REALPART_EXPR:
1678       if (TREE_CODE (arg0) == COMPLEX_CST)
1679 	return fold_convert (type, TREE_REALPART (arg0));
1680       break;
1681 
1682     case IMAGPART_EXPR:
1683       if (TREE_CODE (arg0) == COMPLEX_CST)
1684 	return fold_convert (type, TREE_IMAGPART (arg0));
1685       break;
1686 
1687     case VEC_UNPACK_LO_EXPR:
1688     case VEC_UNPACK_HI_EXPR:
1689     case VEC_UNPACK_FLOAT_LO_EXPR:
1690     case VEC_UNPACK_FLOAT_HI_EXPR:
1691       {
1692 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1693 	tree *elts;
1694 	enum tree_code subcode;
1695 
1696 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1697 	if (TREE_CODE (arg0) != VECTOR_CST)
1698 	  return NULL_TREE;
1699 
1700 	elts = XALLOCAVEC (tree, nelts * 2);
1701 	if (!vec_cst_ctor_to_array (arg0, elts))
1702 	  return NULL_TREE;
1703 
1704 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1705 				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
1706 	  elts += nelts;
1707 
1708 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1709 	  subcode = NOP_EXPR;
1710 	else
1711 	  subcode = FLOAT_EXPR;
1712 
1713 	for (i = 0; i < nelts; i++)
1714 	  {
1715 	    elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1716 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1717 	      return NULL_TREE;
1718 	  }
1719 
1720 	return build_vector (type, elts);
1721       }
1722 
1723     case REDUC_MIN_EXPR:
1724     case REDUC_MAX_EXPR:
1725     case REDUC_PLUS_EXPR:
1726       {
1727 	unsigned int nelts, i;
1728 	tree *elts;
1729 	enum tree_code subcode;
1730 
1731 	if (TREE_CODE (arg0) != VECTOR_CST)
1732 	  return NULL_TREE;
1733         nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1734 
1735 	elts = XALLOCAVEC (tree, nelts);
1736 	if (!vec_cst_ctor_to_array (arg0, elts))
1737 	  return NULL_TREE;
1738 
1739 	switch (code)
1740 	  {
1741 	  case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1742 	  case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1743 	  case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1744 	  default: gcc_unreachable ();
1745 	  }
1746 
1747 	for (i = 1; i < nelts; i++)
1748 	  {
1749 	    elts[0] = const_binop (subcode, elts[0], elts[i]);
1750 	    if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1751 	      return NULL_TREE;
1752 	  }
1753 
1754 	return elts[0];
1755       }
1756 
1757     default:
1758       break;
1759     }
1760 
1761   return NULL_TREE;
1762 }
1763 
1764 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1765    indicates which particular sizetype to create.  */
1766 
1767 tree
1768 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1769 {
1770   return build_int_cst (sizetype_tab[(int) kind], number);
1771 }
1772 
1773 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1774    is a tree code.  The type of the result is taken from the operands.
1775    Both must be equivalent integer types, ala int_binop_types_match_p.
1776    If the operands are constant, so is the result.  */
1777 
1778 tree
1779 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1780 {
1781   tree type = TREE_TYPE (arg0);
1782 
1783   if (arg0 == error_mark_node || arg1 == error_mark_node)
1784     return error_mark_node;
1785 
1786   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1787                                        TREE_TYPE (arg1)));
1788 
1789   /* Handle the special case of two integer constants faster.  */
1790   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1791     {
1792       /* And some specific cases even faster than that.  */
1793       if (code == PLUS_EXPR)
1794 	{
1795 	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1796 	    return arg1;
1797 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1798 	    return arg0;
1799 	}
1800       else if (code == MINUS_EXPR)
1801 	{
1802 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1803 	    return arg0;
1804 	}
1805       else if (code == MULT_EXPR)
1806 	{
1807 	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1808 	    return arg1;
1809 	}
1810 
1811       /* Handle general case of two integer constants.  For sizetype
1812          constant calculations we always want to know about overflow,
1813 	 even in the unsigned case.  */
1814       return int_const_binop_1 (code, arg0, arg1, -1);
1815     }
1816 
1817   return fold_build2_loc (loc, code, type, arg0, arg1);
1818 }
1819 
1820 /* Given two values, either both of sizetype or both of bitsizetype,
1821    compute the difference between the two values.  Return the value
1822    in signed type corresponding to the type of the operands.  */
1823 
1824 tree
1825 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1826 {
1827   tree type = TREE_TYPE (arg0);
1828   tree ctype;
1829 
1830   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1831 				       TREE_TYPE (arg1)));
1832 
1833   /* If the type is already signed, just do the simple thing.  */
1834   if (!TYPE_UNSIGNED (type))
1835     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1836 
1837   if (type == sizetype)
1838     ctype = ssizetype;
1839   else if (type == bitsizetype)
1840     ctype = sbitsizetype;
1841   else
1842     ctype = signed_type_for (type);
1843 
1844   /* If either operand is not a constant, do the conversions to the signed
1845      type and subtract.  The hardware will do the right thing with any
1846      overflow in the subtraction.  */
1847   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1848     return size_binop_loc (loc, MINUS_EXPR,
1849 			   fold_convert_loc (loc, ctype, arg0),
1850 			   fold_convert_loc (loc, ctype, arg1));
1851 
1852   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1853      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1854      overflow) and negate (which can't either).  Special-case a result
1855      of zero while we're here.  */
1856   if (tree_int_cst_equal (arg0, arg1))
1857     return build_int_cst (ctype, 0);
1858   else if (tree_int_cst_lt (arg1, arg0))
1859     return fold_convert_loc (loc, ctype,
1860 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1861   else
1862     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1863 			   fold_convert_loc (loc, ctype,
1864 					     size_binop_loc (loc,
1865 							     MINUS_EXPR,
1866 							     arg1, arg0)));
1867 }
1868 
1869 /* A subroutine of fold_convert_const handling conversions of an
1870    INTEGER_CST to another integer type.  */
1871 
1872 static tree
1873 fold_convert_const_int_from_int (tree type, const_tree arg1)
1874 {
1875   /* Given an integer constant, make new constant with new type,
1876      appropriately sign-extended or truncated.  Use widest_int
1877      so that any extension is done according ARG1's type.  */
1878   return force_fit_type (type, wi::to_widest (arg1),
1879 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1880 			 TREE_OVERFLOW (arg1));
1881 }
1882 
1883 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1884    to an integer type.  */
1885 
1886 static tree
1887 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1888 {
1889   bool overflow = false;
1890   tree t;
1891 
1892   /* The following code implements the floating point to integer
1893      conversion rules required by the Java Language Specification,
1894      that IEEE NaNs are mapped to zero and values that overflow
1895      the target precision saturate, i.e. values greater than
1896      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1897      are mapped to INT_MIN.  These semantics are allowed by the
1898      C and C++ standards that simply state that the behavior of
1899      FP-to-integer conversion is unspecified upon overflow.  */
1900 
1901   wide_int val;
1902   REAL_VALUE_TYPE r;
1903   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1904 
1905   switch (code)
1906     {
1907     case FIX_TRUNC_EXPR:
1908       real_trunc (&r, VOIDmode, &x);
1909       break;
1910 
1911     default:
1912       gcc_unreachable ();
1913     }
1914 
1915   /* If R is NaN, return zero and show we have an overflow.  */
1916   if (REAL_VALUE_ISNAN (r))
1917     {
1918       overflow = true;
1919       val = wi::zero (TYPE_PRECISION (type));
1920     }
1921 
1922   /* See if R is less than the lower bound or greater than the
1923      upper bound.  */
1924 
1925   if (! overflow)
1926     {
1927       tree lt = TYPE_MIN_VALUE (type);
1928       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1929       if (REAL_VALUES_LESS (r, l))
1930 	{
1931 	  overflow = true;
1932 	  val = lt;
1933 	}
1934     }
1935 
1936   if (! overflow)
1937     {
1938       tree ut = TYPE_MAX_VALUE (type);
1939       if (ut)
1940 	{
1941 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1942 	  if (REAL_VALUES_LESS (u, r))
1943 	    {
1944 	      overflow = true;
1945 	      val = ut;
1946 	    }
1947 	}
1948     }
1949 
1950   if (! overflow)
1951     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1952 
1953   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1954   return t;
1955 }
1956 
1957 /* A subroutine of fold_convert_const handling conversions of a
1958    FIXED_CST to an integer type.  */
1959 
1960 static tree
1961 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1962 {
1963   tree t;
1964   double_int temp, temp_trunc;
1965   unsigned int mode;
1966 
1967   /* Right shift FIXED_CST to temp by fbit.  */
1968   temp = TREE_FIXED_CST (arg1).data;
1969   mode = TREE_FIXED_CST (arg1).mode;
1970   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1971     {
1972       temp = temp.rshift (GET_MODE_FBIT (mode),
1973 			  HOST_BITS_PER_DOUBLE_INT,
1974 			  SIGNED_FIXED_POINT_MODE_P (mode));
1975 
1976       /* Left shift temp to temp_trunc by fbit.  */
1977       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1978 				HOST_BITS_PER_DOUBLE_INT,
1979 				SIGNED_FIXED_POINT_MODE_P (mode));
1980     }
1981   else
1982     {
1983       temp = double_int_zero;
1984       temp_trunc = double_int_zero;
1985     }
1986 
1987   /* If FIXED_CST is negative, we need to round the value toward 0.
1988      By checking if the fractional bits are not zero to add 1 to temp.  */
1989   if (SIGNED_FIXED_POINT_MODE_P (mode)
1990       && temp_trunc.is_negative ()
1991       && TREE_FIXED_CST (arg1).data != temp_trunc)
1992     temp += double_int_one;
1993 
1994   /* Given a fixed-point constant, make new constant with new type,
1995      appropriately sign-extended or truncated.  */
1996   t = force_fit_type (type, temp, -1,
1997 		      (temp.is_negative ()
1998 		       && (TYPE_UNSIGNED (type)
1999 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2000 		      | TREE_OVERFLOW (arg1));
2001 
2002   return t;
2003 }
2004 
2005 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2006    to another floating point type.  */
2007 
2008 static tree
2009 fold_convert_const_real_from_real (tree type, const_tree arg1)
2010 {
2011   REAL_VALUE_TYPE value;
2012   tree t;
2013 
2014   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2015   t = build_real (type, value);
2016 
2017   /* If converting an infinity or NAN to a representation that doesn't
2018      have one, set the overflow bit so that we can produce some kind of
2019      error message at the appropriate point if necessary.  It's not the
2020      most user-friendly message, but it's better than nothing.  */
2021   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2022       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2023     TREE_OVERFLOW (t) = 1;
2024   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2025 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2026     TREE_OVERFLOW (t) = 1;
2027   /* Regular overflow, conversion produced an infinity in a mode that
2028      can't represent them.  */
2029   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2030 	   && REAL_VALUE_ISINF (value)
2031 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2032     TREE_OVERFLOW (t) = 1;
2033   else
2034     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2035   return t;
2036 }
2037 
2038 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2039    to a floating point type.  */
2040 
2041 static tree
2042 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2043 {
2044   REAL_VALUE_TYPE value;
2045   tree t;
2046 
2047   real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2048   t = build_real (type, value);
2049 
2050   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2051   return t;
2052 }
2053 
2054 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2055    to another fixed-point type.  */
2056 
2057 static tree
2058 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2059 {
2060   FIXED_VALUE_TYPE value;
2061   tree t;
2062   bool overflow_p;
2063 
2064   overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2065 			      TYPE_SATURATING (type));
2066   t = build_fixed (type, value);
2067 
2068   /* Propagate overflow flags.  */
2069   if (overflow_p | TREE_OVERFLOW (arg1))
2070     TREE_OVERFLOW (t) = 1;
2071   return t;
2072 }
2073 
2074 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2075    to a fixed-point type.  */
2076 
2077 static tree
2078 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2079 {
2080   FIXED_VALUE_TYPE value;
2081   tree t;
2082   bool overflow_p;
2083   double_int di;
2084 
2085   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2086 
2087   di.low = TREE_INT_CST_ELT (arg1, 0);
2088   if (TREE_INT_CST_NUNITS (arg1) == 1)
2089     di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2090   else
2091     di.high = TREE_INT_CST_ELT (arg1, 1);
2092 
2093   overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2094 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2095 				       TYPE_SATURATING (type));
2096   t = build_fixed (type, value);
2097 
2098   /* Propagate overflow flags.  */
2099   if (overflow_p | TREE_OVERFLOW (arg1))
2100     TREE_OVERFLOW (t) = 1;
2101   return t;
2102 }
2103 
2104 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2105    to a fixed-point type.  */
2106 
2107 static tree
2108 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2109 {
2110   FIXED_VALUE_TYPE value;
2111   tree t;
2112   bool overflow_p;
2113 
2114   overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2115 					&TREE_REAL_CST (arg1),
2116 					TYPE_SATURATING (type));
2117   t = build_fixed (type, value);
2118 
2119   /* Propagate overflow flags.  */
2120   if (overflow_p | TREE_OVERFLOW (arg1))
2121     TREE_OVERFLOW (t) = 1;
2122   return t;
2123 }
2124 
2125 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2126    type TYPE.  If no simplification can be done return NULL_TREE.  */
2127 
2128 static tree
2129 fold_convert_const (enum tree_code code, tree type, tree arg1)
2130 {
2131   if (TREE_TYPE (arg1) == type)
2132     return arg1;
2133 
2134   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2135       || TREE_CODE (type) == OFFSET_TYPE)
2136     {
2137       if (TREE_CODE (arg1) == INTEGER_CST)
2138 	return fold_convert_const_int_from_int (type, arg1);
2139       else if (TREE_CODE (arg1) == REAL_CST)
2140 	return fold_convert_const_int_from_real (code, type, arg1);
2141       else if (TREE_CODE (arg1) == FIXED_CST)
2142 	return fold_convert_const_int_from_fixed (type, arg1);
2143     }
2144   else if (TREE_CODE (type) == REAL_TYPE)
2145     {
2146       if (TREE_CODE (arg1) == INTEGER_CST)
2147 	return build_real_from_int_cst (type, arg1);
2148       else if (TREE_CODE (arg1) == REAL_CST)
2149 	return fold_convert_const_real_from_real (type, arg1);
2150       else if (TREE_CODE (arg1) == FIXED_CST)
2151 	return fold_convert_const_real_from_fixed (type, arg1);
2152     }
2153   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2154     {
2155       if (TREE_CODE (arg1) == FIXED_CST)
2156 	return fold_convert_const_fixed_from_fixed (type, arg1);
2157       else if (TREE_CODE (arg1) == INTEGER_CST)
2158 	return fold_convert_const_fixed_from_int (type, arg1);
2159       else if (TREE_CODE (arg1) == REAL_CST)
2160 	return fold_convert_const_fixed_from_real (type, arg1);
2161     }
2162   return NULL_TREE;
2163 }
2164 
2165 /* Construct a vector of zero elements of vector type TYPE.  */
2166 
2167 static tree
2168 build_zero_vector (tree type)
2169 {
2170   tree t;
2171 
2172   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2173   return build_vector_from_val (type, t);
2174 }
2175 
2176 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2177 
2178 bool
2179 fold_convertible_p (const_tree type, const_tree arg)
2180 {
2181   tree orig = TREE_TYPE (arg);
2182 
2183   if (type == orig)
2184     return true;
2185 
2186   if (TREE_CODE (arg) == ERROR_MARK
2187       || TREE_CODE (type) == ERROR_MARK
2188       || TREE_CODE (orig) == ERROR_MARK)
2189     return false;
2190 
2191   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2192     return true;
2193 
2194   switch (TREE_CODE (type))
2195     {
2196     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2197     case POINTER_TYPE: case REFERENCE_TYPE:
2198     case OFFSET_TYPE:
2199       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2200 	  || TREE_CODE (orig) == OFFSET_TYPE)
2201         return true;
2202       return (TREE_CODE (orig) == VECTOR_TYPE
2203 	      && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2204 
2205     case REAL_TYPE:
2206     case FIXED_POINT_TYPE:
2207     case VECTOR_TYPE:
2208     case VOID_TYPE:
2209       return TREE_CODE (type) == TREE_CODE (orig);
2210 
2211     default:
2212       return false;
2213     }
2214 }
2215 
2216 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2217    simple conversions in preference to calling the front-end's convert.  */
2218 
2219 tree
2220 fold_convert_loc (location_t loc, tree type, tree arg)
2221 {
2222   tree orig = TREE_TYPE (arg);
2223   tree tem;
2224 
2225   if (type == orig)
2226     return arg;
2227 
2228   if (TREE_CODE (arg) == ERROR_MARK
2229       || TREE_CODE (type) == ERROR_MARK
2230       || TREE_CODE (orig) == ERROR_MARK)
2231     return error_mark_node;
2232 
2233   switch (TREE_CODE (type))
2234     {
2235     case POINTER_TYPE:
2236     case REFERENCE_TYPE:
2237       /* Handle conversions between pointers to different address spaces.  */
2238       if (POINTER_TYPE_P (orig)
2239 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2240 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2241 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2242       /* fall through */
2243 
2244     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2245     case OFFSET_TYPE:
2246       if (TREE_CODE (arg) == INTEGER_CST)
2247 	{
2248 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2249 	  if (tem != NULL_TREE)
2250 	    return tem;
2251 	}
2252       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2253 	  || TREE_CODE (orig) == OFFSET_TYPE)
2254 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2255       if (TREE_CODE (orig) == COMPLEX_TYPE)
2256 	return fold_convert_loc (loc, type,
2257 			     fold_build1_loc (loc, REALPART_EXPR,
2258 					  TREE_TYPE (orig), arg));
2259       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2260 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2261       return fold_build1_loc (loc, NOP_EXPR, type, arg);
2262 
2263     case REAL_TYPE:
2264       if (TREE_CODE (arg) == INTEGER_CST)
2265 	{
2266 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2267 	  if (tem != NULL_TREE)
2268 	    return tem;
2269 	}
2270       else if (TREE_CODE (arg) == REAL_CST)
2271 	{
2272 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2273 	  if (tem != NULL_TREE)
2274 	    return tem;
2275 	}
2276       else if (TREE_CODE (arg) == FIXED_CST)
2277 	{
2278 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2279 	  if (tem != NULL_TREE)
2280 	    return tem;
2281 	}
2282 
2283       switch (TREE_CODE (orig))
2284 	{
2285 	case INTEGER_TYPE:
2286 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2287 	case POINTER_TYPE: case REFERENCE_TYPE:
2288 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2289 
2290 	case REAL_TYPE:
2291 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2292 
2293 	case FIXED_POINT_TYPE:
2294 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2295 
2296 	case COMPLEX_TYPE:
2297 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2298 	  return fold_convert_loc (loc, type, tem);
2299 
2300 	default:
2301 	  gcc_unreachable ();
2302 	}
2303 
2304     case FIXED_POINT_TYPE:
2305       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2306 	  || TREE_CODE (arg) == REAL_CST)
2307 	{
2308 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2309 	  if (tem != NULL_TREE)
2310 	    goto fold_convert_exit;
2311 	}
2312 
2313       switch (TREE_CODE (orig))
2314 	{
2315 	case FIXED_POINT_TYPE:
2316 	case INTEGER_TYPE:
2317 	case ENUMERAL_TYPE:
2318 	case BOOLEAN_TYPE:
2319 	case REAL_TYPE:
2320 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2321 
2322 	case COMPLEX_TYPE:
2323 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2324 	  return fold_convert_loc (loc, type, tem);
2325 
2326 	default:
2327 	  gcc_unreachable ();
2328 	}
2329 
2330     case COMPLEX_TYPE:
2331       switch (TREE_CODE (orig))
2332 	{
2333 	case INTEGER_TYPE:
2334 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2335 	case POINTER_TYPE: case REFERENCE_TYPE:
2336 	case REAL_TYPE:
2337 	case FIXED_POINT_TYPE:
2338 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2339 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2340 			      fold_convert_loc (loc, TREE_TYPE (type),
2341 					    integer_zero_node));
2342 	case COMPLEX_TYPE:
2343 	  {
2344 	    tree rpart, ipart;
2345 
2346 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2347 	      {
2348 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2349 				      TREE_OPERAND (arg, 0));
2350 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2351 				      TREE_OPERAND (arg, 1));
2352 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2353 	      }
2354 
2355 	    arg = save_expr (arg);
2356 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2357 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2358 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2359 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2360 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2361 	  }
2362 
2363 	default:
2364 	  gcc_unreachable ();
2365 	}
2366 
2367     case VECTOR_TYPE:
2368       if (integer_zerop (arg))
2369 	return build_zero_vector (type);
2370       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2371       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2372 		  || TREE_CODE (orig) == VECTOR_TYPE);
2373       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2374 
2375     case VOID_TYPE:
2376       tem = fold_ignored_result (arg);
2377       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2378 
2379     default:
2380       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2381 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2382       gcc_unreachable ();
2383     }
2384  fold_convert_exit:
2385   protected_set_expr_location_unshare (tem, loc);
2386   return tem;
2387 }
2388 
2389 /* Return false if expr can be assumed not to be an lvalue, true
2390    otherwise.  */
2391 
2392 static bool
2393 maybe_lvalue_p (const_tree x)
2394 {
2395   /* We only need to wrap lvalue tree codes.  */
2396   switch (TREE_CODE (x))
2397   {
2398   case VAR_DECL:
2399   case PARM_DECL:
2400   case RESULT_DECL:
2401   case LABEL_DECL:
2402   case FUNCTION_DECL:
2403   case SSA_NAME:
2404 
2405   case COMPONENT_REF:
2406   case MEM_REF:
2407   case INDIRECT_REF:
2408   case ARRAY_REF:
2409   case ARRAY_RANGE_REF:
2410   case BIT_FIELD_REF:
2411   case OBJ_TYPE_REF:
2412 
2413   case REALPART_EXPR:
2414   case IMAGPART_EXPR:
2415   case PREINCREMENT_EXPR:
2416   case PREDECREMENT_EXPR:
2417   case SAVE_EXPR:
2418   case TRY_CATCH_EXPR:
2419   case WITH_CLEANUP_EXPR:
2420   case COMPOUND_EXPR:
2421   case MODIFY_EXPR:
2422   case TARGET_EXPR:
2423   case COND_EXPR:
2424   case BIND_EXPR:
2425     break;
2426 
2427   default:
2428     /* Assume the worst for front-end tree codes.  */
2429     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2430       break;
2431     return false;
2432   }
2433 
2434   return true;
2435 }
2436 
2437 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2438 
2439 tree
2440 non_lvalue_loc (location_t loc, tree x)
2441 {
2442   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2443      us.  */
2444   if (in_gimple_form)
2445     return x;
2446 
2447   if (! maybe_lvalue_p (x))
2448     return x;
2449   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2450 }
2451 
2452 /* When pedantic, return an expr equal to X but certainly not valid as a
2453    pedantic lvalue.  Otherwise, return X.  */
2454 
2455 static tree
2456 pedantic_non_lvalue_loc (location_t loc, tree x)
2457 {
2458   return protected_set_expr_location_unshare (x, loc);
2459 }
2460 
2461 /* Given a tree comparison code, return the code that is the logical inverse.
2462    It is generally not safe to do this for floating-point comparisons, except
2463    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2464    ERROR_MARK in this case.  */
2465 
2466 enum tree_code
2467 invert_tree_comparison (enum tree_code code, bool honor_nans)
2468 {
2469   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2470       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2471     return ERROR_MARK;
2472 
2473   switch (code)
2474     {
2475     case EQ_EXPR:
2476       return NE_EXPR;
2477     case NE_EXPR:
2478       return EQ_EXPR;
2479     case GT_EXPR:
2480       return honor_nans ? UNLE_EXPR : LE_EXPR;
2481     case GE_EXPR:
2482       return honor_nans ? UNLT_EXPR : LT_EXPR;
2483     case LT_EXPR:
2484       return honor_nans ? UNGE_EXPR : GE_EXPR;
2485     case LE_EXPR:
2486       return honor_nans ? UNGT_EXPR : GT_EXPR;
2487     case LTGT_EXPR:
2488       return UNEQ_EXPR;
2489     case UNEQ_EXPR:
2490       return LTGT_EXPR;
2491     case UNGT_EXPR:
2492       return LE_EXPR;
2493     case UNGE_EXPR:
2494       return LT_EXPR;
2495     case UNLT_EXPR:
2496       return GE_EXPR;
2497     case UNLE_EXPR:
2498       return GT_EXPR;
2499     case ORDERED_EXPR:
2500       return UNORDERED_EXPR;
2501     case UNORDERED_EXPR:
2502       return ORDERED_EXPR;
2503     default:
2504       gcc_unreachable ();
2505     }
2506 }
2507 
2508 /* Similar, but return the comparison that results if the operands are
2509    swapped.  This is safe for floating-point.  */
2510 
2511 enum tree_code
2512 swap_tree_comparison (enum tree_code code)
2513 {
2514   switch (code)
2515     {
2516     case EQ_EXPR:
2517     case NE_EXPR:
2518     case ORDERED_EXPR:
2519     case UNORDERED_EXPR:
2520     case LTGT_EXPR:
2521     case UNEQ_EXPR:
2522       return code;
2523     case GT_EXPR:
2524       return LT_EXPR;
2525     case GE_EXPR:
2526       return LE_EXPR;
2527     case LT_EXPR:
2528       return GT_EXPR;
2529     case LE_EXPR:
2530       return GE_EXPR;
2531     case UNGT_EXPR:
2532       return UNLT_EXPR;
2533     case UNGE_EXPR:
2534       return UNLE_EXPR;
2535     case UNLT_EXPR:
2536       return UNGT_EXPR;
2537     case UNLE_EXPR:
2538       return UNGE_EXPR;
2539     default:
2540       gcc_unreachable ();
2541     }
2542 }
2543 
2544 
2545 /* Convert a comparison tree code from an enum tree_code representation
2546    into a compcode bit-based encoding.  This function is the inverse of
2547    compcode_to_comparison.  */
2548 
2549 static enum comparison_code
2550 comparison_to_compcode (enum tree_code code)
2551 {
2552   switch (code)
2553     {
2554     case LT_EXPR:
2555       return COMPCODE_LT;
2556     case EQ_EXPR:
2557       return COMPCODE_EQ;
2558     case LE_EXPR:
2559       return COMPCODE_LE;
2560     case GT_EXPR:
2561       return COMPCODE_GT;
2562     case NE_EXPR:
2563       return COMPCODE_NE;
2564     case GE_EXPR:
2565       return COMPCODE_GE;
2566     case ORDERED_EXPR:
2567       return COMPCODE_ORD;
2568     case UNORDERED_EXPR:
2569       return COMPCODE_UNORD;
2570     case UNLT_EXPR:
2571       return COMPCODE_UNLT;
2572     case UNEQ_EXPR:
2573       return COMPCODE_UNEQ;
2574     case UNLE_EXPR:
2575       return COMPCODE_UNLE;
2576     case UNGT_EXPR:
2577       return COMPCODE_UNGT;
2578     case LTGT_EXPR:
2579       return COMPCODE_LTGT;
2580     case UNGE_EXPR:
2581       return COMPCODE_UNGE;
2582     default:
2583       gcc_unreachable ();
2584     }
2585 }
2586 
2587 /* Convert a compcode bit-based encoding of a comparison operator back
2588    to GCC's enum tree_code representation.  This function is the
2589    inverse of comparison_to_compcode.  */
2590 
2591 static enum tree_code
2592 compcode_to_comparison (enum comparison_code code)
2593 {
2594   switch (code)
2595     {
2596     case COMPCODE_LT:
2597       return LT_EXPR;
2598     case COMPCODE_EQ:
2599       return EQ_EXPR;
2600     case COMPCODE_LE:
2601       return LE_EXPR;
2602     case COMPCODE_GT:
2603       return GT_EXPR;
2604     case COMPCODE_NE:
2605       return NE_EXPR;
2606     case COMPCODE_GE:
2607       return GE_EXPR;
2608     case COMPCODE_ORD:
2609       return ORDERED_EXPR;
2610     case COMPCODE_UNORD:
2611       return UNORDERED_EXPR;
2612     case COMPCODE_UNLT:
2613       return UNLT_EXPR;
2614     case COMPCODE_UNEQ:
2615       return UNEQ_EXPR;
2616     case COMPCODE_UNLE:
2617       return UNLE_EXPR;
2618     case COMPCODE_UNGT:
2619       return UNGT_EXPR;
2620     case COMPCODE_LTGT:
2621       return LTGT_EXPR;
2622     case COMPCODE_UNGE:
2623       return UNGE_EXPR;
2624     default:
2625       gcc_unreachable ();
2626     }
2627 }
2628 
2629 /* Return a tree for the comparison which is the combination of
2630    doing the AND or OR (depending on CODE) of the two operations LCODE
2631    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2632    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2633    if this makes the transformation invalid.  */
2634 
2635 tree
2636 combine_comparisons (location_t loc,
2637 		     enum tree_code code, enum tree_code lcode,
2638 		     enum tree_code rcode, tree truth_type,
2639 		     tree ll_arg, tree lr_arg)
2640 {
2641   bool honor_nans = HONOR_NANS (ll_arg);
2642   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2643   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2644   int compcode;
2645 
2646   switch (code)
2647     {
2648     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2649       compcode = lcompcode & rcompcode;
2650       break;
2651 
2652     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2653       compcode = lcompcode | rcompcode;
2654       break;
2655 
2656     default:
2657       return NULL_TREE;
2658     }
2659 
2660   if (!honor_nans)
2661     {
2662       /* Eliminate unordered comparisons, as well as LTGT and ORD
2663 	 which are not used unless the mode has NaNs.  */
2664       compcode &= ~COMPCODE_UNORD;
2665       if (compcode == COMPCODE_LTGT)
2666 	compcode = COMPCODE_NE;
2667       else if (compcode == COMPCODE_ORD)
2668 	compcode = COMPCODE_TRUE;
2669     }
2670    else if (flag_trapping_math)
2671      {
2672 	/* Check that the original operation and the optimized ones will trap
2673 	   under the same condition.  */
2674 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2675 		     && (lcompcode != COMPCODE_EQ)
2676 		     && (lcompcode != COMPCODE_ORD);
2677 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2678 		     && (rcompcode != COMPCODE_EQ)
2679 		     && (rcompcode != COMPCODE_ORD);
2680 	bool trap = (compcode & COMPCODE_UNORD) == 0
2681 		    && (compcode != COMPCODE_EQ)
2682 		    && (compcode != COMPCODE_ORD);
2683 
2684         /* In a short-circuited boolean expression the LHS might be
2685 	   such that the RHS, if evaluated, will never trap.  For
2686 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2687 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2688 	   example, the expression above will never trap, hence
2689 	   optimizing it to x < y would be invalid).  */
2690         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2691             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2692           rtrap = false;
2693 
2694         /* If the comparison was short-circuited, and only the RHS
2695 	   trapped, we may now generate a spurious trap.  */
2696 	if (rtrap && !ltrap
2697 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2698 	  return NULL_TREE;
2699 
2700 	/* If we changed the conditions that cause a trap, we lose.  */
2701 	if ((ltrap || rtrap) != trap)
2702 	  return NULL_TREE;
2703       }
2704 
2705   if (compcode == COMPCODE_TRUE)
2706     return constant_boolean_node (true, truth_type);
2707   else if (compcode == COMPCODE_FALSE)
2708     return constant_boolean_node (false, truth_type);
2709   else
2710     {
2711       enum tree_code tcode;
2712 
2713       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2714       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2715     }
2716 }
2717 
2718 /* Return nonzero if two operands (typically of the same tree node)
2719    are necessarily equal.  If either argument has side-effects this
2720    function returns zero.  FLAGS modifies behavior as follows:
2721 
2722    If OEP_ONLY_CONST is set, only return nonzero for constants.
2723    This function tests whether the operands are indistinguishable;
2724    it does not test whether they are equal using C's == operation.
2725    The distinction is important for IEEE floating point, because
2726    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2727    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2728 
2729    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2730    even though it may hold multiple values during a function.
2731    This is because a GCC tree node guarantees that nothing else is
2732    executed between the evaluation of its "operands" (which may often
2733    be evaluated in arbitrary order).  Hence if the operands themselves
2734    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2735    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2736    unset means assuming isochronic (or instantaneous) tree equivalence.
2737    Unless comparing arbitrary expression trees, such as from different
2738    statements, this flag can usually be left unset.
2739 
2740    If OEP_PURE_SAME is set, then pure functions with identical arguments
2741    are considered the same.  It is used when the caller has other ways
2742    to ensure that global memory is unchanged in between.  */
2743 
2744 int
2745 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2746 {
2747   /* If either is ERROR_MARK, they aren't equal.  */
2748   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2749       || TREE_TYPE (arg0) == error_mark_node
2750       || TREE_TYPE (arg1) == error_mark_node)
2751     return 0;
2752 
2753   /* Similar, if either does not have a type (like a released SSA name),
2754      they aren't equal.  */
2755   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2756     return 0;
2757 
2758   /* Check equality of integer constants before bailing out due to
2759      precision differences.  */
2760   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2761     return tree_int_cst_equal (arg0, arg1);
2762 
2763   /* If both types don't have the same signedness, then we can't consider
2764      them equal.  We must check this before the STRIP_NOPS calls
2765      because they may change the signedness of the arguments.  As pointers
2766      strictly don't have a signedness, require either two pointers or
2767      two non-pointers as well.  */
2768   if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2769       || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2770     return 0;
2771 
2772   /* We cannot consider pointers to different address space equal.  */
2773   if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2774       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2775 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2776     return 0;
2777 
2778   /* If both types don't have the same precision, then it is not safe
2779      to strip NOPs.  */
2780   if (element_precision (TREE_TYPE (arg0))
2781       != element_precision (TREE_TYPE (arg1)))
2782     return 0;
2783 
2784   STRIP_NOPS (arg0);
2785   STRIP_NOPS (arg1);
2786 
2787   /* In case both args are comparisons but with different comparison
2788      code, try to swap the comparison operands of one arg to produce
2789      a match and compare that variant.  */
2790   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2791       && COMPARISON_CLASS_P (arg0)
2792       && COMPARISON_CLASS_P (arg1))
2793     {
2794       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2795 
2796       if (TREE_CODE (arg0) == swap_code)
2797 	return operand_equal_p (TREE_OPERAND (arg0, 0),
2798 			        TREE_OPERAND (arg1, 1), flags)
2799 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
2800 				   TREE_OPERAND (arg1, 0), flags);
2801     }
2802 
2803   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2804       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
2805       && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2806     return 0;
2807 
2808   /* This is needed for conversions and for COMPONENT_REF.
2809      Might as well play it safe and always test this.  */
2810   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2811       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2812       || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2813     return 0;
2814 
2815   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2816      We don't care about side effects in that case because the SAVE_EXPR
2817      takes care of that for us. In all other cases, two expressions are
2818      equal if they have no side effects.  If we have two identical
2819      expressions with side effects that should be treated the same due
2820      to the only side effects being identical SAVE_EXPR's, that will
2821      be detected in the recursive calls below.
2822      If we are taking an invariant address of two identical objects
2823      they are necessarily equal as well.  */
2824   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2825       && (TREE_CODE (arg0) == SAVE_EXPR
2826 	  || (flags & OEP_CONSTANT_ADDRESS_OF)
2827 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2828     return 1;
2829 
2830   /* Next handle constant cases, those for which we can return 1 even
2831      if ONLY_CONST is set.  */
2832   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2833     switch (TREE_CODE (arg0))
2834       {
2835       case INTEGER_CST:
2836 	return tree_int_cst_equal (arg0, arg1);
2837 
2838       case FIXED_CST:
2839 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2840 				       TREE_FIXED_CST (arg1));
2841 
2842       case REAL_CST:
2843 	if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2844 				   TREE_REAL_CST (arg1)))
2845 	  return 1;
2846 
2847 
2848 	if (!HONOR_SIGNED_ZEROS (arg0))
2849 	  {
2850 	    /* If we do not distinguish between signed and unsigned zero,
2851 	       consider them equal.  */
2852 	    if (real_zerop (arg0) && real_zerop (arg1))
2853 	      return 1;
2854 	  }
2855 	return 0;
2856 
2857       case VECTOR_CST:
2858 	{
2859 	  unsigned i;
2860 
2861 	  if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2862 	    return 0;
2863 
2864 	  for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2865 	    {
2866 	      if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2867 				    VECTOR_CST_ELT (arg1, i), flags))
2868 		return 0;
2869 	    }
2870 	  return 1;
2871 	}
2872 
2873       case COMPLEX_CST:
2874 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2875 				 flags)
2876 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2877 				    flags));
2878 
2879       case STRING_CST:
2880 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2881 		&& ! memcmp (TREE_STRING_POINTER (arg0),
2882 			      TREE_STRING_POINTER (arg1),
2883 			      TREE_STRING_LENGTH (arg0)));
2884 
2885       case ADDR_EXPR:
2886 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2887 				TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2888 				? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2889       default:
2890 	break;
2891       }
2892 
2893   if (flags & OEP_ONLY_CONST)
2894     return 0;
2895 
2896 /* Define macros to test an operand from arg0 and arg1 for equality and a
2897    variant that allows null and views null as being different from any
2898    non-null value.  In the latter case, if either is null, the both
2899    must be; otherwise, do the normal comparison.  */
2900 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
2901 				    TREE_OPERAND (arg1, N), flags)
2902 
2903 #define OP_SAME_WITH_NULL(N)				\
2904   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
2905    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2906 
2907   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2908     {
2909     case tcc_unary:
2910       /* Two conversions are equal only if signedness and modes match.  */
2911       switch (TREE_CODE (arg0))
2912         {
2913 	CASE_CONVERT:
2914         case FIX_TRUNC_EXPR:
2915 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2916 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2917 	    return 0;
2918 	  break;
2919 	default:
2920 	  break;
2921 	}
2922 
2923       return OP_SAME (0);
2924 
2925 
2926     case tcc_comparison:
2927     case tcc_binary:
2928       if (OP_SAME (0) && OP_SAME (1))
2929 	return 1;
2930 
2931       /* For commutative ops, allow the other order.  */
2932       return (commutative_tree_code (TREE_CODE (arg0))
2933 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2934 				  TREE_OPERAND (arg1, 1), flags)
2935 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2936 				  TREE_OPERAND (arg1, 0), flags));
2937 
2938     case tcc_reference:
2939       /* If either of the pointer (or reference) expressions we are
2940 	 dereferencing contain a side effect, these cannot be equal,
2941 	 but their addresses can be.  */
2942       if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2943 	  && (TREE_SIDE_EFFECTS (arg0)
2944 	      || TREE_SIDE_EFFECTS (arg1)))
2945 	return 0;
2946 
2947       switch (TREE_CODE (arg0))
2948 	{
2949 	case INDIRECT_REF:
2950 	  if (!(flags & OEP_ADDRESS_OF)
2951 	      && (TYPE_ALIGN (TREE_TYPE (arg0))
2952 		  != TYPE_ALIGN (TREE_TYPE (arg1))))
2953 	    return 0;
2954 	  flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2955 	  return OP_SAME (0);
2956 
2957 	case REALPART_EXPR:
2958 	case IMAGPART_EXPR:
2959 	  return OP_SAME (0);
2960 
2961 	case TARGET_MEM_REF:
2962 	case MEM_REF:
2963 	  /* Require equal access sizes, and similar pointer types.
2964 	     We can have incomplete types for array references of
2965 	     variable-sized arrays from the Fortran frontend
2966 	     though.  Also verify the types are compatible.  */
2967 	  if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2968 		   || (TYPE_SIZE (TREE_TYPE (arg0))
2969 		       && TYPE_SIZE (TREE_TYPE (arg1))
2970 		       && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2971 					   TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2972 		  && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2973 		  && ((flags & OEP_ADDRESS_OF)
2974 		      || (alias_ptr_types_compatible_p
2975 			    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2976 			     TREE_TYPE (TREE_OPERAND (arg1, 1)))
2977 			  && (MR_DEPENDENCE_CLIQUE (arg0)
2978 			      == MR_DEPENDENCE_CLIQUE (arg1))
2979 			  && (MR_DEPENDENCE_BASE (arg0)
2980 			      == MR_DEPENDENCE_BASE (arg1))
2981 			  && (TYPE_ALIGN (TREE_TYPE (arg0))
2982 			    == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2983 	    return 0;
2984 	  flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2985 	  return (OP_SAME (0) && OP_SAME (1)
2986 		  /* TARGET_MEM_REF require equal extra operands.  */
2987 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
2988 		      || (OP_SAME_WITH_NULL (2)
2989 			  && OP_SAME_WITH_NULL (3)
2990 			  && OP_SAME_WITH_NULL (4))));
2991 
2992 	case ARRAY_REF:
2993 	case ARRAY_RANGE_REF:
2994 	  /* Operands 2 and 3 may be null.
2995 	     Compare the array index by value if it is constant first as we
2996 	     may have different types but same value here.  */
2997 	  if (!OP_SAME (0))
2998 	    return 0;
2999 	  flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3000 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3001 				       TREE_OPERAND (arg1, 1))
3002 		   || OP_SAME (1))
3003 		  && OP_SAME_WITH_NULL (2)
3004 		  && OP_SAME_WITH_NULL (3));
3005 
3006 	case COMPONENT_REF:
3007 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3008 	     may be NULL when we're called to compare MEM_EXPRs.  */
3009 	  if (!OP_SAME_WITH_NULL (0)
3010 	      || !OP_SAME (1))
3011 	    return 0;
3012 	  flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3013 	  return OP_SAME_WITH_NULL (2);
3014 
3015 	case BIT_FIELD_REF:
3016 	  if (!OP_SAME (0))
3017 	    return 0;
3018 	  flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3019 	  return OP_SAME (1) && OP_SAME (2);
3020 
3021 	default:
3022 	  return 0;
3023 	}
3024 
3025     case tcc_expression:
3026       switch (TREE_CODE (arg0))
3027 	{
3028 	case ADDR_EXPR:
3029 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3030 				  TREE_OPERAND (arg1, 0),
3031 				  flags | OEP_ADDRESS_OF);
3032 
3033 	case TRUTH_NOT_EXPR:
3034 	  return OP_SAME (0);
3035 
3036 	case TRUTH_ANDIF_EXPR:
3037 	case TRUTH_ORIF_EXPR:
3038 	  return OP_SAME (0) && OP_SAME (1);
3039 
3040 	case FMA_EXPR:
3041 	case WIDEN_MULT_PLUS_EXPR:
3042 	case WIDEN_MULT_MINUS_EXPR:
3043 	  if (!OP_SAME (2))
3044 	    return 0;
3045 	  /* The multiplcation operands are commutative.  */
3046 	  /* FALLTHRU */
3047 
3048 	case TRUTH_AND_EXPR:
3049 	case TRUTH_OR_EXPR:
3050 	case TRUTH_XOR_EXPR:
3051 	  if (OP_SAME (0) && OP_SAME (1))
3052 	    return 1;
3053 
3054 	  /* Otherwise take into account this is a commutative operation.  */
3055 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3056 				   TREE_OPERAND (arg1, 1), flags)
3057 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3058 				      TREE_OPERAND (arg1, 0), flags));
3059 
3060 	case COND_EXPR:
3061 	case VEC_COND_EXPR:
3062 	case DOT_PROD_EXPR:
3063 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3064 
3065 	default:
3066 	  return 0;
3067 	}
3068 
3069     case tcc_vl_exp:
3070       switch (TREE_CODE (arg0))
3071 	{
3072 	case CALL_EXPR:
3073 	  /* If the CALL_EXPRs call different functions, then they
3074 	     clearly can not be equal.  */
3075 	  if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3076 				 flags))
3077 	    return 0;
3078 
3079 	  {
3080 	    unsigned int cef = call_expr_flags (arg0);
3081 	    if (flags & OEP_PURE_SAME)
3082 	      cef &= ECF_CONST | ECF_PURE;
3083 	    else
3084 	      cef &= ECF_CONST;
3085 	    if (!cef)
3086 	      return 0;
3087 	  }
3088 
3089 	  /* Now see if all the arguments are the same.  */
3090 	  {
3091 	    const_call_expr_arg_iterator iter0, iter1;
3092 	    const_tree a0, a1;
3093 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3094 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3095 		 a0 && a1;
3096 		 a0 = next_const_call_expr_arg (&iter0),
3097 		   a1 = next_const_call_expr_arg (&iter1))
3098 	      if (! operand_equal_p (a0, a1, flags))
3099 		return 0;
3100 
3101 	    /* If we get here and both argument lists are exhausted
3102 	       then the CALL_EXPRs are equal.  */
3103 	    return ! (a0 || a1);
3104 	  }
3105 	default:
3106 	  return 0;
3107 	}
3108 
3109     case tcc_declaration:
3110       /* Consider __builtin_sqrt equal to sqrt.  */
3111       return (TREE_CODE (arg0) == FUNCTION_DECL
3112 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3113 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3114 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3115 
3116     default:
3117       return 0;
3118     }
3119 
3120 #undef OP_SAME
3121 #undef OP_SAME_WITH_NULL
3122 }
3123 
3124 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3125    shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3126 
3127    When in doubt, return 0.  */
3128 
3129 static int
3130 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3131 {
3132   int unsignedp1, unsignedpo;
3133   tree primarg0, primarg1, primother;
3134   unsigned int correct_width;
3135 
3136   if (operand_equal_p (arg0, arg1, 0))
3137     return 1;
3138 
3139   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3140       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3141     return 0;
3142 
3143   /* Discard any conversions that don't change the modes of ARG0 and ARG1
3144      and see if the inner values are the same.  This removes any
3145      signedness comparison, which doesn't matter here.  */
3146   primarg0 = arg0, primarg1 = arg1;
3147   STRIP_NOPS (primarg0);
3148   STRIP_NOPS (primarg1);
3149   if (operand_equal_p (primarg0, primarg1, 0))
3150     return 1;
3151 
3152   /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3153      actual comparison operand, ARG0.
3154 
3155      First throw away any conversions to wider types
3156      already present in the operands.  */
3157 
3158   primarg1 = get_narrower (arg1, &unsignedp1);
3159   primother = get_narrower (other, &unsignedpo);
3160 
3161   correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3162   if (unsignedp1 == unsignedpo
3163       && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3164       && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3165     {
3166       tree type = TREE_TYPE (arg0);
3167 
3168       /* Make sure shorter operand is extended the right way
3169 	 to match the longer operand.  */
3170       primarg1 = fold_convert (signed_or_unsigned_type_for
3171 			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3172 
3173       if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3174 	return 1;
3175     }
3176 
3177   return 0;
3178 }
3179 
3180 /* See if ARG is an expression that is either a comparison or is performing
3181    arithmetic on comparisons.  The comparisons must only be comparing
3182    two different values, which will be stored in *CVAL1 and *CVAL2; if
3183    they are nonzero it means that some operands have already been found.
3184    No variables may be used anywhere else in the expression except in the
3185    comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
3186    the expression and save_expr needs to be called with CVAL1 and CVAL2.
3187 
3188    If this is true, return 1.  Otherwise, return zero.  */
3189 
3190 static int
3191 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3192 {
3193   enum tree_code code = TREE_CODE (arg);
3194   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3195 
3196   /* We can handle some of the tcc_expression cases here.  */
3197   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3198     tclass = tcc_unary;
3199   else if (tclass == tcc_expression
3200 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3201 	       || code == COMPOUND_EXPR))
3202     tclass = tcc_binary;
3203 
3204   else if (tclass == tcc_expression && code == SAVE_EXPR
3205 	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3206     {
3207       /* If we've already found a CVAL1 or CVAL2, this expression is
3208 	 two complex to handle.  */
3209       if (*cval1 || *cval2)
3210 	return 0;
3211 
3212       tclass = tcc_unary;
3213       *save_p = 1;
3214     }
3215 
3216   switch (tclass)
3217     {
3218     case tcc_unary:
3219       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3220 
3221     case tcc_binary:
3222       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3223 	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
3224 				      cval1, cval2, save_p));
3225 
3226     case tcc_constant:
3227       return 1;
3228 
3229     case tcc_expression:
3230       if (code == COND_EXPR)
3231 	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3232 				     cval1, cval2, save_p)
3233 		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
3234 					cval1, cval2, save_p)
3235 		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
3236 					cval1, cval2, save_p));
3237       return 0;
3238 
3239     case tcc_comparison:
3240       /* First see if we can handle the first operand, then the second.  For
3241 	 the second operand, we know *CVAL1 can't be zero.  It must be that
3242 	 one side of the comparison is each of the values; test for the
3243 	 case where this isn't true by failing if the two operands
3244 	 are the same.  */
3245 
3246       if (operand_equal_p (TREE_OPERAND (arg, 0),
3247 			   TREE_OPERAND (arg, 1), 0))
3248 	return 0;
3249 
3250       if (*cval1 == 0)
3251 	*cval1 = TREE_OPERAND (arg, 0);
3252       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3253 	;
3254       else if (*cval2 == 0)
3255 	*cval2 = TREE_OPERAND (arg, 0);
3256       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3257 	;
3258       else
3259 	return 0;
3260 
3261       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3262 	;
3263       else if (*cval2 == 0)
3264 	*cval2 = TREE_OPERAND (arg, 1);
3265       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3266 	;
3267       else
3268 	return 0;
3269 
3270       return 1;
3271 
3272     default:
3273       return 0;
3274     }
3275 }
3276 
3277 /* ARG is a tree that is known to contain just arithmetic operations and
3278    comparisons.  Evaluate the operations in the tree substituting NEW0 for
3279    any occurrence of OLD0 as an operand of a comparison and likewise for
3280    NEW1 and OLD1.  */
3281 
3282 static tree
3283 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3284 	    tree old1, tree new1)
3285 {
3286   tree type = TREE_TYPE (arg);
3287   enum tree_code code = TREE_CODE (arg);
3288   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3289 
3290   /* We can handle some of the tcc_expression cases here.  */
3291   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3292     tclass = tcc_unary;
3293   else if (tclass == tcc_expression
3294 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3295     tclass = tcc_binary;
3296 
3297   switch (tclass)
3298     {
3299     case tcc_unary:
3300       return fold_build1_loc (loc, code, type,
3301 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3302 				      old0, new0, old1, new1));
3303 
3304     case tcc_binary:
3305       return fold_build2_loc (loc, code, type,
3306 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3307 				      old0, new0, old1, new1),
3308 			  eval_subst (loc, TREE_OPERAND (arg, 1),
3309 				      old0, new0, old1, new1));
3310 
3311     case tcc_expression:
3312       switch (code)
3313 	{
3314 	case SAVE_EXPR:
3315 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3316 			     old1, new1);
3317 
3318 	case COMPOUND_EXPR:
3319 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3320 			     old1, new1);
3321 
3322 	case COND_EXPR:
3323 	  return fold_build3_loc (loc, code, type,
3324 			      eval_subst (loc, TREE_OPERAND (arg, 0),
3325 					  old0, new0, old1, new1),
3326 			      eval_subst (loc, TREE_OPERAND (arg, 1),
3327 					  old0, new0, old1, new1),
3328 			      eval_subst (loc, TREE_OPERAND (arg, 2),
3329 					  old0, new0, old1, new1));
3330 	default:
3331 	  break;
3332 	}
3333       /* Fall through - ???  */
3334 
3335     case tcc_comparison:
3336       {
3337 	tree arg0 = TREE_OPERAND (arg, 0);
3338 	tree arg1 = TREE_OPERAND (arg, 1);
3339 
3340 	/* We need to check both for exact equality and tree equality.  The
3341 	   former will be true if the operand has a side-effect.  In that
3342 	   case, we know the operand occurred exactly once.  */
3343 
3344 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3345 	  arg0 = new0;
3346 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3347 	  arg0 = new1;
3348 
3349 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3350 	  arg1 = new0;
3351 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3352 	  arg1 = new1;
3353 
3354 	return fold_build2_loc (loc, code, type, arg0, arg1);
3355       }
3356 
3357     default:
3358       return arg;
3359     }
3360 }
3361 
3362 /* Return a tree for the case when the result of an expression is RESULT
3363    converted to TYPE and OMITTED was previously an operand of the expression
3364    but is now not needed (e.g., we folded OMITTED * 0).
3365 
3366    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3367    the conversion of RESULT to TYPE.  */
3368 
3369 tree
3370 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3371 {
3372   tree t = fold_convert_loc (loc, type, result);
3373 
3374   /* If the resulting operand is an empty statement, just return the omitted
3375      statement casted to void. */
3376   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3377     return build1_loc (loc, NOP_EXPR, void_type_node,
3378 		       fold_ignored_result (omitted));
3379 
3380   if (TREE_SIDE_EFFECTS (omitted))
3381     return build2_loc (loc, COMPOUND_EXPR, type,
3382 		       fold_ignored_result (omitted), t);
3383 
3384   return non_lvalue_loc (loc, t);
3385 }
3386 
3387 /* Return a tree for the case when the result of an expression is RESULT
3388    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3389    of the expression but are now not needed.
3390 
3391    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3392    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3393    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3394    just do the conversion of RESULT to TYPE.  */
3395 
3396 tree
3397 omit_two_operands_loc (location_t loc, tree type, tree result,
3398 		       tree omitted1, tree omitted2)
3399 {
3400   tree t = fold_convert_loc (loc, type, result);
3401 
3402   if (TREE_SIDE_EFFECTS (omitted2))
3403     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3404   if (TREE_SIDE_EFFECTS (omitted1))
3405     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3406 
3407   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3408 }
3409 
3410 
3411 /* Return a simplified tree node for the truth-negation of ARG.  This
3412    never alters ARG itself.  We assume that ARG is an operation that
3413    returns a truth value (0 or 1).
3414 
3415    FIXME: one would think we would fold the result, but it causes
3416    problems with the dominator optimizer.  */
3417 
3418 static tree
3419 fold_truth_not_expr (location_t loc, tree arg)
3420 {
3421   tree type = TREE_TYPE (arg);
3422   enum tree_code code = TREE_CODE (arg);
3423   location_t loc1, loc2;
3424 
3425   /* If this is a comparison, we can simply invert it, except for
3426      floating-point non-equality comparisons, in which case we just
3427      enclose a TRUTH_NOT_EXPR around what we have.  */
3428 
3429   if (TREE_CODE_CLASS (code) == tcc_comparison)
3430     {
3431       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3432       if (FLOAT_TYPE_P (op_type)
3433 	  && flag_trapping_math
3434 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3435 	  && code != NE_EXPR && code != EQ_EXPR)
3436 	return NULL_TREE;
3437 
3438       code = invert_tree_comparison (code, HONOR_NANS (op_type));
3439       if (code == ERROR_MARK)
3440 	return NULL_TREE;
3441 
3442       return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3443 			 TREE_OPERAND (arg, 1));
3444     }
3445 
3446   switch (code)
3447     {
3448     case INTEGER_CST:
3449       return constant_boolean_node (integer_zerop (arg), type);
3450 
3451     case TRUTH_AND_EXPR:
3452       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3453       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3454       return build2_loc (loc, TRUTH_OR_EXPR, type,
3455 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3456 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3457 
3458     case TRUTH_OR_EXPR:
3459       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3460       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3461       return build2_loc (loc, TRUTH_AND_EXPR, type,
3462 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3463 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3464 
3465     case TRUTH_XOR_EXPR:
3466       /* Here we can invert either operand.  We invert the first operand
3467 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3468 	 result is the XOR of the first operand with the inside of the
3469 	 negation of the second operand.  */
3470 
3471       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3472 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3473 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3474       else
3475 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3476 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3477 			   TREE_OPERAND (arg, 1));
3478 
3479     case TRUTH_ANDIF_EXPR:
3480       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3481       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3482       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3483 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3484 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3485 
3486     case TRUTH_ORIF_EXPR:
3487       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3488       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3489       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3490 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3491 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3492 
3493     case TRUTH_NOT_EXPR:
3494       return TREE_OPERAND (arg, 0);
3495 
3496     case COND_EXPR:
3497       {
3498 	tree arg1 = TREE_OPERAND (arg, 1);
3499 	tree arg2 = TREE_OPERAND (arg, 2);
3500 
3501 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3502 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3503 
3504 	/* A COND_EXPR may have a throw as one operand, which
3505 	   then has void type.  Just leave void operands
3506 	   as they are.  */
3507 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3508 			   VOID_TYPE_P (TREE_TYPE (arg1))
3509 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3510 			   VOID_TYPE_P (TREE_TYPE (arg2))
3511 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3512       }
3513 
3514     case COMPOUND_EXPR:
3515       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3516       return build2_loc (loc, COMPOUND_EXPR, type,
3517 			 TREE_OPERAND (arg, 0),
3518 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3519 
3520     case NON_LVALUE_EXPR:
3521       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3522       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3523 
3524     CASE_CONVERT:
3525       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3526 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3527 
3528       /* ... fall through ...  */
3529 
3530     case FLOAT_EXPR:
3531       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3532       return build1_loc (loc, TREE_CODE (arg), type,
3533 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3534 
3535     case BIT_AND_EXPR:
3536       if (!integer_onep (TREE_OPERAND (arg, 1)))
3537 	return NULL_TREE;
3538       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3539 
3540     case SAVE_EXPR:
3541       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3542 
3543     case CLEANUP_POINT_EXPR:
3544       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3545       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3546 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3547 
3548     default:
3549       return NULL_TREE;
3550     }
3551 }
3552 
3553 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
3554    assume that ARG is an operation that returns a truth value (0 or 1
3555    for scalars, 0 or -1 for vectors).  Return the folded expression if
3556    folding is successful.  Otherwise, return NULL_TREE.  */
3557 
3558 static tree
3559 fold_invert_truthvalue (location_t loc, tree arg)
3560 {
3561   tree type = TREE_TYPE (arg);
3562   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3563 			      ? BIT_NOT_EXPR
3564 			      : TRUTH_NOT_EXPR,
3565 			 type, arg);
3566 }
3567 
3568 /* Return a simplified tree node for the truth-negation of ARG.  This
3569    never alters ARG itself.  We assume that ARG is an operation that
3570    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
3571 
3572 tree
3573 invert_truthvalue_loc (location_t loc, tree arg)
3574 {
3575   if (TREE_CODE (arg) == ERROR_MARK)
3576     return arg;
3577 
3578   tree type = TREE_TYPE (arg);
3579   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3580 			       ? BIT_NOT_EXPR
3581 			       : TRUTH_NOT_EXPR,
3582 			  type, arg);
3583 }
3584 
3585 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3586    operands are another bit-wise operation with a common input.  If so,
3587    distribute the bit operations to save an operation and possibly two if
3588    constants are involved.  For example, convert
3589 	(A | B) & (A | C) into A | (B & C)
3590    Further simplification will occur if B and C are constants.
3591 
3592    If this optimization cannot be done, 0 will be returned.  */
3593 
3594 static tree
3595 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3596 		     tree arg0, tree arg1)
3597 {
3598   tree common;
3599   tree left, right;
3600 
3601   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3602       || TREE_CODE (arg0) == code
3603       || (TREE_CODE (arg0) != BIT_AND_EXPR
3604 	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
3605     return 0;
3606 
3607   if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3608     {
3609       common = TREE_OPERAND (arg0, 0);
3610       left = TREE_OPERAND (arg0, 1);
3611       right = TREE_OPERAND (arg1, 1);
3612     }
3613   else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3614     {
3615       common = TREE_OPERAND (arg0, 0);
3616       left = TREE_OPERAND (arg0, 1);
3617       right = TREE_OPERAND (arg1, 0);
3618     }
3619   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3620     {
3621       common = TREE_OPERAND (arg0, 1);
3622       left = TREE_OPERAND (arg0, 0);
3623       right = TREE_OPERAND (arg1, 1);
3624     }
3625   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3626     {
3627       common = TREE_OPERAND (arg0, 1);
3628       left = TREE_OPERAND (arg0, 0);
3629       right = TREE_OPERAND (arg1, 0);
3630     }
3631   else
3632     return 0;
3633 
3634   common = fold_convert_loc (loc, type, common);
3635   left = fold_convert_loc (loc, type, left);
3636   right = fold_convert_loc (loc, type, right);
3637   return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3638 		      fold_build2_loc (loc, code, type, left, right));
3639 }
3640 
3641 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3642    with code CODE.  This optimization is unsafe.  */
3643 static tree
3644 distribute_real_division (location_t loc, enum tree_code code, tree type,
3645 			  tree arg0, tree arg1)
3646 {
3647   bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3648   bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3649 
3650   /* (A / C) +- (B / C) -> (A +- B) / C.  */
3651   if (mul0 == mul1
3652       && operand_equal_p (TREE_OPERAND (arg0, 1),
3653 		       TREE_OPERAND (arg1, 1), 0))
3654     return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3655 			fold_build2_loc (loc, code, type,
3656 				     TREE_OPERAND (arg0, 0),
3657 				     TREE_OPERAND (arg1, 0)),
3658 			TREE_OPERAND (arg0, 1));
3659 
3660   /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3661   if (operand_equal_p (TREE_OPERAND (arg0, 0),
3662 		       TREE_OPERAND (arg1, 0), 0)
3663       && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3664       && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3665     {
3666       REAL_VALUE_TYPE r0, r1;
3667       r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3668       r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3669       if (!mul0)
3670 	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3671       if (!mul1)
3672         real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3673       real_arithmetic (&r0, code, &r0, &r1);
3674       return fold_build2_loc (loc, MULT_EXPR, type,
3675 			  TREE_OPERAND (arg0, 0),
3676 			  build_real (type, r0));
3677     }
3678 
3679   return NULL_TREE;
3680 }
3681 
3682 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3683    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
3684 
3685 static tree
3686 make_bit_field_ref (location_t loc, tree inner, tree type,
3687 		    HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3688 {
3689   tree result, bftype;
3690 
3691   if (bitpos == 0)
3692     {
3693       tree size = TYPE_SIZE (TREE_TYPE (inner));
3694       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3695 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3696 	  && tree_fits_shwi_p (size)
3697 	  && tree_to_shwi (size) == bitsize)
3698 	return fold_convert_loc (loc, type, inner);
3699     }
3700 
3701   bftype = type;
3702   if (TYPE_PRECISION (bftype) != bitsize
3703       || TYPE_UNSIGNED (bftype) == !unsignedp)
3704     bftype = build_nonstandard_integer_type (bitsize, 0);
3705 
3706   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3707 		       size_int (bitsize), bitsize_int (bitpos));
3708 
3709   if (bftype != type)
3710     result = fold_convert_loc (loc, type, result);
3711 
3712   return result;
3713 }
3714 
3715 /* Optimize a bit-field compare.
3716 
3717    There are two cases:  First is a compare against a constant and the
3718    second is a comparison of two items where the fields are at the same
3719    bit position relative to the start of a chunk (byte, halfword, word)
3720    large enough to contain it.  In these cases we can avoid the shift
3721    implicit in bitfield extractions.
3722 
3723    For constants, we emit a compare of the shifted constant with the
3724    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3725    compared.  For two fields at the same position, we do the ANDs with the
3726    similar mask and compare the result of the ANDs.
3727 
3728    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3729    COMPARE_TYPE is the type of the comparison, and LHS and RHS
3730    are the left and right operands of the comparison, respectively.
3731 
3732    If the optimization described above can be done, we return the resulting
3733    tree.  Otherwise we return zero.  */
3734 
3735 static tree
3736 optimize_bit_field_compare (location_t loc, enum tree_code code,
3737 			    tree compare_type, tree lhs, tree rhs)
3738 {
3739   HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3740   tree type = TREE_TYPE (lhs);
3741   tree unsigned_type;
3742   int const_p = TREE_CODE (rhs) == INTEGER_CST;
3743   machine_mode lmode, rmode, nmode;
3744   int lunsignedp, runsignedp;
3745   int lvolatilep = 0, rvolatilep = 0;
3746   tree linner, rinner = NULL_TREE;
3747   tree mask;
3748   tree offset;
3749 
3750   /* Get all the information about the extractions being done.  If the bit size
3751      if the same as the size of the underlying object, we aren't doing an
3752      extraction at all and so can do nothing.  We also don't want to
3753      do anything if the inner expression is a PLACEHOLDER_EXPR since we
3754      then will no longer be able to replace it.  */
3755   linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3756 				&lunsignedp, &lvolatilep, false);
3757   if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3758       || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3759     return 0;
3760 
3761  if (!const_p)
3762    {
3763      /* If this is not a constant, we can only do something if bit positions,
3764 	sizes, and signedness are the same.  */
3765      rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3766 				   &runsignedp, &rvolatilep, false);
3767 
3768      if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3769 	 || lunsignedp != runsignedp || offset != 0
3770 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3771        return 0;
3772    }
3773 
3774   /* See if we can find a mode to refer to this field.  We should be able to,
3775      but fail if we can't.  */
3776   nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3777 			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3778 			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3779 				TYPE_ALIGN (TREE_TYPE (rinner))),
3780 			 word_mode, false);
3781   if (nmode == VOIDmode)
3782     return 0;
3783 
3784   /* Set signed and unsigned types of the precision of this mode for the
3785      shifts below.  */
3786   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3787 
3788   /* Compute the bit position and size for the new reference and our offset
3789      within it. If the new reference is the same size as the original, we
3790      won't optimize anything, so return zero.  */
3791   nbitsize = GET_MODE_BITSIZE (nmode);
3792   nbitpos = lbitpos & ~ (nbitsize - 1);
3793   lbitpos -= nbitpos;
3794   if (nbitsize == lbitsize)
3795     return 0;
3796 
3797   if (BYTES_BIG_ENDIAN)
3798     lbitpos = nbitsize - lbitsize - lbitpos;
3799 
3800   /* Make the mask to be used against the extracted field.  */
3801   mask = build_int_cst_type (unsigned_type, -1);
3802   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3803   mask = const_binop (RSHIFT_EXPR, mask,
3804 		      size_int (nbitsize - lbitsize - lbitpos));
3805 
3806   if (! const_p)
3807     /* If not comparing with constant, just rework the comparison
3808        and return.  */
3809     return fold_build2_loc (loc, code, compare_type,
3810 			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3811 				     make_bit_field_ref (loc, linner,
3812 							 unsigned_type,
3813 							 nbitsize, nbitpos,
3814 							 1),
3815 				     mask),
3816 			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3817 				     make_bit_field_ref (loc, rinner,
3818 							 unsigned_type,
3819 							 nbitsize, nbitpos,
3820 							 1),
3821 				     mask));
3822 
3823   /* Otherwise, we are handling the constant case. See if the constant is too
3824      big for the field.  Warn and return a tree of for 0 (false) if so.  We do
3825      this not only for its own sake, but to avoid having to test for this
3826      error case below.  If we didn't, we might generate wrong code.
3827 
3828      For unsigned fields, the constant shifted right by the field length should
3829      be all zero.  For signed fields, the high-order bits should agree with
3830      the sign bit.  */
3831 
3832   if (lunsignedp)
3833     {
3834       if (wi::lrshift (rhs, lbitsize) != 0)
3835 	{
3836 	  warning (0, "comparison is always %d due to width of bit-field",
3837 		   code == NE_EXPR);
3838 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3839 	}
3840     }
3841   else
3842     {
3843       wide_int tem = wi::arshift (rhs, lbitsize - 1);
3844       if (tem != 0 && tem != -1)
3845 	{
3846 	  warning (0, "comparison is always %d due to width of bit-field",
3847 		   code == NE_EXPR);
3848 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3849 	}
3850     }
3851 
3852   /* Single-bit compares should always be against zero.  */
3853   if (lbitsize == 1 && ! integer_zerop (rhs))
3854     {
3855       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3856       rhs = build_int_cst (type, 0);
3857     }
3858 
3859   /* Make a new bitfield reference, shift the constant over the
3860      appropriate number of bits and mask it with the computed mask
3861      (in case this was a signed field).  If we changed it, make a new one.  */
3862   lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3863 
3864   rhs = const_binop (BIT_AND_EXPR,
3865 		     const_binop (LSHIFT_EXPR,
3866 				  fold_convert_loc (loc, unsigned_type, rhs),
3867 				  size_int (lbitpos)),
3868 		     mask);
3869 
3870   lhs = build2_loc (loc, code, compare_type,
3871 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3872   return lhs;
3873 }
3874 
3875 /* Subroutine for fold_truth_andor_1: decode a field reference.
3876 
3877    If EXP is a comparison reference, we return the innermost reference.
3878 
3879    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3880    set to the starting bit number.
3881 
3882    If the innermost field can be completely contained in a mode-sized
3883    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
3884 
3885    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3886    otherwise it is not changed.
3887 
3888    *PUNSIGNEDP is set to the signedness of the field.
3889 
3890    *PMASK is set to the mask used.  This is either contained in a
3891    BIT_AND_EXPR or derived from the width of the field.
3892 
3893    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3894 
3895    Return 0 if this is not a component reference or is one that we can't
3896    do anything with.  */
3897 
3898 static tree
3899 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3900 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3901 			int *punsignedp, int *pvolatilep,
3902 			tree *pmask, tree *pand_mask)
3903 {
3904   tree outer_type = 0;
3905   tree and_mask = 0;
3906   tree mask, inner, offset;
3907   tree unsigned_type;
3908   unsigned int precision;
3909 
3910   /* All the optimizations using this function assume integer fields.
3911      There are problems with FP fields since the type_for_size call
3912      below can fail for, e.g., XFmode.  */
3913   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3914     return 0;
3915 
3916   /* We are interested in the bare arrangement of bits, so strip everything
3917      that doesn't affect the machine mode.  However, record the type of the
3918      outermost expression if it may matter below.  */
3919   if (CONVERT_EXPR_P (exp)
3920       || TREE_CODE (exp) == NON_LVALUE_EXPR)
3921     outer_type = TREE_TYPE (exp);
3922   STRIP_NOPS (exp);
3923 
3924   if (TREE_CODE (exp) == BIT_AND_EXPR)
3925     {
3926       and_mask = TREE_OPERAND (exp, 1);
3927       exp = TREE_OPERAND (exp, 0);
3928       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3929       if (TREE_CODE (and_mask) != INTEGER_CST)
3930 	return 0;
3931     }
3932 
3933   inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3934 			       punsignedp, pvolatilep, false);
3935   if ((inner == exp && and_mask == 0)
3936       || *pbitsize < 0 || offset != 0
3937       || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3938     return 0;
3939 
3940   /* If the number of bits in the reference is the same as the bitsize of
3941      the outer type, then the outer type gives the signedness. Otherwise
3942      (in case of a small bitfield) the signedness is unchanged.  */
3943   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3944     *punsignedp = TYPE_UNSIGNED (outer_type);
3945 
3946   /* Compute the mask to access the bitfield.  */
3947   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3948   precision = TYPE_PRECISION (unsigned_type);
3949 
3950   mask = build_int_cst_type (unsigned_type, -1);
3951 
3952   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3953   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3954 
3955   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3956   if (and_mask != 0)
3957     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3958 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
3959 
3960   *pmask = mask;
3961   *pand_mask = and_mask;
3962   return inner;
3963 }
3964 
3965 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3966    bit positions and MASK is SIGNED.  */
3967 
3968 static int
3969 all_ones_mask_p (const_tree mask, unsigned int size)
3970 {
3971   tree type = TREE_TYPE (mask);
3972   unsigned int precision = TYPE_PRECISION (type);
3973 
3974   /* If this function returns true when the type of the mask is
3975      UNSIGNED, then there will be errors.  In particular see
3976      gcc.c-torture/execute/990326-1.c.  There does not appear to be
3977      any documentation paper trail as to why this is so.  But the pre
3978      wide-int worked with that restriction and it has been preserved
3979      here.  */
3980   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3981     return false;
3982 
3983   return wi::mask (size, false, precision) == mask;
3984 }
3985 
3986 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3987    represents the sign bit of EXP's type.  If EXP represents a sign
3988    or zero extension, also test VAL against the unextended type.
3989    The return value is the (sub)expression whose sign bit is VAL,
3990    or NULL_TREE otherwise.  */
3991 
3992 tree
3993 sign_bit_p (tree exp, const_tree val)
3994 {
3995   int width;
3996   tree t;
3997 
3998   /* Tree EXP must have an integral type.  */
3999   t = TREE_TYPE (exp);
4000   if (! INTEGRAL_TYPE_P (t))
4001     return NULL_TREE;
4002 
4003   /* Tree VAL must be an integer constant.  */
4004   if (TREE_CODE (val) != INTEGER_CST
4005       || TREE_OVERFLOW (val))
4006     return NULL_TREE;
4007 
4008   width = TYPE_PRECISION (t);
4009   if (wi::only_sign_bit_p (val, width))
4010     return exp;
4011 
4012   /* Handle extension from a narrower type.  */
4013   if (TREE_CODE (exp) == NOP_EXPR
4014       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4015     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4016 
4017   return NULL_TREE;
4018 }
4019 
4020 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4021    to be evaluated unconditionally.  */
4022 
4023 static int
4024 simple_operand_p (const_tree exp)
4025 {
4026   /* Strip any conversions that don't change the machine mode.  */
4027   STRIP_NOPS (exp);
4028 
4029   return (CONSTANT_CLASS_P (exp)
4030   	  || TREE_CODE (exp) == SSA_NAME
4031 	  || (DECL_P (exp)
4032 	      && ! TREE_ADDRESSABLE (exp)
4033 	      && ! TREE_THIS_VOLATILE (exp)
4034 	      && ! DECL_NONLOCAL (exp)
4035 	      /* Don't regard global variables as simple.  They may be
4036 		 allocated in ways unknown to the compiler (shared memory,
4037 		 #pragma weak, etc).  */
4038 	      && ! TREE_PUBLIC (exp)
4039 	      && ! DECL_EXTERNAL (exp)
4040 	      /* Weakrefs are not safe to be read, since they can be NULL.
4041  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4042 		 have DECL_WEAK flag set.  */
4043 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4044 	      /* Loading a static variable is unduly expensive, but global
4045 		 registers aren't expensive.  */
4046 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4047 }
4048 
4049 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4050    to be evaluated unconditionally.
4051    I addition to simple_operand_p, we assume that comparisons, conversions,
4052    and logic-not operations are simple, if their operands are simple, too.  */
4053 
4054 static bool
4055 simple_operand_p_2 (tree exp)
4056 {
4057   enum tree_code code;
4058 
4059   if (TREE_SIDE_EFFECTS (exp)
4060       || tree_could_trap_p (exp))
4061     return false;
4062 
4063   while (CONVERT_EXPR_P (exp))
4064     exp = TREE_OPERAND (exp, 0);
4065 
4066   code = TREE_CODE (exp);
4067 
4068   if (TREE_CODE_CLASS (code) == tcc_comparison)
4069     return (simple_operand_p (TREE_OPERAND (exp, 0))
4070 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4071 
4072   if (code == TRUTH_NOT_EXPR)
4073       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4074 
4075   return simple_operand_p (exp);
4076 }
4077 
4078 
4079 /* The following functions are subroutines to fold_range_test and allow it to
4080    try to change a logical combination of comparisons into a range test.
4081 
4082    For example, both
4083 	X == 2 || X == 3 || X == 4 || X == 5
4084    and
4085 	X >= 2 && X <= 5
4086    are converted to
4087 	(unsigned) (X - 2) <= 3
4088 
4089    We describe each set of comparisons as being either inside or outside
4090    a range, using a variable named like IN_P, and then describe the
4091    range with a lower and upper bound.  If one of the bounds is omitted,
4092    it represents either the highest or lowest value of the type.
4093 
4094    In the comments below, we represent a range by two numbers in brackets
4095    preceded by a "+" to designate being inside that range, or a "-" to
4096    designate being outside that range, so the condition can be inverted by
4097    flipping the prefix.  An omitted bound is represented by a "-".  For
4098    example, "- [-, 10]" means being outside the range starting at the lowest
4099    possible value and ending at 10, in other words, being greater than 10.
4100    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4101    always false.
4102 
4103    We set up things so that the missing bounds are handled in a consistent
4104    manner so neither a missing bound nor "true" and "false" need to be
4105    handled using a special case.  */
4106 
4107 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4108    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4109    and UPPER1_P are nonzero if the respective argument is an upper bound
4110    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4111    must be specified for a comparison.  ARG1 will be converted to ARG0's
4112    type if both are specified.  */
4113 
4114 static tree
4115 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4116 	     tree arg1, int upper1_p)
4117 {
4118   tree tem;
4119   int result;
4120   int sgn0, sgn1;
4121 
4122   /* If neither arg represents infinity, do the normal operation.
4123      Else, if not a comparison, return infinity.  Else handle the special
4124      comparison rules. Note that most of the cases below won't occur, but
4125      are handled for consistency.  */
4126 
4127   if (arg0 != 0 && arg1 != 0)
4128     {
4129       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4130 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4131       STRIP_NOPS (tem);
4132       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4133     }
4134 
4135   if (TREE_CODE_CLASS (code) != tcc_comparison)
4136     return 0;
4137 
4138   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4139      for neither.  In real maths, we cannot assume open ended ranges are
4140      the same. But, this is computer arithmetic, where numbers are finite.
4141      We can therefore make the transformation of any unbounded range with
4142      the value Z, Z being greater than any representable number. This permits
4143      us to treat unbounded ranges as equal.  */
4144   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4145   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4146   switch (code)
4147     {
4148     case EQ_EXPR:
4149       result = sgn0 == sgn1;
4150       break;
4151     case NE_EXPR:
4152       result = sgn0 != sgn1;
4153       break;
4154     case LT_EXPR:
4155       result = sgn0 < sgn1;
4156       break;
4157     case LE_EXPR:
4158       result = sgn0 <= sgn1;
4159       break;
4160     case GT_EXPR:
4161       result = sgn0 > sgn1;
4162       break;
4163     case GE_EXPR:
4164       result = sgn0 >= sgn1;
4165       break;
4166     default:
4167       gcc_unreachable ();
4168     }
4169 
4170   return constant_boolean_node (result, type);
4171 }
4172 
4173 /* Helper routine for make_range.  Perform one step for it, return
4174    new expression if the loop should continue or NULL_TREE if it should
4175    stop.  */
4176 
4177 tree
4178 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4179 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4180 		 bool *strict_overflow_p)
4181 {
4182   tree arg0_type = TREE_TYPE (arg0);
4183   tree n_low, n_high, low = *p_low, high = *p_high;
4184   int in_p = *p_in_p, n_in_p;
4185 
4186   switch (code)
4187     {
4188     case TRUTH_NOT_EXPR:
4189       /* We can only do something if the range is testing for zero.  */
4190       if (low == NULL_TREE || high == NULL_TREE
4191 	  || ! integer_zerop (low) || ! integer_zerop (high))
4192 	return NULL_TREE;
4193       *p_in_p = ! in_p;
4194       return arg0;
4195 
4196     case EQ_EXPR: case NE_EXPR:
4197     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4198       /* We can only do something if the range is testing for zero
4199 	 and if the second operand is an integer constant.  Note that
4200 	 saying something is "in" the range we make is done by
4201 	 complementing IN_P since it will set in the initial case of
4202 	 being not equal to zero; "out" is leaving it alone.  */
4203       if (low == NULL_TREE || high == NULL_TREE
4204 	  || ! integer_zerop (low) || ! integer_zerop (high)
4205 	  || TREE_CODE (arg1) != INTEGER_CST)
4206 	return NULL_TREE;
4207 
4208       switch (code)
4209 	{
4210 	case NE_EXPR:  /* - [c, c]  */
4211 	  low = high = arg1;
4212 	  break;
4213 	case EQ_EXPR:  /* + [c, c]  */
4214 	  in_p = ! in_p, low = high = arg1;
4215 	  break;
4216 	case GT_EXPR:  /* - [-, c] */
4217 	  low = 0, high = arg1;
4218 	  break;
4219 	case GE_EXPR:  /* + [c, -] */
4220 	  in_p = ! in_p, low = arg1, high = 0;
4221 	  break;
4222 	case LT_EXPR:  /* - [c, -] */
4223 	  low = arg1, high = 0;
4224 	  break;
4225 	case LE_EXPR:  /* + [-, c] */
4226 	  in_p = ! in_p, low = 0, high = arg1;
4227 	  break;
4228 	default:
4229 	  gcc_unreachable ();
4230 	}
4231 
4232       /* If this is an unsigned comparison, we also know that EXP is
4233 	 greater than or equal to zero.  We base the range tests we make
4234 	 on that fact, so we record it here so we can parse existing
4235 	 range tests.  We test arg0_type since often the return type
4236 	 of, e.g. EQ_EXPR, is boolean.  */
4237       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4238 	{
4239 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4240 			      in_p, low, high, 1,
4241 			      build_int_cst (arg0_type, 0),
4242 			      NULL_TREE))
4243 	    return NULL_TREE;
4244 
4245 	  in_p = n_in_p, low = n_low, high = n_high;
4246 
4247 	  /* If the high bound is missing, but we have a nonzero low
4248 	     bound, reverse the range so it goes from zero to the low bound
4249 	     minus 1.  */
4250 	  if (high == 0 && low && ! integer_zerop (low))
4251 	    {
4252 	      in_p = ! in_p;
4253 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4254 				  build_int_cst (TREE_TYPE (low), 1), 0);
4255 	      low = build_int_cst (arg0_type, 0);
4256 	    }
4257 	}
4258 
4259       *p_low = low;
4260       *p_high = high;
4261       *p_in_p = in_p;
4262       return arg0;
4263 
4264     case NEGATE_EXPR:
4265       /* If flag_wrapv and ARG0_TYPE is signed, make sure
4266 	 low and high are non-NULL, then normalize will DTRT.  */
4267       if (!TYPE_UNSIGNED (arg0_type)
4268 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4269 	{
4270 	  if (low == NULL_TREE)
4271 	    low = TYPE_MIN_VALUE (arg0_type);
4272 	  if (high == NULL_TREE)
4273 	    high = TYPE_MAX_VALUE (arg0_type);
4274 	}
4275 
4276       /* (-x) IN [a,b] -> x in [-b, -a]  */
4277       n_low = range_binop (MINUS_EXPR, exp_type,
4278 			   build_int_cst (exp_type, 0),
4279 			   0, high, 1);
4280       n_high = range_binop (MINUS_EXPR, exp_type,
4281 			    build_int_cst (exp_type, 0),
4282 			    0, low, 0);
4283       if (n_high != 0 && TREE_OVERFLOW (n_high))
4284 	return NULL_TREE;
4285       goto normalize;
4286 
4287     case BIT_NOT_EXPR:
4288       /* ~ X -> -X - 1  */
4289       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4290 			 build_int_cst (exp_type, 1));
4291 
4292     case PLUS_EXPR:
4293     case MINUS_EXPR:
4294       if (TREE_CODE (arg1) != INTEGER_CST)
4295 	return NULL_TREE;
4296 
4297       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4298 	 move a constant to the other side.  */
4299       if (!TYPE_UNSIGNED (arg0_type)
4300 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4301 	return NULL_TREE;
4302 
4303       /* If EXP is signed, any overflow in the computation is undefined,
4304 	 so we don't worry about it so long as our computations on
4305 	 the bounds don't overflow.  For unsigned, overflow is defined
4306 	 and this is exactly the right thing.  */
4307       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4308 			   arg0_type, low, 0, arg1, 0);
4309       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4310 			    arg0_type, high, 1, arg1, 0);
4311       if ((n_low != 0 && TREE_OVERFLOW (n_low))
4312 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
4313 	return NULL_TREE;
4314 
4315       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4316 	*strict_overflow_p = true;
4317 
4318       normalize:
4319 	/* Check for an unsigned range which has wrapped around the maximum
4320 	   value thus making n_high < n_low, and normalize it.  */
4321 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4322 	  {
4323 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4324 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
4325 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4326 				build_int_cst (TREE_TYPE (n_low), 1), 0);
4327 
4328 	    /* If the range is of the form +/- [ x+1, x ], we won't
4329 	       be able to normalize it.  But then, it represents the
4330 	       whole range or the empty set, so make it
4331 	       +/- [ -, - ].  */
4332 	    if (tree_int_cst_equal (n_low, low)
4333 		&& tree_int_cst_equal (n_high, high))
4334 	      low = high = 0;
4335 	    else
4336 	      in_p = ! in_p;
4337 	  }
4338 	else
4339 	  low = n_low, high = n_high;
4340 
4341 	*p_low = low;
4342 	*p_high = high;
4343 	*p_in_p = in_p;
4344 	return arg0;
4345 
4346     CASE_CONVERT:
4347     case NON_LVALUE_EXPR:
4348       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4349 	return NULL_TREE;
4350 
4351       if (! INTEGRAL_TYPE_P (arg0_type)
4352 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
4353 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4354 	return NULL_TREE;
4355 
4356       n_low = low, n_high = high;
4357 
4358       if (n_low != 0)
4359 	n_low = fold_convert_loc (loc, arg0_type, n_low);
4360 
4361       if (n_high != 0)
4362 	n_high = fold_convert_loc (loc, arg0_type, n_high);
4363 
4364       /* If we're converting arg0 from an unsigned type, to exp,
4365 	 a signed type,  we will be doing the comparison as unsigned.
4366 	 The tests above have already verified that LOW and HIGH
4367 	 are both positive.
4368 
4369 	 So we have to ensure that we will handle large unsigned
4370 	 values the same way that the current signed bounds treat
4371 	 negative values.  */
4372 
4373       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4374 	{
4375 	  tree high_positive;
4376 	  tree equiv_type;
4377 	  /* For fixed-point modes, we need to pass the saturating flag
4378 	     as the 2nd parameter.  */
4379 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4380 	    equiv_type
4381 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4382 						TYPE_SATURATING (arg0_type));
4383 	  else
4384 	    equiv_type
4385 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4386 
4387 	  /* A range without an upper bound is, naturally, unbounded.
4388 	     Since convert would have cropped a very large value, use
4389 	     the max value for the destination type.  */
4390 	  high_positive
4391 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4392 	      : TYPE_MAX_VALUE (arg0_type);
4393 
4394 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4395 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4396 					     fold_convert_loc (loc, arg0_type,
4397 							       high_positive),
4398 					     build_int_cst (arg0_type, 1));
4399 
4400 	  /* If the low bound is specified, "and" the range with the
4401 	     range for which the original unsigned value will be
4402 	     positive.  */
4403 	  if (low != 0)
4404 	    {
4405 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4406 				  1, fold_convert_loc (loc, arg0_type,
4407 						       integer_zero_node),
4408 				  high_positive))
4409 		return NULL_TREE;
4410 
4411 	      in_p = (n_in_p == in_p);
4412 	    }
4413 	  else
4414 	    {
4415 	      /* Otherwise, "or" the range with the range of the input
4416 		 that will be interpreted as negative.  */
4417 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4418 				  1, fold_convert_loc (loc, arg0_type,
4419 						       integer_zero_node),
4420 				  high_positive))
4421 		return NULL_TREE;
4422 
4423 	      in_p = (in_p != n_in_p);
4424 	    }
4425 	}
4426 
4427       *p_low = n_low;
4428       *p_high = n_high;
4429       *p_in_p = in_p;
4430       return arg0;
4431 
4432     default:
4433       return NULL_TREE;
4434     }
4435 }
4436 
4437 /* Given EXP, a logical expression, set the range it is testing into
4438    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4439    actually being tested.  *PLOW and *PHIGH will be made of the same
4440    type as the returned expression.  If EXP is not a comparison, we
4441    will most likely not be returning a useful value and range.  Set
4442    *STRICT_OVERFLOW_P to true if the return value is only valid
4443    because signed overflow is undefined; otherwise, do not change
4444    *STRICT_OVERFLOW_P.  */
4445 
4446 tree
4447 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4448 	    bool *strict_overflow_p)
4449 {
4450   enum tree_code code;
4451   tree arg0, arg1 = NULL_TREE;
4452   tree exp_type, nexp;
4453   int in_p;
4454   tree low, high;
4455   location_t loc = EXPR_LOCATION (exp);
4456 
4457   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4458      and see if we can refine the range.  Some of the cases below may not
4459      happen, but it doesn't seem worth worrying about this.  We "continue"
4460      the outer loop when we've changed something; otherwise we "break"
4461      the switch, which will "break" the while.  */
4462 
4463   in_p = 0;
4464   low = high = build_int_cst (TREE_TYPE (exp), 0);
4465 
4466   while (1)
4467     {
4468       code = TREE_CODE (exp);
4469       exp_type = TREE_TYPE (exp);
4470       arg0 = NULL_TREE;
4471 
4472       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4473 	{
4474 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4475 	    arg0 = TREE_OPERAND (exp, 0);
4476 	  if (TREE_CODE_CLASS (code) == tcc_binary
4477 	      || TREE_CODE_CLASS (code) == tcc_comparison
4478 	      || (TREE_CODE_CLASS (code) == tcc_expression
4479 		  && TREE_OPERAND_LENGTH (exp) > 1))
4480 	    arg1 = TREE_OPERAND (exp, 1);
4481 	}
4482       if (arg0 == NULL_TREE)
4483 	break;
4484 
4485       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4486 			      &high, &in_p, strict_overflow_p);
4487       if (nexp == NULL_TREE)
4488 	break;
4489       exp = nexp;
4490     }
4491 
4492   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4493   if (TREE_CODE (exp) == INTEGER_CST)
4494     {
4495       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4496 						 exp, 0, low, 0))
4497 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4498 						    exp, 1, high, 1)));
4499       low = high = 0;
4500       exp = 0;
4501     }
4502 
4503   *pin_p = in_p, *plow = low, *phigh = high;
4504   return exp;
4505 }
4506 
4507 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4508    type, TYPE, return an expression to test if EXP is in (or out of, depending
4509    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4510 
4511 tree
4512 build_range_check (location_t loc, tree type, tree exp, int in_p,
4513 		   tree low, tree high)
4514 {
4515   tree etype = TREE_TYPE (exp), value;
4516 
4517 #ifdef HAVE_canonicalize_funcptr_for_compare
4518   /* Disable this optimization for function pointer expressions
4519      on targets that require function pointer canonicalization.  */
4520   if (HAVE_canonicalize_funcptr_for_compare
4521       && TREE_CODE (etype) == POINTER_TYPE
4522       && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4523     return NULL_TREE;
4524 #endif
4525 
4526   if (! in_p)
4527     {
4528       value = build_range_check (loc, type, exp, 1, low, high);
4529       if (value != 0)
4530         return invert_truthvalue_loc (loc, value);
4531 
4532       return 0;
4533     }
4534 
4535   if (low == 0 && high == 0)
4536     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4537 
4538   if (low == 0)
4539     return fold_build2_loc (loc, LE_EXPR, type, exp,
4540 			fold_convert_loc (loc, etype, high));
4541 
4542   if (high == 0)
4543     return fold_build2_loc (loc, GE_EXPR, type, exp,
4544 			fold_convert_loc (loc, etype, low));
4545 
4546   if (operand_equal_p (low, high, 0))
4547     return fold_build2_loc (loc, EQ_EXPR, type, exp,
4548 			fold_convert_loc (loc, etype, low));
4549 
4550   if (integer_zerop (low))
4551     {
4552       if (! TYPE_UNSIGNED (etype))
4553 	{
4554 	  etype = unsigned_type_for (etype);
4555 	  high = fold_convert_loc (loc, etype, high);
4556 	  exp = fold_convert_loc (loc, etype, exp);
4557 	}
4558       return build_range_check (loc, type, exp, 1, 0, high);
4559     }
4560 
4561   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4562   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4563     {
4564       int prec = TYPE_PRECISION (etype);
4565 
4566       if (wi::mask (prec - 1, false, prec) == high)
4567 	{
4568 	  if (TYPE_UNSIGNED (etype))
4569 	    {
4570 	      tree signed_etype = signed_type_for (etype);
4571 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4572 		etype
4573 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4574 	      else
4575 		etype = signed_etype;
4576 	      exp = fold_convert_loc (loc, etype, exp);
4577 	    }
4578 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
4579 			      build_int_cst (etype, 0));
4580 	}
4581     }
4582 
4583   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4584      This requires wrap-around arithmetics for the type of the expression.
4585      First make sure that arithmetics in this type is valid, then make sure
4586      that it wraps around.  */
4587   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4588     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4589 					    TYPE_UNSIGNED (etype));
4590 
4591   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4592     {
4593       tree utype, minv, maxv;
4594 
4595       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4596 	 for the type in question, as we rely on this here.  */
4597       utype = unsigned_type_for (etype);
4598       maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4599       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4600 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
4601       minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4602 
4603       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4604 				      minv, 1, maxv, 1)))
4605 	etype = utype;
4606       else
4607 	return 0;
4608     }
4609 
4610   high = fold_convert_loc (loc, etype, high);
4611   low = fold_convert_loc (loc, etype, low);
4612   exp = fold_convert_loc (loc, etype, exp);
4613 
4614   value = const_binop (MINUS_EXPR, high, low);
4615 
4616 
4617   if (POINTER_TYPE_P (etype))
4618     {
4619       if (value != 0 && !TREE_OVERFLOW (value))
4620 	{
4621 	  low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4622           return build_range_check (loc, type,
4623 			     	    fold_build_pointer_plus_loc (loc, exp, low),
4624 			            1, build_int_cst (etype, 0), value);
4625 	}
4626       return 0;
4627     }
4628 
4629   if (value != 0 && !TREE_OVERFLOW (value))
4630     return build_range_check (loc, type,
4631 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4632 			      1, build_int_cst (etype, 0), value);
4633 
4634   return 0;
4635 }
4636 
4637 /* Return the predecessor of VAL in its type, handling the infinite case.  */
4638 
4639 static tree
4640 range_predecessor (tree val)
4641 {
4642   tree type = TREE_TYPE (val);
4643 
4644   if (INTEGRAL_TYPE_P (type)
4645       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4646     return 0;
4647   else
4648     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4649 			build_int_cst (TREE_TYPE (val), 1), 0);
4650 }
4651 
4652 /* Return the successor of VAL in its type, handling the infinite case.  */
4653 
4654 static tree
4655 range_successor (tree val)
4656 {
4657   tree type = TREE_TYPE (val);
4658 
4659   if (INTEGRAL_TYPE_P (type)
4660       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4661     return 0;
4662   else
4663     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4664 			build_int_cst (TREE_TYPE (val), 1), 0);
4665 }
4666 
4667 /* Given two ranges, see if we can merge them into one.  Return 1 if we
4668    can, 0 if we can't.  Set the output range into the specified parameters.  */
4669 
4670 bool
4671 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4672 	      tree high0, int in1_p, tree low1, tree high1)
4673 {
4674   int no_overlap;
4675   int subset;
4676   int temp;
4677   tree tem;
4678   int in_p;
4679   tree low, high;
4680   int lowequal = ((low0 == 0 && low1 == 0)
4681 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4682 						low0, 0, low1, 0)));
4683   int highequal = ((high0 == 0 && high1 == 0)
4684 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4685 						 high0, 1, high1, 1)));
4686 
4687   /* Make range 0 be the range that starts first, or ends last if they
4688      start at the same value.  Swap them if it isn't.  */
4689   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4690 				 low0, 0, low1, 0))
4691       || (lowequal
4692 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
4693 					high1, 1, high0, 1))))
4694     {
4695       temp = in0_p, in0_p = in1_p, in1_p = temp;
4696       tem = low0, low0 = low1, low1 = tem;
4697       tem = high0, high0 = high1, high1 = tem;
4698     }
4699 
4700   /* Now flag two cases, whether the ranges are disjoint or whether the
4701      second range is totally subsumed in the first.  Note that the tests
4702      below are simplified by the ones above.  */
4703   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4704 					  high0, 1, low1, 0));
4705   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4706 				      high1, 1, high0, 1));
4707 
4708   /* We now have four cases, depending on whether we are including or
4709      excluding the two ranges.  */
4710   if (in0_p && in1_p)
4711     {
4712       /* If they don't overlap, the result is false.  If the second range
4713 	 is a subset it is the result.  Otherwise, the range is from the start
4714 	 of the second to the end of the first.  */
4715       if (no_overlap)
4716 	in_p = 0, low = high = 0;
4717       else if (subset)
4718 	in_p = 1, low = low1, high = high1;
4719       else
4720 	in_p = 1, low = low1, high = high0;
4721     }
4722 
4723   else if (in0_p && ! in1_p)
4724     {
4725       /* If they don't overlap, the result is the first range.  If they are
4726 	 equal, the result is false.  If the second range is a subset of the
4727 	 first, and the ranges begin at the same place, we go from just after
4728 	 the end of the second range to the end of the first.  If the second
4729 	 range is not a subset of the first, or if it is a subset and both
4730 	 ranges end at the same place, the range starts at the start of the
4731 	 first range and ends just before the second range.
4732 	 Otherwise, we can't describe this as a single range.  */
4733       if (no_overlap)
4734 	in_p = 1, low = low0, high = high0;
4735       else if (lowequal && highequal)
4736 	in_p = 0, low = high = 0;
4737       else if (subset && lowequal)
4738 	{
4739 	  low = range_successor (high1);
4740 	  high = high0;
4741 	  in_p = 1;
4742 	  if (low == 0)
4743 	    {
4744 	      /* We are in the weird situation where high0 > high1 but
4745 		 high1 has no successor.  Punt.  */
4746 	      return 0;
4747 	    }
4748 	}
4749       else if (! subset || highequal)
4750 	{
4751 	  low = low0;
4752 	  high = range_predecessor (low1);
4753 	  in_p = 1;
4754 	  if (high == 0)
4755 	    {
4756 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
4757 	      return 0;
4758 	    }
4759 	}
4760       else
4761 	return 0;
4762     }
4763 
4764   else if (! in0_p && in1_p)
4765     {
4766       /* If they don't overlap, the result is the second range.  If the second
4767 	 is a subset of the first, the result is false.  Otherwise,
4768 	 the range starts just after the first range and ends at the
4769 	 end of the second.  */
4770       if (no_overlap)
4771 	in_p = 1, low = low1, high = high1;
4772       else if (subset || highequal)
4773 	in_p = 0, low = high = 0;
4774       else
4775 	{
4776 	  low = range_successor (high0);
4777 	  high = high1;
4778 	  in_p = 1;
4779 	  if (low == 0)
4780 	    {
4781 	      /* high1 > high0 but high0 has no successor.  Punt.  */
4782 	      return 0;
4783 	    }
4784 	}
4785     }
4786 
4787   else
4788     {
4789       /* The case where we are excluding both ranges.  Here the complex case
4790 	 is if they don't overlap.  In that case, the only time we have a
4791 	 range is if they are adjacent.  If the second is a subset of the
4792 	 first, the result is the first.  Otherwise, the range to exclude
4793 	 starts at the beginning of the first range and ends at the end of the
4794 	 second.  */
4795       if (no_overlap)
4796 	{
4797 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4798 					 range_successor (high0),
4799 					 1, low1, 0)))
4800 	    in_p = 0, low = low0, high = high1;
4801 	  else
4802 	    {
4803 	      /* Canonicalize - [min, x] into - [-, x].  */
4804 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
4805 		switch (TREE_CODE (TREE_TYPE (low0)))
4806 		  {
4807 		  case ENUMERAL_TYPE:
4808 		    if (TYPE_PRECISION (TREE_TYPE (low0))
4809 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4810 		      break;
4811 		    /* FALLTHROUGH */
4812 		  case INTEGER_TYPE:
4813 		    if (tree_int_cst_equal (low0,
4814 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
4815 		      low0 = 0;
4816 		    break;
4817 		  case POINTER_TYPE:
4818 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
4819 			&& integer_zerop (low0))
4820 		      low0 = 0;
4821 		    break;
4822 		  default:
4823 		    break;
4824 		  }
4825 
4826 	      /* Canonicalize - [x, max] into - [x, -].  */
4827 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
4828 		switch (TREE_CODE (TREE_TYPE (high1)))
4829 		  {
4830 		  case ENUMERAL_TYPE:
4831 		    if (TYPE_PRECISION (TREE_TYPE (high1))
4832 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4833 		      break;
4834 		    /* FALLTHROUGH */
4835 		  case INTEGER_TYPE:
4836 		    if (tree_int_cst_equal (high1,
4837 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
4838 		      high1 = 0;
4839 		    break;
4840 		  case POINTER_TYPE:
4841 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
4842 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4843 						       high1, 1,
4844 						       build_int_cst (TREE_TYPE (high1), 1),
4845 						       1)))
4846 		      high1 = 0;
4847 		    break;
4848 		  default:
4849 		    break;
4850 		  }
4851 
4852 	      /* The ranges might be also adjacent between the maximum and
4853 	         minimum values of the given type.  For
4854 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4855 	         return + [x + 1, y - 1].  */
4856 	      if (low0 == 0 && high1 == 0)
4857 	        {
4858 		  low = range_successor (high0);
4859 		  high = range_predecessor (low1);
4860 		  if (low == 0 || high == 0)
4861 		    return 0;
4862 
4863 		  in_p = 1;
4864 		}
4865 	      else
4866 		return 0;
4867 	    }
4868 	}
4869       else if (subset)
4870 	in_p = 0, low = low0, high = high0;
4871       else
4872 	in_p = 0, low = low0, high = high1;
4873     }
4874 
4875   *pin_p = in_p, *plow = low, *phigh = high;
4876   return 1;
4877 }
4878 
4879 
4880 /* Subroutine of fold, looking inside expressions of the form
4881    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4882    of the COND_EXPR.  This function is being used also to optimize
4883    A op B ? C : A, by reversing the comparison first.
4884 
4885    Return a folded expression whose code is not a COND_EXPR
4886    anymore, or NULL_TREE if no folding opportunity is found.  */
4887 
4888 static tree
4889 fold_cond_expr_with_comparison (location_t loc, tree type,
4890 				tree arg0, tree arg1, tree arg2)
4891 {
4892   enum tree_code comp_code = TREE_CODE (arg0);
4893   tree arg00 = TREE_OPERAND (arg0, 0);
4894   tree arg01 = TREE_OPERAND (arg0, 1);
4895   tree arg1_type = TREE_TYPE (arg1);
4896   tree tem;
4897 
4898   STRIP_NOPS (arg1);
4899   STRIP_NOPS (arg2);
4900 
4901   /* If we have A op 0 ? A : -A, consider applying the following
4902      transformations:
4903 
4904      A == 0? A : -A    same as -A
4905      A != 0? A : -A    same as A
4906      A >= 0? A : -A    same as abs (A)
4907      A > 0?  A : -A    same as abs (A)
4908      A <= 0? A : -A    same as -abs (A)
4909      A < 0?  A : -A    same as -abs (A)
4910 
4911      None of these transformations work for modes with signed
4912      zeros.  If A is +/-0, the first two transformations will
4913      change the sign of the result (from +0 to -0, or vice
4914      versa).  The last four will fix the sign of the result,
4915      even though the original expressions could be positive or
4916      negative, depending on the sign of A.
4917 
4918      Note that all these transformations are correct if A is
4919      NaN, since the two alternatives (A and -A) are also NaNs.  */
4920   if (!HONOR_SIGNED_ZEROS (element_mode (type))
4921       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4922 	  ? real_zerop (arg01)
4923 	  : integer_zerop (arg01))
4924       && ((TREE_CODE (arg2) == NEGATE_EXPR
4925 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4926 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
4927 	        have already been folded to Y-X, check for that. */
4928 	  || (TREE_CODE (arg1) == MINUS_EXPR
4929 	      && TREE_CODE (arg2) == MINUS_EXPR
4930 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
4931 				  TREE_OPERAND (arg2, 1), 0)
4932 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
4933 				  TREE_OPERAND (arg2, 0), 0))))
4934     switch (comp_code)
4935       {
4936       case EQ_EXPR:
4937       case UNEQ_EXPR:
4938 	tem = fold_convert_loc (loc, arg1_type, arg1);
4939 	return pedantic_non_lvalue_loc (loc,
4940 				    fold_convert_loc (loc, type,
4941 						  negate_expr (tem)));
4942       case NE_EXPR:
4943       case LTGT_EXPR:
4944 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4945       case UNGE_EXPR:
4946       case UNGT_EXPR:
4947 	if (flag_trapping_math)
4948 	  break;
4949 	/* Fall through.  */
4950       case GE_EXPR:
4951       case GT_EXPR:
4952 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4953 	  break;
4954 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4955 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4956       case UNLE_EXPR:
4957       case UNLT_EXPR:
4958 	if (flag_trapping_math)
4959 	  break;
4960       case LE_EXPR:
4961       case LT_EXPR:
4962 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4963 	  break;
4964 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4965 	return negate_expr (fold_convert_loc (loc, type, tem));
4966       default:
4967 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4968 	break;
4969       }
4970 
4971   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
4972      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
4973      both transformations are correct when A is NaN: A != 0
4974      is then true, and A == 0 is false.  */
4975 
4976   if (!HONOR_SIGNED_ZEROS (element_mode (type))
4977       && integer_zerop (arg01) && integer_zerop (arg2))
4978     {
4979       if (comp_code == NE_EXPR)
4980 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4981       else if (comp_code == EQ_EXPR)
4982 	return build_zero_cst (type);
4983     }
4984 
4985   /* Try some transformations of A op B ? A : B.
4986 
4987      A == B? A : B    same as B
4988      A != B? A : B    same as A
4989      A >= B? A : B    same as max (A, B)
4990      A > B?  A : B    same as max (B, A)
4991      A <= B? A : B    same as min (A, B)
4992      A < B?  A : B    same as min (B, A)
4993 
4994      As above, these transformations don't work in the presence
4995      of signed zeros.  For example, if A and B are zeros of
4996      opposite sign, the first two transformations will change
4997      the sign of the result.  In the last four, the original
4998      expressions give different results for (A=+0, B=-0) and
4999      (A=-0, B=+0), but the transformed expressions do not.
5000 
5001      The first two transformations are correct if either A or B
5002      is a NaN.  In the first transformation, the condition will
5003      be false, and B will indeed be chosen.  In the case of the
5004      second transformation, the condition A != B will be true,
5005      and A will be chosen.
5006 
5007      The conversions to max() and min() are not correct if B is
5008      a number and A is not.  The conditions in the original
5009      expressions will be false, so all four give B.  The min()
5010      and max() versions would give a NaN instead.  */
5011   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5012       && operand_equal_for_comparison_p (arg01, arg2, arg00)
5013       /* Avoid these transformations if the COND_EXPR may be used
5014 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5015       && (in_gimple_form
5016 	  || VECTOR_TYPE_P (type)
5017 	  || (! lang_GNU_CXX ()
5018 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5019 	  || ! maybe_lvalue_p (arg1)
5020 	  || ! maybe_lvalue_p (arg2)))
5021     {
5022       tree comp_op0 = arg00;
5023       tree comp_op1 = arg01;
5024       tree comp_type = TREE_TYPE (comp_op0);
5025 
5026       /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
5027       if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5028 	{
5029 	  comp_type = type;
5030 	  comp_op0 = arg1;
5031 	  comp_op1 = arg2;
5032 	}
5033 
5034       switch (comp_code)
5035 	{
5036 	case EQ_EXPR:
5037 	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5038 	case NE_EXPR:
5039 	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5040 	case LE_EXPR:
5041 	case LT_EXPR:
5042 	case UNLE_EXPR:
5043 	case UNLT_EXPR:
5044 	  /* In C++ a ?: expression can be an lvalue, so put the
5045 	     operand which will be used if they are equal first
5046 	     so that we can convert this back to the
5047 	     corresponding COND_EXPR.  */
5048 	  if (!HONOR_NANS (arg1))
5049 	    {
5050 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5051 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5052 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5053 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5054 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5055 				   comp_op1, comp_op0);
5056 	      return pedantic_non_lvalue_loc (loc,
5057 					  fold_convert_loc (loc, type, tem));
5058 	    }
5059 	  break;
5060 	case GE_EXPR:
5061 	case GT_EXPR:
5062 	case UNGE_EXPR:
5063 	case UNGT_EXPR:
5064 	  if (!HONOR_NANS (arg1))
5065 	    {
5066 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5067 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5068 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5069 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5070 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5071 				   comp_op1, comp_op0);
5072 	      return pedantic_non_lvalue_loc (loc,
5073 					  fold_convert_loc (loc, type, tem));
5074 	    }
5075 	  break;
5076 	case UNEQ_EXPR:
5077 	  if (!HONOR_NANS (arg1))
5078 	    return pedantic_non_lvalue_loc (loc,
5079 					fold_convert_loc (loc, type, arg2));
5080 	  break;
5081 	case LTGT_EXPR:
5082 	  if (!HONOR_NANS (arg1))
5083 	    return pedantic_non_lvalue_loc (loc,
5084 					fold_convert_loc (loc, type, arg1));
5085 	  break;
5086 	default:
5087 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5088 	  break;
5089 	}
5090     }
5091 
5092   /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5093      we might still be able to simplify this.  For example,
5094      if C1 is one less or one more than C2, this might have started
5095      out as a MIN or MAX and been transformed by this function.
5096      Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
5097 
5098   if (INTEGRAL_TYPE_P (type)
5099       && TREE_CODE (arg01) == INTEGER_CST
5100       && TREE_CODE (arg2) == INTEGER_CST)
5101     switch (comp_code)
5102       {
5103       case EQ_EXPR:
5104 	if (TREE_CODE (arg1) == INTEGER_CST)
5105 	  break;
5106 	/* We can replace A with C1 in this case.  */
5107 	arg1 = fold_convert_loc (loc, type, arg01);
5108 	return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5109 
5110       case LT_EXPR:
5111 	/* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5112 	   MIN_EXPR, to preserve the signedness of the comparison.  */
5113 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5114 			       OEP_ONLY_CONST)
5115 	    && operand_equal_p (arg01,
5116 				const_binop (PLUS_EXPR, arg2,
5117 					     build_int_cst (type, 1)),
5118 				OEP_ONLY_CONST))
5119 	  {
5120 	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5121 				   fold_convert_loc (loc, TREE_TYPE (arg00),
5122 						     arg2));
5123 	    return pedantic_non_lvalue_loc (loc,
5124 					    fold_convert_loc (loc, type, tem));
5125 	  }
5126 	break;
5127 
5128       case LE_EXPR:
5129 	/* If C1 is C2 - 1, this is min(A, C2), with the same care
5130 	   as above.  */
5131 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5132 			       OEP_ONLY_CONST)
5133 	    && operand_equal_p (arg01,
5134 				const_binop (MINUS_EXPR, arg2,
5135 					     build_int_cst (type, 1)),
5136 				OEP_ONLY_CONST))
5137 	  {
5138 	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5139 				   fold_convert_loc (loc, TREE_TYPE (arg00),
5140 						     arg2));
5141 	    return pedantic_non_lvalue_loc (loc,
5142 					    fold_convert_loc (loc, type, tem));
5143 	  }
5144 	break;
5145 
5146       case GT_EXPR:
5147 	/* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5148 	   MAX_EXPR, to preserve the signedness of the comparison.  */
5149 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5150 			       OEP_ONLY_CONST)
5151 	    && operand_equal_p (arg01,
5152 				const_binop (MINUS_EXPR, arg2,
5153 					     build_int_cst (type, 1)),
5154 				OEP_ONLY_CONST))
5155 	  {
5156 	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5157 				   fold_convert_loc (loc, TREE_TYPE (arg00),
5158 						     arg2));
5159 	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5160 	  }
5161 	break;
5162 
5163       case GE_EXPR:
5164 	/* If C1 is C2 + 1, this is max(A, C2), with the same care as above.  */
5165 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5166 			       OEP_ONLY_CONST)
5167 	    && operand_equal_p (arg01,
5168 				const_binop (PLUS_EXPR, arg2,
5169 					     build_int_cst (type, 1)),
5170 				OEP_ONLY_CONST))
5171 	  {
5172 	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5173 				   fold_convert_loc (loc, TREE_TYPE (arg00),
5174 						     arg2));
5175 	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5176 	  }
5177 	break;
5178       case NE_EXPR:
5179 	break;
5180       default:
5181 	gcc_unreachable ();
5182       }
5183 
5184   return NULL_TREE;
5185 }
5186 
5187 
5188 
5189 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5190 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5191   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5192 		false) >= 2)
5193 #endif
5194 
5195 /* EXP is some logical combination of boolean tests.  See if we can
5196    merge it into some range test.  Return the new tree if so.  */
5197 
5198 static tree
5199 fold_range_test (location_t loc, enum tree_code code, tree type,
5200 		 tree op0, tree op1)
5201 {
5202   int or_op = (code == TRUTH_ORIF_EXPR
5203 	       || code == TRUTH_OR_EXPR);
5204   int in0_p, in1_p, in_p;
5205   tree low0, low1, low, high0, high1, high;
5206   bool strict_overflow_p = false;
5207   tree tem, lhs, rhs;
5208   const char * const warnmsg = G_("assuming signed overflow does not occur "
5209 				  "when simplifying range test");
5210 
5211   if (!INTEGRAL_TYPE_P (type))
5212     return 0;
5213 
5214   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5215   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5216 
5217   /* If this is an OR operation, invert both sides; we will invert
5218      again at the end.  */
5219   if (or_op)
5220     in0_p = ! in0_p, in1_p = ! in1_p;
5221 
5222   /* If both expressions are the same, if we can merge the ranges, and we
5223      can build the range test, return it or it inverted.  If one of the
5224      ranges is always true or always false, consider it to be the same
5225      expression as the other.  */
5226   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5227       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5228 		       in1_p, low1, high1)
5229       && 0 != (tem = (build_range_check (loc, type,
5230 					 lhs != 0 ? lhs
5231 					 : rhs != 0 ? rhs : integer_zero_node,
5232 					 in_p, low, high))))
5233     {
5234       if (strict_overflow_p)
5235 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5236       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5237     }
5238 
5239   /* On machines where the branch cost is expensive, if this is a
5240      short-circuited branch and the underlying object on both sides
5241      is the same, make a non-short-circuit operation.  */
5242   else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5243 	   && lhs != 0 && rhs != 0
5244 	   && (code == TRUTH_ANDIF_EXPR
5245 	       || code == TRUTH_ORIF_EXPR)
5246 	   && operand_equal_p (lhs, rhs, 0))
5247     {
5248       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5249 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5250 	 which cases we can't do this.  */
5251       if (simple_operand_p (lhs))
5252 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5253 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5254 			   type, op0, op1);
5255 
5256       else if (!lang_hooks.decls.global_bindings_p ()
5257 	       && !CONTAINS_PLACEHOLDER_P (lhs))
5258 	{
5259 	  tree common = save_expr (lhs);
5260 
5261 	  if (0 != (lhs = build_range_check (loc, type, common,
5262 					     or_op ? ! in0_p : in0_p,
5263 					     low0, high0))
5264 	      && (0 != (rhs = build_range_check (loc, type, common,
5265 						 or_op ? ! in1_p : in1_p,
5266 						 low1, high1))))
5267 	    {
5268 	      if (strict_overflow_p)
5269 		fold_overflow_warning (warnmsg,
5270 				       WARN_STRICT_OVERFLOW_COMPARISON);
5271 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5272 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5273 				 type, lhs, rhs);
5274 	    }
5275 	}
5276     }
5277 
5278   return 0;
5279 }
5280 
5281 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5282    bit value.  Arrange things so the extra bits will be set to zero if and
5283    only if C is signed-extended to its full width.  If MASK is nonzero,
5284    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5285 
5286 static tree
5287 unextend (tree c, int p, int unsignedp, tree mask)
5288 {
5289   tree type = TREE_TYPE (c);
5290   int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5291   tree temp;
5292 
5293   if (p == modesize || unsignedp)
5294     return c;
5295 
5296   /* We work by getting just the sign bit into the low-order bit, then
5297      into the high-order bit, then sign-extend.  We then XOR that value
5298      with C.  */
5299   temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5300 
5301   /* We must use a signed type in order to get an arithmetic right shift.
5302      However, we must also avoid introducing accidental overflows, so that
5303      a subsequent call to integer_zerop will work.  Hence we must
5304      do the type conversion here.  At this point, the constant is either
5305      zero or one, and the conversion to a signed type can never overflow.
5306      We could get an overflow if this conversion is done anywhere else.  */
5307   if (TYPE_UNSIGNED (type))
5308     temp = fold_convert (signed_type_for (type), temp);
5309 
5310   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5311   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5312   if (mask != 0)
5313     temp = const_binop (BIT_AND_EXPR, temp,
5314 			fold_convert (TREE_TYPE (c), mask));
5315   /* If necessary, convert the type back to match the type of C.  */
5316   if (TYPE_UNSIGNED (type))
5317     temp = fold_convert (type, temp);
5318 
5319   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5320 }
5321 
5322 /* For an expression that has the form
5323      (A && B) || ~B
5324    or
5325      (A || B) && ~B,
5326    we can drop one of the inner expressions and simplify to
5327      A || ~B
5328    or
5329      A && ~B
5330    LOC is the location of the resulting expression.  OP is the inner
5331    logical operation; the left-hand side in the examples above, while CMPOP
5332    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
5333    removing a condition that guards another, as in
5334      (A != NULL && A->...) || A == NULL
5335    which we must not transform.  If RHS_ONLY is true, only eliminate the
5336    right-most operand of the inner logical operation.  */
5337 
5338 static tree
5339 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5340 				 bool rhs_only)
5341 {
5342   tree type = TREE_TYPE (cmpop);
5343   enum tree_code code = TREE_CODE (cmpop);
5344   enum tree_code truthop_code = TREE_CODE (op);
5345   tree lhs = TREE_OPERAND (op, 0);
5346   tree rhs = TREE_OPERAND (op, 1);
5347   tree orig_lhs = lhs, orig_rhs = rhs;
5348   enum tree_code rhs_code = TREE_CODE (rhs);
5349   enum tree_code lhs_code = TREE_CODE (lhs);
5350   enum tree_code inv_code;
5351 
5352   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5353     return NULL_TREE;
5354 
5355   if (TREE_CODE_CLASS (code) != tcc_comparison)
5356     return NULL_TREE;
5357 
5358   if (rhs_code == truthop_code)
5359     {
5360       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5361       if (newrhs != NULL_TREE)
5362 	{
5363 	  rhs = newrhs;
5364 	  rhs_code = TREE_CODE (rhs);
5365 	}
5366     }
5367   if (lhs_code == truthop_code && !rhs_only)
5368     {
5369       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5370       if (newlhs != NULL_TREE)
5371 	{
5372 	  lhs = newlhs;
5373 	  lhs_code = TREE_CODE (lhs);
5374 	}
5375     }
5376 
5377   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5378   if (inv_code == rhs_code
5379       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5380       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5381     return lhs;
5382   if (!rhs_only && inv_code == lhs_code
5383       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5384       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5385     return rhs;
5386   if (rhs != orig_rhs || lhs != orig_lhs)
5387     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5388 			    lhs, rhs);
5389   return NULL_TREE;
5390 }
5391 
5392 /* Find ways of folding logical expressions of LHS and RHS:
5393    Try to merge two comparisons to the same innermost item.
5394    Look for range tests like "ch >= '0' && ch <= '9'".
5395    Look for combinations of simple terms on machines with expensive branches
5396    and evaluate the RHS unconditionally.
5397 
5398    For example, if we have p->a == 2 && p->b == 4 and we can make an
5399    object large enough to span both A and B, we can do this with a comparison
5400    against the object ANDed with the a mask.
5401 
5402    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5403    operations to do this with one comparison.
5404 
5405    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5406    function and the one above.
5407 
5408    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5409    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5410 
5411    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5412    two operands.
5413 
5414    We return the simplified tree or 0 if no optimization is possible.  */
5415 
5416 static tree
5417 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5418 		    tree lhs, tree rhs)
5419 {
5420   /* If this is the "or" of two comparisons, we can do something if
5421      the comparisons are NE_EXPR.  If this is the "and", we can do something
5422      if the comparisons are EQ_EXPR.  I.e.,
5423 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5424 
5425      WANTED_CODE is this operation code.  For single bit fields, we can
5426      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5427      comparison for one-bit fields.  */
5428 
5429   enum tree_code wanted_code;
5430   enum tree_code lcode, rcode;
5431   tree ll_arg, lr_arg, rl_arg, rr_arg;
5432   tree ll_inner, lr_inner, rl_inner, rr_inner;
5433   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5434   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5435   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5436   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5437   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5438   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5439   machine_mode lnmode, rnmode;
5440   tree ll_mask, lr_mask, rl_mask, rr_mask;
5441   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5442   tree l_const, r_const;
5443   tree lntype, rntype, result;
5444   HOST_WIDE_INT first_bit, end_bit;
5445   int volatilep;
5446 
5447   /* Start by getting the comparison codes.  Fail if anything is volatile.
5448      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5449      it were surrounded with a NE_EXPR.  */
5450 
5451   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5452     return 0;
5453 
5454   lcode = TREE_CODE (lhs);
5455   rcode = TREE_CODE (rhs);
5456 
5457   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5458     {
5459       lhs = build2 (NE_EXPR, truth_type, lhs,
5460 		    build_int_cst (TREE_TYPE (lhs), 0));
5461       lcode = NE_EXPR;
5462     }
5463 
5464   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5465     {
5466       rhs = build2 (NE_EXPR, truth_type, rhs,
5467 		    build_int_cst (TREE_TYPE (rhs), 0));
5468       rcode = NE_EXPR;
5469     }
5470 
5471   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5472       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5473     return 0;
5474 
5475   ll_arg = TREE_OPERAND (lhs, 0);
5476   lr_arg = TREE_OPERAND (lhs, 1);
5477   rl_arg = TREE_OPERAND (rhs, 0);
5478   rr_arg = TREE_OPERAND (rhs, 1);
5479 
5480   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5481   if (simple_operand_p (ll_arg)
5482       && simple_operand_p (lr_arg))
5483     {
5484       if (operand_equal_p (ll_arg, rl_arg, 0)
5485           && operand_equal_p (lr_arg, rr_arg, 0))
5486 	{
5487           result = combine_comparisons (loc, code, lcode, rcode,
5488 					truth_type, ll_arg, lr_arg);
5489 	  if (result)
5490 	    return result;
5491 	}
5492       else if (operand_equal_p (ll_arg, rr_arg, 0)
5493                && operand_equal_p (lr_arg, rl_arg, 0))
5494 	{
5495           result = combine_comparisons (loc, code, lcode,
5496 					swap_tree_comparison (rcode),
5497 					truth_type, ll_arg, lr_arg);
5498 	  if (result)
5499 	    return result;
5500 	}
5501     }
5502 
5503   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5504 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5505 
5506   /* If the RHS can be evaluated unconditionally and its operands are
5507      simple, it wins to evaluate the RHS unconditionally on machines
5508      with expensive branches.  In this case, this isn't a comparison
5509      that can be merged.  */
5510 
5511   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5512 		   false) >= 2
5513       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5514       && simple_operand_p (rl_arg)
5515       && simple_operand_p (rr_arg))
5516     {
5517       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5518       if (code == TRUTH_OR_EXPR
5519 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5520 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5521 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5522 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5523 	return build2_loc (loc, NE_EXPR, truth_type,
5524 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5525 				   ll_arg, rl_arg),
5526 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5527 
5528       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5529       if (code == TRUTH_AND_EXPR
5530 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5531 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5532 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5533 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5534 	return build2_loc (loc, EQ_EXPR, truth_type,
5535 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5536 				   ll_arg, rl_arg),
5537 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5538     }
5539 
5540   /* See if the comparisons can be merged.  Then get all the parameters for
5541      each side.  */
5542 
5543   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5544       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5545     return 0;
5546 
5547   volatilep = 0;
5548   ll_inner = decode_field_reference (loc, ll_arg,
5549 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5550 				     &ll_unsignedp, &volatilep, &ll_mask,
5551 				     &ll_and_mask);
5552   lr_inner = decode_field_reference (loc, lr_arg,
5553 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5554 				     &lr_unsignedp, &volatilep, &lr_mask,
5555 				     &lr_and_mask);
5556   rl_inner = decode_field_reference (loc, rl_arg,
5557 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5558 				     &rl_unsignedp, &volatilep, &rl_mask,
5559 				     &rl_and_mask);
5560   rr_inner = decode_field_reference (loc, rr_arg,
5561 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5562 				     &rr_unsignedp, &volatilep, &rr_mask,
5563 				     &rr_and_mask);
5564 
5565   /* It must be true that the inner operation on the lhs of each
5566      comparison must be the same if we are to be able to do anything.
5567      Then see if we have constants.  If not, the same must be true for
5568      the rhs's.  */
5569   if (volatilep || ll_inner == 0 || rl_inner == 0
5570       || ! operand_equal_p (ll_inner, rl_inner, 0))
5571     return 0;
5572 
5573   if (TREE_CODE (lr_arg) == INTEGER_CST
5574       && TREE_CODE (rr_arg) == INTEGER_CST)
5575     l_const = lr_arg, r_const = rr_arg;
5576   else if (lr_inner == 0 || rr_inner == 0
5577 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5578     return 0;
5579   else
5580     l_const = r_const = 0;
5581 
5582   /* If either comparison code is not correct for our logical operation,
5583      fail.  However, we can convert a one-bit comparison against zero into
5584      the opposite comparison against that bit being set in the field.  */
5585 
5586   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5587   if (lcode != wanted_code)
5588     {
5589       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5590 	{
5591 	  /* Make the left operand unsigned, since we are only interested
5592 	     in the value of one bit.  Otherwise we are doing the wrong
5593 	     thing below.  */
5594 	  ll_unsignedp = 1;
5595 	  l_const = ll_mask;
5596 	}
5597       else
5598 	return 0;
5599     }
5600 
5601   /* This is analogous to the code for l_const above.  */
5602   if (rcode != wanted_code)
5603     {
5604       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5605 	{
5606 	  rl_unsignedp = 1;
5607 	  r_const = rl_mask;
5608 	}
5609       else
5610 	return 0;
5611     }
5612 
5613   /* See if we can find a mode that contains both fields being compared on
5614      the left.  If we can't, fail.  Otherwise, update all constants and masks
5615      to be relative to a field of that size.  */
5616   first_bit = MIN (ll_bitpos, rl_bitpos);
5617   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5618   lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5619 			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5620 			  volatilep);
5621   if (lnmode == VOIDmode)
5622     return 0;
5623 
5624   lnbitsize = GET_MODE_BITSIZE (lnmode);
5625   lnbitpos = first_bit & ~ (lnbitsize - 1);
5626   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5627   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5628 
5629   if (BYTES_BIG_ENDIAN)
5630     {
5631       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5632       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5633     }
5634 
5635   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5636 			 size_int (xll_bitpos));
5637   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5638 			 size_int (xrl_bitpos));
5639 
5640   if (l_const)
5641     {
5642       l_const = fold_convert_loc (loc, lntype, l_const);
5643       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5644       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5645       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5646 					fold_build1_loc (loc, BIT_NOT_EXPR,
5647 						     lntype, ll_mask))))
5648 	{
5649 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5650 
5651 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5652 	}
5653     }
5654   if (r_const)
5655     {
5656       r_const = fold_convert_loc (loc, lntype, r_const);
5657       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5658       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5659       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5660 					fold_build1_loc (loc, BIT_NOT_EXPR,
5661 						     lntype, rl_mask))))
5662 	{
5663 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5664 
5665 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5666 	}
5667     }
5668 
5669   /* If the right sides are not constant, do the same for it.  Also,
5670      disallow this optimization if a size or signedness mismatch occurs
5671      between the left and right sides.  */
5672   if (l_const == 0)
5673     {
5674       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5675 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5676 	  /* Make sure the two fields on the right
5677 	     correspond to the left without being swapped.  */
5678 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5679 	return 0;
5680 
5681       first_bit = MIN (lr_bitpos, rr_bitpos);
5682       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5683       rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5684 			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5685 			      volatilep);
5686       if (rnmode == VOIDmode)
5687 	return 0;
5688 
5689       rnbitsize = GET_MODE_BITSIZE (rnmode);
5690       rnbitpos = first_bit & ~ (rnbitsize - 1);
5691       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5692       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5693 
5694       if (BYTES_BIG_ENDIAN)
5695 	{
5696 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5697 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5698 	}
5699 
5700       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5701 							    rntype, lr_mask),
5702 			     size_int (xlr_bitpos));
5703       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5704 							    rntype, rr_mask),
5705 			     size_int (xrr_bitpos));
5706 
5707       /* Make a mask that corresponds to both fields being compared.
5708 	 Do this for both items being compared.  If the operands are the
5709 	 same size and the bits being compared are in the same position
5710 	 then we can do this by masking both and comparing the masked
5711 	 results.  */
5712       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5713       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5714       if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5715 	{
5716 	  lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5717 				    ll_unsignedp || rl_unsignedp);
5718 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
5719 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5720 
5721 	  rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5722 				    lr_unsignedp || rr_unsignedp);
5723 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
5724 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5725 
5726 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5727 	}
5728 
5729       /* There is still another way we can do something:  If both pairs of
5730 	 fields being compared are adjacent, we may be able to make a wider
5731 	 field containing them both.
5732 
5733 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
5734 	 the mask must be shifted to account for the shift done by
5735 	 make_bit_field_ref.  */
5736       if ((ll_bitsize + ll_bitpos == rl_bitpos
5737 	   && lr_bitsize + lr_bitpos == rr_bitpos)
5738 	  || (ll_bitpos == rl_bitpos + rl_bitsize
5739 	      && lr_bitpos == rr_bitpos + rr_bitsize))
5740 	{
5741 	  tree type;
5742 
5743 	  lhs = make_bit_field_ref (loc, ll_inner, lntype,
5744 				    ll_bitsize + rl_bitsize,
5745 				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5746 	  rhs = make_bit_field_ref (loc, lr_inner, rntype,
5747 				    lr_bitsize + rr_bitsize,
5748 				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5749 
5750 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5751 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
5752 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5753 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5754 
5755 	  /* Convert to the smaller type before masking out unwanted bits.  */
5756 	  type = lntype;
5757 	  if (lntype != rntype)
5758 	    {
5759 	      if (lnbitsize > rnbitsize)
5760 		{
5761 		  lhs = fold_convert_loc (loc, rntype, lhs);
5762 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5763 		  type = rntype;
5764 		}
5765 	      else if (lnbitsize < rnbitsize)
5766 		{
5767 		  rhs = fold_convert_loc (loc, lntype, rhs);
5768 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5769 		  type = lntype;
5770 		}
5771 	    }
5772 
5773 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5774 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5775 
5776 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5777 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5778 
5779 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5780 	}
5781 
5782       return 0;
5783     }
5784 
5785   /* Handle the case of comparisons with constants.  If there is something in
5786      common between the masks, those bits of the constants must be the same.
5787      If not, the condition is always false.  Test for this to avoid generating
5788      incorrect code below.  */
5789   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5790   if (! integer_zerop (result)
5791       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5792 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5793     {
5794       if (wanted_code == NE_EXPR)
5795 	{
5796 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
5797 	  return constant_boolean_node (true, truth_type);
5798 	}
5799       else
5800 	{
5801 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5802 	  return constant_boolean_node (false, truth_type);
5803 	}
5804     }
5805 
5806   /* Construct the expression we will return.  First get the component
5807      reference we will make.  Unless the mask is all ones the width of
5808      that field, perform the mask operation.  Then compare with the
5809      merged constant.  */
5810   result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5811 			       ll_unsignedp || rl_unsignedp);
5812 
5813   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5814   if (! all_ones_mask_p (ll_mask, lnbitsize))
5815     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5816 
5817   return build2_loc (loc, wanted_code, truth_type, result,
5818 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
5819 }
5820 
5821 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5822    constant.  */
5823 
5824 static tree
5825 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5826 			    tree op0, tree op1)
5827 {
5828   tree arg0 = op0;
5829   enum tree_code op_code;
5830   tree comp_const;
5831   tree minmax_const;
5832   int consts_equal, consts_lt;
5833   tree inner;
5834 
5835   STRIP_SIGN_NOPS (arg0);
5836 
5837   op_code = TREE_CODE (arg0);
5838   minmax_const = TREE_OPERAND (arg0, 1);
5839   comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5840   consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5841   consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5842   inner = TREE_OPERAND (arg0, 0);
5843 
5844   /* If something does not permit us to optimize, return the original tree.  */
5845   if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5846       || TREE_CODE (comp_const) != INTEGER_CST
5847       || TREE_OVERFLOW (comp_const)
5848       || TREE_CODE (minmax_const) != INTEGER_CST
5849       || TREE_OVERFLOW (minmax_const))
5850     return NULL_TREE;
5851 
5852   /* Now handle all the various comparison codes.  We only handle EQ_EXPR
5853      and GT_EXPR, doing the rest with recursive calls using logical
5854      simplifications.  */
5855   switch (code)
5856     {
5857     case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
5858       {
5859 	tree tem
5860 	  = optimize_minmax_comparison (loc,
5861 					invert_tree_comparison (code, false),
5862 					type, op0, op1);
5863 	if (tem)
5864 	  return invert_truthvalue_loc (loc, tem);
5865 	return NULL_TREE;
5866       }
5867 
5868     case GE_EXPR:
5869       return
5870 	fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5871 		     optimize_minmax_comparison
5872 		     (loc, EQ_EXPR, type, arg0, comp_const),
5873 		     optimize_minmax_comparison
5874 		     (loc, GT_EXPR, type, arg0, comp_const));
5875 
5876     case EQ_EXPR:
5877       if (op_code == MAX_EXPR && consts_equal)
5878 	/* MAX (X, 0) == 0  ->  X <= 0  */
5879 	return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5880 
5881       else if (op_code == MAX_EXPR && consts_lt)
5882 	/* MAX (X, 0) == 5  ->  X == 5   */
5883 	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5884 
5885       else if (op_code == MAX_EXPR)
5886 	/* MAX (X, 0) == -1  ->  false  */
5887 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5888 
5889       else if (consts_equal)
5890 	/* MIN (X, 0) == 0  ->  X >= 0  */
5891 	return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5892 
5893       else if (consts_lt)
5894 	/* MIN (X, 0) == 5  ->  false  */
5895 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5896 
5897       else
5898 	/* MIN (X, 0) == -1  ->  X == -1  */
5899 	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5900 
5901     case GT_EXPR:
5902       if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5903 	/* MAX (X, 0) > 0  ->  X > 0
5904 	   MAX (X, 0) > 5  ->  X > 5  */
5905 	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5906 
5907       else if (op_code == MAX_EXPR)
5908 	/* MAX (X, 0) > -1  ->  true  */
5909 	return omit_one_operand_loc (loc, type, integer_one_node, inner);
5910 
5911       else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5912 	/* MIN (X, 0) > 0  ->  false
5913 	   MIN (X, 0) > 5  ->  false  */
5914 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5915 
5916       else
5917 	/* MIN (X, 0) > -1  ->  X > -1  */
5918 	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5919 
5920     default:
5921       return NULL_TREE;
5922     }
5923 }
5924 
5925 /* T is an integer expression that is being multiplied, divided, or taken a
5926    modulus (CODE says which and what kind of divide or modulus) by a
5927    constant C.  See if we can eliminate that operation by folding it with
5928    other operations already in T.  WIDE_TYPE, if non-null, is a type that
5929    should be used for the computation if wider than our type.
5930 
5931    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5932    (X * 2) + (Y * 4).  We must, however, be assured that either the original
5933    expression would not overflow or that overflow is undefined for the type
5934    in the language in question.
5935 
5936    If we return a non-null expression, it is an equivalent form of the
5937    original computation, but need not be in the original type.
5938 
5939    We set *STRICT_OVERFLOW_P to true if the return values depends on
5940    signed overflow being undefined.  Otherwise we do not change
5941    *STRICT_OVERFLOW_P.  */
5942 
5943 static tree
5944 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5945 		bool *strict_overflow_p)
5946 {
5947   /* To avoid exponential search depth, refuse to allow recursion past
5948      three levels.  Beyond that (1) it's highly unlikely that we'll find
5949      something interesting and (2) we've probably processed it before
5950      when we built the inner expression.  */
5951 
5952   static int depth;
5953   tree ret;
5954 
5955   if (depth > 3)
5956     return NULL;
5957 
5958   depth++;
5959   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5960   depth--;
5961 
5962   return ret;
5963 }
5964 
5965 static tree
5966 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5967 		  bool *strict_overflow_p)
5968 {
5969   tree type = TREE_TYPE (t);
5970   enum tree_code tcode = TREE_CODE (t);
5971   tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5972 				   > GET_MODE_SIZE (TYPE_MODE (type)))
5973 		? wide_type : type);
5974   tree t1, t2;
5975   int same_p = tcode == code;
5976   tree op0 = NULL_TREE, op1 = NULL_TREE;
5977   bool sub_strict_overflow_p;
5978 
5979   /* Don't deal with constants of zero here; they confuse the code below.  */
5980   if (integer_zerop (c))
5981     return NULL_TREE;
5982 
5983   if (TREE_CODE_CLASS (tcode) == tcc_unary)
5984     op0 = TREE_OPERAND (t, 0);
5985 
5986   if (TREE_CODE_CLASS (tcode) == tcc_binary)
5987     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5988 
5989   /* Note that we need not handle conditional operations here since fold
5990      already handles those cases.  So just do arithmetic here.  */
5991   switch (tcode)
5992     {
5993     case INTEGER_CST:
5994       /* For a constant, we can always simplify if we are a multiply
5995 	 or (for divide and modulus) if it is a multiple of our constant.  */
5996       if (code == MULT_EXPR
5997 	  || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5998 	{
5999 	  tree tem = const_binop (code, fold_convert (ctype, t),
6000 				  fold_convert (ctype, c));
6001 	  /* If the multiplication overflowed, we lost information on it.
6002 	     See PR68142 and PR69845.  */
6003 	  if (TREE_OVERFLOW (tem))
6004 	    return NULL_TREE;
6005 	  return tem;
6006 	}
6007       break;
6008 
6009     CASE_CONVERT: case NON_LVALUE_EXPR:
6010       /* If op0 is an expression ...  */
6011       if ((COMPARISON_CLASS_P (op0)
6012 	   || UNARY_CLASS_P (op0)
6013 	   || BINARY_CLASS_P (op0)
6014 	   || VL_EXP_CLASS_P (op0)
6015 	   || EXPRESSION_CLASS_P (op0))
6016 	  /* ... and has wrapping overflow, and its type is smaller
6017 	     than ctype, then we cannot pass through as widening.  */
6018 	  && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6019 		&& TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6020 	       && (TYPE_PRECISION (ctype)
6021 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6022 	      /* ... or this is a truncation (t is narrower than op0),
6023 		 then we cannot pass through this narrowing.  */
6024 	      || (TYPE_PRECISION (type)
6025 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6026 	      /* ... or signedness changes for division or modulus,
6027 		 then we cannot pass through this conversion.  */
6028 	      || (code != MULT_EXPR
6029 		  && (TYPE_UNSIGNED (ctype)
6030 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6031 	      /* ... or has undefined overflow while the converted to
6032 		 type has not, we cannot do the operation in the inner type
6033 		 as that would introduce undefined overflow.  */
6034 	      || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6035 		   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6036 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6037 	break;
6038 
6039       /* Pass the constant down and see if we can make a simplification.  If
6040 	 we can, replace this expression with the inner simplification for
6041 	 possible later conversion to our or some other type.  */
6042       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6043 	  && TREE_CODE (t2) == INTEGER_CST
6044 	  && !TREE_OVERFLOW (t2)
6045 	  && (0 != (t1 = extract_muldiv (op0, t2, code,
6046 					 code == MULT_EXPR
6047 					 ? ctype : NULL_TREE,
6048 					 strict_overflow_p))))
6049 	return t1;
6050       break;
6051 
6052     case ABS_EXPR:
6053       /* If widening the type changes it from signed to unsigned, then we
6054          must avoid building ABS_EXPR itself as unsigned.  */
6055       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6056         {
6057           tree cstype = (*signed_type_for) (ctype);
6058           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6059 	      != 0)
6060             {
6061               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6062               return fold_convert (ctype, t1);
6063             }
6064           break;
6065         }
6066       /* If the constant is negative, we cannot simplify this.  */
6067       if (tree_int_cst_sgn (c) == -1)
6068         break;
6069       /* FALLTHROUGH */
6070     case NEGATE_EXPR:
6071       /* For division and modulus, type can't be unsigned, as e.g.
6072 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6073 	 For signed types, even with wrapping overflow, this is fine.  */
6074       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6075 	break;
6076       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6077 	  != 0)
6078 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6079       break;
6080 
6081     case MIN_EXPR:  case MAX_EXPR:
6082       /* If widening the type changes the signedness, then we can't perform
6083 	 this optimization as that changes the result.  */
6084       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6085 	break;
6086 
6087       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6088       sub_strict_overflow_p = false;
6089       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6090 				&sub_strict_overflow_p)) != 0
6091 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6092 				   &sub_strict_overflow_p)) != 0)
6093 	{
6094 	  if (tree_int_cst_sgn (c) < 0)
6095 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6096 	  if (sub_strict_overflow_p)
6097 	    *strict_overflow_p = true;
6098 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6099 			      fold_convert (ctype, t2));
6100 	}
6101       break;
6102 
6103     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6104       /* If the second operand is constant, this is a multiplication
6105 	 or floor division, by a power of two, so we can treat it that
6106 	 way unless the multiplier or divisor overflows.  Signed
6107 	 left-shift overflow is implementation-defined rather than
6108 	 undefined in C90, so do not convert signed left shift into
6109 	 multiplication.  */
6110       if (TREE_CODE (op1) == INTEGER_CST
6111 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6112 	  /* const_binop may not detect overflow correctly,
6113 	     so check for it explicitly here.  */
6114 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6115 	  && 0 != (t1 = fold_convert (ctype,
6116 				      const_binop (LSHIFT_EXPR,
6117 						   size_one_node,
6118 						   op1)))
6119 	  && !TREE_OVERFLOW (t1))
6120 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6121 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6122 				       ctype,
6123 				       fold_convert (ctype, op0),
6124 				       t1),
6125 			       c, code, wide_type, strict_overflow_p);
6126       break;
6127 
6128     case PLUS_EXPR:  case MINUS_EXPR:
6129       /* See if we can eliminate the operation on both sides.  If we can, we
6130 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6131 	 cases where we can do anything are if the second operand is a
6132 	 constant.  */
6133       sub_strict_overflow_p = false;
6134       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6135       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6136       if (t1 != 0 && t2 != 0
6137 	  && (code == MULT_EXPR
6138 	      /* If not multiplication, we can only do this if both operands
6139 		 are divisible by c.  */
6140 	      || (multiple_of_p (ctype, op0, c)
6141 	          && multiple_of_p (ctype, op1, c))))
6142 	{
6143 	  if (sub_strict_overflow_p)
6144 	    *strict_overflow_p = true;
6145 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6146 			      fold_convert (ctype, t2));
6147 	}
6148 
6149       /* If this was a subtraction, negate OP1 and set it to be an addition.
6150 	 This simplifies the logic below.  */
6151       if (tcode == MINUS_EXPR)
6152 	{
6153 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6154 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6155 	  if (TREE_CODE (op0) == INTEGER_CST)
6156 	    {
6157 	      tree tem = op0;
6158 	      op0 = op1;
6159 	      op1 = tem;
6160 	      tem = t1;
6161 	      t1 = t2;
6162 	      t2 = tem;
6163 	    }
6164 	}
6165 
6166       if (TREE_CODE (op1) != INTEGER_CST)
6167 	break;
6168 
6169       /* If either OP1 or C are negative, this optimization is not safe for
6170 	 some of the division and remainder types while for others we need
6171 	 to change the code.  */
6172       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6173 	{
6174 	  if (code == CEIL_DIV_EXPR)
6175 	    code = FLOOR_DIV_EXPR;
6176 	  else if (code == FLOOR_DIV_EXPR)
6177 	    code = CEIL_DIV_EXPR;
6178 	  else if (code != MULT_EXPR
6179 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6180 	    break;
6181 	}
6182 
6183       /* If it's a multiply or a division/modulus operation of a multiple
6184          of our constant, do the operation and verify it doesn't overflow.  */
6185       if (code == MULT_EXPR
6186 	  || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6187 	{
6188 	  op1 = const_binop (code, fold_convert (ctype, op1),
6189 			     fold_convert (ctype, c));
6190 	  /* We allow the constant to overflow with wrapping semantics.  */
6191 	  if (op1 == 0
6192 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6193 	    break;
6194 	}
6195       else
6196 	break;
6197 
6198       /* If we have an unsigned type, we cannot widen the operation since it
6199 	 will change the result if the original computation overflowed.  */
6200       if (TYPE_UNSIGNED (ctype) && ctype != type)
6201 	break;
6202 
6203       /* If we were able to eliminate our operation from the first side,
6204 	 apply our operation to the second side and reform the PLUS.  */
6205       if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6206 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6207 
6208       /* The last case is if we are a multiply.  In that case, we can
6209 	 apply the distributive law to commute the multiply and addition
6210 	 if the multiplication of the constants doesn't overflow
6211 	 and overflow is defined.  With undefined overflow
6212 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.  */
6213       if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6214 	return fold_build2 (tcode, ctype,
6215 			    fold_build2 (code, ctype,
6216 					 fold_convert (ctype, op0),
6217 					 fold_convert (ctype, c)),
6218 			    op1);
6219 
6220       break;
6221 
6222     case MULT_EXPR:
6223       /* We have a special case here if we are doing something like
6224 	 (C * 8) % 4 since we know that's zero.  */
6225       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6226 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6227 	  /* If the multiplication can overflow we cannot optimize this.  */
6228 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6229 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6230 	  && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6231 	{
6232 	  *strict_overflow_p = true;
6233 	  return omit_one_operand (type, integer_zero_node, op0);
6234 	}
6235 
6236       /* ... fall through ...  */
6237 
6238     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6239     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6240       /* If we can extract our operation from the LHS, do so and return a
6241 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6242 	 do something only if the second operand is a constant.  */
6243       if (same_p
6244 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6245 				   strict_overflow_p)) != 0)
6246 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6247 			    fold_convert (ctype, op1));
6248       else if (tcode == MULT_EXPR && code == MULT_EXPR
6249 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6250 					strict_overflow_p)) != 0)
6251 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6252 			    fold_convert (ctype, t1));
6253       else if (TREE_CODE (op1) != INTEGER_CST)
6254 	return 0;
6255 
6256       /* If these are the same operation types, we can associate them
6257 	 assuming no overflow.  */
6258       if (tcode == code)
6259 	{
6260 	  bool overflow_p = false;
6261 	  bool overflow_mul_p;
6262 	  signop sign = TYPE_SIGN (ctype);
6263 	  unsigned prec = TYPE_PRECISION (ctype);
6264 	  wide_int mul = wi::mul (wide_int::from (op1, prec,
6265 						  TYPE_SIGN (TREE_TYPE (op1))),
6266 				  wide_int::from (c, prec,
6267 						  TYPE_SIGN (TREE_TYPE (c))),
6268 				  sign, &overflow_mul_p);
6269 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6270 	  if (overflow_mul_p
6271 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6272 	    overflow_p = true;
6273 	  if (!overflow_p)
6274 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6275 				wide_int_to_tree (ctype, mul));
6276 	}
6277 
6278       /* If these operations "cancel" each other, we have the main
6279 	 optimizations of this pass, which occur when either constant is a
6280 	 multiple of the other, in which case we replace this with either an
6281 	 operation or CODE or TCODE.
6282 
6283 	 If we have an unsigned type, we cannot do this since it will change
6284 	 the result if the original computation overflowed.  */
6285       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6286 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6287 	      || (tcode == MULT_EXPR
6288 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6289 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6290 		  && code != MULT_EXPR)))
6291 	{
6292 	  if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6293 	    {
6294 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6295 		*strict_overflow_p = true;
6296 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6297 				  fold_convert (ctype,
6298 						const_binop (TRUNC_DIV_EXPR,
6299 							     op1, c)));
6300 	    }
6301 	  else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6302 	    {
6303 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6304 		*strict_overflow_p = true;
6305 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6306 				  fold_convert (ctype,
6307 						const_binop (TRUNC_DIV_EXPR,
6308 							     c, op1)));
6309 	    }
6310 	}
6311       break;
6312 
6313     default:
6314       break;
6315     }
6316 
6317   return 0;
6318 }
6319 
6320 /* Return a node which has the indicated constant VALUE (either 0 or
6321    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6322    and is of the indicated TYPE.  */
6323 
6324 tree
6325 constant_boolean_node (bool value, tree type)
6326 {
6327   if (type == integer_type_node)
6328     return value ? integer_one_node : integer_zero_node;
6329   else if (type == boolean_type_node)
6330     return value ? boolean_true_node : boolean_false_node;
6331   else if (TREE_CODE (type) == VECTOR_TYPE)
6332     return build_vector_from_val (type,
6333 				  build_int_cst (TREE_TYPE (type),
6334 						 value ? -1 : 0));
6335   else
6336     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6337 }
6338 
6339 
6340 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6341    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6342    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6343    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6344    COND is the first argument to CODE; otherwise (as in the example
6345    given here), it is the second argument.  TYPE is the type of the
6346    original expression.  Return NULL_TREE if no simplification is
6347    possible.  */
6348 
6349 static tree
6350 fold_binary_op_with_conditional_arg (location_t loc,
6351 				     enum tree_code code,
6352 				     tree type, tree op0, tree op1,
6353 				     tree cond, tree arg, int cond_first_p)
6354 {
6355   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6356   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6357   tree test, true_value, false_value;
6358   tree lhs = NULL_TREE;
6359   tree rhs = NULL_TREE;
6360   enum tree_code cond_code = COND_EXPR;
6361 
6362   if (TREE_CODE (cond) == COND_EXPR
6363       || TREE_CODE (cond) == VEC_COND_EXPR)
6364     {
6365       test = TREE_OPERAND (cond, 0);
6366       true_value = TREE_OPERAND (cond, 1);
6367       false_value = TREE_OPERAND (cond, 2);
6368       /* If this operand throws an expression, then it does not make
6369 	 sense to try to perform a logical or arithmetic operation
6370 	 involving it.  */
6371       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6372 	lhs = true_value;
6373       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6374 	rhs = false_value;
6375     }
6376   else
6377     {
6378       tree testtype = TREE_TYPE (cond);
6379       test = cond;
6380       true_value = constant_boolean_node (true, testtype);
6381       false_value = constant_boolean_node (false, testtype);
6382     }
6383 
6384   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6385     cond_code = VEC_COND_EXPR;
6386 
6387   /* This transformation is only worthwhile if we don't have to wrap ARG
6388      in a SAVE_EXPR and the operation can be simplified without recursing
6389      on at least one of the branches once its pushed inside the COND_EXPR.  */
6390   if (!TREE_CONSTANT (arg)
6391       && (TREE_SIDE_EFFECTS (arg)
6392 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6393 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6394     return NULL_TREE;
6395 
6396   arg = fold_convert_loc (loc, arg_type, arg);
6397   if (lhs == 0)
6398     {
6399       true_value = fold_convert_loc (loc, cond_type, true_value);
6400       if (cond_first_p)
6401 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6402       else
6403 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6404     }
6405   if (rhs == 0)
6406     {
6407       false_value = fold_convert_loc (loc, cond_type, false_value);
6408       if (cond_first_p)
6409 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6410       else
6411 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6412     }
6413 
6414   /* Check that we have simplified at least one of the branches.  */
6415   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6416     return NULL_TREE;
6417 
6418   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6419 }
6420 
6421 
6422 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6423 
6424    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6425    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6426    ADDEND is the same as X.
6427 
6428    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6429    and finite.  The problematic cases are when X is zero, and its mode
6430    has signed zeros.  In the case of rounding towards -infinity,
6431    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6432    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6433 
6434 bool
6435 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6436 {
6437   if (!real_zerop (addend))
6438     return false;
6439 
6440   /* Don't allow the fold with -fsignaling-nans.  */
6441   if (HONOR_SNANS (element_mode (type)))
6442     return false;
6443 
6444   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6445   if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6446     return true;
6447 
6448   /* In a vector or complex, we would need to check the sign of all zeros.  */
6449   if (TREE_CODE (addend) != REAL_CST)
6450     return false;
6451 
6452   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6453   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6454     negate = !negate;
6455 
6456   /* The mode has signed zeros, and we have to honor their sign.
6457      In this situation, there is only one case we can return true for.
6458      X - 0 is the same as X unless rounding towards -infinity is
6459      supported.  */
6460   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6461 }
6462 
6463 /* Subroutine of fold() that checks comparisons of built-in math
6464    functions against real constants.
6465 
6466    FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6467    operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
6468    is the type of the result and ARG0 and ARG1 are the operands of the
6469    comparison.  ARG1 must be a TREE_REAL_CST.
6470 
6471    The function returns the constant folded tree if a simplification
6472    can be made, and NULL_TREE otherwise.  */
6473 
6474 static tree
6475 fold_mathfn_compare (location_t loc,
6476 		     enum built_in_function fcode, enum tree_code code,
6477 		     tree type, tree arg0, tree arg1)
6478 {
6479   REAL_VALUE_TYPE c;
6480 
6481   if (BUILTIN_SQRT_P (fcode))
6482     {
6483       tree arg = CALL_EXPR_ARG (arg0, 0);
6484       machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6485 
6486       c = TREE_REAL_CST (arg1);
6487       if (REAL_VALUE_NEGATIVE (c))
6488 	{
6489 	  /* sqrt(x) < y is always false, if y is negative.  */
6490 	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6491 	    return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6492 
6493 	  /* sqrt(x) > y is always true, if y is negative and we
6494 	     don't care about NaNs, i.e. negative values of x.  */
6495 	  if (code == NE_EXPR || !HONOR_NANS (mode))
6496 	    return omit_one_operand_loc (loc, type, integer_one_node, arg);
6497 
6498 	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
6499 	  return fold_build2_loc (loc, GE_EXPR, type, arg,
6500 			      build_real (TREE_TYPE (arg), dconst0));
6501 	}
6502       else if (code == GT_EXPR || code == GE_EXPR)
6503 	{
6504 	  REAL_VALUE_TYPE c2;
6505 
6506 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6507 	  real_convert (&c2, mode, &c2);
6508 
6509 	  if (REAL_VALUE_ISINF (c2))
6510 	    {
6511 	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
6512 	      if (HONOR_INFINITIES (mode))
6513 		return fold_build2_loc (loc, EQ_EXPR, type, arg,
6514 				    build_real (TREE_TYPE (arg), c2));
6515 
6516 	      /* sqrt(x) > y is always false, when y is very large
6517 		 and we don't care about infinities.  */
6518 	      return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6519 	    }
6520 
6521 	  /* sqrt(x) > c is the same as x > c*c.  */
6522 	  return fold_build2_loc (loc, code, type, arg,
6523 			      build_real (TREE_TYPE (arg), c2));
6524 	}
6525       else if (code == LT_EXPR || code == LE_EXPR)
6526 	{
6527 	  REAL_VALUE_TYPE c2;
6528 
6529 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6530 	  real_convert (&c2, mode, &c2);
6531 
6532 	  if (REAL_VALUE_ISINF (c2))
6533 	    {
6534 	      /* sqrt(x) < y is always true, when y is a very large
6535 		 value and we don't care about NaNs or Infinities.  */
6536 	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6537 		return omit_one_operand_loc (loc, type, integer_one_node, arg);
6538 
6539 	      /* sqrt(x) < y is x != +Inf when y is very large and we
6540 		 don't care about NaNs.  */
6541 	      if (! HONOR_NANS (mode))
6542 		return fold_build2_loc (loc, NE_EXPR, type, arg,
6543 				    build_real (TREE_TYPE (arg), c2));
6544 
6545 	      /* sqrt(x) < y is x >= 0 when y is very large and we
6546 		 don't care about Infinities.  */
6547 	      if (! HONOR_INFINITIES (mode))
6548 		return fold_build2_loc (loc, GE_EXPR, type, arg,
6549 				    build_real (TREE_TYPE (arg), dconst0));
6550 
6551 	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
6552 	      arg = save_expr (arg);
6553 	      return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6554 				  fold_build2_loc (loc, GE_EXPR, type, arg,
6555 					       build_real (TREE_TYPE (arg),
6556 							   dconst0)),
6557 				  fold_build2_loc (loc, NE_EXPR, type, arg,
6558 					       build_real (TREE_TYPE (arg),
6559 							   c2)));
6560 	    }
6561 
6562 	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
6563 	  if (! HONOR_NANS (mode))
6564 	    return fold_build2_loc (loc, code, type, arg,
6565 				build_real (TREE_TYPE (arg), c2));
6566 
6567 	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
6568 	  arg = save_expr (arg);
6569 	  return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6570 				  fold_build2_loc (loc, GE_EXPR, type, arg,
6571 					       build_real (TREE_TYPE (arg),
6572 							   dconst0)),
6573 				  fold_build2_loc (loc, code, type, arg,
6574 					       build_real (TREE_TYPE (arg),
6575 							   c2)));
6576 	}
6577     }
6578 
6579   return NULL_TREE;
6580 }
6581 
6582 /* Subroutine of fold() that optimizes comparisons against Infinities,
6583    either +Inf or -Inf.
6584 
6585    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6586    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6587    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6588 
6589    The function returns the constant folded tree if a simplification
6590    can be made, and NULL_TREE otherwise.  */
6591 
6592 static tree
6593 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6594 		  tree arg0, tree arg1)
6595 {
6596   machine_mode mode;
6597   REAL_VALUE_TYPE max;
6598   tree temp;
6599   bool neg;
6600 
6601   mode = TYPE_MODE (TREE_TYPE (arg0));
6602 
6603   /* For negative infinity swap the sense of the comparison.  */
6604   neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6605   if (neg)
6606     code = swap_tree_comparison (code);
6607 
6608   switch (code)
6609     {
6610     case GT_EXPR:
6611       /* x > +Inf is always false, if with ignore sNANs.  */
6612       if (HONOR_SNANS (mode))
6613         return NULL_TREE;
6614       return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6615 
6616     case LE_EXPR:
6617       /* x <= +Inf is always true, if we don't case about NaNs.  */
6618       if (! HONOR_NANS (mode))
6619 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6620 
6621       /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
6622       arg0 = save_expr (arg0);
6623       return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6624 
6625     case EQ_EXPR:
6626     case GE_EXPR:
6627       /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
6628       real_maxval (&max, neg, mode);
6629       return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6630 			  arg0, build_real (TREE_TYPE (arg0), max));
6631 
6632     case LT_EXPR:
6633       /* x < +Inf is always equal to x <= DBL_MAX.  */
6634       real_maxval (&max, neg, mode);
6635       return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6636 			  arg0, build_real (TREE_TYPE (arg0), max));
6637 
6638     case NE_EXPR:
6639       /* x != +Inf is always equal to !(x > DBL_MAX).  */
6640       real_maxval (&max, neg, mode);
6641       if (! HONOR_NANS (mode))
6642 	return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6643 			    arg0, build_real (TREE_TYPE (arg0), max));
6644 
6645       temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6646 			  arg0, build_real (TREE_TYPE (arg0), max));
6647       return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6648 
6649     default:
6650       break;
6651     }
6652 
6653   return NULL_TREE;
6654 }
6655 
6656 /* Subroutine of fold() that optimizes comparisons of a division by
6657    a nonzero integer constant against an integer constant, i.e.
6658    X/C1 op C2.
6659 
6660    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6661    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6662    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6663 
6664    The function returns the constant folded tree if a simplification
6665    can be made, and NULL_TREE otherwise.  */
6666 
6667 static tree
6668 fold_div_compare (location_t loc,
6669 		  enum tree_code code, tree type, tree arg0, tree arg1)
6670 {
6671   tree prod, tmp, hi, lo;
6672   tree arg00 = TREE_OPERAND (arg0, 0);
6673   tree arg01 = TREE_OPERAND (arg0, 1);
6674   signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6675   bool neg_overflow = false;
6676   bool overflow;
6677 
6678   /* We have to do this the hard way to detect unsigned overflow.
6679      prod = int_const_binop (MULT_EXPR, arg01, arg1);  */
6680   wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6681   prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6682   neg_overflow = false;
6683 
6684   if (sign == UNSIGNED)
6685     {
6686       tmp = int_const_binop (MINUS_EXPR, arg01,
6687                              build_int_cst (TREE_TYPE (arg01), 1));
6688       lo = prod;
6689 
6690       /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6691       val = wi::add (prod, tmp, sign, &overflow);
6692       hi = force_fit_type (TREE_TYPE (arg00), val,
6693 			   -1, overflow | TREE_OVERFLOW (prod));
6694     }
6695   else if (tree_int_cst_sgn (arg01) >= 0)
6696     {
6697       tmp = int_const_binop (MINUS_EXPR, arg01,
6698 			     build_int_cst (TREE_TYPE (arg01), 1));
6699       switch (tree_int_cst_sgn (arg1))
6700 	{
6701 	case -1:
6702 	  neg_overflow = true;
6703 	  lo = int_const_binop (MINUS_EXPR, prod, tmp);
6704 	  hi = prod;
6705 	  break;
6706 
6707 	case  0:
6708 	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6709 	  hi = tmp;
6710 	  break;
6711 
6712 	case  1:
6713           hi = int_const_binop (PLUS_EXPR, prod, tmp);
6714 	  lo = prod;
6715 	  break;
6716 
6717 	default:
6718 	  gcc_unreachable ();
6719 	}
6720     }
6721   else
6722     {
6723       /* A negative divisor reverses the relational operators.  */
6724       code = swap_tree_comparison (code);
6725 
6726       tmp = int_const_binop (PLUS_EXPR, arg01,
6727 			     build_int_cst (TREE_TYPE (arg01), 1));
6728       switch (tree_int_cst_sgn (arg1))
6729 	{
6730 	case -1:
6731 	  hi = int_const_binop (MINUS_EXPR, prod, tmp);
6732 	  lo = prod;
6733 	  break;
6734 
6735 	case  0:
6736 	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6737 	  lo = tmp;
6738 	  break;
6739 
6740 	case  1:
6741 	  neg_overflow = true;
6742 	  lo = int_const_binop (PLUS_EXPR, prod, tmp);
6743 	  hi = prod;
6744 	  break;
6745 
6746 	default:
6747 	  gcc_unreachable ();
6748 	}
6749     }
6750 
6751   switch (code)
6752     {
6753     case EQ_EXPR:
6754       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6755 	return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6756       if (TREE_OVERFLOW (hi))
6757 	return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6758       if (TREE_OVERFLOW (lo))
6759 	return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6760       return build_range_check (loc, type, arg00, 1, lo, hi);
6761 
6762     case NE_EXPR:
6763       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6764 	return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6765       if (TREE_OVERFLOW (hi))
6766 	return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6767       if (TREE_OVERFLOW (lo))
6768 	return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6769       return build_range_check (loc, type, arg00, 0, lo, hi);
6770 
6771     case LT_EXPR:
6772       if (TREE_OVERFLOW (lo))
6773 	{
6774 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6775 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6776 	}
6777       return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6778 
6779     case LE_EXPR:
6780       if (TREE_OVERFLOW (hi))
6781 	{
6782 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6783 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6784 	}
6785       return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6786 
6787     case GT_EXPR:
6788       if (TREE_OVERFLOW (hi))
6789 	{
6790 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6791 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6792 	}
6793       return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6794 
6795     case GE_EXPR:
6796       if (TREE_OVERFLOW (lo))
6797 	{
6798 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6799 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6800 	}
6801       return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6802 
6803     default:
6804       break;
6805     }
6806 
6807   return NULL_TREE;
6808 }
6809 
6810 
6811 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6812    equality/inequality test, then return a simplified form of the test
6813    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6814    result type.  */
6815 
6816 static tree
6817 fold_single_bit_test_into_sign_test (location_t loc,
6818 				     enum tree_code code, tree arg0, tree arg1,
6819 				     tree result_type)
6820 {
6821   /* If this is testing a single bit, we can optimize the test.  */
6822   if ((code == NE_EXPR || code == EQ_EXPR)
6823       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6824       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6825     {
6826       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6827 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6828       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6829 
6830       if (arg00 != NULL_TREE
6831 	  /* This is only a win if casting to a signed type is cheap,
6832 	     i.e. when arg00's type is not a partial mode.  */
6833 	  && TYPE_PRECISION (TREE_TYPE (arg00))
6834 	     == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6835 	{
6836 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6837 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6838 			      result_type,
6839 			      fold_convert_loc (loc, stype, arg00),
6840 			      build_int_cst (stype, 0));
6841 	}
6842     }
6843 
6844   return NULL_TREE;
6845 }
6846 
6847 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6848    equality/inequality test, then return a simplified form of
6849    the test using shifts and logical operations.  Otherwise return
6850    NULL.  TYPE is the desired result type.  */
6851 
6852 tree
6853 fold_single_bit_test (location_t loc, enum tree_code code,
6854 		      tree arg0, tree arg1, tree result_type)
6855 {
6856   /* If this is testing a single bit, we can optimize the test.  */
6857   if ((code == NE_EXPR || code == EQ_EXPR)
6858       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6859       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6860     {
6861       tree inner = TREE_OPERAND (arg0, 0);
6862       tree type = TREE_TYPE (arg0);
6863       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6864       machine_mode operand_mode = TYPE_MODE (type);
6865       int ops_unsigned;
6866       tree signed_type, unsigned_type, intermediate_type;
6867       tree tem, one;
6868 
6869       /* First, see if we can fold the single bit test into a sign-bit
6870 	 test.  */
6871       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6872 						 result_type);
6873       if (tem)
6874 	return tem;
6875 
6876       /* Otherwise we have (A & C) != 0 where C is a single bit,
6877 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6878 	 Similarly for (A & C) == 0.  */
6879 
6880       /* If INNER is a right shift of a constant and it plus BITNUM does
6881 	 not overflow, adjust BITNUM and INNER.  */
6882       if (TREE_CODE (inner) == RSHIFT_EXPR
6883 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6884 	  && bitnum < TYPE_PRECISION (type)
6885 	  && wi::ltu_p (TREE_OPERAND (inner, 1),
6886 			TYPE_PRECISION (type) - bitnum))
6887 	{
6888 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6889 	  inner = TREE_OPERAND (inner, 0);
6890 	}
6891 
6892       /* If we are going to be able to omit the AND below, we must do our
6893 	 operations as unsigned.  If we must use the AND, we have a choice.
6894 	 Normally unsigned is faster, but for some machines signed is.  */
6895 #ifdef LOAD_EXTEND_OP
6896       ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6897 		      && !flag_syntax_only) ? 0 : 1;
6898 #else
6899       ops_unsigned = 1;
6900 #endif
6901 
6902       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6903       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6904       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6905       inner = fold_convert_loc (loc, intermediate_type, inner);
6906 
6907       if (bitnum != 0)
6908 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6909 			inner, size_int (bitnum));
6910 
6911       one = build_int_cst (intermediate_type, 1);
6912 
6913       if (code == EQ_EXPR)
6914 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6915 
6916       /* Put the AND last so it can combine with more things.  */
6917       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6918 
6919       /* Make sure to return the proper type.  */
6920       inner = fold_convert_loc (loc, result_type, inner);
6921 
6922       return inner;
6923     }
6924   return NULL_TREE;
6925 }
6926 
6927 /* Check whether we are allowed to reorder operands arg0 and arg1,
6928    such that the evaluation of arg1 occurs before arg0.  */
6929 
6930 static bool
6931 reorder_operands_p (const_tree arg0, const_tree arg1)
6932 {
6933   if (! flag_evaluation_order)
6934       return true;
6935   if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6936     return true;
6937   return ! TREE_SIDE_EFFECTS (arg0)
6938 	 && ! TREE_SIDE_EFFECTS (arg1);
6939 }
6940 
6941 /* Test whether it is preferable two swap two operands, ARG0 and
6942    ARG1, for example because ARG0 is an integer constant and ARG1
6943    isn't.  If REORDER is true, only recommend swapping if we can
6944    evaluate the operands in reverse order.  */
6945 
6946 bool
6947 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6948 {
6949   if (CONSTANT_CLASS_P (arg1))
6950     return 0;
6951   if (CONSTANT_CLASS_P (arg0))
6952     return 1;
6953 
6954   STRIP_NOPS (arg0);
6955   STRIP_NOPS (arg1);
6956 
6957   if (TREE_CONSTANT (arg1))
6958     return 0;
6959   if (TREE_CONSTANT (arg0))
6960     return 1;
6961 
6962   if (reorder && flag_evaluation_order
6963       && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6964     return 0;
6965 
6966   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6967      for commutative and comparison operators.  Ensuring a canonical
6968      form allows the optimizers to find additional redundancies without
6969      having to explicitly check for both orderings.  */
6970   if (TREE_CODE (arg0) == SSA_NAME
6971       && TREE_CODE (arg1) == SSA_NAME
6972       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6973     return 1;
6974 
6975   /* Put SSA_NAMEs last.  */
6976   if (TREE_CODE (arg1) == SSA_NAME)
6977     return 0;
6978   if (TREE_CODE (arg0) == SSA_NAME)
6979     return 1;
6980 
6981   /* Put variables last.  */
6982   if (DECL_P (arg1))
6983     return 0;
6984   if (DECL_P (arg0))
6985     return 1;
6986 
6987   return 0;
6988 }
6989 
6990 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6991    ARG0 is extended to a wider type.  */
6992 
6993 static tree
6994 fold_widened_comparison (location_t loc, enum tree_code code,
6995 			 tree type, tree arg0, tree arg1)
6996 {
6997   tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6998   tree arg1_unw;
6999   tree shorter_type, outer_type;
7000   tree min, max;
7001   bool above, below;
7002 
7003   if (arg0_unw == arg0)
7004     return NULL_TREE;
7005   shorter_type = TREE_TYPE (arg0_unw);
7006 
7007 #ifdef HAVE_canonicalize_funcptr_for_compare
7008   /* Disable this optimization if we're casting a function pointer
7009      type on targets that require function pointer canonicalization.  */
7010   if (HAVE_canonicalize_funcptr_for_compare
7011       && TREE_CODE (shorter_type) == POINTER_TYPE
7012       && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7013     return NULL_TREE;
7014 #endif
7015 
7016   if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7017     return NULL_TREE;
7018 
7019   arg1_unw = get_unwidened (arg1, NULL_TREE);
7020 
7021   /* If possible, express the comparison in the shorter mode.  */
7022   if ((code == EQ_EXPR || code == NE_EXPR
7023        || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7024       && (TREE_TYPE (arg1_unw) == shorter_type
7025 	  || ((TYPE_PRECISION (shorter_type)
7026 	       >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7027 	      && (TYPE_UNSIGNED (shorter_type)
7028 		  == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7029 	  || (TREE_CODE (arg1_unw) == INTEGER_CST
7030 	      && (TREE_CODE (shorter_type) == INTEGER_TYPE
7031 		  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7032 	      && int_fits_type_p (arg1_unw, shorter_type))))
7033     return fold_build2_loc (loc, code, type, arg0_unw,
7034 			fold_convert_loc (loc, shorter_type, arg1_unw));
7035 
7036   if (TREE_CODE (arg1_unw) != INTEGER_CST
7037       || TREE_CODE (shorter_type) != INTEGER_TYPE
7038       || !int_fits_type_p (arg1_unw, shorter_type))
7039     return NULL_TREE;
7040 
7041   /* If we are comparing with the integer that does not fit into the range
7042      of the shorter type, the result is known.  */
7043   outer_type = TREE_TYPE (arg1_unw);
7044   min = lower_bound_in_type (outer_type, shorter_type);
7045   max = upper_bound_in_type (outer_type, shorter_type);
7046 
7047   above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7048 						   max, arg1_unw));
7049   below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7050 						   arg1_unw, min));
7051 
7052   switch (code)
7053     {
7054     case EQ_EXPR:
7055       if (above || below)
7056 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7057       break;
7058 
7059     case NE_EXPR:
7060       if (above || below)
7061 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7062       break;
7063 
7064     case LT_EXPR:
7065     case LE_EXPR:
7066       if (above)
7067 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7068       else if (below)
7069 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7070 
7071     case GT_EXPR:
7072     case GE_EXPR:
7073       if (above)
7074 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7075       else if (below)
7076 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7077 
7078     default:
7079       break;
7080     }
7081 
7082   return NULL_TREE;
7083 }
7084 
7085 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7086    ARG0 just the signedness is changed.  */
7087 
7088 static tree
7089 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7090 			      tree arg0, tree arg1)
7091 {
7092   tree arg0_inner;
7093   tree inner_type, outer_type;
7094 
7095   if (!CONVERT_EXPR_P (arg0))
7096     return NULL_TREE;
7097 
7098   outer_type = TREE_TYPE (arg0);
7099   arg0_inner = TREE_OPERAND (arg0, 0);
7100   inner_type = TREE_TYPE (arg0_inner);
7101 
7102 #ifdef HAVE_canonicalize_funcptr_for_compare
7103   /* Disable this optimization if we're casting a function pointer
7104      type on targets that require function pointer canonicalization.  */
7105   if (HAVE_canonicalize_funcptr_for_compare
7106       && TREE_CODE (inner_type) == POINTER_TYPE
7107       && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7108     return NULL_TREE;
7109 #endif
7110 
7111   if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7112     return NULL_TREE;
7113 
7114   if (TREE_CODE (arg1) != INTEGER_CST
7115       && !(CONVERT_EXPR_P (arg1)
7116 	   && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7117     return NULL_TREE;
7118 
7119   if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7120       && code != NE_EXPR
7121       && code != EQ_EXPR)
7122     return NULL_TREE;
7123 
7124   if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7125     return NULL_TREE;
7126 
7127   if (TREE_CODE (arg1) == INTEGER_CST)
7128     arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7129 			   TREE_OVERFLOW (arg1));
7130   else
7131     arg1 = fold_convert_loc (loc, inner_type, arg1);
7132 
7133   return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7134 }
7135 
7136 
7137 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7138    means A >= Y && A != MAX, but in this case we know that
7139    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7140 
7141 static tree
7142 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7143 {
7144   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7145 
7146   if (TREE_CODE (bound) == LT_EXPR)
7147     a = TREE_OPERAND (bound, 0);
7148   else if (TREE_CODE (bound) == GT_EXPR)
7149     a = TREE_OPERAND (bound, 1);
7150   else
7151     return NULL_TREE;
7152 
7153   typea = TREE_TYPE (a);
7154   if (!INTEGRAL_TYPE_P (typea)
7155       && !POINTER_TYPE_P (typea))
7156     return NULL_TREE;
7157 
7158   if (TREE_CODE (ineq) == LT_EXPR)
7159     {
7160       a1 = TREE_OPERAND (ineq, 1);
7161       y = TREE_OPERAND (ineq, 0);
7162     }
7163   else if (TREE_CODE (ineq) == GT_EXPR)
7164     {
7165       a1 = TREE_OPERAND (ineq, 0);
7166       y = TREE_OPERAND (ineq, 1);
7167     }
7168   else
7169     return NULL_TREE;
7170 
7171   if (TREE_TYPE (a1) != typea)
7172     return NULL_TREE;
7173 
7174   if (POINTER_TYPE_P (typea))
7175     {
7176       /* Convert the pointer types into integer before taking the difference.  */
7177       tree ta = fold_convert_loc (loc, ssizetype, a);
7178       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7179       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7180     }
7181   else
7182     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7183 
7184   if (!diff || !integer_onep (diff))
7185    return NULL_TREE;
7186 
7187   return fold_build2_loc (loc, GE_EXPR, type, a, y);
7188 }
7189 
7190 /* Fold a sum or difference of at least one multiplication.
7191    Returns the folded tree or NULL if no simplification could be made.  */
7192 
7193 static tree
7194 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7195 			  tree arg0, tree arg1)
7196 {
7197   tree arg00, arg01, arg10, arg11;
7198   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7199 
7200   /* (A * C) +- (B * C) -> (A+-B) * C.
7201      (A * C) +- A -> A * (C+-1).
7202      We are most concerned about the case where C is a constant,
7203      but other combinations show up during loop reduction.  Since
7204      it is not difficult, try all four possibilities.  */
7205 
7206   if (TREE_CODE (arg0) == MULT_EXPR)
7207     {
7208       arg00 = TREE_OPERAND (arg0, 0);
7209       arg01 = TREE_OPERAND (arg0, 1);
7210     }
7211   else if (TREE_CODE (arg0) == INTEGER_CST)
7212     {
7213       arg00 = build_one_cst (type);
7214       arg01 = arg0;
7215     }
7216   else
7217     {
7218       /* We cannot generate constant 1 for fract.  */
7219       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7220 	return NULL_TREE;
7221       arg00 = arg0;
7222       arg01 = build_one_cst (type);
7223     }
7224   if (TREE_CODE (arg1) == MULT_EXPR)
7225     {
7226       arg10 = TREE_OPERAND (arg1, 0);
7227       arg11 = TREE_OPERAND (arg1, 1);
7228     }
7229   else if (TREE_CODE (arg1) == INTEGER_CST)
7230     {
7231       arg10 = build_one_cst (type);
7232       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7233 	 the purpose of this canonicalization.  */
7234       if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7235 	  && negate_expr_p (arg1)
7236 	  && code == PLUS_EXPR)
7237 	{
7238 	  arg11 = negate_expr (arg1);
7239 	  code = MINUS_EXPR;
7240 	}
7241       else
7242 	arg11 = arg1;
7243     }
7244   else
7245     {
7246       /* We cannot generate constant 1 for fract.  */
7247       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7248 	return NULL_TREE;
7249       arg10 = arg1;
7250       arg11 = build_one_cst (type);
7251     }
7252   same = NULL_TREE;
7253 
7254   if (operand_equal_p (arg01, arg11, 0))
7255     same = arg01, alt0 = arg00, alt1 = arg10;
7256   else if (operand_equal_p (arg00, arg10, 0))
7257     same = arg00, alt0 = arg01, alt1 = arg11;
7258   else if (operand_equal_p (arg00, arg11, 0))
7259     same = arg00, alt0 = arg01, alt1 = arg10;
7260   else if (operand_equal_p (arg01, arg10, 0))
7261     same = arg01, alt0 = arg00, alt1 = arg11;
7262 
7263   /* No identical multiplicands; see if we can find a common
7264      power-of-two factor in non-power-of-two multiplies.  This
7265      can help in multi-dimensional array access.  */
7266   else if (tree_fits_shwi_p (arg01)
7267 	   && tree_fits_shwi_p (arg11))
7268     {
7269       HOST_WIDE_INT int01, int11, tmp;
7270       bool swap = false;
7271       tree maybe_same;
7272       int01 = tree_to_shwi (arg01);
7273       int11 = tree_to_shwi (arg11);
7274 
7275       /* Move min of absolute values to int11.  */
7276       if (absu_hwi (int01) < absu_hwi (int11))
7277         {
7278 	  tmp = int01, int01 = int11, int11 = tmp;
7279 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7280 	  maybe_same = arg01;
7281 	  swap = true;
7282 	}
7283       else
7284 	maybe_same = arg11;
7285 
7286       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7287 	  /* The remainder should not be a constant, otherwise we
7288 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7289 	     increased the number of multiplications necessary.  */
7290 	  && TREE_CODE (arg10) != INTEGER_CST)
7291         {
7292 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7293 			      build_int_cst (TREE_TYPE (arg00),
7294 					     int01 / int11));
7295 	  alt1 = arg10;
7296 	  same = maybe_same;
7297 	  if (swap)
7298 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7299 	}
7300     }
7301 
7302   if (same)
7303     return fold_build2_loc (loc, MULT_EXPR, type,
7304 			fold_build2_loc (loc, code, type,
7305 				     fold_convert_loc (loc, type, alt0),
7306 				     fold_convert_loc (loc, type, alt1)),
7307 			fold_convert_loc (loc, type, same));
7308 
7309   return NULL_TREE;
7310 }
7311 
7312 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7313    specified by EXPR into the buffer PTR of length LEN bytes.
7314    Return the number of bytes placed in the buffer, or zero
7315    upon failure.  */
7316 
7317 static int
7318 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7319 {
7320   tree type = TREE_TYPE (expr);
7321   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7322   int byte, offset, word, words;
7323   unsigned char value;
7324 
7325   if ((off == -1 && total_bytes > len)
7326       || off >= total_bytes)
7327     return 0;
7328   if (off == -1)
7329     off = 0;
7330   words = total_bytes / UNITS_PER_WORD;
7331 
7332   for (byte = 0; byte < total_bytes; byte++)
7333     {
7334       int bitpos = byte * BITS_PER_UNIT;
7335       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7336 	 number of bytes.  */
7337       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7338 
7339       if (total_bytes > UNITS_PER_WORD)
7340 	{
7341 	  word = byte / UNITS_PER_WORD;
7342 	  if (WORDS_BIG_ENDIAN)
7343 	    word = (words - 1) - word;
7344 	  offset = word * UNITS_PER_WORD;
7345 	  if (BYTES_BIG_ENDIAN)
7346 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7347 	  else
7348 	    offset += byte % UNITS_PER_WORD;
7349 	}
7350       else
7351 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7352       if (offset >= off
7353 	  && offset - off < len)
7354 	ptr[offset - off] = value;
7355     }
7356   return MIN (len, total_bytes - off);
7357 }
7358 
7359 
7360 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7361    specified by EXPR into the buffer PTR of length LEN bytes.
7362    Return the number of bytes placed in the buffer, or zero
7363    upon failure.  */
7364 
7365 static int
7366 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7367 {
7368   tree type = TREE_TYPE (expr);
7369   machine_mode mode = TYPE_MODE (type);
7370   int total_bytes = GET_MODE_SIZE (mode);
7371   FIXED_VALUE_TYPE value;
7372   tree i_value, i_type;
7373 
7374   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7375     return 0;
7376 
7377   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7378 
7379   if (NULL_TREE == i_type
7380       || TYPE_PRECISION (i_type) != total_bytes)
7381     return 0;
7382 
7383   value = TREE_FIXED_CST (expr);
7384   i_value = double_int_to_tree (i_type, value.data);
7385 
7386   return native_encode_int (i_value, ptr, len, off);
7387 }
7388 
7389 
7390 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7391    specified by EXPR into the buffer PTR of length LEN bytes.
7392    Return the number of bytes placed in the buffer, or zero
7393    upon failure.  */
7394 
7395 static int
7396 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7397 {
7398   tree type = TREE_TYPE (expr);
7399   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7400   int byte, offset, word, words, bitpos;
7401   unsigned char value;
7402 
7403   /* There are always 32 bits in each long, no matter the size of
7404      the hosts long.  We handle floating point representations with
7405      up to 192 bits.  */
7406   long tmp[6];
7407 
7408   if ((off == -1 && total_bytes > len)
7409       || off >= total_bytes)
7410     return 0;
7411   if (off == -1)
7412     off = 0;
7413   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7414 
7415   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7416 
7417   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7418        bitpos += BITS_PER_UNIT)
7419     {
7420       byte = (bitpos / BITS_PER_UNIT) & 3;
7421       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7422 
7423       if (UNITS_PER_WORD < 4)
7424 	{
7425 	  word = byte / UNITS_PER_WORD;
7426 	  if (WORDS_BIG_ENDIAN)
7427 	    word = (words - 1) - word;
7428 	  offset = word * UNITS_PER_WORD;
7429 	  if (BYTES_BIG_ENDIAN)
7430 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7431 	  else
7432 	    offset += byte % UNITS_PER_WORD;
7433 	}
7434       else
7435 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7436       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7437       if (offset >= off
7438 	  && offset - off < len)
7439 	ptr[offset - off] = value;
7440     }
7441   return MIN (len, total_bytes - off);
7442 }
7443 
7444 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7445    specified by EXPR into the buffer PTR of length LEN bytes.
7446    Return the number of bytes placed in the buffer, or zero
7447    upon failure.  */
7448 
7449 static int
7450 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7451 {
7452   int rsize, isize;
7453   tree part;
7454 
7455   part = TREE_REALPART (expr);
7456   rsize = native_encode_expr (part, ptr, len, off);
7457   if (off == -1
7458       && rsize == 0)
7459     return 0;
7460   part = TREE_IMAGPART (expr);
7461   if (off != -1)
7462     off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7463   isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7464   if (off == -1
7465       && isize != rsize)
7466     return 0;
7467   return rsize + isize;
7468 }
7469 
7470 
7471 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7472    specified by EXPR into the buffer PTR of length LEN bytes.
7473    Return the number of bytes placed in the buffer, or zero
7474    upon failure.  */
7475 
7476 static int
7477 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7478 {
7479   unsigned i, count;
7480   int size, offset;
7481   tree itype, elem;
7482 
7483   offset = 0;
7484   count = VECTOR_CST_NELTS (expr);
7485   itype = TREE_TYPE (TREE_TYPE (expr));
7486   size = GET_MODE_SIZE (TYPE_MODE (itype));
7487   for (i = 0; i < count; i++)
7488     {
7489       if (off >= size)
7490 	{
7491 	  off -= size;
7492 	  continue;
7493 	}
7494       elem = VECTOR_CST_ELT (expr, i);
7495       int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7496       if ((off == -1 && res != size)
7497 	  || res == 0)
7498 	return 0;
7499       offset += res;
7500       if (offset >= len)
7501 	return offset;
7502       if (off != -1)
7503 	off = 0;
7504     }
7505   return offset;
7506 }
7507 
7508 
7509 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7510    specified by EXPR into the buffer PTR of length LEN bytes.
7511    Return the number of bytes placed in the buffer, or zero
7512    upon failure.  */
7513 
7514 static int
7515 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7516 {
7517   tree type = TREE_TYPE (expr);
7518   HOST_WIDE_INT total_bytes;
7519 
7520   if (TREE_CODE (type) != ARRAY_TYPE
7521       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7522       || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7523       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7524     return 0;
7525   total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7526   if ((off == -1 && total_bytes > len)
7527       || off >= total_bytes)
7528     return 0;
7529   if (off == -1)
7530     off = 0;
7531   if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7532     {
7533       int written = 0;
7534       if (off < TREE_STRING_LENGTH (expr))
7535 	{
7536 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7537 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7538 	}
7539       memset (ptr + written, 0,
7540 	      MIN (total_bytes - written, len - written));
7541     }
7542   else
7543     memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7544   return MIN (total_bytes - off, len);
7545 }
7546 
7547 
7548 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7549    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7550    buffer PTR of length LEN bytes.  If OFF is not -1 then start
7551    the encoding at byte offset OFF and encode at most LEN bytes.
7552    Return the number of bytes placed in the buffer, or zero upon failure.  */
7553 
7554 int
7555 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7556 {
7557   /* We don't support starting at negative offset and -1 is special.  */
7558   if (off < -1)
7559     return 0;
7560 
7561   switch (TREE_CODE (expr))
7562     {
7563     case INTEGER_CST:
7564       return native_encode_int (expr, ptr, len, off);
7565 
7566     case REAL_CST:
7567       return native_encode_real (expr, ptr, len, off);
7568 
7569     case FIXED_CST:
7570       return native_encode_fixed (expr, ptr, len, off);
7571 
7572     case COMPLEX_CST:
7573       return native_encode_complex (expr, ptr, len, off);
7574 
7575     case VECTOR_CST:
7576       return native_encode_vector (expr, ptr, len, off);
7577 
7578     case STRING_CST:
7579       return native_encode_string (expr, ptr, len, off);
7580 
7581     default:
7582       return 0;
7583     }
7584 }
7585 
7586 
7587 /* Subroutine of native_interpret_expr.  Interpret the contents of
7588    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7589    If the buffer cannot be interpreted, return NULL_TREE.  */
7590 
7591 static tree
7592 native_interpret_int (tree type, const unsigned char *ptr, int len)
7593 {
7594   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7595 
7596   if (total_bytes > len
7597       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7598     return NULL_TREE;
7599 
7600   wide_int result = wi::from_buffer (ptr, total_bytes);
7601 
7602   return wide_int_to_tree (type, result);
7603 }
7604 
7605 
7606 /* Subroutine of native_interpret_expr.  Interpret the contents of
7607    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7608    If the buffer cannot be interpreted, return NULL_TREE.  */
7609 
7610 static tree
7611 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7612 {
7613   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7614   double_int result;
7615   FIXED_VALUE_TYPE fixed_value;
7616 
7617   if (total_bytes > len
7618       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7619     return NULL_TREE;
7620 
7621   result = double_int::from_buffer (ptr, total_bytes);
7622   fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7623 
7624   return build_fixed (type, fixed_value);
7625 }
7626 
7627 
7628 /* Subroutine of native_interpret_expr.  Interpret the contents of
7629    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7630    If the buffer cannot be interpreted, return NULL_TREE.  */
7631 
7632 static tree
7633 native_interpret_real (tree type, const unsigned char *ptr, int len)
7634 {
7635   machine_mode mode = TYPE_MODE (type);
7636   int total_bytes = GET_MODE_SIZE (mode);
7637   int byte, offset, word, words, bitpos;
7638   unsigned char value;
7639   /* There are always 32 bits in each long, no matter the size of
7640      the hosts long.  We handle floating point representations with
7641      up to 192 bits.  */
7642   REAL_VALUE_TYPE r;
7643   long tmp[6];
7644 
7645   total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7646   if (total_bytes > len || total_bytes > 24)
7647     return NULL_TREE;
7648   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7649 
7650   memset (tmp, 0, sizeof (tmp));
7651   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7652        bitpos += BITS_PER_UNIT)
7653     {
7654       byte = (bitpos / BITS_PER_UNIT) & 3;
7655       if (UNITS_PER_WORD < 4)
7656 	{
7657 	  word = byte / UNITS_PER_WORD;
7658 	  if (WORDS_BIG_ENDIAN)
7659 	    word = (words - 1) - word;
7660 	  offset = word * UNITS_PER_WORD;
7661 	  if (BYTES_BIG_ENDIAN)
7662 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7663 	  else
7664 	    offset += byte % UNITS_PER_WORD;
7665 	}
7666       else
7667 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7668       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7669 
7670       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7671     }
7672 
7673   real_from_target (&r, tmp, mode);
7674   return build_real (type, r);
7675 }
7676 
7677 
7678 /* Subroutine of native_interpret_expr.  Interpret the contents of
7679    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7680    If the buffer cannot be interpreted, return NULL_TREE.  */
7681 
7682 static tree
7683 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7684 {
7685   tree etype, rpart, ipart;
7686   int size;
7687 
7688   etype = TREE_TYPE (type);
7689   size = GET_MODE_SIZE (TYPE_MODE (etype));
7690   if (size * 2 > len)
7691     return NULL_TREE;
7692   rpart = native_interpret_expr (etype, ptr, size);
7693   if (!rpart)
7694     return NULL_TREE;
7695   ipart = native_interpret_expr (etype, ptr+size, size);
7696   if (!ipart)
7697     return NULL_TREE;
7698   return build_complex (type, rpart, ipart);
7699 }
7700 
7701 
7702 /* Subroutine of native_interpret_expr.  Interpret the contents of
7703    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7704    If the buffer cannot be interpreted, return NULL_TREE.  */
7705 
7706 static tree
7707 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7708 {
7709   tree etype, elem;
7710   int i, size, count;
7711   tree *elements;
7712 
7713   etype = TREE_TYPE (type);
7714   size = GET_MODE_SIZE (TYPE_MODE (etype));
7715   count = TYPE_VECTOR_SUBPARTS (type);
7716   if (size * count > len)
7717     return NULL_TREE;
7718 
7719   elements = XALLOCAVEC (tree, count);
7720   for (i = count - 1; i >= 0; i--)
7721     {
7722       elem = native_interpret_expr (etype, ptr+(i*size), size);
7723       if (!elem)
7724 	return NULL_TREE;
7725       elements[i] = elem;
7726     }
7727   return build_vector (type, elements);
7728 }
7729 
7730 
7731 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7732    the buffer PTR of length LEN as a constant of type TYPE.  For
7733    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7734    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7735    return NULL_TREE.  */
7736 
7737 tree
7738 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7739 {
7740   switch (TREE_CODE (type))
7741     {
7742     case INTEGER_TYPE:
7743     case ENUMERAL_TYPE:
7744     case BOOLEAN_TYPE:
7745     case POINTER_TYPE:
7746     case REFERENCE_TYPE:
7747       return native_interpret_int (type, ptr, len);
7748 
7749     case REAL_TYPE:
7750       return native_interpret_real (type, ptr, len);
7751 
7752     case FIXED_POINT_TYPE:
7753       return native_interpret_fixed (type, ptr, len);
7754 
7755     case COMPLEX_TYPE:
7756       return native_interpret_complex (type, ptr, len);
7757 
7758     case VECTOR_TYPE:
7759       return native_interpret_vector (type, ptr, len);
7760 
7761     default:
7762       return NULL_TREE;
7763     }
7764 }
7765 
7766 /* Returns true if we can interpret the contents of a native encoding
7767    as TYPE.  */
7768 
7769 static bool
7770 can_native_interpret_type_p (tree type)
7771 {
7772   switch (TREE_CODE (type))
7773     {
7774     case INTEGER_TYPE:
7775     case ENUMERAL_TYPE:
7776     case BOOLEAN_TYPE:
7777     case POINTER_TYPE:
7778     case REFERENCE_TYPE:
7779     case FIXED_POINT_TYPE:
7780     case REAL_TYPE:
7781     case COMPLEX_TYPE:
7782     case VECTOR_TYPE:
7783       return true;
7784     default:
7785       return false;
7786     }
7787 }
7788 
7789 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7790    TYPE at compile-time.  If we're unable to perform the conversion
7791    return NULL_TREE.  */
7792 
7793 static tree
7794 fold_view_convert_expr (tree type, tree expr)
7795 {
7796   /* We support up to 512-bit values (for V8DFmode).  */
7797   unsigned char buffer[64];
7798   int len;
7799 
7800   /* Check that the host and target are sane.  */
7801   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7802     return NULL_TREE;
7803 
7804   len = native_encode_expr (expr, buffer, sizeof (buffer));
7805   if (len == 0)
7806     return NULL_TREE;
7807 
7808   return native_interpret_expr (type, buffer, len);
7809 }
7810 
7811 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7812    to avoid confusing the gimplify process.  */
7813 
7814 tree
7815 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7816 {
7817   /* The size of the object is not relevant when talking about its address.  */
7818   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7819     t = TREE_OPERAND (t, 0);
7820 
7821   if (TREE_CODE (t) == INDIRECT_REF)
7822     {
7823       t = TREE_OPERAND (t, 0);
7824 
7825       if (TREE_TYPE (t) != ptrtype)
7826 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7827     }
7828   else if (TREE_CODE (t) == MEM_REF
7829 	   && integer_zerop (TREE_OPERAND (t, 1)))
7830     return TREE_OPERAND (t, 0);
7831   else if (TREE_CODE (t) == MEM_REF
7832 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7833     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7834 			TREE_OPERAND (t, 0),
7835 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7836   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7837     {
7838       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7839 
7840       if (TREE_TYPE (t) != ptrtype)
7841 	t = fold_convert_loc (loc, ptrtype, t);
7842     }
7843   else
7844     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7845 
7846   return t;
7847 }
7848 
7849 /* Build an expression for the address of T.  */
7850 
7851 tree
7852 build_fold_addr_expr_loc (location_t loc, tree t)
7853 {
7854   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7855 
7856   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7857 }
7858 
7859 /* Fold a unary expression of code CODE and type TYPE with operand
7860    OP0.  Return the folded expression if folding is successful.
7861    Otherwise, return NULL_TREE.  */
7862 
7863 tree
7864 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7865 {
7866   tree tem;
7867   tree arg0;
7868   enum tree_code_class kind = TREE_CODE_CLASS (code);
7869 
7870   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7871 	      && TREE_CODE_LENGTH (code) == 1);
7872 
7873   arg0 = op0;
7874   if (arg0)
7875     {
7876       if (CONVERT_EXPR_CODE_P (code)
7877 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7878 	{
7879 	  /* Don't use STRIP_NOPS, because signedness of argument type
7880 	     matters.  */
7881 	  STRIP_SIGN_NOPS (arg0);
7882 	}
7883       else
7884 	{
7885 	  /* Strip any conversions that don't change the mode.  This
7886 	     is safe for every expression, except for a comparison
7887 	     expression because its signedness is derived from its
7888 	     operands.
7889 
7890 	     Note that this is done as an internal manipulation within
7891 	     the constant folder, in order to find the simplest
7892 	     representation of the arguments so that their form can be
7893 	     studied.  In any cases, the appropriate type conversions
7894 	     should be put back in the tree that will get out of the
7895 	     constant folder.  */
7896 	  STRIP_NOPS (arg0);
7897 	}
7898 
7899       if (CONSTANT_CLASS_P (arg0))
7900 	{
7901 	  tree tem = const_unop (code, type, arg0);
7902 	  if (tem)
7903 	    {
7904 	      if (TREE_TYPE (tem) != type)
7905 		tem = fold_convert_loc (loc, type, tem);
7906 	      return tem;
7907 	    }
7908 	}
7909     }
7910 
7911   tem = generic_simplify (loc, code, type, op0);
7912   if (tem)
7913     return tem;
7914 
7915   if (TREE_CODE_CLASS (code) == tcc_unary)
7916     {
7917       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7918 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7919 		       fold_build1_loc (loc, code, type,
7920 				    fold_convert_loc (loc, TREE_TYPE (op0),
7921 						      TREE_OPERAND (arg0, 1))));
7922       else if (TREE_CODE (arg0) == COND_EXPR)
7923 	{
7924 	  tree arg01 = TREE_OPERAND (arg0, 1);
7925 	  tree arg02 = TREE_OPERAND (arg0, 2);
7926 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7927 	    arg01 = fold_build1_loc (loc, code, type,
7928 				 fold_convert_loc (loc,
7929 						   TREE_TYPE (op0), arg01));
7930 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7931 	    arg02 = fold_build1_loc (loc, code, type,
7932 				 fold_convert_loc (loc,
7933 						   TREE_TYPE (op0), arg02));
7934 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7935 			     arg01, arg02);
7936 
7937 	  /* If this was a conversion, and all we did was to move into
7938 	     inside the COND_EXPR, bring it back out.  But leave it if
7939 	     it is a conversion from integer to integer and the
7940 	     result precision is no wider than a word since such a
7941 	     conversion is cheap and may be optimized away by combine,
7942 	     while it couldn't if it were outside the COND_EXPR.  Then return
7943 	     so we don't get into an infinite recursion loop taking the
7944 	     conversion out and then back in.  */
7945 
7946 	  if ((CONVERT_EXPR_CODE_P (code)
7947 	       || code == NON_LVALUE_EXPR)
7948 	      && TREE_CODE (tem) == COND_EXPR
7949 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7950 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7951 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7952 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7953 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7954 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7955 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7956 		     && (INTEGRAL_TYPE_P
7957 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7958 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7959 		  || flag_syntax_only))
7960 	    tem = build1_loc (loc, code, type,
7961 			      build3 (COND_EXPR,
7962 				      TREE_TYPE (TREE_OPERAND
7963 						 (TREE_OPERAND (tem, 1), 0)),
7964 				      TREE_OPERAND (tem, 0),
7965 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7966 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7967 						    0)));
7968 	  return tem;
7969 	}
7970    }
7971 
7972   switch (code)
7973     {
7974     case NON_LVALUE_EXPR:
7975       if (!maybe_lvalue_p (op0))
7976 	return fold_convert_loc (loc, type, op0);
7977       return NULL_TREE;
7978 
7979     CASE_CONVERT:
7980     case FLOAT_EXPR:
7981     case FIX_TRUNC_EXPR:
7982       if (COMPARISON_CLASS_P (op0))
7983 	{
7984 	  /* If we have (type) (a CMP b) and type is an integral type, return
7985 	     new expression involving the new type.  Canonicalize
7986 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7987 	     non-integral type.
7988 	     Do not fold the result as that would not simplify further, also
7989 	     folding again results in recursions.  */
7990 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7991 	    return build2_loc (loc, TREE_CODE (op0), type,
7992 			       TREE_OPERAND (op0, 0),
7993 			       TREE_OPERAND (op0, 1));
7994 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7995 		   && TREE_CODE (type) != VECTOR_TYPE)
7996 	    return build3_loc (loc, COND_EXPR, type, op0,
7997 			       constant_boolean_node (true, type),
7998 			       constant_boolean_node (false, type));
7999 	}
8000 
8001       /* Handle (T *)&A.B.C for A being of type T and B and C
8002 	 living at offset zero.  This occurs frequently in
8003 	 C++ upcasting and then accessing the base.  */
8004       if (TREE_CODE (op0) == ADDR_EXPR
8005 	  && POINTER_TYPE_P (type)
8006 	  && handled_component_p (TREE_OPERAND (op0, 0)))
8007         {
8008 	  HOST_WIDE_INT bitsize, bitpos;
8009 	  tree offset;
8010 	  machine_mode mode;
8011 	  int unsignedp, volatilep;
8012           tree base = TREE_OPERAND (op0, 0);
8013 	  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8014 				      &mode, &unsignedp, &volatilep, false);
8015 	  /* If the reference was to a (constant) zero offset, we can use
8016 	     the address of the base if it has the same base type
8017 	     as the result type and the pointer type is unqualified.  */
8018 	  if (! offset && bitpos == 0
8019 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8020 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8021 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8022 	    return fold_convert_loc (loc, type,
8023 				     build_fold_addr_expr_loc (loc, base));
8024         }
8025 
8026       if (TREE_CODE (op0) == MODIFY_EXPR
8027 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8028 	  /* Detect assigning a bitfield.  */
8029 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8030 	       && DECL_BIT_FIELD
8031 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8032 	{
8033 	  /* Don't leave an assignment inside a conversion
8034 	     unless assigning a bitfield.  */
8035 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8036 	  /* First do the assignment, then return converted constant.  */
8037 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8038 	  TREE_NO_WARNING (tem) = 1;
8039 	  TREE_USED (tem) = 1;
8040 	  return tem;
8041 	}
8042 
8043       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8044 	 constants (if x has signed type, the sign bit cannot be set
8045 	 in c).  This folds extension into the BIT_AND_EXPR.
8046 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8047 	 very likely don't have maximal range for their precision and this
8048 	 transformation effectively doesn't preserve non-maximal ranges.  */
8049       if (TREE_CODE (type) == INTEGER_TYPE
8050 	  && TREE_CODE (op0) == BIT_AND_EXPR
8051 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8052 	{
8053 	  tree and_expr = op0;
8054 	  tree and0 = TREE_OPERAND (and_expr, 0);
8055 	  tree and1 = TREE_OPERAND (and_expr, 1);
8056 	  int change = 0;
8057 
8058 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8059 	      || (TYPE_PRECISION (type)
8060 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8061 	    change = 1;
8062 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
8063 		   <= HOST_BITS_PER_WIDE_INT
8064 		   && tree_fits_uhwi_p (and1))
8065 	    {
8066 	      unsigned HOST_WIDE_INT cst;
8067 
8068 	      cst = tree_to_uhwi (and1);
8069 	      cst &= HOST_WIDE_INT_M1U
8070 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8071 	      change = (cst == 0);
8072 #ifdef LOAD_EXTEND_OP
8073 	      if (change
8074 		  && !flag_syntax_only
8075 		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8076 		      == ZERO_EXTEND))
8077 		{
8078 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
8079 		  and0 = fold_convert_loc (loc, uns, and0);
8080 		  and1 = fold_convert_loc (loc, uns, and1);
8081 		}
8082 #endif
8083 	    }
8084 	  if (change)
8085 	    {
8086 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
8087 				    TREE_OVERFLOW (and1));
8088 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
8089 				      fold_convert_loc (loc, type, and0), tem);
8090 	    }
8091 	}
8092 
8093       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8094          when one of the new casts will fold away. Conservatively we assume
8095 	 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8096       if (POINTER_TYPE_P (type)
8097 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8098 	  && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8099 	  && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8100 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8101 	      || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8102 	{
8103 	  tree arg00 = TREE_OPERAND (arg0, 0);
8104 	  tree arg01 = TREE_OPERAND (arg0, 1);
8105 
8106 	  return fold_build_pointer_plus_loc
8107 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
8108 	}
8109 
8110       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8111 	 of the same precision, and X is an integer type not narrower than
8112 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
8113       if (INTEGRAL_TYPE_P (type)
8114 	  && TREE_CODE (op0) == BIT_NOT_EXPR
8115 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8116 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8117 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8118 	{
8119 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8120 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8121 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8122 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8123 				fold_convert_loc (loc, type, tem));
8124 	}
8125 
8126       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8127 	 type of X and Y (integer types only).  */
8128       if (INTEGRAL_TYPE_P (type)
8129 	  && TREE_CODE (op0) == MULT_EXPR
8130 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8131 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8132 	{
8133 	  /* Be careful not to introduce new overflows.  */
8134 	  tree mult_type;
8135           if (TYPE_OVERFLOW_WRAPS (type))
8136 	    mult_type = type;
8137 	  else
8138 	    mult_type = unsigned_type_for (type);
8139 
8140 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8141 	    {
8142 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8143 				 fold_convert_loc (loc, mult_type,
8144 						   TREE_OPERAND (op0, 0)),
8145 				 fold_convert_loc (loc, mult_type,
8146 						   TREE_OPERAND (op0, 1)));
8147 	      return fold_convert_loc (loc, type, tem);
8148 	    }
8149 	}
8150 
8151       return NULL_TREE;
8152 
8153     case VIEW_CONVERT_EXPR:
8154       if (TREE_CODE (op0) == MEM_REF)
8155 	{
8156 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8157 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8158 	  tem = fold_build2_loc (loc, MEM_REF, type,
8159 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8160 	  return tem;
8161 	}
8162 
8163       return NULL_TREE;
8164 
8165     case NEGATE_EXPR:
8166       tem = fold_negate_expr (loc, arg0);
8167       if (tem)
8168 	return fold_convert_loc (loc, type, tem);
8169       return NULL_TREE;
8170 
8171     case ABS_EXPR:
8172       /* Convert fabs((double)float) into (double)fabsf(float).  */
8173       if (TREE_CODE (arg0) == NOP_EXPR
8174 	  && TREE_CODE (type) == REAL_TYPE)
8175 	{
8176 	  tree targ0 = strip_float_extensions (arg0);
8177 	  if (targ0 != arg0)
8178 	    return fold_convert_loc (loc, type,
8179 				     fold_build1_loc (loc, ABS_EXPR,
8180 						  TREE_TYPE (targ0),
8181 						  targ0));
8182 	}
8183       /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
8184       else if (TREE_CODE (arg0) == ABS_EXPR)
8185 	return arg0;
8186 
8187       /* Strip sign ops from argument.  */
8188       if (TREE_CODE (type) == REAL_TYPE)
8189 	{
8190 	  tem = fold_strip_sign_ops (arg0);
8191 	  if (tem)
8192 	    return fold_build1_loc (loc, ABS_EXPR, type,
8193 				fold_convert_loc (loc, type, tem));
8194 	}
8195       return NULL_TREE;
8196 
8197     case CONJ_EXPR:
8198       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8199 	return fold_convert_loc (loc, type, arg0);
8200       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8201 	{
8202 	  tree itype = TREE_TYPE (type);
8203 	  tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8204 	  tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8205 	  return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8206 			      negate_expr (ipart));
8207 	}
8208       if (TREE_CODE (arg0) == CONJ_EXPR)
8209 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8210       return NULL_TREE;
8211 
8212     case BIT_NOT_EXPR:
8213       /* Convert ~ (-A) to A - 1.  */
8214       if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8215 	return fold_build2_loc (loc, MINUS_EXPR, type,
8216 			    fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8217 			    build_int_cst (type, 1));
8218       /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
8219       else if (INTEGRAL_TYPE_P (type)
8220 	       && ((TREE_CODE (arg0) == MINUS_EXPR
8221 		    && integer_onep (TREE_OPERAND (arg0, 1)))
8222 		   || (TREE_CODE (arg0) == PLUS_EXPR
8223 		       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8224 	{
8225 	  /* Perform the negation in ARG0's type and only then convert
8226 	     to TYPE as to avoid introducing undefined behavior.  */
8227 	  tree t = fold_build1_loc (loc, NEGATE_EXPR,
8228 				    TREE_TYPE (TREE_OPERAND (arg0, 0)),
8229 				    TREE_OPERAND (arg0, 0));
8230 	  return fold_convert_loc (loc, type, t);
8231 	}
8232       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8233       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8234 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8235 			       	     fold_convert_loc (loc, type,
8236 						       TREE_OPERAND (arg0, 0)))))
8237 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8238 			    fold_convert_loc (loc, type,
8239 					      TREE_OPERAND (arg0, 1)));
8240       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8241 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8242 			       	     fold_convert_loc (loc, type,
8243 						       TREE_OPERAND (arg0, 1)))))
8244 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8245 			    fold_convert_loc (loc, type,
8246 					      TREE_OPERAND (arg0, 0)), tem);
8247 
8248       return NULL_TREE;
8249 
8250     case TRUTH_NOT_EXPR:
8251       /* Note that the operand of this must be an int
8252 	 and its values must be 0 or 1.
8253 	 ("true" is a fixed value perhaps depending on the language,
8254 	 but we don't handle values other than 1 correctly yet.)  */
8255       tem = fold_truth_not_expr (loc, arg0);
8256       if (!tem)
8257 	return NULL_TREE;
8258       return fold_convert_loc (loc, type, tem);
8259 
8260     case REALPART_EXPR:
8261       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8262 	return fold_convert_loc (loc, type, arg0);
8263       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8264 	{
8265 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8266 	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8267 			     fold_build1_loc (loc, REALPART_EXPR, itype,
8268 					  TREE_OPERAND (arg0, 0)),
8269 			     fold_build1_loc (loc, REALPART_EXPR, itype,
8270 					  TREE_OPERAND (arg0, 1)));
8271 	  return fold_convert_loc (loc, type, tem);
8272 	}
8273       if (TREE_CODE (arg0) == CONJ_EXPR)
8274 	{
8275 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8276 	  tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8277 			     TREE_OPERAND (arg0, 0));
8278 	  return fold_convert_loc (loc, type, tem);
8279 	}
8280       if (TREE_CODE (arg0) == CALL_EXPR)
8281 	{
8282 	  tree fn = get_callee_fndecl (arg0);
8283 	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8284 	    switch (DECL_FUNCTION_CODE (fn))
8285 	      {
8286 	      CASE_FLT_FN (BUILT_IN_CEXPI):
8287 	        fn = mathfn_built_in (type, BUILT_IN_COS);
8288 		if (fn)
8289 	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8290 		break;
8291 
8292 	      default:
8293 		break;
8294 	      }
8295 	}
8296       return NULL_TREE;
8297 
8298     case IMAGPART_EXPR:
8299       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8300 	return build_zero_cst (type);
8301       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8302 	{
8303 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8304 	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8305 			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8306 					  TREE_OPERAND (arg0, 0)),
8307 			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8308 					  TREE_OPERAND (arg0, 1)));
8309 	  return fold_convert_loc (loc, type, tem);
8310 	}
8311       if (TREE_CODE (arg0) == CONJ_EXPR)
8312 	{
8313 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8314 	  tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8315 	  return fold_convert_loc (loc, type, negate_expr (tem));
8316 	}
8317       if (TREE_CODE (arg0) == CALL_EXPR)
8318 	{
8319 	  tree fn = get_callee_fndecl (arg0);
8320 	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8321 	    switch (DECL_FUNCTION_CODE (fn))
8322 	      {
8323 	      CASE_FLT_FN (BUILT_IN_CEXPI):
8324 	        fn = mathfn_built_in (type, BUILT_IN_SIN);
8325 		if (fn)
8326 	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8327 		break;
8328 
8329 	      default:
8330 		break;
8331 	      }
8332 	}
8333       return NULL_TREE;
8334 
8335     case INDIRECT_REF:
8336       /* Fold *&X to X if X is an lvalue.  */
8337       if (TREE_CODE (op0) == ADDR_EXPR)
8338 	{
8339 	  tree op00 = TREE_OPERAND (op0, 0);
8340 	  if ((TREE_CODE (op00) == VAR_DECL
8341 	       || TREE_CODE (op00) == PARM_DECL
8342 	       || TREE_CODE (op00) == RESULT_DECL)
8343 	      && !TREE_READONLY (op00))
8344 	    return op00;
8345 	}
8346       return NULL_TREE;
8347 
8348     default:
8349       return NULL_TREE;
8350     } /* switch (code) */
8351 }
8352 
8353 
8354 /* If the operation was a conversion do _not_ mark a resulting constant
8355    with TREE_OVERFLOW if the original constant was not.  These conversions
8356    have implementation defined behavior and retaining the TREE_OVERFLOW
8357    flag here would confuse later passes such as VRP.  */
8358 tree
8359 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8360 				tree type, tree op0)
8361 {
8362   tree res = fold_unary_loc (loc, code, type, op0);
8363   if (res
8364       && TREE_CODE (res) == INTEGER_CST
8365       && TREE_CODE (op0) == INTEGER_CST
8366       && CONVERT_EXPR_CODE_P (code))
8367     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8368 
8369   return res;
8370 }
8371 
8372 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8373    operands OP0 and OP1.  LOC is the location of the resulting expression.
8374    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8375    Return the folded expression if folding is successful.  Otherwise,
8376    return NULL_TREE.  */
8377 static tree
8378 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8379 		  tree arg0, tree arg1, tree op0, tree op1)
8380 {
8381   tree tem;
8382 
8383   /* We only do these simplifications if we are optimizing.  */
8384   if (!optimize)
8385     return NULL_TREE;
8386 
8387   /* Check for things like (A || B) && (A || C).  We can convert this
8388      to A || (B && C).  Note that either operator can be any of the four
8389      truth and/or operations and the transformation will still be
8390      valid.   Also note that we only care about order for the
8391      ANDIF and ORIF operators.  If B contains side effects, this
8392      might change the truth-value of A.  */
8393   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8394       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8395 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8396 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8397 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8398       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8399     {
8400       tree a00 = TREE_OPERAND (arg0, 0);
8401       tree a01 = TREE_OPERAND (arg0, 1);
8402       tree a10 = TREE_OPERAND (arg1, 0);
8403       tree a11 = TREE_OPERAND (arg1, 1);
8404       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8405 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8406 			 && (code == TRUTH_AND_EXPR
8407 			     || code == TRUTH_OR_EXPR));
8408 
8409       if (operand_equal_p (a00, a10, 0))
8410 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8411 			    fold_build2_loc (loc, code, type, a01, a11));
8412       else if (commutative && operand_equal_p (a00, a11, 0))
8413 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8414 			    fold_build2_loc (loc, code, type, a01, a10));
8415       else if (commutative && operand_equal_p (a01, a10, 0))
8416 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8417 			    fold_build2_loc (loc, code, type, a00, a11));
8418 
8419       /* This case if tricky because we must either have commutative
8420 	 operators or else A10 must not have side-effects.  */
8421 
8422       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8423 	       && operand_equal_p (a01, a11, 0))
8424 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8425 			    fold_build2_loc (loc, code, type, a00, a10),
8426 			    a01);
8427     }
8428 
8429   /* See if we can build a range comparison.  */
8430   if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8431     return tem;
8432 
8433   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8434       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8435     {
8436       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8437       if (tem)
8438 	return fold_build2_loc (loc, code, type, tem, arg1);
8439     }
8440 
8441   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8442       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8443     {
8444       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8445       if (tem)
8446 	return fold_build2_loc (loc, code, type, arg0, tem);
8447     }
8448 
8449   /* Check for the possibility of merging component references.  If our
8450      lhs is another similar operation, try to merge its rhs with our
8451      rhs.  Then try to merge our lhs and rhs.  */
8452   if (TREE_CODE (arg0) == code
8453       && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8454 					 TREE_OPERAND (arg0, 1), arg1)))
8455     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8456 
8457   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8458     return tem;
8459 
8460   if (LOGICAL_OP_NON_SHORT_CIRCUIT
8461       && (code == TRUTH_AND_EXPR
8462           || code == TRUTH_ANDIF_EXPR
8463           || code == TRUTH_OR_EXPR
8464           || code == TRUTH_ORIF_EXPR))
8465     {
8466       enum tree_code ncode, icode;
8467 
8468       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8469 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8470       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8471 
8472       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8473 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8474 	 We don't want to pack more than two leafs to a non-IF AND/OR
8475 	 expression.
8476 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8477 	 equal to IF-CODE, then we don't want to add right-hand operand.
8478 	 If the inner right-hand side of left-hand operand has
8479 	 side-effects, or isn't simple, then we can't add to it,
8480 	 as otherwise we might destroy if-sequence.  */
8481       if (TREE_CODE (arg0) == icode
8482 	  && simple_operand_p_2 (arg1)
8483 	  /* Needed for sequence points to handle trappings, and
8484 	     side-effects.  */
8485 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8486 	{
8487 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8488 				 arg1);
8489 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8490 				  tem);
8491 	}
8492 	/* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8493 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8494       else if (TREE_CODE (arg1) == icode
8495 	  && simple_operand_p_2 (arg0)
8496 	  /* Needed for sequence points to handle trappings, and
8497 	     side-effects.  */
8498 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8499 	{
8500 	  tem = fold_build2_loc (loc, ncode, type,
8501 				 arg0, TREE_OPERAND (arg1, 0));
8502 	  return fold_build2_loc (loc, icode, type, tem,
8503 				  TREE_OPERAND (arg1, 1));
8504 	}
8505       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8506 	 into (A OR B).
8507 	 For sequence point consistancy, we need to check for trapping,
8508 	 and side-effects.  */
8509       else if (code == icode && simple_operand_p_2 (arg0)
8510                && simple_operand_p_2 (arg1))
8511 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8512     }
8513 
8514   return NULL_TREE;
8515 }
8516 
8517 /* Fold a binary expression of code CODE and type TYPE with operands
8518    OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8519    Return the folded expression if folding is successful.  Otherwise,
8520    return NULL_TREE.  */
8521 
8522 static tree
8523 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8524 {
8525   enum tree_code compl_code;
8526 
8527   if (code == MIN_EXPR)
8528     compl_code = MAX_EXPR;
8529   else if (code == MAX_EXPR)
8530     compl_code = MIN_EXPR;
8531   else
8532     gcc_unreachable ();
8533 
8534   /* MIN (MAX (a, b), b) == b.  */
8535   if (TREE_CODE (op0) == compl_code
8536       && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8537     return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8538 
8539   /* MIN (MAX (b, a), b) == b.  */
8540   if (TREE_CODE (op0) == compl_code
8541       && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8542       && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8543     return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8544 
8545   /* MIN (a, MAX (a, b)) == a.  */
8546   if (TREE_CODE (op1) == compl_code
8547       && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8548       && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8549     return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8550 
8551   /* MIN (a, MAX (b, a)) == a.  */
8552   if (TREE_CODE (op1) == compl_code
8553       && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8554       && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8555     return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8556 
8557   return NULL_TREE;
8558 }
8559 
8560 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8561    by changing CODE to reduce the magnitude of constants involved in
8562    ARG0 of the comparison.
8563    Returns a canonicalized comparison tree if a simplification was
8564    possible, otherwise returns NULL_TREE.
8565    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8566    valid if signed overflow is undefined.  */
8567 
8568 static tree
8569 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8570 				 tree arg0, tree arg1,
8571 				 bool *strict_overflow_p)
8572 {
8573   enum tree_code code0 = TREE_CODE (arg0);
8574   tree t, cst0 = NULL_TREE;
8575   int sgn0;
8576   bool swap = false;
8577 
8578   /* Match A +- CST code arg1 and CST code arg1.  We can change the
8579      first form only if overflow is undefined.  */
8580   if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8581 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8582 	 /* In principle pointers also have undefined overflow behavior,
8583 	    but that causes problems elsewhere.  */
8584 	 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8585 	 && (code0 == MINUS_EXPR
8586 	     || code0 == PLUS_EXPR)
8587          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8588 	|| code0 == INTEGER_CST))
8589     return NULL_TREE;
8590 
8591   /* Identify the constant in arg0 and its sign.  */
8592   if (code0 == INTEGER_CST)
8593     cst0 = arg0;
8594   else
8595     cst0 = TREE_OPERAND (arg0, 1);
8596   sgn0 = tree_int_cst_sgn (cst0);
8597 
8598   /* Overflowed constants and zero will cause problems.  */
8599   if (integer_zerop (cst0)
8600       || TREE_OVERFLOW (cst0))
8601     return NULL_TREE;
8602 
8603   /* See if we can reduce the magnitude of the constant in
8604      arg0 by changing the comparison code.  */
8605   if (code0 == INTEGER_CST)
8606     {
8607       /* CST <= arg1  ->  CST-1 < arg1.  */
8608       if (code == LE_EXPR && sgn0 == 1)
8609 	code = LT_EXPR;
8610       /* -CST < arg1  ->  -CST-1 <= arg1.  */
8611       else if (code == LT_EXPR && sgn0 == -1)
8612 	code = LE_EXPR;
8613       /* CST > arg1  ->  CST-1 >= arg1.  */
8614       else if (code == GT_EXPR && sgn0 == 1)
8615 	code = GE_EXPR;
8616       /* -CST >= arg1  ->  -CST-1 > arg1.  */
8617       else if (code == GE_EXPR && sgn0 == -1)
8618 	code = GT_EXPR;
8619       else
8620         return NULL_TREE;
8621       /* arg1 code' CST' might be more canonical.  */
8622       swap = true;
8623     }
8624   else
8625     {
8626       /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8627       if (code == LT_EXPR
8628 	  && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8629 	code = LE_EXPR;
8630       /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8631       else if (code == GT_EXPR
8632 	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8633 	code = GE_EXPR;
8634       /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8635       else if (code == LE_EXPR
8636 	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8637 	code = LT_EXPR;
8638       /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8639       else if (code == GE_EXPR
8640 	       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8641 	code = GT_EXPR;
8642       else
8643 	return NULL_TREE;
8644       *strict_overflow_p = true;
8645     }
8646 
8647   /* Now build the constant reduced in magnitude.  But not if that
8648      would produce one outside of its types range.  */
8649   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8650       && ((sgn0 == 1
8651 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8652 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8653 	  || (sgn0 == -1
8654 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8655 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8656     /* We cannot swap the comparison here as that would cause us to
8657        endlessly recurse.  */
8658     return NULL_TREE;
8659 
8660   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8661 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8662   if (code0 != INTEGER_CST)
8663     t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8664   t = fold_convert (TREE_TYPE (arg1), t);
8665 
8666   /* If swapping might yield to a more canonical form, do so.  */
8667   if (swap)
8668     return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8669   else
8670     return fold_build2_loc (loc, code, type, t, arg1);
8671 }
8672 
8673 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8674    overflow further.  Try to decrease the magnitude of constants involved
8675    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8676    and put sole constants at the second argument position.
8677    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8678 
8679 static tree
8680 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8681 			       tree arg0, tree arg1)
8682 {
8683   tree t;
8684   bool strict_overflow_p;
8685   const char * const warnmsg = G_("assuming signed overflow does not occur "
8686 				  "when reducing constant in comparison");
8687 
8688   /* Try canonicalization by simplifying arg0.  */
8689   strict_overflow_p = false;
8690   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8691 				       &strict_overflow_p);
8692   if (t)
8693     {
8694       if (strict_overflow_p)
8695 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8696       return t;
8697     }
8698 
8699   /* Try canonicalization by simplifying arg1 using the swapped
8700      comparison.  */
8701   code = swap_tree_comparison (code);
8702   strict_overflow_p = false;
8703   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8704 				       &strict_overflow_p);
8705   if (t && strict_overflow_p)
8706     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8707   return t;
8708 }
8709 
8710 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8711    space.  This is used to avoid issuing overflow warnings for
8712    expressions like &p->x which can not wrap.  */
8713 
8714 static bool
8715 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8716 {
8717   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8718     return true;
8719 
8720   if (bitpos < 0)
8721     return true;
8722 
8723   wide_int wi_offset;
8724   int precision = TYPE_PRECISION (TREE_TYPE (base));
8725   if (offset == NULL_TREE)
8726     wi_offset = wi::zero (precision);
8727   else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8728     return true;
8729   else
8730     wi_offset = offset;
8731 
8732   bool overflow;
8733   wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8734   wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8735   if (overflow)
8736     return true;
8737 
8738   if (!wi::fits_uhwi_p (total))
8739     return true;
8740 
8741   HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8742   if (size <= 0)
8743     return true;
8744 
8745   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8746      array.  */
8747   if (TREE_CODE (base) == ADDR_EXPR)
8748     {
8749       HOST_WIDE_INT base_size;
8750 
8751       base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8752       if (base_size > 0 && size < base_size)
8753 	size = base_size;
8754     }
8755 
8756   return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8757 }
8758 
8759 /* Subroutine of fold_binary.  This routine performs all of the
8760    transformations that are common to the equality/inequality
8761    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8762    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8763    fold_binary should call fold_binary.  Fold a comparison with
8764    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8765    the folded comparison or NULL_TREE.  */
8766 
8767 static tree
8768 fold_comparison (location_t loc, enum tree_code code, tree type,
8769 		 tree op0, tree op1)
8770 {
8771   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8772   tree arg0, arg1, tem;
8773 
8774   arg0 = op0;
8775   arg1 = op1;
8776 
8777   STRIP_SIGN_NOPS (arg0);
8778   STRIP_SIGN_NOPS (arg1);
8779 
8780   /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1.  */
8781   if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8782       && (equality_code
8783 	  || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8784 	      && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8785       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8786       && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8787       && TREE_CODE (arg1) == INTEGER_CST
8788       && !TREE_OVERFLOW (arg1))
8789     {
8790       const enum tree_code
8791 	reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8792       tree const1 = TREE_OPERAND (arg0, 1);
8793       tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8794       tree variable = TREE_OPERAND (arg0, 0);
8795       tree new_const = int_const_binop (reverse_op, const2, const1);
8796 
8797       /* If the constant operation overflowed this can be
8798 	 simplified as a comparison against INT_MAX/INT_MIN.  */
8799       if (TREE_OVERFLOW (new_const)
8800 	  && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8801 	{
8802 	  int const1_sgn = tree_int_cst_sgn (const1);
8803 	  enum tree_code code2 = code;
8804 
8805 	  /* Get the sign of the constant on the lhs if the
8806 	     operation were VARIABLE + CONST1.  */
8807 	  if (TREE_CODE (arg0) == MINUS_EXPR)
8808 	    const1_sgn = -const1_sgn;
8809 
8810 	  /* The sign of the constant determines if we overflowed
8811 	     INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8812 	     Canonicalize to the INT_MIN overflow by swapping the comparison
8813 	     if necessary.  */
8814 	  if (const1_sgn == -1)
8815 	    code2 = swap_tree_comparison (code);
8816 
8817 	  /* We now can look at the canonicalized case
8818 	       VARIABLE + 1  CODE2  INT_MIN
8819 	     and decide on the result.  */
8820 	  switch (code2)
8821 	    {
8822 	    case EQ_EXPR:
8823 	    case LT_EXPR:
8824 	    case LE_EXPR:
8825 	      return
8826 		omit_one_operand_loc (loc, type, boolean_false_node, variable);
8827 
8828 	    case NE_EXPR:
8829 	    case GE_EXPR:
8830 	    case GT_EXPR:
8831 	      return
8832 		omit_one_operand_loc (loc, type, boolean_true_node, variable);
8833 
8834 	    default:
8835 	      gcc_unreachable ();
8836 	    }
8837 	}
8838       else
8839 	{
8840 	  if (!equality_code)
8841 	    fold_overflow_warning ("assuming signed overflow does not occur "
8842 				   "when changing X +- C1 cmp C2 to "
8843 				   "X cmp C2 -+ C1",
8844 				   WARN_STRICT_OVERFLOW_COMPARISON);
8845 	  return fold_build2_loc (loc, code, type, variable, new_const);
8846 	}
8847     }
8848 
8849   /* Transform comparisons of the form X - Y CMP 0 to X CMP Y.  */
8850   if (TREE_CODE (arg0) == MINUS_EXPR
8851       && equality_code
8852       && integer_zerop (arg1))
8853     {
8854       /* ??? The transformation is valid for the other operators if overflow
8855 	 is undefined for the type, but performing it here badly interacts
8856 	 with the transformation in fold_cond_expr_with_comparison which
8857 	 attempts to synthetize ABS_EXPR.  */
8858       if (!equality_code)
8859 	fold_overflow_warning ("assuming signed overflow does not occur "
8860 			       "when changing X - Y cmp 0 to X cmp Y",
8861 			       WARN_STRICT_OVERFLOW_COMPARISON);
8862       return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8863 			      TREE_OPERAND (arg0, 1));
8864     }
8865 
8866   /* For comparisons of pointers we can decompose it to a compile time
8867      comparison of the base objects and the offsets into the object.
8868      This requires at least one operand being an ADDR_EXPR or a
8869      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
8870   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8871       && (TREE_CODE (arg0) == ADDR_EXPR
8872 	  || TREE_CODE (arg1) == ADDR_EXPR
8873 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8874 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8875     {
8876       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8877       HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8878       machine_mode mode;
8879       int volatilep, unsignedp;
8880       bool indirect_base0 = false, indirect_base1 = false;
8881 
8882       /* Get base and offset for the access.  Strip ADDR_EXPR for
8883 	 get_inner_reference, but put it back by stripping INDIRECT_REF
8884 	 off the base object if possible.  indirect_baseN will be true
8885 	 if baseN is not an address but refers to the object itself.  */
8886       base0 = arg0;
8887       if (TREE_CODE (arg0) == ADDR_EXPR)
8888 	{
8889 	  base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8890 				       &bitsize, &bitpos0, &offset0, &mode,
8891 				       &unsignedp, &volatilep, false);
8892 	  if (TREE_CODE (base0) == INDIRECT_REF)
8893 	    base0 = TREE_OPERAND (base0, 0);
8894 	  else
8895 	    indirect_base0 = true;
8896 	}
8897       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8898 	{
8899 	  base0 = TREE_OPERAND (arg0, 0);
8900 	  STRIP_SIGN_NOPS (base0);
8901 	  if (TREE_CODE (base0) == ADDR_EXPR)
8902 	    {
8903 	      base0
8904 		= get_inner_reference (TREE_OPERAND (base0, 0),
8905 				       &bitsize, &bitpos0, &offset0, &mode,
8906 				       &unsignedp, &volatilep, false);
8907 	      if (TREE_CODE (base0) == INDIRECT_REF)
8908 		base0 = TREE_OPERAND (base0, 0);
8909 	      else
8910 		indirect_base0 = true;
8911 	    }
8912 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
8913 	    offset0 = TREE_OPERAND (arg0, 1);
8914 	  else
8915 	    offset0 = size_binop (PLUS_EXPR, offset0,
8916 				  TREE_OPERAND (arg0, 1));
8917 	  if (TREE_CODE (offset0) == INTEGER_CST)
8918 	    {
8919 	      offset_int tem = wi::sext (wi::to_offset (offset0),
8920 					 TYPE_PRECISION (sizetype));
8921 	      tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8922 	      tem += bitpos0;
8923 	      if (wi::fits_shwi_p (tem))
8924 		{
8925 		  bitpos0 = tem.to_shwi ();
8926 		  offset0 = NULL_TREE;
8927 		}
8928 	    }
8929 	}
8930 
8931       base1 = arg1;
8932       if (TREE_CODE (arg1) == ADDR_EXPR)
8933 	{
8934 	  base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8935 				       &bitsize, &bitpos1, &offset1, &mode,
8936 				       &unsignedp, &volatilep, false);
8937 	  if (TREE_CODE (base1) == INDIRECT_REF)
8938 	    base1 = TREE_OPERAND (base1, 0);
8939 	  else
8940 	    indirect_base1 = true;
8941 	}
8942       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8943 	{
8944 	  base1 = TREE_OPERAND (arg1, 0);
8945 	  STRIP_SIGN_NOPS (base1);
8946 	  if (TREE_CODE (base1) == ADDR_EXPR)
8947 	    {
8948 	      base1
8949 		= get_inner_reference (TREE_OPERAND (base1, 0),
8950 				       &bitsize, &bitpos1, &offset1, &mode,
8951 				       &unsignedp, &volatilep, false);
8952 	      if (TREE_CODE (base1) == INDIRECT_REF)
8953 		base1 = TREE_OPERAND (base1, 0);
8954 	      else
8955 		indirect_base1 = true;
8956 	    }
8957 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
8958 	    offset1 = TREE_OPERAND (arg1, 1);
8959 	  else
8960 	    offset1 = size_binop (PLUS_EXPR, offset1,
8961 				  TREE_OPERAND (arg1, 1));
8962 	  if (TREE_CODE (offset1) == INTEGER_CST)
8963 	    {
8964 	      offset_int tem = wi::sext (wi::to_offset (offset1),
8965 					 TYPE_PRECISION (sizetype));
8966 	      tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8967 	      tem += bitpos1;
8968 	      if (wi::fits_shwi_p (tem))
8969 		{
8970 		  bitpos1 = tem.to_shwi ();
8971 		  offset1 = NULL_TREE;
8972 		}
8973 	    }
8974 	}
8975 
8976       /* A local variable can never be pointed to by
8977          the default SSA name of an incoming parameter.  */
8978       if ((TREE_CODE (arg0) == ADDR_EXPR
8979            && indirect_base0
8980            && TREE_CODE (base0) == VAR_DECL
8981            && auto_var_in_fn_p (base0, current_function_decl)
8982            && !indirect_base1
8983            && TREE_CODE (base1) == SSA_NAME
8984            && SSA_NAME_IS_DEFAULT_DEF (base1)
8985 	   && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8986           || (TREE_CODE (arg1) == ADDR_EXPR
8987               && indirect_base1
8988               && TREE_CODE (base1) == VAR_DECL
8989               && auto_var_in_fn_p (base1, current_function_decl)
8990               && !indirect_base0
8991               && TREE_CODE (base0) == SSA_NAME
8992               && SSA_NAME_IS_DEFAULT_DEF (base0)
8993 	      && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8994         {
8995           if (code == NE_EXPR)
8996             return constant_boolean_node (1, type);
8997           else if (code == EQ_EXPR)
8998             return constant_boolean_node (0, type);
8999         }
9000       /* If we have equivalent bases we might be able to simplify.  */
9001       else if (indirect_base0 == indirect_base1
9002                && operand_equal_p (base0, base1, 0))
9003 	{
9004 	  /* We can fold this expression to a constant if the non-constant
9005 	     offset parts are equal.  */
9006 	  if ((offset0 == offset1
9007 	       || (offset0 && offset1
9008 		   && operand_equal_p (offset0, offset1, 0)))
9009 	      && (code == EQ_EXPR
9010 		  || code == NE_EXPR
9011 		  || (indirect_base0 && DECL_P (base0))
9012 		  || POINTER_TYPE_OVERFLOW_UNDEFINED))
9013 
9014 	    {
9015 	      if (!equality_code
9016 		  && bitpos0 != bitpos1
9017 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9018 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9019 		fold_overflow_warning (("assuming pointer wraparound does not "
9020 					"occur when comparing P +- C1 with "
9021 					"P +- C2"),
9022 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
9023 
9024 	      switch (code)
9025 		{
9026 		case EQ_EXPR:
9027 		  return constant_boolean_node (bitpos0 == bitpos1, type);
9028 		case NE_EXPR:
9029 		  return constant_boolean_node (bitpos0 != bitpos1, type);
9030 		case LT_EXPR:
9031 		  return constant_boolean_node (bitpos0 < bitpos1, type);
9032 		case LE_EXPR:
9033 		  return constant_boolean_node (bitpos0 <= bitpos1, type);
9034 		case GE_EXPR:
9035 		  return constant_boolean_node (bitpos0 >= bitpos1, type);
9036 		case GT_EXPR:
9037 		  return constant_boolean_node (bitpos0 > bitpos1, type);
9038 		default:;
9039 		}
9040 	    }
9041 	  /* We can simplify the comparison to a comparison of the variable
9042 	     offset parts if the constant offset parts are equal.
9043 	     Be careful to use signed sizetype here because otherwise we
9044 	     mess with array offsets in the wrong way.  This is possible
9045 	     because pointer arithmetic is restricted to retain within an
9046 	     object and overflow on pointer differences is undefined as of
9047 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
9048 	  else if (bitpos0 == bitpos1
9049 		   && (equality_code
9050 		       || (indirect_base0 && DECL_P (base0))
9051 		       || POINTER_TYPE_OVERFLOW_UNDEFINED))
9052 	    {
9053 	      /* By converting to signed sizetype we cover middle-end pointer
9054 	         arithmetic which operates on unsigned pointer types of size
9055 	         type size and ARRAY_REF offsets which are properly sign or
9056 	         zero extended from their type in case it is narrower than
9057 	         sizetype.  */
9058 	      if (offset0 == NULL_TREE)
9059 		offset0 = build_int_cst (ssizetype, 0);
9060 	      else
9061 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
9062 	      if (offset1 == NULL_TREE)
9063 		offset1 = build_int_cst (ssizetype, 0);
9064 	      else
9065 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
9066 
9067 	      if (!equality_code
9068 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9069 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9070 		fold_overflow_warning (("assuming pointer wraparound does not "
9071 					"occur when comparing P +- C1 with "
9072 					"P +- C2"),
9073 				       WARN_STRICT_OVERFLOW_COMPARISON);
9074 
9075 	      return fold_build2_loc (loc, code, type, offset0, offset1);
9076 	    }
9077 	}
9078       /* For non-equal bases we can simplify if they are addresses
9079 	 declarations with different addresses.  */
9080       else if (indirect_base0 && indirect_base1
9081 	       /* We know that !operand_equal_p (base0, base1, 0)
9082 		  because the if condition was false.  But make
9083 		  sure two decls are not the same.  */
9084 	       && base0 != base1
9085 	       && TREE_CODE (arg0) == ADDR_EXPR
9086 	       && TREE_CODE (arg1) == ADDR_EXPR
9087 	       && DECL_P (base0)
9088 	       && DECL_P (base1)
9089 	       /* Watch for aliases.  */
9090 	       && (!decl_in_symtab_p (base0)
9091 		   || !decl_in_symtab_p (base1)
9092 		   || !symtab_node::get_create (base0)->equal_address_to
9093 			 (symtab_node::get_create (base1))))
9094 	{
9095 	  if (code == EQ_EXPR)
9096 	    return omit_two_operands_loc (loc, type, boolean_false_node,
9097 				      arg0, arg1);
9098 	  else if (code == NE_EXPR)
9099 	    return omit_two_operands_loc (loc, type, boolean_true_node,
9100 				      arg0, arg1);
9101 	}
9102       /* For equal offsets we can simplify to a comparison of the
9103 	 base addresses.  */
9104       else if (bitpos0 == bitpos1
9105 	       && (indirect_base0
9106 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9107 	       && (indirect_base1
9108 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9109 	       && ((offset0 == offset1)
9110 		   || (offset0 && offset1
9111 		       && operand_equal_p (offset0, offset1, 0))))
9112 	{
9113 	  if (indirect_base0)
9114 	    base0 = build_fold_addr_expr_loc (loc, base0);
9115 	  if (indirect_base1)
9116 	    base1 = build_fold_addr_expr_loc (loc, base1);
9117 	  return fold_build2_loc (loc, code, type, base0, base1);
9118 	}
9119     }
9120 
9121   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9122      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
9123      the resulting offset is smaller in absolute value than the
9124      original one and has the same sign.  */
9125   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9126       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9127       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9128       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9129 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9130       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9131       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9132 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9133     {
9134       tree const1 = TREE_OPERAND (arg0, 1);
9135       tree const2 = TREE_OPERAND (arg1, 1);
9136       tree variable1 = TREE_OPERAND (arg0, 0);
9137       tree variable2 = TREE_OPERAND (arg1, 0);
9138       tree cst;
9139       const char * const warnmsg = G_("assuming signed overflow does not "
9140 				      "occur when combining constants around "
9141 				      "a comparison");
9142 
9143       /* Put the constant on the side where it doesn't overflow and is
9144 	 of lower absolute value and of same sign than before.  */
9145       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9146 			     ? MINUS_EXPR : PLUS_EXPR,
9147 			     const2, const1);
9148       if (!TREE_OVERFLOW (cst)
9149 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9150 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9151 	{
9152 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9153 	  return fold_build2_loc (loc, code, type,
9154 				  variable1,
9155 				  fold_build2_loc (loc, TREE_CODE (arg1),
9156 						   TREE_TYPE (arg1),
9157 						   variable2, cst));
9158 	}
9159 
9160       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9161 			     ? MINUS_EXPR : PLUS_EXPR,
9162 			     const1, const2);
9163       if (!TREE_OVERFLOW (cst)
9164 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9165 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9166 	{
9167 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9168 	  return fold_build2_loc (loc, code, type,
9169 				  fold_build2_loc (loc, TREE_CODE (arg0),
9170 						   TREE_TYPE (arg0),
9171 						   variable1, cst),
9172 				  variable2);
9173 	}
9174     }
9175 
9176   /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9177      signed arithmetic case.  That form is created by the compiler
9178      often enough for folding it to be of value.  One example is in
9179      computing loop trip counts after Operator Strength Reduction.  */
9180   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9181       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9182       && TREE_CODE (arg0) == MULT_EXPR
9183       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9184           && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9185       && integer_zerop (arg1))
9186     {
9187       tree const1 = TREE_OPERAND (arg0, 1);
9188       tree const2 = arg1;                       /* zero */
9189       tree variable1 = TREE_OPERAND (arg0, 0);
9190       enum tree_code cmp_code = code;
9191 
9192       /* Handle unfolded multiplication by zero.  */
9193       if (integer_zerop (const1))
9194 	return fold_build2_loc (loc, cmp_code, type, const1, const2);
9195 
9196       fold_overflow_warning (("assuming signed overflow does not occur when "
9197 			      "eliminating multiplication in comparison "
9198 			      "with zero"),
9199 			     WARN_STRICT_OVERFLOW_COMPARISON);
9200 
9201       /* If const1 is negative we swap the sense of the comparison.  */
9202       if (tree_int_cst_sgn (const1) < 0)
9203         cmp_code = swap_tree_comparison (cmp_code);
9204 
9205       return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9206     }
9207 
9208   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9209   if (tem)
9210     return tem;
9211 
9212   if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9213     {
9214       tree targ0 = strip_float_extensions (arg0);
9215       tree targ1 = strip_float_extensions (arg1);
9216       tree newtype = TREE_TYPE (targ0);
9217 
9218       if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9219 	newtype = TREE_TYPE (targ1);
9220 
9221       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9222       if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9223 	return fold_build2_loc (loc, code, type,
9224 			    fold_convert_loc (loc, newtype, targ0),
9225 			    fold_convert_loc (loc, newtype, targ1));
9226 
9227       /* (-a) CMP (-b) -> b CMP a  */
9228       if (TREE_CODE (arg0) == NEGATE_EXPR
9229 	  && TREE_CODE (arg1) == NEGATE_EXPR)
9230 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9231 			    TREE_OPERAND (arg0, 0));
9232 
9233       if (TREE_CODE (arg1) == REAL_CST)
9234 	{
9235 	  REAL_VALUE_TYPE cst;
9236 	  cst = TREE_REAL_CST (arg1);
9237 
9238 	  /* (-a) CMP CST -> a swap(CMP) (-CST)  */
9239 	  if (TREE_CODE (arg0) == NEGATE_EXPR)
9240 	    return fold_build2_loc (loc, swap_tree_comparison (code), type,
9241 				TREE_OPERAND (arg0, 0),
9242 				build_real (TREE_TYPE (arg1),
9243 					    real_value_negate (&cst)));
9244 
9245 	  /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
9246 	  /* a CMP (-0) -> a CMP 0  */
9247 	  if (REAL_VALUE_MINUS_ZERO (cst))
9248 	    return fold_build2_loc (loc, code, type, arg0,
9249 				build_real (TREE_TYPE (arg1), dconst0));
9250 
9251 	  /* x != NaN is always true, other ops are always false.  */
9252 	  if (REAL_VALUE_ISNAN (cst)
9253 	      && ! HONOR_SNANS (arg1))
9254 	    {
9255 	      tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9256 	      return omit_one_operand_loc (loc, type, tem, arg0);
9257 	    }
9258 
9259 	  /* Fold comparisons against infinity.  */
9260 	  if (REAL_VALUE_ISINF (cst)
9261 	      && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9262 	    {
9263 	      tem = fold_inf_compare (loc, code, type, arg0, arg1);
9264 	      if (tem != NULL_TREE)
9265 		return tem;
9266 	    }
9267 	}
9268 
9269       /* If this is a comparison of a real constant with a PLUS_EXPR
9270 	 or a MINUS_EXPR of a real constant, we can convert it into a
9271 	 comparison with a revised real constant as long as no overflow
9272 	 occurs when unsafe_math_optimizations are enabled.  */
9273       if (flag_unsafe_math_optimizations
9274 	  && TREE_CODE (arg1) == REAL_CST
9275 	  && (TREE_CODE (arg0) == PLUS_EXPR
9276 	      || TREE_CODE (arg0) == MINUS_EXPR)
9277 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9278 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9279 				      ? MINUS_EXPR : PLUS_EXPR,
9280 				      arg1, TREE_OPERAND (arg0, 1)))
9281 	  && !TREE_OVERFLOW (tem))
9282 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9283 
9284       /* Likewise, we can simplify a comparison of a real constant with
9285          a MINUS_EXPR whose first operand is also a real constant, i.e.
9286          (c1 - x) < c2 becomes x > c1-c2.  Reordering is allowed on
9287          floating-point types only if -fassociative-math is set.  */
9288       if (flag_associative_math
9289 	  && TREE_CODE (arg1) == REAL_CST
9290 	  && TREE_CODE (arg0) == MINUS_EXPR
9291 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9292 	  && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9293 				      arg1))
9294 	  && !TREE_OVERFLOW (tem))
9295 	return fold_build2_loc (loc, swap_tree_comparison (code), type,
9296 			    TREE_OPERAND (arg0, 1), tem);
9297 
9298       /* Fold comparisons against built-in math functions.  */
9299       if (TREE_CODE (arg1) == REAL_CST
9300 	  && flag_unsafe_math_optimizations
9301 	  && ! flag_errno_math)
9302 	{
9303 	  enum built_in_function fcode = builtin_mathfn_code (arg0);
9304 
9305 	  if (fcode != END_BUILTINS)
9306 	    {
9307 	      tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9308 	      if (tem != NULL_TREE)
9309 		return tem;
9310 	    }
9311 	}
9312     }
9313 
9314   if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9315       && CONVERT_EXPR_P (arg0))
9316     {
9317       /* If we are widening one operand of an integer comparison,
9318 	 see if the other operand is similarly being widened.  Perhaps we
9319 	 can do the comparison in the narrower type.  */
9320       tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9321       if (tem)
9322 	return tem;
9323 
9324       /* Or if we are changing signedness.  */
9325       tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9326       if (tem)
9327 	return tem;
9328     }
9329 
9330   /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9331      constant, we can simplify it.  */
9332   if (TREE_CODE (arg1) == INTEGER_CST
9333       && (TREE_CODE (arg0) == MIN_EXPR
9334 	  || TREE_CODE (arg0) == MAX_EXPR)
9335       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9336     {
9337       tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9338       if (tem)
9339 	return tem;
9340     }
9341 
9342   /* Simplify comparison of something with itself.  (For IEEE
9343      floating-point, we can only do some of these simplifications.)  */
9344   if (operand_equal_p (arg0, arg1, 0))
9345     {
9346       switch (code)
9347 	{
9348 	case EQ_EXPR:
9349 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9350 	      || ! HONOR_NANS (arg0))
9351 	    return constant_boolean_node (1, type);
9352 	  break;
9353 
9354 	case GE_EXPR:
9355 	case LE_EXPR:
9356 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9357 	      || ! HONOR_NANS (arg0))
9358 	    return constant_boolean_node (1, type);
9359 	  return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9360 
9361 	case NE_EXPR:
9362 	  /* For NE, we can only do this simplification if integer
9363 	     or we don't honor IEEE floating point NaNs.  */
9364 	  if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9365 	      && HONOR_NANS (arg0))
9366 	    break;
9367 	  /* ... fall through ...  */
9368 	case GT_EXPR:
9369 	case LT_EXPR:
9370 	  return constant_boolean_node (0, type);
9371 	default:
9372 	  gcc_unreachable ();
9373 	}
9374     }
9375 
9376   /* If we are comparing an expression that just has comparisons
9377      of two integer values, arithmetic expressions of those comparisons,
9378      and constants, we can simplify it.  There are only three cases
9379      to check: the two values can either be equal, the first can be
9380      greater, or the second can be greater.  Fold the expression for
9381      those three values.  Since each value must be 0 or 1, we have
9382      eight possibilities, each of which corresponds to the constant 0
9383      or 1 or one of the six possible comparisons.
9384 
9385      This handles common cases like (a > b) == 0 but also handles
9386      expressions like  ((x > y) - (y > x)) > 0, which supposedly
9387      occur in macroized code.  */
9388 
9389   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9390     {
9391       tree cval1 = 0, cval2 = 0;
9392       int save_p = 0;
9393 
9394       if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9395 	  /* Don't handle degenerate cases here; they should already
9396 	     have been handled anyway.  */
9397 	  && cval1 != 0 && cval2 != 0
9398 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9399 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9400 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9401 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9402 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9403 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9404 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9405 	{
9406 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9407 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9408 
9409 	  /* We can't just pass T to eval_subst in case cval1 or cval2
9410 	     was the same as ARG1.  */
9411 
9412 	  tree high_result
9413 		= fold_build2_loc (loc, code, type,
9414 			       eval_subst (loc, arg0, cval1, maxval,
9415 					   cval2, minval),
9416 			       arg1);
9417 	  tree equal_result
9418 		= fold_build2_loc (loc, code, type,
9419 			       eval_subst (loc, arg0, cval1, maxval,
9420 					   cval2, maxval),
9421 			       arg1);
9422 	  tree low_result
9423 		= fold_build2_loc (loc, code, type,
9424 			       eval_subst (loc, arg0, cval1, minval,
9425 					   cval2, maxval),
9426 			       arg1);
9427 
9428 	  /* All three of these results should be 0 or 1.  Confirm they are.
9429 	     Then use those values to select the proper code to use.  */
9430 
9431 	  if (TREE_CODE (high_result) == INTEGER_CST
9432 	      && TREE_CODE (equal_result) == INTEGER_CST
9433 	      && TREE_CODE (low_result) == INTEGER_CST)
9434 	    {
9435 	      /* Make a 3-bit mask with the high-order bit being the
9436 		 value for `>', the next for '=', and the low for '<'.  */
9437 	      switch ((integer_onep (high_result) * 4)
9438 		      + (integer_onep (equal_result) * 2)
9439 		      + integer_onep (low_result))
9440 		{
9441 		case 0:
9442 		  /* Always false.  */
9443 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9444 		case 1:
9445 		  code = LT_EXPR;
9446 		  break;
9447 		case 2:
9448 		  code = EQ_EXPR;
9449 		  break;
9450 		case 3:
9451 		  code = LE_EXPR;
9452 		  break;
9453 		case 4:
9454 		  code = GT_EXPR;
9455 		  break;
9456 		case 5:
9457 		  code = NE_EXPR;
9458 		  break;
9459 		case 6:
9460 		  code = GE_EXPR;
9461 		  break;
9462 		case 7:
9463 		  /* Always true.  */
9464 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9465 		}
9466 
9467 	      if (save_p)
9468 		{
9469 		  tem = save_expr (build2 (code, type, cval1, cval2));
9470 		  SET_EXPR_LOCATION (tem, loc);
9471 		  return tem;
9472 		}
9473 	      return fold_build2_loc (loc, code, type, cval1, cval2);
9474 	    }
9475 	}
9476     }
9477 
9478   /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9479      into a single range test.  */
9480   if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9481        || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9482       && TREE_CODE (arg1) == INTEGER_CST
9483       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9484       && !integer_zerop (TREE_OPERAND (arg0, 1))
9485       && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9486       && !TREE_OVERFLOW (arg1))
9487     {
9488       tem = fold_div_compare (loc, code, type, arg0, arg1);
9489       if (tem != NULL_TREE)
9490 	return tem;
9491     }
9492 
9493   /* Fold ~X op ~Y as Y op X.  */
9494   if (TREE_CODE (arg0) == BIT_NOT_EXPR
9495       && TREE_CODE (arg1) == BIT_NOT_EXPR)
9496     {
9497       tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9498       return fold_build2_loc (loc, code, type,
9499 			  fold_convert_loc (loc, cmp_type,
9500 					    TREE_OPERAND (arg1, 0)),
9501 			  TREE_OPERAND (arg0, 0));
9502     }
9503 
9504   /* Fold ~X op C as X op' ~C, where op' is the swapped comparison.  */
9505   if (TREE_CODE (arg0) == BIT_NOT_EXPR
9506       && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9507     {
9508       tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9509       return fold_build2_loc (loc, swap_tree_comparison (code), type,
9510 			  TREE_OPERAND (arg0, 0),
9511 			  fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9512 				       fold_convert_loc (loc, cmp_type, arg1)));
9513     }
9514 
9515   return NULL_TREE;
9516 }
9517 
9518 
9519 /* Subroutine of fold_binary.  Optimize complex multiplications of the
9520    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
9521    argument EXPR represents the expression "z" of type TYPE.  */
9522 
9523 static tree
9524 fold_mult_zconjz (location_t loc, tree type, tree expr)
9525 {
9526   tree itype = TREE_TYPE (type);
9527   tree rpart, ipart, tem;
9528 
9529   if (TREE_CODE (expr) == COMPLEX_EXPR)
9530     {
9531       rpart = TREE_OPERAND (expr, 0);
9532       ipart = TREE_OPERAND (expr, 1);
9533     }
9534   else if (TREE_CODE (expr) == COMPLEX_CST)
9535     {
9536       rpart = TREE_REALPART (expr);
9537       ipart = TREE_IMAGPART (expr);
9538     }
9539   else
9540     {
9541       expr = save_expr (expr);
9542       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9543       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9544     }
9545 
9546   rpart = save_expr (rpart);
9547   ipart = save_expr (ipart);
9548   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9549 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9550 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9551   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9552 			  build_zero_cst (itype));
9553 }
9554 
9555 
9556 /* Subroutine of fold_binary.  If P is the value of EXPR, computes
9557    power-of-two M and (arbitrary) N such that M divides (P-N).  This condition
9558    guarantees that P and N have the same least significant log2(M) bits.
9559    N is not otherwise constrained.  In particular, N is not normalized to
9560    0 <= N < M as is common.  In general, the precise value of P is unknown.
9561    M is chosen as large as possible such that constant N can be determined.
9562 
9563    Returns M and sets *RESIDUE to N.
9564 
9565    If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9566    account.  This is not always possible due to PR 35705.
9567  */
9568 
9569 static unsigned HOST_WIDE_INT
9570 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9571 				 bool allow_func_align)
9572 {
9573   enum tree_code code;
9574 
9575   *residue = 0;
9576 
9577   code = TREE_CODE (expr);
9578   if (code == ADDR_EXPR)
9579     {
9580       unsigned int bitalign;
9581       get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9582       *residue /= BITS_PER_UNIT;
9583       return bitalign / BITS_PER_UNIT;
9584     }
9585   else if (code == POINTER_PLUS_EXPR)
9586     {
9587       tree op0, op1;
9588       unsigned HOST_WIDE_INT modulus;
9589       enum tree_code inner_code;
9590 
9591       op0 = TREE_OPERAND (expr, 0);
9592       STRIP_NOPS (op0);
9593       modulus = get_pointer_modulus_and_residue (op0, residue,
9594 						 allow_func_align);
9595 
9596       op1 = TREE_OPERAND (expr, 1);
9597       STRIP_NOPS (op1);
9598       inner_code = TREE_CODE (op1);
9599       if (inner_code == INTEGER_CST)
9600 	{
9601 	  *residue += TREE_INT_CST_LOW (op1);
9602 	  return modulus;
9603 	}
9604       else if (inner_code == MULT_EXPR)
9605 	{
9606 	  op1 = TREE_OPERAND (op1, 1);
9607 	  if (TREE_CODE (op1) == INTEGER_CST)
9608 	    {
9609 	      unsigned HOST_WIDE_INT align;
9610 
9611 	      /* Compute the greatest power-of-2 divisor of op1.  */
9612 	      align = TREE_INT_CST_LOW (op1);
9613 	      align &= -align;
9614 
9615 	      /* If align is non-zero and less than *modulus, replace
9616 		 *modulus with align., If align is 0, then either op1 is 0
9617 		 or the greatest power-of-2 divisor of op1 doesn't fit in an
9618 		 unsigned HOST_WIDE_INT.  In either case, no additional
9619 		 constraint is imposed.  */
9620 	      if (align)
9621 		modulus = MIN (modulus, align);
9622 
9623 	      return modulus;
9624 	    }
9625 	}
9626     }
9627 
9628   /* If we get here, we were unable to determine anything useful about the
9629      expression.  */
9630   return 1;
9631 }
9632 
9633 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
9634    CONSTRUCTOR ARG into array ELTS and return true if successful.  */
9635 
9636 static bool
9637 vec_cst_ctor_to_array (tree arg, tree *elts)
9638 {
9639   unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9640 
9641   if (TREE_CODE (arg) == VECTOR_CST)
9642     {
9643       for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9644 	elts[i] = VECTOR_CST_ELT (arg, i);
9645     }
9646   else if (TREE_CODE (arg) == CONSTRUCTOR)
9647     {
9648       constructor_elt *elt;
9649 
9650       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9651 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9652 	  return false;
9653 	else
9654 	  elts[i] = elt->value;
9655     }
9656   else
9657     return false;
9658   for (; i < nelts; i++)
9659     elts[i]
9660       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9661   return true;
9662 }
9663 
9664 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9665    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9666    NULL_TREE otherwise.  */
9667 
9668 static tree
9669 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9670 {
9671   unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9672   tree *elts;
9673   bool need_ctor = false;
9674 
9675   gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9676 	      && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9677   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9678       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9679     return NULL_TREE;
9680 
9681   elts = XALLOCAVEC (tree, nelts * 3);
9682   if (!vec_cst_ctor_to_array (arg0, elts)
9683       || !vec_cst_ctor_to_array (arg1, elts + nelts))
9684     return NULL_TREE;
9685 
9686   for (i = 0; i < nelts; i++)
9687     {
9688       if (!CONSTANT_CLASS_P (elts[sel[i]]))
9689 	need_ctor = true;
9690       elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9691     }
9692 
9693   if (need_ctor)
9694     {
9695       vec<constructor_elt, va_gc> *v;
9696       vec_alloc (v, nelts);
9697       for (i = 0; i < nelts; i++)
9698 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9699       return build_constructor (type, v);
9700     }
9701   else
9702     return build_vector (type, &elts[2 * nelts]);
9703 }
9704 
9705 /* Try to fold a pointer difference of type TYPE two address expressions of
9706    array references AREF0 and AREF1 using location LOC.  Return a
9707    simplified expression for the difference or NULL_TREE.  */
9708 
9709 static tree
9710 fold_addr_of_array_ref_difference (location_t loc, tree type,
9711 				   tree aref0, tree aref1)
9712 {
9713   tree base0 = TREE_OPERAND (aref0, 0);
9714   tree base1 = TREE_OPERAND (aref1, 0);
9715   tree base_offset = build_int_cst (type, 0);
9716 
9717   /* If the bases are array references as well, recurse.  If the bases
9718      are pointer indirections compute the difference of the pointers.
9719      If the bases are equal, we are set.  */
9720   if ((TREE_CODE (base0) == ARRAY_REF
9721        && TREE_CODE (base1) == ARRAY_REF
9722        && (base_offset
9723 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9724       || (INDIRECT_REF_P (base0)
9725 	  && INDIRECT_REF_P (base1)
9726 	  && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9727 					     TREE_OPERAND (base0, 0),
9728 					     TREE_OPERAND (base1, 0))))
9729       || operand_equal_p (base0, base1, 0))
9730     {
9731       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9732       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9733       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9734       tree diff = build2 (MINUS_EXPR, type, op0, op1);
9735       return fold_build2_loc (loc, PLUS_EXPR, type,
9736 			      base_offset,
9737 			      fold_build2_loc (loc, MULT_EXPR, type,
9738 					       diff, esz));
9739     }
9740   return NULL_TREE;
9741 }
9742 
9743 /* If the real or vector real constant CST of type TYPE has an exact
9744    inverse, return it, else return NULL.  */
9745 
9746 tree
9747 exact_inverse (tree type, tree cst)
9748 {
9749   REAL_VALUE_TYPE r;
9750   tree unit_type, *elts;
9751   machine_mode mode;
9752   unsigned vec_nelts, i;
9753 
9754   switch (TREE_CODE (cst))
9755     {
9756     case REAL_CST:
9757       r = TREE_REAL_CST (cst);
9758 
9759       if (exact_real_inverse (TYPE_MODE (type), &r))
9760 	return build_real (type, r);
9761 
9762       return NULL_TREE;
9763 
9764     case VECTOR_CST:
9765       vec_nelts = VECTOR_CST_NELTS (cst);
9766       elts = XALLOCAVEC (tree, vec_nelts);
9767       unit_type = TREE_TYPE (type);
9768       mode = TYPE_MODE (unit_type);
9769 
9770       for (i = 0; i < vec_nelts; i++)
9771 	{
9772 	  r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9773 	  if (!exact_real_inverse (mode, &r))
9774 	    return NULL_TREE;
9775 	  elts[i] = build_real (unit_type, r);
9776 	}
9777 
9778       return build_vector (type, elts);
9779 
9780     default:
9781       return NULL_TREE;
9782     }
9783 }
9784 
9785 /*  Mask out the tz least significant bits of X of type TYPE where
9786     tz is the number of trailing zeroes in Y.  */
9787 static wide_int
9788 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9789 {
9790   int tz = wi::ctz (y);
9791   if (tz > 0)
9792     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9793   return x;
9794 }
9795 
9796 /* Return true when T is an address and is known to be nonzero.
9797    For floating point we further ensure that T is not denormal.
9798    Similar logic is present in nonzero_address in rtlanal.h.
9799 
9800    If the return value is based on the assumption that signed overflow
9801    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9802    change *STRICT_OVERFLOW_P.  */
9803 
9804 static bool
9805 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9806 {
9807   tree type = TREE_TYPE (t);
9808   enum tree_code code;
9809 
9810   /* Doing something useful for floating point would need more work.  */
9811   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9812     return false;
9813 
9814   code = TREE_CODE (t);
9815   switch (TREE_CODE_CLASS (code))
9816     {
9817     case tcc_unary:
9818       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9819 					      strict_overflow_p);
9820     case tcc_binary:
9821     case tcc_comparison:
9822       return tree_binary_nonzero_warnv_p (code, type,
9823 					       TREE_OPERAND (t, 0),
9824 					       TREE_OPERAND (t, 1),
9825 					       strict_overflow_p);
9826     case tcc_constant:
9827     case tcc_declaration:
9828     case tcc_reference:
9829       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9830 
9831     default:
9832       break;
9833     }
9834 
9835   switch (code)
9836     {
9837     case TRUTH_NOT_EXPR:
9838       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9839 					      strict_overflow_p);
9840 
9841     case TRUTH_AND_EXPR:
9842     case TRUTH_OR_EXPR:
9843     case TRUTH_XOR_EXPR:
9844       return tree_binary_nonzero_warnv_p (code, type,
9845 					       TREE_OPERAND (t, 0),
9846 					       TREE_OPERAND (t, 1),
9847 					       strict_overflow_p);
9848 
9849     case COND_EXPR:
9850     case CONSTRUCTOR:
9851     case OBJ_TYPE_REF:
9852     case ASSERT_EXPR:
9853     case ADDR_EXPR:
9854     case WITH_SIZE_EXPR:
9855     case SSA_NAME:
9856       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9857 
9858     case COMPOUND_EXPR:
9859     case MODIFY_EXPR:
9860     case BIND_EXPR:
9861       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9862 					strict_overflow_p);
9863 
9864     case SAVE_EXPR:
9865       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9866 					strict_overflow_p);
9867 
9868     case CALL_EXPR:
9869       {
9870 	tree fndecl = get_callee_fndecl (t);
9871 	if (!fndecl) return false;
9872 	if (flag_delete_null_pointer_checks && !flag_check_new
9873 	    && DECL_IS_OPERATOR_NEW (fndecl)
9874 	    && !TREE_NOTHROW (fndecl))
9875 	  return true;
9876 	if (flag_delete_null_pointer_checks
9877 	    && lookup_attribute ("returns_nonnull",
9878 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9879 	  return true;
9880 	return alloca_call_p (t);
9881       }
9882 
9883     default:
9884       break;
9885     }
9886   return false;
9887 }
9888 
9889 /* Return true when T is an address and is known to be nonzero.
9890    Handle warnings about undefined signed overflow.  */
9891 
9892 static bool
9893 tree_expr_nonzero_p (tree t)
9894 {
9895   bool ret, strict_overflow_p;
9896 
9897   strict_overflow_p = false;
9898   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9899   if (strict_overflow_p)
9900     fold_overflow_warning (("assuming signed overflow does not occur when "
9901 			    "determining that expression is always "
9902 			    "non-zero"),
9903 			   WARN_STRICT_OVERFLOW_MISC);
9904   return ret;
9905 }
9906 
9907 /* Fold a binary expression of code CODE and type TYPE with operands
9908    OP0 and OP1.  LOC is the location of the resulting expression.
9909    Return the folded expression if folding is successful.  Otherwise,
9910    return NULL_TREE.  */
9911 
9912 tree
9913 fold_binary_loc (location_t loc,
9914 	     enum tree_code code, tree type, tree op0, tree op1)
9915 {
9916   enum tree_code_class kind = TREE_CODE_CLASS (code);
9917   tree arg0, arg1, tem;
9918   tree t1 = NULL_TREE;
9919   bool strict_overflow_p;
9920   unsigned int prec;
9921 
9922   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9923 	      && TREE_CODE_LENGTH (code) == 2
9924 	      && op0 != NULL_TREE
9925 	      && op1 != NULL_TREE);
9926 
9927   arg0 = op0;
9928   arg1 = op1;
9929 
9930   /* Strip any conversions that don't change the mode.  This is
9931      safe for every expression, except for a comparison expression
9932      because its signedness is derived from its operands.  So, in
9933      the latter case, only strip conversions that don't change the
9934      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
9935      preserved.
9936 
9937      Note that this is done as an internal manipulation within the
9938      constant folder, in order to find the simplest representation
9939      of the arguments so that their form can be studied.  In any
9940      cases, the appropriate type conversions should be put back in
9941      the tree that will get out of the constant folder.  */
9942 
9943   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9944     {
9945       STRIP_SIGN_NOPS (arg0);
9946       STRIP_SIGN_NOPS (arg1);
9947     }
9948   else
9949     {
9950       STRIP_NOPS (arg0);
9951       STRIP_NOPS (arg1);
9952     }
9953 
9954   /* Note that TREE_CONSTANT isn't enough: static var addresses are
9955      constant but we can't do arithmetic on them.  */
9956   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9957     {
9958       tem = const_binop (code, type, arg0, arg1);
9959       if (tem != NULL_TREE)
9960 	{
9961 	  if (TREE_TYPE (tem) != type)
9962 	    tem = fold_convert_loc (loc, type, tem);
9963 	  return tem;
9964 	}
9965     }
9966 
9967   /* If this is a commutative operation, and ARG0 is a constant, move it
9968      to ARG1 to reduce the number of tests below.  */
9969   if (commutative_tree_code (code)
9970       && tree_swap_operands_p (arg0, arg1, true))
9971     return fold_build2_loc (loc, code, type, op1, op0);
9972 
9973   /* Likewise if this is a comparison, and ARG0 is a constant, move it
9974      to ARG1 to reduce the number of tests below.  */
9975   if (kind == tcc_comparison
9976       && tree_swap_operands_p (arg0, arg1, true))
9977     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9978 
9979   tem = generic_simplify (loc, code, type, op0, op1);
9980   if (tem)
9981     return tem;
9982 
9983   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9984 
9985      First check for cases where an arithmetic operation is applied to a
9986      compound, conditional, or comparison operation.  Push the arithmetic
9987      operation inside the compound or conditional to see if any folding
9988      can then be done.  Convert comparison to conditional for this purpose.
9989      The also optimizes non-constant cases that used to be done in
9990      expand_expr.
9991 
9992      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9993      one of the operands is a comparison and the other is a comparison, a
9994      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
9995      code below would make the expression more complex.  Change it to a
9996      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
9997      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
9998 
9999   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10000        || code == EQ_EXPR || code == NE_EXPR)
10001       && TREE_CODE (type) != VECTOR_TYPE
10002       && ((truth_value_p (TREE_CODE (arg0))
10003 	   && (truth_value_p (TREE_CODE (arg1))
10004 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
10005 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
10006 	  || (truth_value_p (TREE_CODE (arg1))
10007 	      && (truth_value_p (TREE_CODE (arg0))
10008 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
10009 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
10010     {
10011       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10012 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10013 			 : TRUTH_XOR_EXPR,
10014 			 boolean_type_node,
10015 			 fold_convert_loc (loc, boolean_type_node, arg0),
10016 			 fold_convert_loc (loc, boolean_type_node, arg1));
10017 
10018       if (code == EQ_EXPR)
10019 	tem = invert_truthvalue_loc (loc, tem);
10020 
10021       return fold_convert_loc (loc, type, tem);
10022     }
10023 
10024   if (TREE_CODE_CLASS (code) == tcc_binary
10025       || TREE_CODE_CLASS (code) == tcc_comparison)
10026     {
10027       if (TREE_CODE (arg0) == COMPOUND_EXPR)
10028 	{
10029 	  tem = fold_build2_loc (loc, code, type,
10030 			     fold_convert_loc (loc, TREE_TYPE (op0),
10031 					       TREE_OPERAND (arg0, 1)), op1);
10032 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10033 			     tem);
10034 	}
10035       if (TREE_CODE (arg1) == COMPOUND_EXPR
10036 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10037 	{
10038 	  tem = fold_build2_loc (loc, code, type, op0,
10039 			     fold_convert_loc (loc, TREE_TYPE (op1),
10040 					       TREE_OPERAND (arg1, 1)));
10041 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10042 			     tem);
10043 	}
10044 
10045       if (TREE_CODE (arg0) == COND_EXPR
10046 	  || TREE_CODE (arg0) == VEC_COND_EXPR
10047 	  || COMPARISON_CLASS_P (arg0))
10048 	{
10049 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10050 						     arg0, arg1,
10051 						     /*cond_first_p=*/1);
10052 	  if (tem != NULL_TREE)
10053 	    return tem;
10054 	}
10055 
10056       if (TREE_CODE (arg1) == COND_EXPR
10057 	  || TREE_CODE (arg1) == VEC_COND_EXPR
10058 	  || COMPARISON_CLASS_P (arg1))
10059 	{
10060 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10061 						     arg1, arg0,
10062 					             /*cond_first_p=*/0);
10063 	  if (tem != NULL_TREE)
10064 	    return tem;
10065 	}
10066     }
10067 
10068   switch (code)
10069     {
10070     case MEM_REF:
10071       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
10072       if (TREE_CODE (arg0) == ADDR_EXPR
10073 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10074 	{
10075 	  tree iref = TREE_OPERAND (arg0, 0);
10076 	  return fold_build2 (MEM_REF, type,
10077 			      TREE_OPERAND (iref, 0),
10078 			      int_const_binop (PLUS_EXPR, arg1,
10079 					       TREE_OPERAND (iref, 1)));
10080 	}
10081 
10082       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
10083       if (TREE_CODE (arg0) == ADDR_EXPR
10084 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
10085 	{
10086 	  tree base;
10087 	  HOST_WIDE_INT coffset;
10088 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10089 						&coffset);
10090 	  if (!base)
10091 	    return NULL_TREE;
10092 	  return fold_build2 (MEM_REF, type,
10093 			      build_fold_addr_expr (base),
10094 			      int_const_binop (PLUS_EXPR, arg1,
10095 					       size_int (coffset)));
10096 	}
10097 
10098       return NULL_TREE;
10099 
10100     case POINTER_PLUS_EXPR:
10101       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
10102       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10103 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10104         return fold_convert_loc (loc, type,
10105 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10106 					      fold_convert_loc (loc, sizetype,
10107 								arg1),
10108 					      fold_convert_loc (loc, sizetype,
10109 								arg0)));
10110 
10111       return NULL_TREE;
10112 
10113     case PLUS_EXPR:
10114       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10115 	{
10116 	  /* X + (X / CST) * -CST is X % CST.  */
10117 	  if (TREE_CODE (arg1) == MULT_EXPR
10118 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10119 	      && operand_equal_p (arg0,
10120 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10121 	    {
10122 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10123 	      tree cst1 = TREE_OPERAND (arg1, 1);
10124 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10125 				      cst1, cst0);
10126 	      if (sum && integer_zerop (sum))
10127 		return fold_convert_loc (loc, type,
10128 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10129 						      TREE_TYPE (arg0), arg0,
10130 						      cst0));
10131 	    }
10132 	}
10133 
10134       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10135 	 one.  Make sure the type is not saturating and has the signedness of
10136 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10137 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10138       if ((TREE_CODE (arg0) == MULT_EXPR
10139 	   || TREE_CODE (arg1) == MULT_EXPR)
10140 	  && !TYPE_SATURATING (type)
10141 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10142 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10143 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10144         {
10145 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10146 	  if (tem)
10147 	    return tem;
10148 	}
10149 
10150       if (! FLOAT_TYPE_P (type))
10151 	{
10152 	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10153 	     with a constant, and the two constants have no bits in common,
10154 	     we should treat this as a BIT_IOR_EXPR since this may produce more
10155 	     simplifications.  */
10156 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10157 	      && TREE_CODE (arg1) == BIT_AND_EXPR
10158 	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10159 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10160 	      && wi::bit_and (TREE_OPERAND (arg0, 1),
10161 			      TREE_OPERAND (arg1, 1)) == 0)
10162 	    {
10163 	      code = BIT_IOR_EXPR;
10164 	      goto bit_ior;
10165 	    }
10166 
10167 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10168 	     (plus (plus (mult) (mult)) (foo)) so that we can
10169 	     take advantage of the factoring cases below.  */
10170 	  if (ANY_INTEGRAL_TYPE_P (type)
10171 	      && TYPE_OVERFLOW_WRAPS (type)
10172 	      && (((TREE_CODE (arg0) == PLUS_EXPR
10173 		    || TREE_CODE (arg0) == MINUS_EXPR)
10174 		   && TREE_CODE (arg1) == MULT_EXPR)
10175 		  || ((TREE_CODE (arg1) == PLUS_EXPR
10176 		       || TREE_CODE (arg1) == MINUS_EXPR)
10177 		      && TREE_CODE (arg0) == MULT_EXPR)))
10178 	    {
10179 	      tree parg0, parg1, parg, marg;
10180 	      enum tree_code pcode;
10181 
10182 	      if (TREE_CODE (arg1) == MULT_EXPR)
10183 		parg = arg0, marg = arg1;
10184 	      else
10185 		parg = arg1, marg = arg0;
10186 	      pcode = TREE_CODE (parg);
10187 	      parg0 = TREE_OPERAND (parg, 0);
10188 	      parg1 = TREE_OPERAND (parg, 1);
10189 	      STRIP_NOPS (parg0);
10190 	      STRIP_NOPS (parg1);
10191 
10192 	      if (TREE_CODE (parg0) == MULT_EXPR
10193 		  && TREE_CODE (parg1) != MULT_EXPR)
10194 		return fold_build2_loc (loc, pcode, type,
10195 				    fold_build2_loc (loc, PLUS_EXPR, type,
10196 						 fold_convert_loc (loc, type,
10197 								   parg0),
10198 						 fold_convert_loc (loc, type,
10199 								   marg)),
10200 				    fold_convert_loc (loc, type, parg1));
10201 	      if (TREE_CODE (parg0) != MULT_EXPR
10202 		  && TREE_CODE (parg1) == MULT_EXPR)
10203 		return
10204 		  fold_build2_loc (loc, PLUS_EXPR, type,
10205 			       fold_convert_loc (loc, type, parg0),
10206 			       fold_build2_loc (loc, pcode, type,
10207 					    fold_convert_loc (loc, type, marg),
10208 					    fold_convert_loc (loc, type,
10209 							      parg1)));
10210 	    }
10211 	}
10212       else
10213 	{
10214 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10215 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
10216 	     if signed zeros are involved.  */
10217 	  if (!HONOR_SNANS (element_mode (arg0))
10218               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10219 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10220 	    {
10221 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10222 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10223 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10224 	      bool arg0rz = false, arg0iz = false;
10225 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
10226 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
10227 		{
10228 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10229 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10230 		  if (arg0rz && arg1i && real_zerop (arg1i))
10231 		    {
10232 		      tree rp = arg1r ? arg1r
10233 				  : build1 (REALPART_EXPR, rtype, arg1);
10234 		      tree ip = arg0i ? arg0i
10235 				  : build1 (IMAGPART_EXPR, rtype, arg0);
10236 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10237 		    }
10238 		  else if (arg0iz && arg1r && real_zerop (arg1r))
10239 		    {
10240 		      tree rp = arg0r ? arg0r
10241 				  : build1 (REALPART_EXPR, rtype, arg0);
10242 		      tree ip = arg1i ? arg1i
10243 				  : build1 (IMAGPART_EXPR, rtype, arg1);
10244 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10245 		    }
10246 		}
10247 	    }
10248 
10249 	  if (flag_unsafe_math_optimizations
10250 	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10251 	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10252 	      && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10253 	    return tem;
10254 
10255           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10256              We associate floats only if the user has specified
10257              -fassociative-math.  */
10258           if (flag_associative_math
10259               && TREE_CODE (arg1) == PLUS_EXPR
10260               && TREE_CODE (arg0) != MULT_EXPR)
10261             {
10262               tree tree10 = TREE_OPERAND (arg1, 0);
10263               tree tree11 = TREE_OPERAND (arg1, 1);
10264               if (TREE_CODE (tree11) == MULT_EXPR
10265 		  && TREE_CODE (tree10) == MULT_EXPR)
10266                 {
10267                   tree tree0;
10268                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10269                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10270                 }
10271             }
10272           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10273              We associate floats only if the user has specified
10274              -fassociative-math.  */
10275           if (flag_associative_math
10276               && TREE_CODE (arg0) == PLUS_EXPR
10277               && TREE_CODE (arg1) != MULT_EXPR)
10278             {
10279               tree tree00 = TREE_OPERAND (arg0, 0);
10280               tree tree01 = TREE_OPERAND (arg0, 1);
10281               if (TREE_CODE (tree01) == MULT_EXPR
10282 		  && TREE_CODE (tree00) == MULT_EXPR)
10283                 {
10284                   tree tree0;
10285                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10286                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10287                 }
10288             }
10289 	}
10290 
10291      bit_rotate:
10292       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10293 	 is a rotate of A by C1 bits.  */
10294       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10295 	 is a rotate of A by B bits.  */
10296       {
10297 	enum tree_code code0, code1;
10298 	tree rtype;
10299 	code0 = TREE_CODE (arg0);
10300 	code1 = TREE_CODE (arg1);
10301 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10302 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10303 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
10304 			        TREE_OPERAND (arg1, 0), 0)
10305 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10306 	        TYPE_UNSIGNED (rtype))
10307 	    /* Only create rotates in complete modes.  Other cases are not
10308 	       expanded properly.  */
10309 	    && (element_precision (rtype)
10310 		== element_precision (TYPE_MODE (rtype))))
10311 	  {
10312 	    tree tree01, tree11;
10313 	    enum tree_code code01, code11;
10314 
10315 	    tree01 = TREE_OPERAND (arg0, 1);
10316 	    tree11 = TREE_OPERAND (arg1, 1);
10317 	    STRIP_NOPS (tree01);
10318 	    STRIP_NOPS (tree11);
10319 	    code01 = TREE_CODE (tree01);
10320 	    code11 = TREE_CODE (tree11);
10321 	    if (code01 == INTEGER_CST
10322 		&& code11 == INTEGER_CST
10323 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
10324 		    == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10325 	      {
10326 		tem = build2_loc (loc, LROTATE_EXPR,
10327 				  TREE_TYPE (TREE_OPERAND (arg0, 0)),
10328 				  TREE_OPERAND (arg0, 0),
10329 				  code0 == LSHIFT_EXPR
10330 				  ? TREE_OPERAND (arg0, 1)
10331 				  : TREE_OPERAND (arg1, 1));
10332 		return fold_convert_loc (loc, type, tem);
10333 	      }
10334 	    else if (code11 == MINUS_EXPR)
10335 	      {
10336 		tree tree110, tree111;
10337 		tree110 = TREE_OPERAND (tree11, 0);
10338 		tree111 = TREE_OPERAND (tree11, 1);
10339 		STRIP_NOPS (tree110);
10340 		STRIP_NOPS (tree111);
10341 		if (TREE_CODE (tree110) == INTEGER_CST
10342 		    && 0 == compare_tree_int (tree110,
10343 					      element_precision
10344 					      (TREE_TYPE (TREE_OPERAND
10345 							  (arg0, 0))))
10346 		    && operand_equal_p (tree01, tree111, 0))
10347 		  return
10348 		    fold_convert_loc (loc, type,
10349 				      build2 ((code0 == LSHIFT_EXPR
10350 					       ? LROTATE_EXPR
10351 					       : RROTATE_EXPR),
10352 					      TREE_TYPE (TREE_OPERAND (arg0, 0)),
10353 					      TREE_OPERAND (arg0, 0),
10354 					      TREE_OPERAND (arg0, 1)));
10355 	      }
10356 	    else if (code01 == MINUS_EXPR)
10357 	      {
10358 		tree tree010, tree011;
10359 		tree010 = TREE_OPERAND (tree01, 0);
10360 		tree011 = TREE_OPERAND (tree01, 1);
10361 		STRIP_NOPS (tree010);
10362 		STRIP_NOPS (tree011);
10363 		if (TREE_CODE (tree010) == INTEGER_CST
10364 		    && 0 == compare_tree_int (tree010,
10365 					      element_precision
10366 					      (TREE_TYPE (TREE_OPERAND
10367 							  (arg0, 0))))
10368 		    && operand_equal_p (tree11, tree011, 0))
10369 		    return fold_convert_loc
10370 		      (loc, type,
10371 		       build2 ((code0 != LSHIFT_EXPR
10372 				? LROTATE_EXPR
10373 				: RROTATE_EXPR),
10374 			       TREE_TYPE (TREE_OPERAND (arg0, 0)),
10375 			       TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
10376 	      }
10377 	  }
10378       }
10379 
10380     associate:
10381       /* In most languages, can't associate operations on floats through
10382 	 parentheses.  Rather than remember where the parentheses were, we
10383 	 don't associate floats at all, unless the user has specified
10384 	 -fassociative-math.
10385 	 And, we need to make sure type is not saturating.  */
10386 
10387       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10388 	  && !TYPE_SATURATING (type))
10389 	{
10390 	  tree var0, con0, lit0, minus_lit0;
10391 	  tree var1, con1, lit1, minus_lit1;
10392 	  tree atype = type;
10393 	  bool ok = true;
10394 
10395 	  /* Split both trees into variables, constants, and literals.  Then
10396 	     associate each group together, the constants with literals,
10397 	     then the result with variables.  This increases the chances of
10398 	     literals being recombined later and of generating relocatable
10399 	     expressions for the sum of a constant and literal.  */
10400 	  var0 = split_tree (loc, arg0, type, code,
10401 			     &con0, &lit0, &minus_lit0, 0);
10402 	  var1 = split_tree (loc, arg1, type, code,
10403 			     &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
10404 
10405 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
10406 	  if (code == MINUS_EXPR)
10407 	    code = PLUS_EXPR;
10408 
10409 	  /* With undefined overflow prefer doing association in a type
10410 	     which wraps on overflow, if that is one of the operand types.  */
10411 	  if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10412 	      || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10413 	    {
10414 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10415 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10416 		atype = TREE_TYPE (arg0);
10417 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10418 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10419 		atype = TREE_TYPE (arg1);
10420 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10421 	    }
10422 
10423 	  /* With undefined overflow we can only associate constants with one
10424 	     variable, and constants whose association doesn't overflow.  */
10425 	  if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10426 	      || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10427 	    {
10428 	      if (var0 && var1)
10429 		{
10430 		  tree tmp0 = var0;
10431 		  tree tmp1 = var1;
10432 		  bool one_neg = false;
10433 
10434 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
10435 		    {
10436 		      tmp0 = TREE_OPERAND (tmp0, 0);
10437 		      one_neg = !one_neg;
10438 		    }
10439 		  if (CONVERT_EXPR_P (tmp0)
10440 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10441 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10442 			  <= TYPE_PRECISION (atype)))
10443 		    tmp0 = TREE_OPERAND (tmp0, 0);
10444 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
10445 		    {
10446 		      tmp1 = TREE_OPERAND (tmp1, 0);
10447 		      one_neg = !one_neg;
10448 		    }
10449 		  if (CONVERT_EXPR_P (tmp1)
10450 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10451 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10452 			  <= TYPE_PRECISION (atype)))
10453 		    tmp1 = TREE_OPERAND (tmp1, 0);
10454 		  /* The only case we can still associate with two variables
10455 		     is if they cancel out.  */
10456 		  if (!one_neg
10457 		      || !operand_equal_p (tmp0, tmp1, 0))
10458 		    ok = false;
10459 		}
10460 	    }
10461 
10462 	  /* Only do something if we found more than two objects.  Otherwise,
10463 	     nothing has changed and we risk infinite recursion.  */
10464 	  if (ok
10465 	      && (2 < ((var0 != 0) + (var1 != 0)
10466 		       + (con0 != 0) + (con1 != 0)
10467 		       + (lit0 != 0) + (lit1 != 0)
10468 		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
10469 	    {
10470 	      bool any_overflows = false;
10471 	      if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10472 	      if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10473 	      if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10474 	      if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10475 	      var0 = associate_trees (loc, var0, var1, code, atype);
10476 	      con0 = associate_trees (loc, con0, con1, code, atype);
10477 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
10478 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10479 					    code, atype);
10480 
10481 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
10482 		 greater than the positive part.  Otherwise, the multiplicative
10483 		 folding code (i.e extract_muldiv) may be fooled in case
10484 		 unsigned constants are subtracted, like in the following
10485 		 example: ((X*2 + 4) - 8U)/2.  */
10486 	      if (minus_lit0 && lit0)
10487 		{
10488 		  if (TREE_CODE (lit0) == INTEGER_CST
10489 		      && TREE_CODE (minus_lit0) == INTEGER_CST
10490 		      && tree_int_cst_lt (lit0, minus_lit0))
10491 		    {
10492 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10493 						    MINUS_EXPR, atype);
10494 		      lit0 = 0;
10495 		    }
10496 		  else
10497 		    {
10498 		      lit0 = associate_trees (loc, lit0, minus_lit0,
10499 					      MINUS_EXPR, atype);
10500 		      minus_lit0 = 0;
10501 		    }
10502 		}
10503 
10504 	      /* Don't introduce overflows through reassociation.  */
10505 	      if (!any_overflows
10506 		  && ((lit0 && TREE_OVERFLOW_P (lit0))
10507 		      || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10508 		return NULL_TREE;
10509 
10510 	      if (minus_lit0)
10511 		{
10512 		  if (con0 == 0)
10513 		    return
10514 		      fold_convert_loc (loc, type,
10515 					associate_trees (loc, var0, minus_lit0,
10516 							 MINUS_EXPR, atype));
10517 		  else
10518 		    {
10519 		      con0 = associate_trees (loc, con0, minus_lit0,
10520 					      MINUS_EXPR, atype);
10521 		      return
10522 			fold_convert_loc (loc, type,
10523 					  associate_trees (loc, var0, con0,
10524 							   PLUS_EXPR, atype));
10525 		    }
10526 		}
10527 
10528 	      con0 = associate_trees (loc, con0, lit0, code, atype);
10529 	      return
10530 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10531 							      code, atype));
10532 	    }
10533 	}
10534 
10535       return NULL_TREE;
10536 
10537     case MINUS_EXPR:
10538       /* Pointer simplifications for subtraction, simple reassociations. */
10539       if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10540 	{
10541 	  /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10542 	  if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10543 	      && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10544 	    {
10545 	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10546 	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10547 	      tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10548 	      tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10549 	      return fold_build2_loc (loc, PLUS_EXPR, type,
10550 				  fold_build2_loc (loc, MINUS_EXPR, type,
10551 					       arg00, arg10),
10552 				  fold_build2_loc (loc, MINUS_EXPR, type,
10553 					       arg01, arg11));
10554 	    }
10555 	  /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10556 	  else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10557 	    {
10558 	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10559 	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10560 	      tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10561 				      fold_convert_loc (loc, type, arg1));
10562 	      if (tmp)
10563 	        return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10564 	    }
10565 	  /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10566 	     simplifies. */
10567 	  else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10568 	    {
10569 	      tree arg10 = fold_convert_loc (loc, type,
10570 					     TREE_OPERAND (arg1, 0));
10571 	      tree arg11 = fold_convert_loc (loc, type,
10572 					     TREE_OPERAND (arg1, 1));
10573 	      tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10574 					  fold_convert_loc (loc, type, arg0),
10575 					  arg10);
10576 	      if (tmp)
10577 		return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10578 	    }
10579 	}
10580       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
10581       if (TREE_CODE (arg0) == NEGATE_EXPR
10582 	  && negate_expr_p (op1)
10583 	  && reorder_operands_p (arg0, arg1))
10584 	return fold_build2_loc (loc, MINUS_EXPR, type,
10585 				negate_expr (op1),
10586 				fold_convert_loc (loc, type,
10587 						  TREE_OPERAND (arg0, 0)));
10588 
10589       /* X - (X / Y) * Y is X % Y.  */
10590       if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10591 	  && TREE_CODE (arg1) == MULT_EXPR
10592 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10593 	  && operand_equal_p (arg0,
10594 			      TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10595 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10596 			      TREE_OPERAND (arg1, 1), 0))
10597 	return
10598 	  fold_convert_loc (loc, type,
10599 			    fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10600 					 arg0, TREE_OPERAND (arg1, 1)));
10601 
10602       if (! FLOAT_TYPE_P (type))
10603 	{
10604 	  /* Fold A - (A & B) into ~B & A.  */
10605 	  if (!TREE_SIDE_EFFECTS (arg0)
10606 	      && TREE_CODE (arg1) == BIT_AND_EXPR)
10607 	    {
10608 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10609 		{
10610 		  tree arg10 = fold_convert_loc (loc, type,
10611 						 TREE_OPERAND (arg1, 0));
10612 		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10613 				      fold_build1_loc (loc, BIT_NOT_EXPR,
10614 						   type, arg10),
10615 				      fold_convert_loc (loc, type, arg0));
10616 		}
10617 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10618 		{
10619 		  tree arg11 = fold_convert_loc (loc,
10620 						 type, TREE_OPERAND (arg1, 1));
10621 		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10622 				      fold_build1_loc (loc, BIT_NOT_EXPR,
10623 						   type, arg11),
10624 				      fold_convert_loc (loc, type, arg0));
10625 		}
10626 	    }
10627 
10628 	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10629 	     any power of 2 minus 1.  */
10630 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10631 	      && TREE_CODE (arg1) == BIT_AND_EXPR
10632 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
10633 				  TREE_OPERAND (arg1, 0), 0))
10634 	    {
10635 	      tree mask0 = TREE_OPERAND (arg0, 1);
10636 	      tree mask1 = TREE_OPERAND (arg1, 1);
10637 	      tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10638 
10639 	      if (operand_equal_p (tem, mask1, 0))
10640 		{
10641 		  tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10642 				     TREE_OPERAND (arg0, 0), mask1);
10643 		  return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10644 		}
10645 	    }
10646 	}
10647 
10648       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10649 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
10650 	 signed zeros are involved.  */
10651       if (!HONOR_SNANS (element_mode (arg0))
10652 	  && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10653 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10654         {
10655 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10656 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10657 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10658 	  bool arg0rz = false, arg0iz = false;
10659 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
10660 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
10661 	    {
10662 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10663 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10664 	      if (arg0rz && arg1i && real_zerop (arg1i))
10665 	        {
10666 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10667 					 arg1r ? arg1r
10668 					 : build1 (REALPART_EXPR, rtype, arg1));
10669 		  tree ip = arg0i ? arg0i
10670 		    : build1 (IMAGPART_EXPR, rtype, arg0);
10671 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10672 		}
10673 	      else if (arg0iz && arg1r && real_zerop (arg1r))
10674 	        {
10675 		  tree rp = arg0r ? arg0r
10676 		    : build1 (REALPART_EXPR, rtype, arg0);
10677 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10678 					 arg1i ? arg1i
10679 					 : build1 (IMAGPART_EXPR, rtype, arg1));
10680 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10681 		}
10682 	    }
10683 	}
10684 
10685       /* A - B -> A + (-B) if B is easily negatable.  */
10686       if (negate_expr_p (op1)
10687 	  && ! TYPE_OVERFLOW_SANITIZED (type)
10688 	  && ((FLOAT_TYPE_P (type)
10689                /* Avoid this transformation if B is a positive REAL_CST.  */
10690 	       && (TREE_CODE (op1) != REAL_CST
10691 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10692 	      || INTEGRAL_TYPE_P (type)))
10693 	return fold_build2_loc (loc, PLUS_EXPR, type,
10694 				fold_convert_loc (loc, type, arg0),
10695 				negate_expr (op1));
10696 
10697       /* Try folding difference of addresses.  */
10698       {
10699 	HOST_WIDE_INT diff;
10700 
10701 	if ((TREE_CODE (arg0) == ADDR_EXPR
10702 	     || TREE_CODE (arg1) == ADDR_EXPR)
10703 	    && ptr_difference_const (arg0, arg1, &diff))
10704 	  return build_int_cst_type (type, diff);
10705       }
10706 
10707       /* Fold &a[i] - &a[j] to i-j.  */
10708       if (TREE_CODE (arg0) == ADDR_EXPR
10709 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10710 	  && TREE_CODE (arg1) == ADDR_EXPR
10711 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10712         {
10713 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
10714 							TREE_OPERAND (arg0, 0),
10715 							TREE_OPERAND (arg1, 0));
10716 	  if (tem)
10717 	    return tem;
10718 	}
10719 
10720       if (FLOAT_TYPE_P (type)
10721 	  && flag_unsafe_math_optimizations
10722 	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10723 	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10724 	  && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10725 	return tem;
10726 
10727       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10728 	 one.  Make sure the type is not saturating and has the signedness of
10729 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10730 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10731       if ((TREE_CODE (arg0) == MULT_EXPR
10732 	   || TREE_CODE (arg1) == MULT_EXPR)
10733 	  && !TYPE_SATURATING (type)
10734 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10735 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10736 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10737         {
10738 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10739 	  if (tem)
10740 	    return tem;
10741 	}
10742 
10743       goto associate;
10744 
10745     case MULT_EXPR:
10746       /* (-A) * (-B) -> A * B  */
10747       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10748 	return fold_build2_loc (loc, MULT_EXPR, type,
10749 			    fold_convert_loc (loc, type,
10750 					      TREE_OPERAND (arg0, 0)),
10751 			    fold_convert_loc (loc, type,
10752 					      negate_expr (arg1)));
10753       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10754 	return fold_build2_loc (loc, MULT_EXPR, type,
10755 			    fold_convert_loc (loc, type,
10756 					      negate_expr (arg0)),
10757 			    fold_convert_loc (loc, type,
10758 					      TREE_OPERAND (arg1, 0)));
10759 
10760       if (! FLOAT_TYPE_P (type))
10761 	{
10762 	  /* Transform x * -C into -x * C if x is easily negatable.  */
10763 	  if (TREE_CODE (op1) == INTEGER_CST
10764 	      && tree_int_cst_sgn (op1) == -1
10765 	      && negate_expr_p (op0)
10766 	      && (tem = negate_expr (op1)) != op1
10767 	      && ! TREE_OVERFLOW (tem))
10768 	    return fold_build2_loc (loc, MULT_EXPR, type,
10769 				    fold_convert_loc (loc, type,
10770 						      negate_expr (op0)), tem);
10771 
10772 	  /* (a * (1 << b)) is (a << b)  */
10773 	  if (TREE_CODE (arg1) == LSHIFT_EXPR
10774 	      && integer_onep (TREE_OPERAND (arg1, 0)))
10775 	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10776 				TREE_OPERAND (arg1, 1));
10777 	  if (TREE_CODE (arg0) == LSHIFT_EXPR
10778 	      && integer_onep (TREE_OPERAND (arg0, 0)))
10779 	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10780 				TREE_OPERAND (arg0, 1));
10781 
10782 	  /* (A + A) * C -> A * 2 * C  */
10783 	  if (TREE_CODE (arg0) == PLUS_EXPR
10784 	      && TREE_CODE (arg1) == INTEGER_CST
10785 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
10786 			          TREE_OPERAND (arg0, 1), 0))
10787 	    return fold_build2_loc (loc, MULT_EXPR, type,
10788 				omit_one_operand_loc (loc, type,
10789 						  TREE_OPERAND (arg0, 0),
10790 						  TREE_OPERAND (arg0, 1)),
10791 				fold_build2_loc (loc, MULT_EXPR, type,
10792 					     build_int_cst (type, 2) , arg1));
10793 
10794 	  /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10795 	     sign-changing only.  */
10796 	  if (TREE_CODE (arg1) == INTEGER_CST
10797 	      && TREE_CODE (arg0) == EXACT_DIV_EXPR
10798 	      && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10799 	    return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10800 
10801 	  strict_overflow_p = false;
10802 	  if (TREE_CODE (arg1) == INTEGER_CST
10803 	      && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10804 					     &strict_overflow_p)))
10805 	    {
10806 	      if (strict_overflow_p)
10807 		fold_overflow_warning (("assuming signed overflow does not "
10808 					"occur when simplifying "
10809 					"multiplication"),
10810 				       WARN_STRICT_OVERFLOW_MISC);
10811 	      return fold_convert_loc (loc, type, tem);
10812 	    }
10813 
10814 	  /* Optimize z * conj(z) for integer complex numbers.  */
10815 	  if (TREE_CODE (arg0) == CONJ_EXPR
10816 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10817 	    return fold_mult_zconjz (loc, type, arg1);
10818 	  if (TREE_CODE (arg1) == CONJ_EXPR
10819 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10820 	    return fold_mult_zconjz (loc, type, arg0);
10821 	}
10822       else
10823 	{
10824 	  /* Convert (C1/X)*C2 into (C1*C2)/X.  This transformation may change
10825              the result for floating point types due to rounding so it is applied
10826              only if -fassociative-math was specify.  */
10827 	  if (flag_associative_math
10828 	      && TREE_CODE (arg0) == RDIV_EXPR
10829 	      && TREE_CODE (arg1) == REAL_CST
10830 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10831 	    {
10832 	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10833 				      arg1);
10834 	      if (tem)
10835 		return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10836 				    TREE_OPERAND (arg0, 1));
10837 	    }
10838 
10839           /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
10840 	  if (operand_equal_p (arg0, arg1, 0))
10841 	    {
10842 	      tree tem = fold_strip_sign_ops (arg0);
10843 	      if (tem != NULL_TREE)
10844 		{
10845 		  tem = fold_convert_loc (loc, type, tem);
10846 		  return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10847 		}
10848 	    }
10849 
10850 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10851 	     This is not the same for NaNs or if signed zeros are
10852 	     involved.  */
10853 	  if (!HONOR_NANS (arg0)
10854               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10855 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10856 	      && TREE_CODE (arg1) == COMPLEX_CST
10857 	      && real_zerop (TREE_REALPART (arg1)))
10858 	    {
10859 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10860 	      if (real_onep (TREE_IMAGPART (arg1)))
10861 		return
10862 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10863 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10864 							     rtype, arg0)),
10865 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10866 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
10867 		return
10868 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10869 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10870 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10871 							     rtype, arg0)));
10872 	    }
10873 
10874 	  /* Optimize z * conj(z) for floating point complex numbers.
10875 	     Guarded by flag_unsafe_math_optimizations as non-finite
10876 	     imaginary components don't produce scalar results.  */
10877 	  if (flag_unsafe_math_optimizations
10878 	      && TREE_CODE (arg0) == CONJ_EXPR
10879 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10880 	    return fold_mult_zconjz (loc, type, arg1);
10881 	  if (flag_unsafe_math_optimizations
10882 	      && TREE_CODE (arg1) == CONJ_EXPR
10883 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10884 	    return fold_mult_zconjz (loc, type, arg0);
10885 
10886 	  if (flag_unsafe_math_optimizations)
10887 	    {
10888 	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10889 	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10890 
10891 	      /* Optimizations of root(...)*root(...).  */
10892 	      if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10893 		{
10894 		  tree rootfn, arg;
10895 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
10896 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
10897 
10898 		  /* Optimize sqrt(x)*sqrt(x) as x.  */
10899 		  if (BUILTIN_SQRT_P (fcode0)
10900 		      && operand_equal_p (arg00, arg10, 0)
10901 		      && ! HONOR_SNANS (element_mode (type)))
10902 		    return arg00;
10903 
10904 	          /* Optimize root(x)*root(y) as root(x*y).  */
10905 		  rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10906 		  arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10907 		  return build_call_expr_loc (loc, rootfn, 1, arg);
10908 		}
10909 
10910 	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
10911 	      if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10912 		{
10913 		  tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10914 		  tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10915 					  CALL_EXPR_ARG (arg0, 0),
10916 					  CALL_EXPR_ARG (arg1, 0));
10917 		  return build_call_expr_loc (loc, expfn, 1, arg);
10918 		}
10919 
10920 	      /* Optimizations of pow(...)*pow(...).  */
10921 	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10922 		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10923 		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10924 		{
10925 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
10926 		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
10927 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
10928 		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
10929 
10930 		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
10931 		  if (operand_equal_p (arg01, arg11, 0))
10932 		    {
10933 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10934 		      tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10935 					      arg00, arg10);
10936 		      return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10937 		    }
10938 
10939 		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
10940 		  if (operand_equal_p (arg00, arg10, 0))
10941 		    {
10942 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10943 		      tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10944 					      arg01, arg11);
10945 		      return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10946 		    }
10947 		}
10948 
10949 	      /* Optimize tan(x)*cos(x) as sin(x).  */
10950 	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10951 		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10952 		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10953 		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10954 		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10955 		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10956 		  && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10957 				      CALL_EXPR_ARG (arg1, 0), 0))
10958 		{
10959 		  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10960 
10961 		  if (sinfn != NULL_TREE)
10962 		    return build_call_expr_loc (loc, sinfn, 1,
10963 					    CALL_EXPR_ARG (arg0, 0));
10964 		}
10965 
10966 	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
10967 	      if (fcode1 == BUILT_IN_POW
10968 		  || fcode1 == BUILT_IN_POWF
10969 		  || fcode1 == BUILT_IN_POWL)
10970 		{
10971 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
10972 		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
10973 		  if (TREE_CODE (arg11) == REAL_CST
10974 		      && !TREE_OVERFLOW (arg11)
10975 		      && operand_equal_p (arg0, arg10, 0))
10976 		    {
10977 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10978 		      REAL_VALUE_TYPE c;
10979 		      tree arg;
10980 
10981 		      c = TREE_REAL_CST (arg11);
10982 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10983 		      arg = build_real (type, c);
10984 		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10985 		    }
10986 		}
10987 
10988 	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
10989 	      if (fcode0 == BUILT_IN_POW
10990 		  || fcode0 == BUILT_IN_POWF
10991 		  || fcode0 == BUILT_IN_POWL)
10992 		{
10993 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
10994 		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
10995 		  if (TREE_CODE (arg01) == REAL_CST
10996 		      && !TREE_OVERFLOW (arg01)
10997 		      && operand_equal_p (arg1, arg00, 0))
10998 		    {
10999 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11000 		      REAL_VALUE_TYPE c;
11001 		      tree arg;
11002 
11003 		      c = TREE_REAL_CST (arg01);
11004 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11005 		      arg = build_real (type, c);
11006 		      return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11007 		    }
11008 		}
11009 
11010 	      /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x.  */
11011 	      if (!in_gimple_form
11012 		  && optimize
11013 		  && operand_equal_p (arg0, arg1, 0))
11014 		{
11015 		  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11016 
11017 		  if (powfn)
11018 		    {
11019 		      tree arg = build_real (type, dconst2);
11020 		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11021 		    }
11022 		}
11023 	    }
11024 	}
11025       goto associate;
11026 
11027     case BIT_IOR_EXPR:
11028     bit_ior:
11029       /* ~X | X is -1.  */
11030       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11031 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11032 	{
11033 	  t1 = build_zero_cst (type);
11034 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11035 	  return omit_one_operand_loc (loc, type, t1, arg1);
11036 	}
11037 
11038       /* X | ~X is -1.  */
11039       if (TREE_CODE (arg1) == BIT_NOT_EXPR
11040 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11041 	{
11042 	  t1 = build_zero_cst (type);
11043 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11044 	  return omit_one_operand_loc (loc, type, t1, arg0);
11045 	}
11046 
11047       /* Canonicalize (X & C1) | C2.  */
11048       if (TREE_CODE (arg0) == BIT_AND_EXPR
11049 	  && TREE_CODE (arg1) == INTEGER_CST
11050 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11051 	{
11052 	  int width = TYPE_PRECISION (type), w;
11053 	  wide_int c1 = TREE_OPERAND (arg0, 1);
11054 	  wide_int c2 = arg1;
11055 
11056 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
11057 	  if ((c1 & c2) == c1)
11058 	    return omit_one_operand_loc (loc, type, arg1,
11059 					 TREE_OPERAND (arg0, 0));
11060 
11061 	  wide_int msk = wi::mask (width, false,
11062 				   TYPE_PRECISION (TREE_TYPE (arg1)));
11063 
11064 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
11065 	  if (msk.and_not (c1 | c2) == 0)
11066 	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11067 				    TREE_OPERAND (arg0, 0), arg1);
11068 
11069 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11070 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11071 	     mode which allows further optimizations.  */
11072 	  c1 &= msk;
11073 	  c2 &= msk;
11074 	  wide_int c3 = c1.and_not (c2);
11075 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11076 	    {
11077 	      wide_int mask = wi::mask (w, false,
11078 					TYPE_PRECISION (type));
11079 	      if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11080 		{
11081 		  c3 = mask;
11082 		  break;
11083 		}
11084 	    }
11085 
11086 	  if (c3 != c1)
11087 	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11088 				    fold_build2_loc (loc, BIT_AND_EXPR, type,
11089 						     TREE_OPERAND (arg0, 0),
11090 						     wide_int_to_tree (type,
11091 								       c3)),
11092 				    arg1);
11093 	}
11094 
11095       /* (X & ~Y) | (~X & Y) is X ^ Y */
11096       if (TREE_CODE (arg0) == BIT_AND_EXPR
11097 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
11098         {
11099 	  tree a0, a1, l0, l1, n0, n1;
11100 
11101 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11102 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11103 
11104 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11105 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11106 
11107 	  n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11108 	  n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11109 
11110 	  if ((operand_equal_p (n0, a0, 0)
11111 	       && operand_equal_p (n1, a1, 0))
11112 	      || (operand_equal_p (n0, a1, 0)
11113 		  && operand_equal_p (n1, a0, 0)))
11114 	    return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11115 	}
11116 
11117       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11118       if (t1 != NULL_TREE)
11119 	return t1;
11120 
11121       /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11122 
11123 	 This results in more efficient code for machines without a NAND
11124 	 instruction.  Combine will canonicalize to the first form
11125 	 which will allow use of NAND instructions provided by the
11126 	 backend if they exist.  */
11127       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11128 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11129 	{
11130 	  return
11131 	    fold_build1_loc (loc, BIT_NOT_EXPR, type,
11132 			 build2 (BIT_AND_EXPR, type,
11133 				 fold_convert_loc (loc, type,
11134 						   TREE_OPERAND (arg0, 0)),
11135 				 fold_convert_loc (loc, type,
11136 						   TREE_OPERAND (arg1, 0))));
11137 	}
11138 
11139       /* See if this can be simplified into a rotate first.  If that
11140 	 is unsuccessful continue in the association code.  */
11141       goto bit_rotate;
11142 
11143     case BIT_XOR_EXPR:
11144       /* ~X ^ X is -1.  */
11145       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11146 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11147 	{
11148 	  t1 = build_zero_cst (type);
11149 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11150 	  return omit_one_operand_loc (loc, type, t1, arg1);
11151 	}
11152 
11153       /* X ^ ~X is -1.  */
11154       if (TREE_CODE (arg1) == BIT_NOT_EXPR
11155 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11156 	{
11157 	  t1 = build_zero_cst (type);
11158 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11159 	  return omit_one_operand_loc (loc, type, t1, arg0);
11160 	}
11161 
11162       /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11163          with a constant, and the two constants have no bits in common,
11164 	 we should treat this as a BIT_IOR_EXPR since this may produce more
11165 	 simplifications.  */
11166       if (TREE_CODE (arg0) == BIT_AND_EXPR
11167 	  && TREE_CODE (arg1) == BIT_AND_EXPR
11168 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11169 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11170 	  && wi::bit_and (TREE_OPERAND (arg0, 1),
11171 			  TREE_OPERAND (arg1, 1)) == 0)
11172 	{
11173 	  code = BIT_IOR_EXPR;
11174 	  goto bit_ior;
11175 	}
11176 
11177       /* (X | Y) ^ X -> Y & ~ X*/
11178       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11179           && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11180         {
11181 	  tree t2 = TREE_OPERAND (arg0, 1);
11182 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11183 			    arg1);
11184 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11185 			    fold_convert_loc (loc, type, t2),
11186 			    fold_convert_loc (loc, type, t1));
11187 	  return t1;
11188 	}
11189 
11190       /* (Y | X) ^ X -> Y & ~ X*/
11191       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11192           && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11193         {
11194 	  tree t2 = TREE_OPERAND (arg0, 0);
11195 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11196 			    arg1);
11197 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11198 			    fold_convert_loc (loc, type, t2),
11199 			    fold_convert_loc (loc, type, t1));
11200 	  return t1;
11201 	}
11202 
11203       /* X ^ (X | Y) -> Y & ~ X*/
11204       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11205           && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11206         {
11207 	  tree t2 = TREE_OPERAND (arg1, 1);
11208 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11209 			    arg0);
11210 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11211 			    fold_convert_loc (loc, type, t2),
11212 			    fold_convert_loc (loc, type, t1));
11213 	  return t1;
11214 	}
11215 
11216       /* X ^ (Y | X) -> Y & ~ X*/
11217       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11218           && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11219         {
11220 	  tree t2 = TREE_OPERAND (arg1, 0);
11221 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11222 			    arg0);
11223 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11224 			    fold_convert_loc (loc, type, t2),
11225 			    fold_convert_loc (loc, type, t1));
11226 	  return t1;
11227 	}
11228 
11229       /* Convert ~X ^ ~Y to X ^ Y.  */
11230       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11231 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11232 	return fold_build2_loc (loc, code, type,
11233 			    fold_convert_loc (loc, type,
11234 					      TREE_OPERAND (arg0, 0)),
11235 			    fold_convert_loc (loc, type,
11236 					      TREE_OPERAND (arg1, 0)));
11237 
11238       /* Convert ~X ^ C to X ^ ~C.  */
11239       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11240 	  && TREE_CODE (arg1) == INTEGER_CST)
11241 	return fold_build2_loc (loc, code, type,
11242 			    fold_convert_loc (loc, type,
11243 					      TREE_OPERAND (arg0, 0)),
11244 			    fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11245 
11246       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11247       if (TREE_CODE (arg0) == BIT_AND_EXPR
11248 	  && INTEGRAL_TYPE_P (type)
11249 	  && integer_onep (TREE_OPERAND (arg0, 1))
11250 	  && integer_onep (arg1))
11251 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11252 				build_zero_cst (TREE_TYPE (arg0)));
11253 
11254       /* Fold (X & Y) ^ Y as ~X & Y.  */
11255       if (TREE_CODE (arg0) == BIT_AND_EXPR
11256 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11257 	{
11258 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11259 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11260 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11261 			      fold_convert_loc (loc, type, arg1));
11262 	}
11263       /* Fold (X & Y) ^ X as ~Y & X.  */
11264       if (TREE_CODE (arg0) == BIT_AND_EXPR
11265 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11266 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11267 	{
11268 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11269 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11270 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11271 			      fold_convert_loc (loc, type, arg1));
11272 	}
11273       /* Fold X ^ (X & Y) as X & ~Y.  */
11274       if (TREE_CODE (arg1) == BIT_AND_EXPR
11275 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11276 	{
11277 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11278 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11279 			      fold_convert_loc (loc, type, arg0),
11280 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11281 	}
11282       /* Fold X ^ (Y & X) as ~Y & X.  */
11283       if (TREE_CODE (arg1) == BIT_AND_EXPR
11284 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11285 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11286 	{
11287 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11288 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11289 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11290 			      fold_convert_loc (loc, type, arg0));
11291 	}
11292 
11293       /* See if this can be simplified into a rotate first.  If that
11294 	 is unsuccessful continue in the association code.  */
11295       goto bit_rotate;
11296 
11297     case BIT_AND_EXPR:
11298       /* ~X & X, (X == 0) & X, and !X & X are always zero.  */
11299       if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11300 	   || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11301 	   || (TREE_CODE (arg0) == EQ_EXPR
11302 	       && integer_zerop (TREE_OPERAND (arg0, 1))))
11303 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11304 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11305 
11306       /* X & ~X , X & (X == 0), and X & !X are always zero.  */
11307       if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11308 	   || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11309 	   || (TREE_CODE (arg1) == EQ_EXPR
11310 	       && integer_zerop (TREE_OPERAND (arg1, 1))))
11311 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11312 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11313 
11314       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11315       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11316 	  && INTEGRAL_TYPE_P (type)
11317 	  && integer_onep (TREE_OPERAND (arg0, 1))
11318 	  && integer_onep (arg1))
11319 	{
11320 	  tree tem2;
11321 	  tem = TREE_OPERAND (arg0, 0);
11322 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11323 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11324 				  tem, tem2);
11325 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11326 				  build_zero_cst (TREE_TYPE (tem)));
11327 	}
11328       /* Fold ~X & 1 as (X & 1) == 0.  */
11329       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11330 	  && INTEGRAL_TYPE_P (type)
11331 	  && integer_onep (arg1))
11332 	{
11333 	  tree tem2;
11334 	  tem = TREE_OPERAND (arg0, 0);
11335 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11336 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11337 				  tem, tem2);
11338 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11339 				  build_zero_cst (TREE_TYPE (tem)));
11340 	}
11341       /* Fold !X & 1 as X == 0.  */
11342       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11343 	  && integer_onep (arg1))
11344 	{
11345 	  tem = TREE_OPERAND (arg0, 0);
11346 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
11347 				  build_zero_cst (TREE_TYPE (tem)));
11348 	}
11349 
11350       /* Fold (X ^ Y) & Y as ~X & Y.  */
11351       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11352 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11353 	{
11354 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11355 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11356 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11357 			      fold_convert_loc (loc, type, arg1));
11358 	}
11359       /* Fold (X ^ Y) & X as ~Y & X.  */
11360       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11361 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11362 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11363 	{
11364 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11365 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11366 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11367 			      fold_convert_loc (loc, type, arg1));
11368 	}
11369       /* Fold X & (X ^ Y) as X & ~Y.  */
11370       if (TREE_CODE (arg1) == BIT_XOR_EXPR
11371 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11372 	{
11373 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11374 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11375 			      fold_convert_loc (loc, type, arg0),
11376 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11377 	}
11378       /* Fold X & (Y ^ X) as ~Y & X.  */
11379       if (TREE_CODE (arg1) == BIT_XOR_EXPR
11380 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11381 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11382 	{
11383 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11384 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11385 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11386 			      fold_convert_loc (loc, type, arg0));
11387 	}
11388 
11389       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11390          multiple of 1 << CST.  */
11391       if (TREE_CODE (arg1) == INTEGER_CST)
11392 	{
11393 	  wide_int cst1 = arg1;
11394 	  wide_int ncst1 = -cst1;
11395 	  if ((cst1 & ncst1) == ncst1
11396 	      && multiple_of_p (type, arg0,
11397 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11398 	    return fold_convert_loc (loc, type, arg0);
11399 	}
11400 
11401       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11402          bits from CST2.  */
11403       if (TREE_CODE (arg1) == INTEGER_CST
11404 	  && TREE_CODE (arg0) == MULT_EXPR
11405 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11406 	{
11407 	  wide_int warg1 = arg1;
11408 	  wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11409 
11410 	  if (masked == 0)
11411 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
11412 	                                  arg0, arg1);
11413 	  else if (masked != warg1)
11414 	    {
11415 	      /* Avoid the transform if arg1 is a mask of some
11416 	         mode which allows further optimizations.  */
11417 	      int pop = wi::popcount (warg1);
11418 	      if (!(pop >= BITS_PER_UNIT
11419 		    && exact_log2 (pop) != -1
11420 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11421 		return fold_build2_loc (loc, code, type, op0,
11422 					wide_int_to_tree (type, masked));
11423 	    }
11424 	}
11425 
11426       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11427 	 ((A & N) + B) & M -> (A + B) & M
11428 	 Similarly if (N & M) == 0,
11429 	 ((A | N) + B) & M -> (A + B) & M
11430 	 and for - instead of + (or unary - instead of +)
11431 	 and/or ^ instead of |.
11432 	 If B is constant and (B & M) == 0, fold into A & M.  */
11433       if (TREE_CODE (arg1) == INTEGER_CST)
11434 	{
11435 	  wide_int cst1 = arg1;
11436 	  if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11437 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11438 	      && (TREE_CODE (arg0) == PLUS_EXPR
11439 		  || TREE_CODE (arg0) == MINUS_EXPR
11440 		  || TREE_CODE (arg0) == NEGATE_EXPR)
11441 	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11442 		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11443 	    {
11444 	      tree pmop[2];
11445 	      int which = 0;
11446 	      wide_int cst0;
11447 
11448 	      /* Now we know that arg0 is (C + D) or (C - D) or
11449 		 -C and arg1 (M) is == (1LL << cst) - 1.
11450 		 Store C into PMOP[0] and D into PMOP[1].  */
11451 	      pmop[0] = TREE_OPERAND (arg0, 0);
11452 	      pmop[1] = NULL;
11453 	      if (TREE_CODE (arg0) != NEGATE_EXPR)
11454 		{
11455 		  pmop[1] = TREE_OPERAND (arg0, 1);
11456 		  which = 1;
11457 		}
11458 
11459 	      if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11460 		which = -1;
11461 
11462 	      for (; which >= 0; which--)
11463 		switch (TREE_CODE (pmop[which]))
11464 		  {
11465 		  case BIT_AND_EXPR:
11466 		  case BIT_IOR_EXPR:
11467 		  case BIT_XOR_EXPR:
11468 		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11469 			!= INTEGER_CST)
11470 		      break;
11471 		    cst0 = TREE_OPERAND (pmop[which], 1);
11472 		    cst0 &= cst1;
11473 		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11474 		      {
11475 			if (cst0 != cst1)
11476 			  break;
11477 		      }
11478 		    else if (cst0 != 0)
11479 		      break;
11480 		    /* If C or D is of the form (A & N) where
11481 		       (N & M) == M, or of the form (A | N) or
11482 		       (A ^ N) where (N & M) == 0, replace it with A.  */
11483 		    pmop[which] = TREE_OPERAND (pmop[which], 0);
11484 		    break;
11485 		  case INTEGER_CST:
11486 		    /* If C or D is a N where (N & M) == 0, it can be
11487 		       omitted (assumed 0).  */
11488 		    if ((TREE_CODE (arg0) == PLUS_EXPR
11489 			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11490 			&& (cst1 & pmop[which]) == 0)
11491 		      pmop[which] = NULL;
11492 		    break;
11493 		  default:
11494 		    break;
11495 		  }
11496 
11497 	      /* Only build anything new if we optimized one or both arguments
11498 		 above.  */
11499 	      if (pmop[0] != TREE_OPERAND (arg0, 0)
11500 		  || (TREE_CODE (arg0) != NEGATE_EXPR
11501 		      && pmop[1] != TREE_OPERAND (arg0, 1)))
11502 		{
11503 		  tree utype = TREE_TYPE (arg0);
11504 		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11505 		    {
11506 		      /* Perform the operations in a type that has defined
11507 			 overflow behavior.  */
11508 		      utype = unsigned_type_for (TREE_TYPE (arg0));
11509 		      if (pmop[0] != NULL)
11510 			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11511 		      if (pmop[1] != NULL)
11512 			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11513 		    }
11514 
11515 		  if (TREE_CODE (arg0) == NEGATE_EXPR)
11516 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11517 		  else if (TREE_CODE (arg0) == PLUS_EXPR)
11518 		    {
11519 		      if (pmop[0] != NULL && pmop[1] != NULL)
11520 			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11521 					       pmop[0], pmop[1]);
11522 		      else if (pmop[0] != NULL)
11523 			tem = pmop[0];
11524 		      else if (pmop[1] != NULL)
11525 			tem = pmop[1];
11526 		      else
11527 			return build_int_cst (type, 0);
11528 		    }
11529 		  else if (pmop[0] == NULL)
11530 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11531 		  else
11532 		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11533 					   pmop[0], pmop[1]);
11534 		  /* TEM is now the new binary +, - or unary - replacement.  */
11535 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11536 					 fold_convert_loc (loc, utype, arg1));
11537 		  return fold_convert_loc (loc, type, tem);
11538 		}
11539 	    }
11540 	}
11541 
11542       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11543       if (t1 != NULL_TREE)
11544 	return t1;
11545       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
11546       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11547 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11548 	{
11549 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11550 
11551 	  wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11552 	  if (mask == -1)
11553 	    return
11554 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11555 	}
11556 
11557       /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11558 
11559 	 This results in more efficient code for machines without a NOR
11560 	 instruction.  Combine will canonicalize to the first form
11561 	 which will allow use of NOR instructions provided by the
11562 	 backend if they exist.  */
11563       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11564 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11565 	{
11566 	  return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11567 			      build2 (BIT_IOR_EXPR, type,
11568 				      fold_convert_loc (loc, type,
11569 							TREE_OPERAND (arg0, 0)),
11570 				      fold_convert_loc (loc, type,
11571 							TREE_OPERAND (arg1, 0))));
11572 	}
11573 
11574       /* If arg0 is derived from the address of an object or function, we may
11575 	 be able to fold this expression using the object or function's
11576 	 alignment.  */
11577       if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11578 	{
11579 	  unsigned HOST_WIDE_INT modulus, residue;
11580 	  unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11581 
11582 	  modulus = get_pointer_modulus_and_residue (arg0, &residue,
11583 						     integer_onep (arg1));
11584 
11585 	  /* This works because modulus is a power of 2.  If this weren't the
11586 	     case, we'd have to replace it by its greatest power-of-2
11587 	     divisor: modulus & -modulus.  */
11588 	  if (low < modulus)
11589 	    return build_int_cst (type, residue & low);
11590 	}
11591 
11592       /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11593 	      (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11594 	 if the new mask might be further optimized.  */
11595       if ((TREE_CODE (arg0) == LSHIFT_EXPR
11596 	   || TREE_CODE (arg0) == RSHIFT_EXPR)
11597 	  && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11598 	  && TREE_CODE (arg1) == INTEGER_CST
11599 	  && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11600 	  && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11601 	  && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11602 	      < TYPE_PRECISION (TREE_TYPE (arg0))))
11603 	{
11604 	  unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11605 	  unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11606 	  unsigned HOST_WIDE_INT newmask, zerobits = 0;
11607 	  tree shift_type = TREE_TYPE (arg0);
11608 
11609 	  if (TREE_CODE (arg0) == LSHIFT_EXPR)
11610 	    zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11611 	  else if (TREE_CODE (arg0) == RSHIFT_EXPR
11612 		   && TYPE_PRECISION (TREE_TYPE (arg0))
11613 		      == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11614 	    {
11615 	      prec = TYPE_PRECISION (TREE_TYPE (arg0));
11616 	      tree arg00 = TREE_OPERAND (arg0, 0);
11617 	      /* See if more bits can be proven as zero because of
11618 		 zero extension.  */
11619 	      if (TREE_CODE (arg00) == NOP_EXPR
11620 		  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11621 		{
11622 		  tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11623 		  if (TYPE_PRECISION (inner_type)
11624 		      == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11625 		      && TYPE_PRECISION (inner_type) < prec)
11626 		    {
11627 		      prec = TYPE_PRECISION (inner_type);
11628 		      /* See if we can shorten the right shift.  */
11629 		      if (shiftc < prec)
11630 			shift_type = inner_type;
11631 		      /* Otherwise X >> C1 is all zeros, so we'll optimize
11632 			 it into (X, 0) later on by making sure zerobits
11633 			 is all ones.  */
11634 		    }
11635 		}
11636 	      zerobits = ~(unsigned HOST_WIDE_INT) 0;
11637 	      if (shiftc < prec)
11638 		{
11639 		  zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11640 		  zerobits <<= prec - shiftc;
11641 		}
11642 	      /* For arithmetic shift if sign bit could be set, zerobits
11643 		 can contain actually sign bits, so no transformation is
11644 		 possible, unless MASK masks them all away.  In that
11645 		 case the shift needs to be converted into logical shift.  */
11646 	      if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11647 		  && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11648 		{
11649 		  if ((mask & zerobits) == 0)
11650 		    shift_type = unsigned_type_for (TREE_TYPE (arg0));
11651 		  else
11652 		    zerobits = 0;
11653 		}
11654 	    }
11655 
11656 	  /* ((X << 16) & 0xff00) is (X, 0).  */
11657 	  if ((mask & zerobits) == mask)
11658 	    return omit_one_operand_loc (loc, type,
11659 					 build_int_cst (type, 0), arg0);
11660 
11661 	  newmask = mask | zerobits;
11662 	  if (newmask != mask && (newmask & (newmask + 1)) == 0)
11663 	    {
11664 	      /* Only do the transformation if NEWMASK is some integer
11665 		 mode's mask.  */
11666 	      for (prec = BITS_PER_UNIT;
11667 		   prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11668 		if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11669 		  break;
11670 	      if (prec < HOST_BITS_PER_WIDE_INT
11671 		  || newmask == ~(unsigned HOST_WIDE_INT) 0)
11672 		{
11673 		  tree newmaskt;
11674 
11675 		  if (shift_type != TREE_TYPE (arg0))
11676 		    {
11677 		      tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11678 					 fold_convert_loc (loc, shift_type,
11679 							   TREE_OPERAND (arg0, 0)),
11680 					 TREE_OPERAND (arg0, 1));
11681 		      tem = fold_convert_loc (loc, type, tem);
11682 		    }
11683 		  else
11684 		    tem = op0;
11685 		  newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11686 		  if (!tree_int_cst_equal (newmaskt, arg1))
11687 		    return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11688 		}
11689 	    }
11690 	}
11691 
11692       goto associate;
11693 
11694     case RDIV_EXPR:
11695       /* Don't touch a floating-point divide by zero unless the mode
11696 	 of the constant can represent infinity.  */
11697       if (TREE_CODE (arg1) == REAL_CST
11698 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11699 	  && real_zerop (arg1))
11700 	return NULL_TREE;
11701 
11702       /* (-A) / (-B) -> A / B  */
11703       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11704 	return fold_build2_loc (loc, RDIV_EXPR, type,
11705 			    TREE_OPERAND (arg0, 0),
11706 			    negate_expr (arg1));
11707       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11708 	return fold_build2_loc (loc, RDIV_EXPR, type,
11709 			    negate_expr (arg0),
11710 			    TREE_OPERAND (arg1, 0));
11711 
11712       /* Convert A/B/C to A/(B*C).  */
11713       if (flag_reciprocal_math
11714 	  && TREE_CODE (arg0) == RDIV_EXPR)
11715 	return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11716 			    fold_build2_loc (loc, MULT_EXPR, type,
11717 					 TREE_OPERAND (arg0, 1), arg1));
11718 
11719       /* Convert A/(B/C) to (A/B)*C.  */
11720       if (flag_reciprocal_math
11721 	  && TREE_CODE (arg1) == RDIV_EXPR)
11722 	return fold_build2_loc (loc, MULT_EXPR, type,
11723 			    fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11724 					 TREE_OPERAND (arg1, 0)),
11725 			    TREE_OPERAND (arg1, 1));
11726 
11727       /* Convert C1/(X*C2) into (C1/C2)/X.  */
11728       if (flag_reciprocal_math
11729 	  && TREE_CODE (arg1) == MULT_EXPR
11730 	  && TREE_CODE (arg0) == REAL_CST
11731 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11732 	{
11733 	  tree tem = const_binop (RDIV_EXPR, arg0,
11734 				  TREE_OPERAND (arg1, 1));
11735 	  if (tem)
11736 	    return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11737 				TREE_OPERAND (arg1, 0));
11738 	}
11739 
11740       if (flag_unsafe_math_optimizations)
11741 	{
11742 	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11743 	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11744 
11745 	  /* Optimize sin(x)/cos(x) as tan(x).  */
11746 	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11747 	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11748 	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11749 	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11750 				  CALL_EXPR_ARG (arg1, 0), 0))
11751 	    {
11752 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11753 
11754 	      if (tanfn != NULL_TREE)
11755 		return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11756 	    }
11757 
11758 	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
11759 	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11760 	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11761 	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11762 	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11763 				  CALL_EXPR_ARG (arg1, 0), 0))
11764 	    {
11765 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11766 
11767 	      if (tanfn != NULL_TREE)
11768 		{
11769 		  tree tmp = build_call_expr_loc (loc, tanfn, 1,
11770 					      CALL_EXPR_ARG (arg0, 0));
11771 		  return fold_build2_loc (loc, RDIV_EXPR, type,
11772 				      build_real (type, dconst1), tmp);
11773 		}
11774 	    }
11775 
11776  	  /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11777 	     NaNs or Infinities.  */
11778  	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11779  	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11780  	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11781 	    {
11782 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
11783 	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
11784 
11785 	      if (! HONOR_NANS (arg00)
11786 		  && ! HONOR_INFINITIES (element_mode (arg00))
11787 		  && operand_equal_p (arg00, arg01, 0))
11788 		{
11789 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11790 
11791 		  if (cosfn != NULL_TREE)
11792 		    return build_call_expr_loc (loc, cosfn, 1, arg00);
11793 		}
11794 	    }
11795 
11796  	  /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11797 	     NaNs or Infinities.  */
11798  	  if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11799  	       || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11800  	       || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11801 	    {
11802 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
11803 	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
11804 
11805 	      if (! HONOR_NANS (arg00)
11806 		  && ! HONOR_INFINITIES (element_mode (arg00))
11807 		  && operand_equal_p (arg00, arg01, 0))
11808 		{
11809 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11810 
11811 		  if (cosfn != NULL_TREE)
11812 		    {
11813 		      tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11814 		      return fold_build2_loc (loc, RDIV_EXPR, type,
11815 					  build_real (type, dconst1),
11816 					  tmp);
11817 		    }
11818 		}
11819 	    }
11820 
11821 	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
11822 	  if (fcode0 == BUILT_IN_POW
11823 	      || fcode0 == BUILT_IN_POWF
11824 	      || fcode0 == BUILT_IN_POWL)
11825 	    {
11826 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
11827 	      tree arg01 = CALL_EXPR_ARG (arg0, 1);
11828 	      if (TREE_CODE (arg01) == REAL_CST
11829 		  && !TREE_OVERFLOW (arg01)
11830 		  && operand_equal_p (arg1, arg00, 0))
11831 		{
11832 		  tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11833 		  REAL_VALUE_TYPE c;
11834 		  tree arg;
11835 
11836 		  c = TREE_REAL_CST (arg01);
11837 		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11838 		  arg = build_real (type, c);
11839 		  return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11840 		}
11841 	    }
11842 
11843 	  /* Optimize a/root(b/c) into a*root(c/b).  */
11844 	  if (BUILTIN_ROOT_P (fcode1))
11845 	    {
11846 	      tree rootarg = CALL_EXPR_ARG (arg1, 0);
11847 
11848 	      if (TREE_CODE (rootarg) == RDIV_EXPR)
11849 		{
11850 		  tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11851 		  tree b = TREE_OPERAND (rootarg, 0);
11852 		  tree c = TREE_OPERAND (rootarg, 1);
11853 
11854 		  tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11855 
11856 		  tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11857 		  return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11858 		}
11859 	    }
11860 
11861 	  /* Optimize x/expN(y) into x*expN(-y).  */
11862 	  if (BUILTIN_EXPONENT_P (fcode1))
11863 	    {
11864 	      tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11865 	      tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11866 	      arg1 = build_call_expr_loc (loc,
11867 				      expfn, 1,
11868 				      fold_convert_loc (loc, type, arg));
11869 	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11870 	    }
11871 
11872 	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
11873 	  if (fcode1 == BUILT_IN_POW
11874 	      || fcode1 == BUILT_IN_POWF
11875 	      || fcode1 == BUILT_IN_POWL)
11876 	    {
11877 	      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11878 	      tree arg10 = CALL_EXPR_ARG (arg1, 0);
11879 	      tree arg11 = CALL_EXPR_ARG (arg1, 1);
11880 	      tree neg11 = fold_convert_loc (loc, type,
11881 					     negate_expr (arg11));
11882 	      arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11883 	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11884 	    }
11885 	}
11886       return NULL_TREE;
11887 
11888     case TRUNC_DIV_EXPR:
11889       /* Optimize (X & (-A)) / A where A is a power of 2,
11890 	 to X >> log2(A) */
11891       if (TREE_CODE (arg0) == BIT_AND_EXPR
11892 	  && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11893 	  && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11894 	{
11895 	  tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11896 				      arg1, TREE_OPERAND (arg0, 1));
11897 	  if (sum && integer_zerop (sum)) {
11898 	    tree pow2 = build_int_cst (integer_type_node,
11899 				       wi::exact_log2 (arg1));
11900 	    return fold_build2_loc (loc, RSHIFT_EXPR, type,
11901 				    TREE_OPERAND (arg0, 0), pow2);
11902 	  }
11903 	}
11904 
11905       /* Fall through */
11906 
11907     case FLOOR_DIV_EXPR:
11908       /* Simplify A / (B << N) where A and B are positive and B is
11909 	 a power of 2, to A >> (N + log2(B)).  */
11910       strict_overflow_p = false;
11911       if (TREE_CODE (arg1) == LSHIFT_EXPR
11912 	  && (TYPE_UNSIGNED (type)
11913 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11914 	{
11915 	  tree sval = TREE_OPERAND (arg1, 0);
11916 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11917 	    {
11918 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
11919 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11920 					 wi::exact_log2 (sval));
11921 
11922 	      if (strict_overflow_p)
11923 		fold_overflow_warning (("assuming signed overflow does not "
11924 					"occur when simplifying A / (B << N)"),
11925 				       WARN_STRICT_OVERFLOW_MISC);
11926 
11927 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11928 					sh_cnt, pow2);
11929 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
11930 				      fold_convert_loc (loc, type, arg0), sh_cnt);
11931 	    }
11932 	}
11933 
11934       /* Fall through */
11935 
11936     case ROUND_DIV_EXPR:
11937     case CEIL_DIV_EXPR:
11938     case EXACT_DIV_EXPR:
11939       if (integer_zerop (arg1))
11940 	return NULL_TREE;
11941 
11942       /* Convert -A / -B to A / B when the type is signed and overflow is
11943 	 undefined.  */
11944       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11945 	  && TREE_CODE (op0) == NEGATE_EXPR
11946 	  && negate_expr_p (op1))
11947 	{
11948 	  if (INTEGRAL_TYPE_P (type))
11949 	    fold_overflow_warning (("assuming signed overflow does not occur "
11950 				    "when distributing negation across "
11951 				    "division"),
11952 				   WARN_STRICT_OVERFLOW_MISC);
11953 	  return fold_build2_loc (loc, code, type,
11954 				  fold_convert_loc (loc, type,
11955 						    TREE_OPERAND (arg0, 0)),
11956 				  negate_expr (op1));
11957 	}
11958       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11959 	  && TREE_CODE (op1) == NEGATE_EXPR
11960 	  && negate_expr_p (op0))
11961 	{
11962 	  if (INTEGRAL_TYPE_P (type))
11963 	    fold_overflow_warning (("assuming signed overflow does not occur "
11964 				    "when distributing negation across "
11965 				    "division"),
11966 				   WARN_STRICT_OVERFLOW_MISC);
11967 	  return fold_build2_loc (loc, code, type,
11968 				  negate_expr (op0),
11969 				  fold_convert_loc (loc, type,
11970 						    TREE_OPERAND (arg1, 0)));
11971 	}
11972 
11973       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11974 	 operation, EXACT_DIV_EXPR.
11975 
11976 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11977 	 At one time others generated faster code, it's not clear if they do
11978 	 after the last round to changes to the DIV code in expmed.c.  */
11979       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11980 	  && multiple_of_p (type, arg0, arg1))
11981 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11982 
11983       strict_overflow_p = false;
11984       if (TREE_CODE (arg1) == INTEGER_CST
11985 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11986 					 &strict_overflow_p)))
11987 	{
11988 	  if (strict_overflow_p)
11989 	    fold_overflow_warning (("assuming signed overflow does not occur "
11990 				    "when simplifying division"),
11991 				   WARN_STRICT_OVERFLOW_MISC);
11992 	  return fold_convert_loc (loc, type, tem);
11993 	}
11994 
11995       return NULL_TREE;
11996 
11997     case CEIL_MOD_EXPR:
11998     case FLOOR_MOD_EXPR:
11999     case ROUND_MOD_EXPR:
12000     case TRUNC_MOD_EXPR:
12001       /* X % -Y is the same as X % Y.  */
12002       if (code == TRUNC_MOD_EXPR
12003 	  && !TYPE_UNSIGNED (type)
12004 	  && TREE_CODE (arg1) == NEGATE_EXPR
12005 	  && !TYPE_OVERFLOW_TRAPS (type))
12006 	return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12007 			    fold_convert_loc (loc, type,
12008 					      TREE_OPERAND (arg1, 0)));
12009 
12010       strict_overflow_p = false;
12011       if (TREE_CODE (arg1) == INTEGER_CST
12012 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12013 					 &strict_overflow_p)))
12014 	{
12015 	  if (strict_overflow_p)
12016 	    fold_overflow_warning (("assuming signed overflow does not occur "
12017 				    "when simplifying modulus"),
12018 				   WARN_STRICT_OVERFLOW_MISC);
12019 	  return fold_convert_loc (loc, type, tem);
12020 	}
12021 
12022       /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12023          i.e. "X % C" into "X & (C - 1)", if X and C are positive.  */
12024       if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12025 	  && (TYPE_UNSIGNED (type)
12026 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12027 	{
12028 	  tree c = arg1;
12029 	  /* Also optimize A % (C << N)  where C is a power of 2,
12030 	     to A & ((C << N) - 1).  */
12031 	  if (TREE_CODE (arg1) == LSHIFT_EXPR)
12032 	    c = TREE_OPERAND (arg1, 0);
12033 
12034 	  if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12035 	    {
12036 	      tree mask
12037 		= fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12038 				   build_int_cst (TREE_TYPE (arg1), 1));
12039 	      if (strict_overflow_p)
12040 		fold_overflow_warning (("assuming signed overflow does not "
12041 					"occur when simplifying "
12042 					"X % (power of two)"),
12043 				       WARN_STRICT_OVERFLOW_MISC);
12044 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
12045 				      fold_convert_loc (loc, type, arg0),
12046 				      fold_convert_loc (loc, type, mask));
12047 	    }
12048 	}
12049 
12050       return NULL_TREE;
12051 
12052     case LROTATE_EXPR:
12053     case RROTATE_EXPR:
12054     case RSHIFT_EXPR:
12055     case LSHIFT_EXPR:
12056       /* Since negative shift count is not well-defined,
12057 	 don't try to compute it in the compiler.  */
12058       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12059 	return NULL_TREE;
12060 
12061       prec = element_precision (type);
12062 
12063       /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
12064       if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12065 	  && tree_to_uhwi (arg1) < prec
12066 	  && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12067 	  && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12068 	{
12069 	  unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12070 			      + tree_to_uhwi (arg1));
12071 
12072 	  /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12073 	     being well defined.  */
12074 	  if (low >= prec)
12075 	    {
12076 	      if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12077 	        low = low % prec;
12078 	      else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12079 		return omit_one_operand_loc (loc, type, build_zero_cst (type),
12080 					 TREE_OPERAND (arg0, 0));
12081 	      else
12082 		low = prec - 1;
12083 	    }
12084 
12085 	  return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12086 				  build_int_cst (TREE_TYPE (arg1), low));
12087 	}
12088 
12089       /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12090          into x & ((unsigned)-1 >> c) for unsigned types.  */
12091       if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12092            || (TYPE_UNSIGNED (type)
12093 	       && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12094 	  && tree_fits_uhwi_p (arg1)
12095 	  && tree_to_uhwi (arg1) < prec
12096 	  && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12097 	  && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12098 	{
12099 	  HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12100 	  HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12101 	  tree lshift;
12102 	  tree arg00;
12103 
12104 	  if (low0 == low1)
12105 	    {
12106 	      arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12107 
12108 	      lshift = build_minus_one_cst (type);
12109 	      lshift = const_binop (code, lshift, arg1);
12110 
12111 	      return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12112 	    }
12113 	}
12114 
12115       /* If we have a rotate of a bit operation with the rotate count and
12116 	 the second operand of the bit operation both constant,
12117 	 permute the two operations.  */
12118       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12119 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
12120 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
12121 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
12122 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12123 	{
12124 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12125 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12126 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
12127 				  fold_build2_loc (loc, code, type,
12128 						   arg00, arg1),
12129 				  fold_build2_loc (loc, code, type,
12130 						   arg01, arg1));
12131 	}
12132 
12133       /* Two consecutive rotates adding up to the some integer
12134 	 multiple of the precision of the type can be ignored.  */
12135       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12136 	  && TREE_CODE (arg0) == RROTATE_EXPR
12137 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12138 	  && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12139 			     prec) == 0)
12140 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12141 
12142       /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12143 	      (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12144 	 if the latter can be further optimized.  */
12145       if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12146 	  && TREE_CODE (arg0) == BIT_AND_EXPR
12147 	  && TREE_CODE (arg1) == INTEGER_CST
12148 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12149 	{
12150 	  tree mask = fold_build2_loc (loc, code, type,
12151 				   fold_convert_loc (loc, type,
12152 						     TREE_OPERAND (arg0, 1)),
12153 				   arg1);
12154 	  tree shift = fold_build2_loc (loc, code, type,
12155 				    fold_convert_loc (loc, type,
12156 						      TREE_OPERAND (arg0, 0)),
12157 				    arg1);
12158 	  tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12159 	  if (tem)
12160 	    return tem;
12161 	}
12162 
12163       return NULL_TREE;
12164 
12165     case MIN_EXPR:
12166       tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12167       if (tem)
12168 	return tem;
12169       goto associate;
12170 
12171     case MAX_EXPR:
12172       tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12173       if (tem)
12174 	return tem;
12175       goto associate;
12176 
12177     case TRUTH_ANDIF_EXPR:
12178       /* Note that the operands of this must be ints
12179 	 and their values must be 0 or 1.
12180 	 ("true" is a fixed value perhaps depending on the language.)  */
12181       /* If first arg is constant zero, return it.  */
12182       if (integer_zerop (arg0))
12183 	return fold_convert_loc (loc, type, arg0);
12184     case TRUTH_AND_EXPR:
12185       /* If either arg is constant true, drop it.  */
12186       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12187 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12188       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12189 	  /* Preserve sequence points.  */
12190 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12191 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12192       /* If second arg is constant zero, result is zero, but first arg
12193 	 must be evaluated.  */
12194       if (integer_zerop (arg1))
12195 	return omit_one_operand_loc (loc, type, arg1, arg0);
12196       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12197 	 case will be handled here.  */
12198       if (integer_zerop (arg0))
12199 	return omit_one_operand_loc (loc, type, arg0, arg1);
12200 
12201       /* !X && X is always false.  */
12202       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12203 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12204 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12205       /* X && !X is always false.  */
12206       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12207 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12208 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12209 
12210       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
12211 	 means A >= Y && A != MAX, but in this case we know that
12212 	 A < X <= MAX.  */
12213 
12214       if (!TREE_SIDE_EFFECTS (arg0)
12215 	  && !TREE_SIDE_EFFECTS (arg1))
12216 	{
12217 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12218 	  if (tem && !operand_equal_p (tem, arg0, 0))
12219 	    return fold_build2_loc (loc, code, type, tem, arg1);
12220 
12221 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12222 	  if (tem && !operand_equal_p (tem, arg1, 0))
12223 	    return fold_build2_loc (loc, code, type, arg0, tem);
12224 	}
12225 
12226       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12227           != NULL_TREE)
12228         return tem;
12229 
12230       return NULL_TREE;
12231 
12232     case TRUTH_ORIF_EXPR:
12233       /* Note that the operands of this must be ints
12234 	 and their values must be 0 or true.
12235 	 ("true" is a fixed value perhaps depending on the language.)  */
12236       /* If first arg is constant true, return it.  */
12237       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12238 	return fold_convert_loc (loc, type, arg0);
12239     case TRUTH_OR_EXPR:
12240       /* If either arg is constant zero, drop it.  */
12241       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12242 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12243       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12244 	  /* Preserve sequence points.  */
12245 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12246 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12247       /* If second arg is constant true, result is true, but we must
12248 	 evaluate first arg.  */
12249       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12250 	return omit_one_operand_loc (loc, type, arg1, arg0);
12251       /* Likewise for first arg, but note this only occurs here for
12252 	 TRUTH_OR_EXPR.  */
12253       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12254 	return omit_one_operand_loc (loc, type, arg0, arg1);
12255 
12256       /* !X || X is always true.  */
12257       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12258 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12259 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12260       /* X || !X is always true.  */
12261       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12262 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12263 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12264 
12265       /* (X && !Y) || (!X && Y) is X ^ Y */
12266       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12267 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12268         {
12269 	  tree a0, a1, l0, l1, n0, n1;
12270 
12271 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12272 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12273 
12274 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12275 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12276 
12277 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12278 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12279 
12280 	  if ((operand_equal_p (n0, a0, 0)
12281 	       && operand_equal_p (n1, a1, 0))
12282 	      || (operand_equal_p (n0, a1, 0)
12283 		  && operand_equal_p (n1, a0, 0)))
12284 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12285 	}
12286 
12287       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12288           != NULL_TREE)
12289         return tem;
12290 
12291       return NULL_TREE;
12292 
12293     case TRUTH_XOR_EXPR:
12294       /* If the second arg is constant zero, drop it.  */
12295       if (integer_zerop (arg1))
12296 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12297       /* If the second arg is constant true, this is a logical inversion.  */
12298       if (integer_onep (arg1))
12299 	{
12300 	  tem = invert_truthvalue_loc (loc, arg0);
12301 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12302 	}
12303       /* Identical arguments cancel to zero.  */
12304       if (operand_equal_p (arg0, arg1, 0))
12305 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12306 
12307       /* !X ^ X is always true.  */
12308       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12309 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12310 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12311 
12312       /* X ^ !X is always true.  */
12313       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12314 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12315 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12316 
12317       return NULL_TREE;
12318 
12319     case EQ_EXPR:
12320     case NE_EXPR:
12321       STRIP_NOPS (arg0);
12322       STRIP_NOPS (arg1);
12323 
12324       tem = fold_comparison (loc, code, type, op0, op1);
12325       if (tem != NULL_TREE)
12326 	return tem;
12327 
12328       /* bool_var != 0 becomes bool_var. */
12329       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12330           && code == NE_EXPR)
12331         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12332 
12333       /* bool_var == 1 becomes bool_var. */
12334       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12335           && code == EQ_EXPR)
12336         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12337 
12338       /* bool_var != 1 becomes !bool_var. */
12339       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12340           && code == NE_EXPR)
12341         return fold_convert_loc (loc, type,
12342 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12343 						  TREE_TYPE (arg0), arg0));
12344 
12345       /* bool_var == 0 becomes !bool_var. */
12346       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12347           && code == EQ_EXPR)
12348         return fold_convert_loc (loc, type,
12349 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12350 						  TREE_TYPE (arg0), arg0));
12351 
12352       /* !exp != 0 becomes !exp */
12353       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12354 	  && code == NE_EXPR)
12355         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12356 
12357       /* If this is an equality comparison of the address of two non-weak,
12358 	 unaliased symbols neither of which are extern (since we do not
12359 	 have access to attributes for externs), then we know the result.  */
12360       if (TREE_CODE (arg0) == ADDR_EXPR
12361 	  && DECL_P (TREE_OPERAND (arg0, 0))
12362 	  && TREE_CODE (arg1) == ADDR_EXPR
12363 	  && DECL_P (TREE_OPERAND (arg1, 0)))
12364 	{
12365 	  int equal;
12366 
12367 	  if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12368 	      && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12369 	    equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12370 		    ->equal_address_to (symtab_node::get_create
12371 					  (TREE_OPERAND (arg1, 0)));
12372 	  else
12373 	    equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12374 	  if (equal != 2)
12375 	    return constant_boolean_node (equal
12376 				          ? code == EQ_EXPR : code != EQ_EXPR,
12377 				          type);
12378 	}
12379 
12380       /* Similarly for a NEGATE_EXPR.  */
12381       if (TREE_CODE (arg0) == NEGATE_EXPR
12382 	  && TREE_CODE (arg1) == INTEGER_CST
12383 	  && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12384 							arg1)))
12385 	  && TREE_CODE (tem) == INTEGER_CST
12386 	  && !TREE_OVERFLOW (tem))
12387 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12388 
12389       /* Similarly for a BIT_XOR_EXPR;  X ^ C1 == C2 is X == (C1 ^ C2).  */
12390       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12391 	  && TREE_CODE (arg1) == INTEGER_CST
12392 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12393 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12394 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12395 					 fold_convert_loc (loc,
12396 							   TREE_TYPE (arg0),
12397 							   arg1),
12398 					 TREE_OPERAND (arg0, 1)));
12399 
12400       /* Transform comparisons of the form X +- Y CMP X to Y CMP 0.  */
12401       if ((TREE_CODE (arg0) == PLUS_EXPR
12402 	   || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12403 	   || TREE_CODE (arg0) == MINUS_EXPR)
12404 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12405 									0)),
12406 			      arg1, 0)
12407 	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12408 	      || POINTER_TYPE_P (TREE_TYPE (arg0))))
12409 	{
12410 	  tree val = TREE_OPERAND (arg0, 1);
12411 	  val = fold_build2_loc (loc, code, type, val,
12412 				 build_int_cst (TREE_TYPE (val), 0));
12413 	  return omit_two_operands_loc (loc, type, val,
12414 					TREE_OPERAND (arg0, 0), arg1);
12415 	}
12416 
12417       /* Transform comparisons of the form X CMP X +- Y to Y CMP 0.  */
12418       if ((TREE_CODE (arg1) == PLUS_EXPR
12419 	   || TREE_CODE (arg1) == POINTER_PLUS_EXPR
12420 	   || TREE_CODE (arg1) == MINUS_EXPR)
12421 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
12422 									0)),
12423 			      arg0, 0)
12424 	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
12425 	      || POINTER_TYPE_P (TREE_TYPE (arg1))))
12426 	{
12427 	  tree val = TREE_OPERAND (arg1, 1);
12428 	  val = fold_build2_loc (loc, code, type, val,
12429 				 build_int_cst (TREE_TYPE (val), 0));
12430 	  return omit_two_operands_loc (loc, type, val,
12431 					TREE_OPERAND (arg1, 0), arg0);
12432 	}
12433 
12434       /* Transform comparisons of the form C - X CMP X if C % 2 == 1.  */
12435       if (TREE_CODE (arg0) == MINUS_EXPR
12436 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12437 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12438 									1)),
12439 			      arg1, 0)
12440 	  && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12441 	return omit_two_operands_loc (loc, type,
12442 				      code == NE_EXPR
12443 				      ? boolean_true_node : boolean_false_node,
12444 				      TREE_OPERAND (arg0, 1), arg1);
12445 
12446       /* Transform comparisons of the form X CMP C - X if C % 2 == 1.  */
12447       if (TREE_CODE (arg1) == MINUS_EXPR
12448 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
12449 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
12450 									1)),
12451 			      arg0, 0)
12452 	  && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
12453 	return omit_two_operands_loc (loc, type,
12454 				      code == NE_EXPR
12455 				      ? boolean_true_node : boolean_false_node,
12456 				      TREE_OPERAND (arg1, 1), arg0);
12457 
12458       /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
12459       if (TREE_CODE (arg0) == ABS_EXPR
12460 	  && (integer_zerop (arg1) || real_zerop (arg1)))
12461 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12462 
12463       /* If this is an EQ or NE comparison with zero and ARG0 is
12464 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
12465 	 two operations, but the latter can be done in one less insn
12466 	 on machines that have only two-operand insns or on which a
12467 	 constant cannot be the first operand.  */
12468       if (TREE_CODE (arg0) == BIT_AND_EXPR
12469 	  && integer_zerop (arg1))
12470 	{
12471 	  tree arg00 = TREE_OPERAND (arg0, 0);
12472 	  tree arg01 = TREE_OPERAND (arg0, 1);
12473 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
12474 	      && integer_onep (TREE_OPERAND (arg00, 0)))
12475 	    {
12476 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12477 				      arg01, TREE_OPERAND (arg00, 1));
12478 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12479 				 build_int_cst (TREE_TYPE (arg0), 1));
12480 	      return fold_build2_loc (loc, code, type,
12481 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12482 				  arg1);
12483 	    }
12484 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
12485 		   && integer_onep (TREE_OPERAND (arg01, 0)))
12486 	    {
12487 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12488 				      arg00, TREE_OPERAND (arg01, 1));
12489 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12490 				 build_int_cst (TREE_TYPE (arg0), 1));
12491 	      return fold_build2_loc (loc, code, type,
12492 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12493 				  arg1);
12494 	    }
12495 	}
12496 
12497       /* If this is an NE or EQ comparison of zero against the result of a
12498 	 signed MOD operation whose second operand is a power of 2, make
12499 	 the MOD operation unsigned since it is simpler and equivalent.  */
12500       if (integer_zerop (arg1)
12501 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12502 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12503 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
12504 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12505 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12506 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
12507 	{
12508 	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12509 	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12510 				     fold_convert_loc (loc, newtype,
12511 						       TREE_OPERAND (arg0, 0)),
12512 				     fold_convert_loc (loc, newtype,
12513 						       TREE_OPERAND (arg0, 1)));
12514 
12515 	  return fold_build2_loc (loc, code, type, newmod,
12516 			      fold_convert_loc (loc, newtype, arg1));
12517 	}
12518 
12519       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12520 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
12521 	 a single bit.  */
12522       if (TREE_CODE (arg0) == BIT_AND_EXPR
12523 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12524 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12525 	     == INTEGER_CST
12526 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12527 	  && integer_zerop (arg1))
12528 	{
12529 	  tree itype = TREE_TYPE (arg0);
12530 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12531 	  prec = TYPE_PRECISION (itype);
12532 
12533 	  /* Check for a valid shift count.  */
12534 	  if (wi::ltu_p (arg001, prec))
12535 	    {
12536 	      tree arg01 = TREE_OPERAND (arg0, 1);
12537 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12538 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12539 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12540 		 can be rewritten as (X & (C2 << C1)) != 0.  */
12541 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12542 		{
12543 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12544 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12545 		  return fold_build2_loc (loc, code, type, tem,
12546 					  fold_convert_loc (loc, itype, arg1));
12547 		}
12548 	      /* Otherwise, for signed (arithmetic) shifts,
12549 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12550 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
12551 	      else if (!TYPE_UNSIGNED (itype))
12552 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12553 				    arg000, build_int_cst (itype, 0));
12554 	      /* Otherwise, of unsigned (logical) shifts,
12555 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12556 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
12557 	      else
12558 		return omit_one_operand_loc (loc, type,
12559 					 code == EQ_EXPR ? integer_one_node
12560 							 : integer_zero_node,
12561 					 arg000);
12562 	    }
12563 	}
12564 
12565       /* If we have (A & C) == C where C is a power of 2, convert this into
12566 	 (A & C) != 0.  Similarly for NE_EXPR.  */
12567       if (TREE_CODE (arg0) == BIT_AND_EXPR
12568 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12569 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12570 	return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12571 			    arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12572 						    integer_zero_node));
12573 
12574       /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12575 	 bit, then fold the expression into A < 0 or A >= 0.  */
12576       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12577       if (tem)
12578 	return tem;
12579 
12580       /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12581 	 Similarly for NE_EXPR.  */
12582       if (TREE_CODE (arg0) == BIT_AND_EXPR
12583 	  && TREE_CODE (arg1) == INTEGER_CST
12584 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12585 	{
12586 	  tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12587 				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
12588 				   TREE_OPERAND (arg0, 1));
12589 	  tree dandnotc
12590 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12591 			       fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12592 			       notc);
12593 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12594 	  if (integer_nonzerop (dandnotc))
12595 	    return omit_one_operand_loc (loc, type, rslt, arg0);
12596 	}
12597 
12598       /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12599 	 Similarly for NE_EXPR.  */
12600       if (TREE_CODE (arg0) == BIT_IOR_EXPR
12601 	  && TREE_CODE (arg1) == INTEGER_CST
12602 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12603 	{
12604 	  tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12605 	  tree candnotd
12606 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12607 			       TREE_OPERAND (arg0, 1),
12608 			       fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12609 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12610 	  if (integer_nonzerop (candnotd))
12611 	    return omit_one_operand_loc (loc, type, rslt, arg0);
12612 	}
12613 
12614       /* If this is a comparison of a field, we may be able to simplify it.  */
12615       if ((TREE_CODE (arg0) == COMPONENT_REF
12616 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
12617 	  /* Handle the constant case even without -O
12618 	     to make sure the warnings are given.  */
12619 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12620 	{
12621 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12622 	  if (t1)
12623 	    return t1;
12624 	}
12625 
12626       /* Optimize comparisons of strlen vs zero to a compare of the
12627 	 first character of the string vs zero.  To wit,
12628 		strlen(ptr) == 0   =>  *ptr == 0
12629 		strlen(ptr) != 0   =>  *ptr != 0
12630 	 Other cases should reduce to one of these two (or a constant)
12631 	 due to the return value of strlen being unsigned.  */
12632       if (TREE_CODE (arg0) == CALL_EXPR
12633 	  && integer_zerop (arg1))
12634 	{
12635 	  tree fndecl = get_callee_fndecl (arg0);
12636 
12637 	  if (fndecl
12638 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12639 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12640 	      && call_expr_nargs (arg0) == 1
12641 	      && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12642 	    {
12643 	      tree iref = build_fold_indirect_ref_loc (loc,
12644 						   CALL_EXPR_ARG (arg0, 0));
12645 	      return fold_build2_loc (loc, code, type, iref,
12646 				  build_int_cst (TREE_TYPE (iref), 0));
12647 	    }
12648 	}
12649 
12650       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12651 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
12652       if (TREE_CODE (arg0) == RSHIFT_EXPR
12653 	  && integer_zerop (arg1)
12654 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12655 	{
12656 	  tree arg00 = TREE_OPERAND (arg0, 0);
12657 	  tree arg01 = TREE_OPERAND (arg0, 1);
12658 	  tree itype = TREE_TYPE (arg00);
12659 	  if (wi::eq_p (arg01, element_precision (itype) - 1))
12660 	    {
12661 	      if (TYPE_UNSIGNED (itype))
12662 		{
12663 		  itype = signed_type_for (itype);
12664 		  arg00 = fold_convert_loc (loc, itype, arg00);
12665 		}
12666 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12667 				  type, arg00, build_zero_cst (itype));
12668 	    }
12669 	}
12670 
12671       /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
12672       if (integer_zerop (arg1)
12673 	  && TREE_CODE (arg0) == BIT_XOR_EXPR)
12674 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12675 			    TREE_OPERAND (arg0, 1));
12676 
12677       /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
12678       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12679 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12680 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12681 				build_zero_cst (TREE_TYPE (arg0)));
12682       /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
12683       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12684 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12685 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12686 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12687 				build_zero_cst (TREE_TYPE (arg0)));
12688 
12689       /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
12690       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12691 	  && TREE_CODE (arg1) == INTEGER_CST
12692 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12693 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12694 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12695 					 TREE_OPERAND (arg0, 1), arg1));
12696 
12697       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12698 	 (X & C) == 0 when C is a single bit.  */
12699       if (TREE_CODE (arg0) == BIT_AND_EXPR
12700 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12701 	  && integer_zerop (arg1)
12702 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
12703 	{
12704 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12705 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12706 				 TREE_OPERAND (arg0, 1));
12707 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12708 				  type, tem,
12709 				  fold_convert_loc (loc, TREE_TYPE (arg0),
12710 						    arg1));
12711 	}
12712 
12713       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12714 	 constant C is a power of two, i.e. a single bit.  */
12715       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12716 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12717 	  && integer_zerop (arg1)
12718 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12719 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12720 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12721 	{
12722 	  tree arg00 = TREE_OPERAND (arg0, 0);
12723 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12724 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
12725 	}
12726 
12727       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12728 	 when is C is a power of two, i.e. a single bit.  */
12729       if (TREE_CODE (arg0) == BIT_AND_EXPR
12730 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12731 	  && integer_zerop (arg1)
12732 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12733 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12734 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12735 	{
12736 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12737 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12738 			     arg000, TREE_OPERAND (arg0, 1));
12739 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12740 			      tem, build_int_cst (TREE_TYPE (tem), 0));
12741 	}
12742 
12743       if (integer_zerop (arg1)
12744 	  && tree_expr_nonzero_p (arg0))
12745         {
12746 	  tree res = constant_boolean_node (code==NE_EXPR, type);
12747 	  return omit_one_operand_loc (loc, type, res, arg0);
12748 	}
12749 
12750       /* Fold -X op -Y as X op Y, where op is eq/ne.  */
12751       if (TREE_CODE (arg0) == NEGATE_EXPR
12752           && TREE_CODE (arg1) == NEGATE_EXPR)
12753 	return fold_build2_loc (loc, code, type,
12754 				TREE_OPERAND (arg0, 0),
12755 				fold_convert_loc (loc, TREE_TYPE (arg0),
12756 						  TREE_OPERAND (arg1, 0)));
12757 
12758       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
12759       if (TREE_CODE (arg0) == BIT_AND_EXPR
12760 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
12761 	{
12762 	  tree arg00 = TREE_OPERAND (arg0, 0);
12763 	  tree arg01 = TREE_OPERAND (arg0, 1);
12764 	  tree arg10 = TREE_OPERAND (arg1, 0);
12765 	  tree arg11 = TREE_OPERAND (arg1, 1);
12766 	  tree itype = TREE_TYPE (arg0);
12767 
12768 	  if (operand_equal_p (arg01, arg11, 0))
12769 	    return fold_build2_loc (loc, code, type,
12770 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12771 					     fold_build2_loc (loc,
12772 							  BIT_XOR_EXPR, itype,
12773 							  arg00, arg10),
12774 					     arg01),
12775 				build_zero_cst (itype));
12776 
12777 	  if (operand_equal_p (arg01, arg10, 0))
12778 	    return fold_build2_loc (loc, code, type,
12779 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12780 					     fold_build2_loc (loc,
12781 							  BIT_XOR_EXPR, itype,
12782 							  arg00, arg11),
12783 					     arg01),
12784 				build_zero_cst (itype));
12785 
12786 	  if (operand_equal_p (arg00, arg11, 0))
12787 	    return fold_build2_loc (loc, code, type,
12788 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12789 					     fold_build2_loc (loc,
12790 							  BIT_XOR_EXPR, itype,
12791 							  arg01, arg10),
12792 					     arg00),
12793 				build_zero_cst (itype));
12794 
12795 	  if (operand_equal_p (arg00, arg10, 0))
12796 	    return fold_build2_loc (loc, code, type,
12797 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12798 					     fold_build2_loc (loc,
12799 							  BIT_XOR_EXPR, itype,
12800 							  arg01, arg11),
12801 					     arg00),
12802 				build_zero_cst (itype));
12803 	}
12804 
12805       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12806 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
12807 	{
12808 	  tree arg00 = TREE_OPERAND (arg0, 0);
12809 	  tree arg01 = TREE_OPERAND (arg0, 1);
12810 	  tree arg10 = TREE_OPERAND (arg1, 0);
12811 	  tree arg11 = TREE_OPERAND (arg1, 1);
12812 	  tree itype = TREE_TYPE (arg0);
12813 
12814 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12815 	     operand_equal_p guarantees no side-effects so we don't need
12816 	     to use omit_one_operand on Z.  */
12817 	  if (operand_equal_p (arg01, arg11, 0))
12818 	    return fold_build2_loc (loc, code, type, arg00,
12819 				    fold_convert_loc (loc, TREE_TYPE (arg00),
12820 						      arg10));
12821 	  if (operand_equal_p (arg01, arg10, 0))
12822 	    return fold_build2_loc (loc, code, type, arg00,
12823 				    fold_convert_loc (loc, TREE_TYPE (arg00),
12824 						      arg11));
12825 	  if (operand_equal_p (arg00, arg11, 0))
12826 	    return fold_build2_loc (loc, code, type, arg01,
12827 				    fold_convert_loc (loc, TREE_TYPE (arg01),
12828 						      arg10));
12829 	  if (operand_equal_p (arg00, arg10, 0))
12830 	    return fold_build2_loc (loc, code, type, arg01,
12831 				    fold_convert_loc (loc, TREE_TYPE (arg01),
12832 						      arg11));
12833 
12834 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
12835 	  if (TREE_CODE (arg01) == INTEGER_CST
12836 	      && TREE_CODE (arg11) == INTEGER_CST)
12837 	    {
12838 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12839 				     fold_convert_loc (loc, itype, arg11));
12840 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12841 	      return fold_build2_loc (loc, code, type, tem,
12842 				      fold_convert_loc (loc, itype, arg10));
12843 	    }
12844 	}
12845 
12846       /* Attempt to simplify equality/inequality comparisons of complex
12847 	 values.  Only lower the comparison if the result is known or
12848 	 can be simplified to a single scalar comparison.  */
12849       if ((TREE_CODE (arg0) == COMPLEX_EXPR
12850 	   || TREE_CODE (arg0) == COMPLEX_CST)
12851 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
12852 	      || TREE_CODE (arg1) == COMPLEX_CST))
12853 	{
12854 	  tree real0, imag0, real1, imag1;
12855 	  tree rcond, icond;
12856 
12857 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
12858 	    {
12859 	      real0 = TREE_OPERAND (arg0, 0);
12860 	      imag0 = TREE_OPERAND (arg0, 1);
12861 	    }
12862 	  else
12863 	    {
12864 	      real0 = TREE_REALPART (arg0);
12865 	      imag0 = TREE_IMAGPART (arg0);
12866 	    }
12867 
12868 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
12869 	    {
12870 	      real1 = TREE_OPERAND (arg1, 0);
12871 	      imag1 = TREE_OPERAND (arg1, 1);
12872 	    }
12873 	  else
12874 	    {
12875 	      real1 = TREE_REALPART (arg1);
12876 	      imag1 = TREE_IMAGPART (arg1);
12877 	    }
12878 
12879 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
12880 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12881 	    {
12882 	      if (integer_zerop (rcond))
12883 		{
12884 		  if (code == EQ_EXPR)
12885 		    return omit_two_operands_loc (loc, type, boolean_false_node,
12886 					      imag0, imag1);
12887 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12888 		}
12889 	      else
12890 		{
12891 		  if (code == NE_EXPR)
12892 		    return omit_two_operands_loc (loc, type, boolean_true_node,
12893 					      imag0, imag1);
12894 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12895 		}
12896 	    }
12897 
12898 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
12899 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
12900 	    {
12901 	      if (integer_zerop (icond))
12902 		{
12903 		  if (code == EQ_EXPR)
12904 		    return omit_two_operands_loc (loc, type, boolean_false_node,
12905 					      real0, real1);
12906 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12907 		}
12908 	      else
12909 		{
12910 		  if (code == NE_EXPR)
12911 		    return omit_two_operands_loc (loc, type, boolean_true_node,
12912 					      real0, real1);
12913 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12914 		}
12915 	    }
12916 	}
12917 
12918       return NULL_TREE;
12919 
12920     case LT_EXPR:
12921     case GT_EXPR:
12922     case LE_EXPR:
12923     case GE_EXPR:
12924       tem = fold_comparison (loc, code, type, op0, op1);
12925       if (tem != NULL_TREE)
12926 	return tem;
12927 
12928       /* Transform comparisons of the form X +- C CMP X.  */
12929       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12930 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12931 	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12932 	       && !HONOR_SNANS (arg0))
12933 	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12934 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12935 	{
12936 	  tree arg01 = TREE_OPERAND (arg0, 1);
12937 	  enum tree_code code0 = TREE_CODE (arg0);
12938 	  int is_positive;
12939 
12940 	  if (TREE_CODE (arg01) == REAL_CST)
12941 	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12942 	  else
12943 	    is_positive = tree_int_cst_sgn (arg01);
12944 
12945 	  /* (X - c) > X becomes false.  */
12946 	  if (code == GT_EXPR
12947 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
12948 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
12949 	    {
12950 	      if (TREE_CODE (arg01) == INTEGER_CST
12951 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12952 		fold_overflow_warning (("assuming signed overflow does not "
12953 					"occur when assuming that (X - c) > X "
12954 					"is always false"),
12955 				       WARN_STRICT_OVERFLOW_ALL);
12956 	      return constant_boolean_node (0, type);
12957 	    }
12958 
12959 	  /* Likewise (X + c) < X becomes false.  */
12960 	  if (code == LT_EXPR
12961 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
12962 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
12963 	    {
12964 	      if (TREE_CODE (arg01) == INTEGER_CST
12965 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12966 		fold_overflow_warning (("assuming signed overflow does not "
12967 					"occur when assuming that "
12968 					"(X + c) < X is always false"),
12969 				       WARN_STRICT_OVERFLOW_ALL);
12970 	      return constant_boolean_node (0, type);
12971 	    }
12972 
12973 	  /* Convert (X - c) <= X to true.  */
12974 	  if (!HONOR_NANS (arg1)
12975 	      && code == LE_EXPR
12976 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
12977 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
12978 	    {
12979 	      if (TREE_CODE (arg01) == INTEGER_CST
12980 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12981 		fold_overflow_warning (("assuming signed overflow does not "
12982 					"occur when assuming that "
12983 					"(X - c) <= X is always true"),
12984 				       WARN_STRICT_OVERFLOW_ALL);
12985 	      return constant_boolean_node (1, type);
12986 	    }
12987 
12988 	  /* Convert (X + c) >= X to true.  */
12989 	  if (!HONOR_NANS (arg1)
12990 	      && code == GE_EXPR
12991 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
12992 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
12993 	    {
12994 	      if (TREE_CODE (arg01) == INTEGER_CST
12995 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12996 		fold_overflow_warning (("assuming signed overflow does not "
12997 					"occur when assuming that "
12998 					"(X + c) >= X is always true"),
12999 				       WARN_STRICT_OVERFLOW_ALL);
13000 	      return constant_boolean_node (1, type);
13001 	    }
13002 
13003 	  if (TREE_CODE (arg01) == INTEGER_CST)
13004 	    {
13005 	      /* Convert X + c > X and X - c < X to true for integers.  */
13006 	      if (code == GT_EXPR
13007 	          && ((code0 == PLUS_EXPR && is_positive > 0)
13008 		      || (code0 == MINUS_EXPR && is_positive < 0)))
13009 		{
13010 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13011 		    fold_overflow_warning (("assuming signed overflow does "
13012 					    "not occur when assuming that "
13013 					    "(X + c) > X is always true"),
13014 					   WARN_STRICT_OVERFLOW_ALL);
13015 		  return constant_boolean_node (1, type);
13016 		}
13017 
13018 	      if (code == LT_EXPR
13019 	          && ((code0 == MINUS_EXPR && is_positive > 0)
13020 		      || (code0 == PLUS_EXPR && is_positive < 0)))
13021 		{
13022 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13023 		    fold_overflow_warning (("assuming signed overflow does "
13024 					    "not occur when assuming that "
13025 					    "(X - c) < X is always true"),
13026 					   WARN_STRICT_OVERFLOW_ALL);
13027 		  return constant_boolean_node (1, type);
13028 		}
13029 
13030 	      /* Convert X + c <= X and X - c >= X to false for integers.  */
13031 	      if (code == LE_EXPR
13032 	          && ((code0 == PLUS_EXPR && is_positive > 0)
13033 		      || (code0 == MINUS_EXPR && is_positive < 0)))
13034 		{
13035 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13036 		    fold_overflow_warning (("assuming signed overflow does "
13037 					    "not occur when assuming that "
13038 					    "(X + c) <= X is always false"),
13039 					   WARN_STRICT_OVERFLOW_ALL);
13040 		  return constant_boolean_node (0, type);
13041 		}
13042 
13043 	      if (code == GE_EXPR
13044 	          && ((code0 == MINUS_EXPR && is_positive > 0)
13045 		      || (code0 == PLUS_EXPR && is_positive < 0)))
13046 		{
13047 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13048 		    fold_overflow_warning (("assuming signed overflow does "
13049 					    "not occur when assuming that "
13050 					    "(X - c) >= X is always false"),
13051 					   WARN_STRICT_OVERFLOW_ALL);
13052 		  return constant_boolean_node (0, type);
13053 		}
13054 	    }
13055 	}
13056 
13057       /* Comparisons with the highest or lowest possible integer of
13058 	 the specified precision will have known values.  */
13059       {
13060 	tree arg1_type = TREE_TYPE (arg1);
13061 	unsigned int prec = TYPE_PRECISION (arg1_type);
13062 
13063 	if (TREE_CODE (arg1) == INTEGER_CST
13064 	    && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13065 	  {
13066 	    wide_int max = wi::max_value (arg1_type);
13067 	    wide_int signed_max = wi::max_value (prec, SIGNED);
13068 	    wide_int min = wi::min_value (arg1_type);
13069 
13070 	    if (wi::eq_p (arg1, max))
13071 	      switch (code)
13072 		{
13073 		case GT_EXPR:
13074 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13075 
13076 		case GE_EXPR:
13077 		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13078 
13079 		case LE_EXPR:
13080 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13081 
13082 		case LT_EXPR:
13083 		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13084 
13085 		/* The GE_EXPR and LT_EXPR cases above are not normally
13086 		   reached because of previous transformations.  */
13087 
13088 		default:
13089 		  break;
13090 		}
13091 	    else if (wi::eq_p (arg1, max - 1))
13092 	      switch (code)
13093 		{
13094 		case GT_EXPR:
13095 		  arg1 = const_binop (PLUS_EXPR, arg1,
13096 				      build_int_cst (TREE_TYPE (arg1), 1));
13097 		  return fold_build2_loc (loc, EQ_EXPR, type,
13098 				      fold_convert_loc (loc,
13099 							TREE_TYPE (arg1), arg0),
13100 				      arg1);
13101 		case LE_EXPR:
13102 		  arg1 = const_binop (PLUS_EXPR, arg1,
13103 				      build_int_cst (TREE_TYPE (arg1), 1));
13104 		  return fold_build2_loc (loc, NE_EXPR, type,
13105 				      fold_convert_loc (loc, TREE_TYPE (arg1),
13106 							arg0),
13107 				      arg1);
13108 		default:
13109 		  break;
13110 		}
13111 	    else if (wi::eq_p (arg1, min))
13112 	      switch (code)
13113 		{
13114 		case LT_EXPR:
13115 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13116 
13117 		case LE_EXPR:
13118 		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13119 
13120 		case GE_EXPR:
13121 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13122 
13123 		case GT_EXPR:
13124 		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13125 
13126 		default:
13127 		  break;
13128 		}
13129 	    else if (wi::eq_p (arg1, min + 1))
13130 	      switch (code)
13131 		{
13132 		case GE_EXPR:
13133 		  arg1 = const_binop (MINUS_EXPR, arg1,
13134 				      build_int_cst (TREE_TYPE (arg1), 1));
13135 		  return fold_build2_loc (loc, NE_EXPR, type,
13136 				      fold_convert_loc (loc,
13137 							TREE_TYPE (arg1), arg0),
13138 				      arg1);
13139 		case LT_EXPR:
13140 		  arg1 = const_binop (MINUS_EXPR, arg1,
13141 				      build_int_cst (TREE_TYPE (arg1), 1));
13142 		  return fold_build2_loc (loc, EQ_EXPR, type,
13143 				      fold_convert_loc (loc, TREE_TYPE (arg1),
13144 							arg0),
13145 				      arg1);
13146 		default:
13147 		  break;
13148 		}
13149 
13150 	    else if (wi::eq_p (arg1, signed_max)
13151 		     && TYPE_UNSIGNED (arg1_type)
13152 		     /* We will flip the signedness of the comparison operator
13153 			associated with the mode of arg1, so the sign bit is
13154 			specified by this mode.  Check that arg1 is the signed
13155 			max associated with this sign bit.  */
13156 		     && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13157 		     /* signed_type does not work on pointer types.  */
13158 		     && INTEGRAL_TYPE_P (arg1_type))
13159 	      {
13160 		/* The following case also applies to X < signed_max+1
13161 		   and X >= signed_max+1 because previous transformations.  */
13162 		if (code == LE_EXPR || code == GT_EXPR)
13163 		  {
13164 		    tree st = signed_type_for (arg1_type);
13165 		    return fold_build2_loc (loc,
13166 					code == LE_EXPR ? GE_EXPR : LT_EXPR,
13167 					type, fold_convert_loc (loc, st, arg0),
13168 					build_int_cst (st, 0));
13169 		  }
13170 	      }
13171 	  }
13172       }
13173 
13174       /* If we are comparing an ABS_EXPR with a constant, we can
13175 	 convert all the cases into explicit comparisons, but they may
13176 	 well not be faster than doing the ABS and one comparison.
13177 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
13178 	 and a comparison, and is probably faster.  */
13179       if (code == LE_EXPR
13180 	  && TREE_CODE (arg1) == INTEGER_CST
13181 	  && TREE_CODE (arg0) == ABS_EXPR
13182 	  && ! TREE_SIDE_EFFECTS (arg0)
13183 	  && (0 != (tem = negate_expr (arg1)))
13184 	  && TREE_CODE (tem) == INTEGER_CST
13185 	  && !TREE_OVERFLOW (tem))
13186 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13187 			    build2 (GE_EXPR, type,
13188 				    TREE_OPERAND (arg0, 0), tem),
13189 			    build2 (LE_EXPR, type,
13190 				    TREE_OPERAND (arg0, 0), arg1));
13191 
13192       /* Convert ABS_EXPR<x> >= 0 to true.  */
13193       strict_overflow_p = false;
13194       if (code == GE_EXPR
13195 	  && (integer_zerop (arg1)
13196 	      || (! HONOR_NANS (arg0)
13197 		  && real_zerop (arg1)))
13198 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13199 	{
13200 	  if (strict_overflow_p)
13201 	    fold_overflow_warning (("assuming signed overflow does not occur "
13202 				    "when simplifying comparison of "
13203 				    "absolute value and zero"),
13204 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13205 	  return omit_one_operand_loc (loc, type,
13206 				       constant_boolean_node (true, type),
13207 				       arg0);
13208 	}
13209 
13210       /* Convert ABS_EXPR<x> < 0 to false.  */
13211       strict_overflow_p = false;
13212       if (code == LT_EXPR
13213 	  && (integer_zerop (arg1) || real_zerop (arg1))
13214 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13215 	{
13216 	  if (strict_overflow_p)
13217 	    fold_overflow_warning (("assuming signed overflow does not occur "
13218 				    "when simplifying comparison of "
13219 				    "absolute value and zero"),
13220 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13221 	  return omit_one_operand_loc (loc, type,
13222 				       constant_boolean_node (false, type),
13223 				       arg0);
13224 	}
13225 
13226       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13227 	 and similarly for >= into !=.  */
13228       if ((code == LT_EXPR || code == GE_EXPR)
13229 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13230 	  && TREE_CODE (arg1) == LSHIFT_EXPR
13231 	  && integer_onep (TREE_OPERAND (arg1, 0)))
13232 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13233 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13234 				   TREE_OPERAND (arg1, 1)),
13235 			   build_zero_cst (TREE_TYPE (arg0)));
13236 
13237       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
13238 	 otherwise Y might be >= # of bits in X's type and thus e.g.
13239 	 (unsigned char) (1 << Y) for Y 15 might be 0.
13240 	 If the cast is widening, then 1 << Y should have unsigned type,
13241 	 otherwise if Y is number of bits in the signed shift type minus 1,
13242 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
13243 	 31 might be 0xffffffff80000000.  */
13244       if ((code == LT_EXPR || code == GE_EXPR)
13245 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13246 	  && CONVERT_EXPR_P (arg1)
13247 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13248 	  && (element_precision (TREE_TYPE (arg1))
13249 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13250 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13251 	      || (element_precision (TREE_TYPE (arg1))
13252 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13253 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13254 	{
13255 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13256 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13257 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13258 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13259 			     build_zero_cst (TREE_TYPE (arg0)));
13260 	}
13261 
13262       return NULL_TREE;
13263 
13264     case UNORDERED_EXPR:
13265     case ORDERED_EXPR:
13266     case UNLT_EXPR:
13267     case UNLE_EXPR:
13268     case UNGT_EXPR:
13269     case UNGE_EXPR:
13270     case UNEQ_EXPR:
13271     case LTGT_EXPR:
13272       if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13273 	{
13274 	  t1 = fold_relational_const (code, type, arg0, arg1);
13275 	  if (t1 != NULL_TREE)
13276 	    return t1;
13277 	}
13278 
13279       /* If the first operand is NaN, the result is constant.  */
13280       if (TREE_CODE (arg0) == REAL_CST
13281 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13282 	  && (code != LTGT_EXPR || ! flag_trapping_math))
13283 	{
13284 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13285 	       ? integer_zero_node
13286 	       : integer_one_node;
13287 	  return omit_one_operand_loc (loc, type, t1, arg1);
13288 	}
13289 
13290       /* If the second operand is NaN, the result is constant.  */
13291       if (TREE_CODE (arg1) == REAL_CST
13292 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13293 	  && (code != LTGT_EXPR || ! flag_trapping_math))
13294 	{
13295 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13296 	       ? integer_zero_node
13297 	       : integer_one_node;
13298 	  return omit_one_operand_loc (loc, type, t1, arg0);
13299 	}
13300 
13301       /* Simplify unordered comparison of something with itself.  */
13302       if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13303 	  && operand_equal_p (arg0, arg1, 0))
13304 	return constant_boolean_node (1, type);
13305 
13306       if (code == LTGT_EXPR
13307 	  && !flag_trapping_math
13308 	  && operand_equal_p (arg0, arg1, 0))
13309 	return constant_boolean_node (0, type);
13310 
13311       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
13312       {
13313 	tree targ0 = strip_float_extensions (arg0);
13314 	tree targ1 = strip_float_extensions (arg1);
13315 	tree newtype = TREE_TYPE (targ0);
13316 
13317 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13318 	  newtype = TREE_TYPE (targ1);
13319 
13320 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13321 	  return fold_build2_loc (loc, code, type,
13322 			      fold_convert_loc (loc, newtype, targ0),
13323 			      fold_convert_loc (loc, newtype, targ1));
13324       }
13325 
13326       return NULL_TREE;
13327 
13328     case COMPOUND_EXPR:
13329       /* When pedantic, a compound expression can be neither an lvalue
13330 	 nor an integer constant expression.  */
13331       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13332 	return NULL_TREE;
13333       /* Don't let (0, 0) be null pointer constant.  */
13334       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13335 				 : fold_convert_loc (loc, type, arg1);
13336       return pedantic_non_lvalue_loc (loc, tem);
13337 
13338     case ASSERT_EXPR:
13339       /* An ASSERT_EXPR should never be passed to fold_binary.  */
13340       gcc_unreachable ();
13341 
13342     default:
13343       return NULL_TREE;
13344     } /* switch (code) */
13345 }
13346 
13347 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
13348    a LABEL_EXPR; otherwise return NULL_TREE.  Do not check the subtrees
13349    of GOTO_EXPR.  */
13350 
13351 static tree
13352 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13353 {
13354   switch (TREE_CODE (*tp))
13355     {
13356     case LABEL_EXPR:
13357       return *tp;
13358 
13359     case GOTO_EXPR:
13360       *walk_subtrees = 0;
13361 
13362       /* ... fall through ...  */
13363 
13364     default:
13365       return NULL_TREE;
13366     }
13367 }
13368 
13369 /* Return whether the sub-tree ST contains a label which is accessible from
13370    outside the sub-tree.  */
13371 
13372 static bool
13373 contains_label_p (tree st)
13374 {
13375   return
13376    (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13377 }
13378 
13379 /* Fold a ternary expression of code CODE and type TYPE with operands
13380    OP0, OP1, and OP2.  Return the folded expression if folding is
13381    successful.  Otherwise, return NULL_TREE.  */
13382 
13383 tree
13384 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13385 		  tree op0, tree op1, tree op2)
13386 {
13387   tree tem;
13388   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13389   enum tree_code_class kind = TREE_CODE_CLASS (code);
13390 
13391   gcc_assert (IS_EXPR_CODE_CLASS (kind)
13392 	      && TREE_CODE_LENGTH (code) == 3);
13393 
13394   /* If this is a commutative operation, and OP0 is a constant, move it
13395      to OP1 to reduce the number of tests below.  */
13396   if (commutative_ternary_tree_code (code)
13397       && tree_swap_operands_p (op0, op1, true))
13398     return fold_build3_loc (loc, code, type, op1, op0, op2);
13399 
13400   tem = generic_simplify (loc, code, type, op0, op1, op2);
13401   if (tem)
13402     return tem;
13403 
13404   /* Strip any conversions that don't change the mode.  This is safe
13405      for every expression, except for a comparison expression because
13406      its signedness is derived from its operands.  So, in the latter
13407      case, only strip conversions that don't change the signedness.
13408 
13409      Note that this is done as an internal manipulation within the
13410      constant folder, in order to find the simplest representation of
13411      the arguments so that their form can be studied.  In any cases,
13412      the appropriate type conversions should be put back in the tree
13413      that will get out of the constant folder.  */
13414   if (op0)
13415     {
13416       arg0 = op0;
13417       STRIP_NOPS (arg0);
13418     }
13419 
13420   if (op1)
13421     {
13422       arg1 = op1;
13423       STRIP_NOPS (arg1);
13424     }
13425 
13426   if (op2)
13427     {
13428       arg2 = op2;
13429       STRIP_NOPS (arg2);
13430     }
13431 
13432   switch (code)
13433     {
13434     case COMPONENT_REF:
13435       if (TREE_CODE (arg0) == CONSTRUCTOR
13436 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13437 	{
13438 	  unsigned HOST_WIDE_INT idx;
13439 	  tree field, value;
13440 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13441 	    if (field == arg1)
13442 	      return value;
13443 	}
13444       return NULL_TREE;
13445 
13446     case COND_EXPR:
13447     case VEC_COND_EXPR:
13448       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13449 	 so all simple results must be passed through pedantic_non_lvalue.  */
13450       if (TREE_CODE (arg0) == INTEGER_CST)
13451 	{
13452 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
13453 	  tem = integer_zerop (arg0) ? op2 : op1;
13454 	  /* Only optimize constant conditions when the selected branch
13455 	     has the same type as the COND_EXPR.  This avoids optimizing
13456              away "c ? x : throw", where the throw has a void type.
13457              Avoid throwing away that operand which contains label.  */
13458           if ((!TREE_SIDE_EFFECTS (unused_op)
13459                || !contains_label_p (unused_op))
13460               && (! VOID_TYPE_P (TREE_TYPE (tem))
13461                   || VOID_TYPE_P (type)))
13462 	    return pedantic_non_lvalue_loc (loc, tem);
13463 	  return NULL_TREE;
13464 	}
13465       else if (TREE_CODE (arg0) == VECTOR_CST)
13466 	{
13467 	  if ((TREE_CODE (arg1) == VECTOR_CST
13468 	       || TREE_CODE (arg1) == CONSTRUCTOR)
13469 	      && (TREE_CODE (arg2) == VECTOR_CST
13470 		  || TREE_CODE (arg2) == CONSTRUCTOR))
13471 	    {
13472 	      unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13473 	      unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13474 	      gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13475 	      for (i = 0; i < nelts; i++)
13476 		{
13477 		  tree val = VECTOR_CST_ELT (arg0, i);
13478 		  if (integer_all_onesp (val))
13479 		    sel[i] = i;
13480 		  else if (integer_zerop (val))
13481 		    sel[i] = nelts + i;
13482 		  else /* Currently unreachable.  */
13483 		    return NULL_TREE;
13484 		}
13485 	      tree t = fold_vec_perm (type, arg1, arg2, sel);
13486 	      if (t != NULL_TREE)
13487 		return t;
13488 	    }
13489 	}
13490 
13491       /* If we have A op B ? A : C, we may be able to convert this to a
13492 	 simpler expression, depending on the operation and the values
13493 	 of B and C.  Signed zeros prevent all of these transformations,
13494 	 for reasons given above each one.
13495 
13496          Also try swapping the arguments and inverting the conditional.  */
13497       if (COMPARISON_CLASS_P (arg0)
13498 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13499 					     arg1, TREE_OPERAND (arg0, 1))
13500 	  && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13501 	{
13502 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13503 	  if (tem)
13504 	    return tem;
13505 	}
13506 
13507       if (COMPARISON_CLASS_P (arg0)
13508 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13509 					     op2,
13510 					     TREE_OPERAND (arg0, 1))
13511 	  && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13512 	{
13513 	  location_t loc0 = expr_location_or (arg0, loc);
13514 	  tem = fold_invert_truthvalue (loc0, arg0);
13515 	  if (tem && COMPARISON_CLASS_P (tem))
13516 	    {
13517 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13518 	      if (tem)
13519 		return tem;
13520 	    }
13521 	}
13522 
13523       /* If the second operand is simpler than the third, swap them
13524 	 since that produces better jump optimization results.  */
13525       if (truth_value_p (TREE_CODE (arg0))
13526 	  && tree_swap_operands_p (op1, op2, false))
13527 	{
13528 	  location_t loc0 = expr_location_or (arg0, loc);
13529 	  /* See if this can be inverted.  If it can't, possibly because
13530 	     it was a floating-point inequality comparison, don't do
13531 	     anything.  */
13532 	  tem = fold_invert_truthvalue (loc0, arg0);
13533 	  if (tem)
13534 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
13535 	}
13536 
13537       /* Convert A ? 1 : 0 to simply A.  */
13538       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13539 				 : (integer_onep (op1)
13540 				    && !VECTOR_TYPE_P (type)))
13541 	  && integer_zerop (op2)
13542 	  /* If we try to convert OP0 to our type, the
13543 	     call to fold will try to move the conversion inside
13544 	     a COND, which will recurse.  In that case, the COND_EXPR
13545 	     is probably the best choice, so leave it alone.  */
13546 	  && type == TREE_TYPE (arg0))
13547 	return pedantic_non_lvalue_loc (loc, arg0);
13548 
13549       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
13550 	 over COND_EXPR in cases such as floating point comparisons.  */
13551       if (integer_zerop (op1)
13552 	  && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13553 				    : (integer_onep (op2)
13554 				       && !VECTOR_TYPE_P (type)))
13555 	  && truth_value_p (TREE_CODE (arg0)))
13556 	return pedantic_non_lvalue_loc (loc,
13557 				    fold_convert_loc (loc, type,
13558 					      invert_truthvalue_loc (loc,
13559 								     arg0)));
13560 
13561       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
13562       if (TREE_CODE (arg0) == LT_EXPR
13563 	  && integer_zerop (TREE_OPERAND (arg0, 1))
13564 	  && integer_zerop (op2)
13565 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13566 	{
13567 	  /* sign_bit_p looks through both zero and sign extensions,
13568 	     but for this optimization only sign extensions are
13569 	     usable.  */
13570 	  tree tem2 = TREE_OPERAND (arg0, 0);
13571 	  while (tem != tem2)
13572 	    {
13573 	      if (TREE_CODE (tem2) != NOP_EXPR
13574 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13575 		{
13576 		  tem = NULL_TREE;
13577 		  break;
13578 		}
13579 	      tem2 = TREE_OPERAND (tem2, 0);
13580 	    }
13581 	  /* sign_bit_p only checks ARG1 bits within A's precision.
13582 	     If <sign bit of A> has wider type than A, bits outside
13583 	     of A's precision in <sign bit of A> need to be checked.
13584 	     If they are all 0, this optimization needs to be done
13585 	     in unsigned A's type, if they are all 1 in signed A's type,
13586 	     otherwise this can't be done.  */
13587 	  if (tem
13588 	      && TYPE_PRECISION (TREE_TYPE (tem))
13589 		 < TYPE_PRECISION (TREE_TYPE (arg1))
13590 	      && TYPE_PRECISION (TREE_TYPE (tem))
13591 		 < TYPE_PRECISION (type))
13592 	    {
13593 	      int inner_width, outer_width;
13594 	      tree tem_type;
13595 
13596 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13597 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13598 	      if (outer_width > TYPE_PRECISION (type))
13599 		outer_width = TYPE_PRECISION (type);
13600 
13601 	      wide_int mask = wi::shifted_mask
13602 		(inner_width, outer_width - inner_width, false,
13603 		 TYPE_PRECISION (TREE_TYPE (arg1)));
13604 
13605 	      wide_int common = mask & arg1;
13606 	      if (common == mask)
13607 		{
13608 		  tem_type = signed_type_for (TREE_TYPE (tem));
13609 		  tem = fold_convert_loc (loc, tem_type, tem);
13610 		}
13611 	      else if (common == 0)
13612 		{
13613 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
13614 		  tem = fold_convert_loc (loc, tem_type, tem);
13615 		}
13616 	      else
13617 		tem = NULL;
13618 	    }
13619 
13620 	  if (tem)
13621 	    return
13622 	      fold_convert_loc (loc, type,
13623 				fold_build2_loc (loc, BIT_AND_EXPR,
13624 					     TREE_TYPE (tem), tem,
13625 					     fold_convert_loc (loc,
13626 							       TREE_TYPE (tem),
13627 							       arg1)));
13628 	}
13629 
13630       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
13631 	 already handled above.  */
13632       if (TREE_CODE (arg0) == BIT_AND_EXPR
13633 	  && integer_onep (TREE_OPERAND (arg0, 1))
13634 	  && integer_zerop (op2)
13635 	  && integer_pow2p (arg1))
13636 	{
13637 	  tree tem = TREE_OPERAND (arg0, 0);
13638 	  STRIP_NOPS (tem);
13639 	  if (TREE_CODE (tem) == RSHIFT_EXPR
13640 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13641               && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13642 	         tree_to_uhwi (TREE_OPERAND (tem, 1)))
13643 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
13644 				TREE_OPERAND (tem, 0), arg1);
13645 	}
13646 
13647       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
13648 	 is probably obsolete because the first operand should be a
13649 	 truth value (that's why we have the two cases above), but let's
13650 	 leave it in until we can confirm this for all front-ends.  */
13651       if (integer_zerop (op2)
13652 	  && TREE_CODE (arg0) == NE_EXPR
13653 	  && integer_zerop (TREE_OPERAND (arg0, 1))
13654 	  && integer_pow2p (arg1)
13655 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13656 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13657 			      arg1, OEP_ONLY_CONST))
13658 	return pedantic_non_lvalue_loc (loc,
13659 				    fold_convert_loc (loc, type,
13660 						      TREE_OPERAND (arg0, 0)));
13661 
13662       /* Disable the transformations below for vectors, since
13663 	 fold_binary_op_with_conditional_arg may undo them immediately,
13664 	 yielding an infinite loop.  */
13665       if (code == VEC_COND_EXPR)
13666 	return NULL_TREE;
13667 
13668       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
13669       if (integer_zerop (op2)
13670 	  && truth_value_p (TREE_CODE (arg0))
13671 	  && truth_value_p (TREE_CODE (arg1))
13672 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13673 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13674 							   : TRUTH_ANDIF_EXPR,
13675 				type, fold_convert_loc (loc, type, arg0), arg1);
13676 
13677       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
13678       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13679 	  && truth_value_p (TREE_CODE (arg0))
13680 	  && truth_value_p (TREE_CODE (arg1))
13681 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13682 	{
13683 	  location_t loc0 = expr_location_or (arg0, loc);
13684 	  /* Only perform transformation if ARG0 is easily inverted.  */
13685 	  tem = fold_invert_truthvalue (loc0, arg0);
13686 	  if (tem)
13687 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
13688 					 ? BIT_IOR_EXPR
13689 					 : TRUTH_ORIF_EXPR,
13690 				    type, fold_convert_loc (loc, type, tem),
13691 				    arg1);
13692 	}
13693 
13694       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
13695       if (integer_zerop (arg1)
13696 	  && truth_value_p (TREE_CODE (arg0))
13697 	  && truth_value_p (TREE_CODE (op2))
13698 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13699 	{
13700 	  location_t loc0 = expr_location_or (arg0, loc);
13701 	  /* Only perform transformation if ARG0 is easily inverted.  */
13702 	  tem = fold_invert_truthvalue (loc0, arg0);
13703 	  if (tem)
13704 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
13705 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13706 				    type, fold_convert_loc (loc, type, tem),
13707 				    op2);
13708 	}
13709 
13710       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
13711       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13712 	  && truth_value_p (TREE_CODE (arg0))
13713 	  && truth_value_p (TREE_CODE (op2))
13714 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13715 	return fold_build2_loc (loc, code == VEC_COND_EXPR
13716 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13717 				type, fold_convert_loc (loc, type, arg0), op2);
13718 
13719       return NULL_TREE;
13720 
13721     case CALL_EXPR:
13722       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
13723 	 of fold_ternary on them.  */
13724       gcc_unreachable ();
13725 
13726     case BIT_FIELD_REF:
13727       if ((TREE_CODE (arg0) == VECTOR_CST
13728 	   || (TREE_CODE (arg0) == CONSTRUCTOR
13729 	       && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13730 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
13731 	      || (TREE_CODE (type) == VECTOR_TYPE
13732 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13733 	{
13734 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13735 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13736 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13737 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13738 
13739 	  if (n != 0
13740 	      && (idx % width) == 0
13741 	      && (n % width) == 0
13742 	      && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13743 	    {
13744 	      idx = idx / width;
13745 	      n = n / width;
13746 
13747 	      if (TREE_CODE (arg0) == VECTOR_CST)
13748 		{
13749 		  if (n == 1)
13750 		    return VECTOR_CST_ELT (arg0, idx);
13751 
13752 		  tree *vals = XALLOCAVEC (tree, n);
13753 		  for (unsigned i = 0; i < n; ++i)
13754 		    vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13755 		  return build_vector (type, vals);
13756 		}
13757 
13758 	      /* Constructor elements can be subvectors.  */
13759 	      unsigned HOST_WIDE_INT k = 1;
13760 	      if (CONSTRUCTOR_NELTS (arg0) != 0)
13761 		{
13762 		  tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13763 		  if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13764 		    k = TYPE_VECTOR_SUBPARTS (cons_elem);
13765 		}
13766 
13767 	      /* We keep an exact subset of the constructor elements.  */
13768 	      if ((idx % k) == 0 && (n % k) == 0)
13769 		{
13770 		  if (CONSTRUCTOR_NELTS (arg0) == 0)
13771 		    return build_constructor (type, NULL);
13772 		  idx /= k;
13773 		  n /= k;
13774 		  if (n == 1)
13775 		    {
13776 		      if (idx < CONSTRUCTOR_NELTS (arg0))
13777 			return CONSTRUCTOR_ELT (arg0, idx)->value;
13778 		      return build_zero_cst (type);
13779 		    }
13780 
13781 		  vec<constructor_elt, va_gc> *vals;
13782 		  vec_alloc (vals, n);
13783 		  for (unsigned i = 0;
13784 		       i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13785 		       ++i)
13786 		    CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13787 					    CONSTRUCTOR_ELT
13788 					      (arg0, idx + i)->value);
13789 		  return build_constructor (type, vals);
13790 		}
13791 	      /* The bitfield references a single constructor element.  */
13792 	      else if (idx + n <= (idx / k + 1) * k)
13793 		{
13794 		  if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13795 		    return build_zero_cst (type);
13796 		  else if (n == k)
13797 		    return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13798 		  else
13799 		    return fold_build3_loc (loc, code, type,
13800 		      CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13801 		      build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13802 		}
13803 	    }
13804 	}
13805 
13806       /* A bit-field-ref that referenced the full argument can be stripped.  */
13807       if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13808 	  && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13809 	  && integer_zerop (op2))
13810 	return fold_convert_loc (loc, type, arg0);
13811 
13812       /* On constants we can use native encode/interpret to constant
13813          fold (nearly) all BIT_FIELD_REFs.  */
13814       if (CONSTANT_CLASS_P (arg0)
13815 	  && can_native_interpret_type_p (type)
13816 	  && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13817 	  /* This limitation should not be necessary, we just need to
13818 	     round this up to mode size.  */
13819 	  && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13820 	  /* Need bit-shifting of the buffer to relax the following.  */
13821 	  && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13822 	{
13823 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13824 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13825 	  unsigned HOST_WIDE_INT clen;
13826 	  clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13827 	  /* ???  We cannot tell native_encode_expr to start at
13828 	     some random byte only.  So limit us to a reasonable amount
13829 	     of work.  */
13830 	  if (clen <= 4096)
13831 	    {
13832 	      unsigned char *b = XALLOCAVEC (unsigned char, clen);
13833 	      unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13834 	      if (len > 0
13835 		  && len * BITS_PER_UNIT >= bitpos + bitsize)
13836 		{
13837 		  tree v = native_interpret_expr (type,
13838 						  b + bitpos / BITS_PER_UNIT,
13839 						  bitsize / BITS_PER_UNIT);
13840 		  if (v)
13841 		    return v;
13842 		}
13843 	    }
13844 	}
13845 
13846       return NULL_TREE;
13847 
13848     case FMA_EXPR:
13849       /* For integers we can decompose the FMA if possible.  */
13850       if (TREE_CODE (arg0) == INTEGER_CST
13851 	  && TREE_CODE (arg1) == INTEGER_CST)
13852 	return fold_build2_loc (loc, PLUS_EXPR, type,
13853 				const_binop (MULT_EXPR, arg0, arg1), arg2);
13854       if (integer_zerop (arg2))
13855 	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13856 
13857       return fold_fma (loc, type, arg0, arg1, arg2);
13858 
13859     case VEC_PERM_EXPR:
13860       if (TREE_CODE (arg2) == VECTOR_CST)
13861 	{
13862 	  unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13863 	  unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13864 	  unsigned char *sel2 = sel + nelts;
13865 	  bool need_mask_canon = false;
13866 	  bool need_mask_canon2 = false;
13867 	  bool all_in_vec0 = true;
13868 	  bool all_in_vec1 = true;
13869 	  bool maybe_identity = true;
13870 	  bool single_arg = (op0 == op1);
13871 	  bool changed = false;
13872 
13873 	  mask2 = 2 * nelts - 1;
13874 	  mask = single_arg ? (nelts - 1) : mask2;
13875 	  gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13876 	  for (i = 0; i < nelts; i++)
13877 	    {
13878 	      tree val = VECTOR_CST_ELT (arg2, i);
13879 	      if (TREE_CODE (val) != INTEGER_CST)
13880 		return NULL_TREE;
13881 
13882 	      /* Make sure that the perm value is in an acceptable
13883 		 range.  */
13884 	      wide_int t = val;
13885 	      need_mask_canon |= wi::gtu_p (t, mask);
13886 	      need_mask_canon2 |= wi::gtu_p (t, mask2);
13887 	      sel[i] = t.to_uhwi () & mask;
13888 	      sel2[i] = t.to_uhwi () & mask2;
13889 
13890 	      if (sel[i] < nelts)
13891 		all_in_vec1 = false;
13892 	      else
13893 		all_in_vec0 = false;
13894 
13895 	      if ((sel[i] & (nelts-1)) != i)
13896 		maybe_identity = false;
13897 	    }
13898 
13899 	  if (maybe_identity)
13900 	    {
13901 	      if (all_in_vec0)
13902 		return op0;
13903 	      if (all_in_vec1)
13904 		return op1;
13905 	    }
13906 
13907 	  if (all_in_vec0)
13908 	    op1 = op0;
13909 	  else if (all_in_vec1)
13910 	    {
13911 	      op0 = op1;
13912 	      for (i = 0; i < nelts; i++)
13913 		sel[i] -= nelts;
13914 	      need_mask_canon = true;
13915 	    }
13916 
13917 	  if ((TREE_CODE (op0) == VECTOR_CST
13918 	       || TREE_CODE (op0) == CONSTRUCTOR)
13919 	      && (TREE_CODE (op1) == VECTOR_CST
13920 		  || TREE_CODE (op1) == CONSTRUCTOR))
13921 	    {
13922 	      tree t = fold_vec_perm (type, op0, op1, sel);
13923 	      if (t != NULL_TREE)
13924 		return t;
13925 	    }
13926 
13927 	  if (op0 == op1 && !single_arg)
13928 	    changed = true;
13929 
13930 	  /* Some targets are deficient and fail to expand a single
13931 	     argument permutation while still allowing an equivalent
13932 	     2-argument version.  */
13933 	  if (need_mask_canon && arg2 == op2
13934 	      && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13935 	      && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13936 	    {
13937 	      need_mask_canon = need_mask_canon2;
13938 	      sel = sel2;
13939 	    }
13940 
13941 	  if (need_mask_canon && arg2 == op2)
13942 	    {
13943 	      tree *tsel = XALLOCAVEC (tree, nelts);
13944 	      tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13945 	      for (i = 0; i < nelts; i++)
13946 		tsel[i] = build_int_cst (eltype, sel[i]);
13947 	      op2 = build_vector (TREE_TYPE (arg2), tsel);
13948 	      changed = true;
13949 	    }
13950 
13951 	  if (changed)
13952 	    return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13953 	}
13954       return NULL_TREE;
13955 
13956     default:
13957       return NULL_TREE;
13958     } /* switch (code) */
13959 }
13960 
13961 /* Perform constant folding and related simplification of EXPR.
13962    The related simplifications include x*1 => x, x*0 => 0, etc.,
13963    and application of the associative law.
13964    NOP_EXPR conversions may be removed freely (as long as we
13965    are careful not to change the type of the overall expression).
13966    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13967    but we can constant-fold them if they have constant operands.  */
13968 
13969 #ifdef ENABLE_FOLD_CHECKING
13970 # define fold(x) fold_1 (x)
13971 static tree fold_1 (tree);
13972 static
13973 #endif
13974 tree
13975 fold (tree expr)
13976 {
13977   const tree t = expr;
13978   enum tree_code code = TREE_CODE (t);
13979   enum tree_code_class kind = TREE_CODE_CLASS (code);
13980   tree tem;
13981   location_t loc = EXPR_LOCATION (expr);
13982 
13983   /* Return right away if a constant.  */
13984   if (kind == tcc_constant)
13985     return t;
13986 
13987   /* CALL_EXPR-like objects with variable numbers of operands are
13988      treated specially.  */
13989   if (kind == tcc_vl_exp)
13990     {
13991       if (code == CALL_EXPR)
13992 	{
13993 	  tem = fold_call_expr (loc, expr, false);
13994 	  return tem ? tem : expr;
13995 	}
13996       return expr;
13997     }
13998 
13999   if (IS_EXPR_CODE_CLASS (kind))
14000     {
14001       tree type = TREE_TYPE (t);
14002       tree op0, op1, op2;
14003 
14004       switch (TREE_CODE_LENGTH (code))
14005 	{
14006 	case 1:
14007 	  op0 = TREE_OPERAND (t, 0);
14008 	  tem = fold_unary_loc (loc, code, type, op0);
14009 	  return tem ? tem : expr;
14010 	case 2:
14011 	  op0 = TREE_OPERAND (t, 0);
14012 	  op1 = TREE_OPERAND (t, 1);
14013 	  tem = fold_binary_loc (loc, code, type, op0, op1);
14014 	  return tem ? tem : expr;
14015 	case 3:
14016 	  op0 = TREE_OPERAND (t, 0);
14017 	  op1 = TREE_OPERAND (t, 1);
14018 	  op2 = TREE_OPERAND (t, 2);
14019 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14020 	  return tem ? tem : expr;
14021 	default:
14022 	  break;
14023 	}
14024     }
14025 
14026   switch (code)
14027     {
14028     case ARRAY_REF:
14029       {
14030 	tree op0 = TREE_OPERAND (t, 0);
14031 	tree op1 = TREE_OPERAND (t, 1);
14032 
14033 	if (TREE_CODE (op1) == INTEGER_CST
14034 	    && TREE_CODE (op0) == CONSTRUCTOR
14035 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14036 	  {
14037 	    vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14038 	    unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14039 	    unsigned HOST_WIDE_INT begin = 0;
14040 
14041 	    /* Find a matching index by means of a binary search.  */
14042 	    while (begin != end)
14043 	      {
14044 		unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14045 		tree index = (*elts)[middle].index;
14046 
14047 		if (TREE_CODE (index) == INTEGER_CST
14048 		    && tree_int_cst_lt (index, op1))
14049 		  begin = middle + 1;
14050 		else if (TREE_CODE (index) == INTEGER_CST
14051 			 && tree_int_cst_lt (op1, index))
14052 		  end = middle;
14053 		else if (TREE_CODE (index) == RANGE_EXPR
14054 			 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14055 		  begin = middle + 1;
14056 		else if (TREE_CODE (index) == RANGE_EXPR
14057 			 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14058 		  end = middle;
14059 		else
14060 		  return (*elts)[middle].value;
14061 	      }
14062 	  }
14063 
14064 	return t;
14065       }
14066 
14067       /* Return a VECTOR_CST if possible.  */
14068     case CONSTRUCTOR:
14069       {
14070 	tree type = TREE_TYPE (t);
14071 	if (TREE_CODE (type) != VECTOR_TYPE)
14072 	  return t;
14073 
14074 	tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14075 	unsigned HOST_WIDE_INT idx, pos = 0;
14076 	tree value;
14077 
14078 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14079 	  {
14080 	    if (!CONSTANT_CLASS_P (value))
14081 	      return t;
14082 	    if (TREE_CODE (value) == VECTOR_CST)
14083 	      {
14084 		for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14085 		  vec[pos++] = VECTOR_CST_ELT (value, i);
14086 	      }
14087 	    else
14088 	      vec[pos++] = value;
14089 	  }
14090 	for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14091 	  vec[pos] = build_zero_cst (TREE_TYPE (type));
14092 
14093 	return build_vector (type, vec);
14094       }
14095 
14096     case CONST_DECL:
14097       return fold (DECL_INITIAL (t));
14098 
14099     default:
14100       return t;
14101     } /* switch (code) */
14102 }
14103 
14104 #ifdef ENABLE_FOLD_CHECKING
14105 #undef fold
14106 
14107 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14108 				hash_table<pointer_hash<const tree_node> > *);
14109 static void fold_check_failed (const_tree, const_tree);
14110 void print_fold_checksum (const_tree);
14111 
14112 /* When --enable-checking=fold, compute a digest of expr before
14113    and after actual fold call to see if fold did not accidentally
14114    change original expr.  */
14115 
14116 tree
14117 fold (tree expr)
14118 {
14119   tree ret;
14120   struct md5_ctx ctx;
14121   unsigned char checksum_before[16], checksum_after[16];
14122   hash_table<pointer_hash<const tree_node> > ht (32);
14123 
14124   md5_init_ctx (&ctx);
14125   fold_checksum_tree (expr, &ctx, &ht);
14126   md5_finish_ctx (&ctx, checksum_before);
14127   ht.empty ();
14128 
14129   ret = fold_1 (expr);
14130 
14131   md5_init_ctx (&ctx);
14132   fold_checksum_tree (expr, &ctx, &ht);
14133   md5_finish_ctx (&ctx, checksum_after);
14134 
14135   if (memcmp (checksum_before, checksum_after, 16))
14136     fold_check_failed (expr, ret);
14137 
14138   return ret;
14139 }
14140 
14141 void
14142 print_fold_checksum (const_tree expr)
14143 {
14144   struct md5_ctx ctx;
14145   unsigned char checksum[16], cnt;
14146   hash_table<pointer_hash<const tree_node> > ht (32);
14147 
14148   md5_init_ctx (&ctx);
14149   fold_checksum_tree (expr, &ctx, &ht);
14150   md5_finish_ctx (&ctx, checksum);
14151   for (cnt = 0; cnt < 16; ++cnt)
14152     fprintf (stderr, "%02x", checksum[cnt]);
14153   putc ('\n', stderr);
14154 }
14155 
14156 static void
14157 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14158 {
14159   internal_error ("fold check: original tree changed by fold");
14160 }
14161 
14162 static void
14163 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14164 		    hash_table<pointer_hash <const tree_node> > *ht)
14165 {
14166   const tree_node **slot;
14167   enum tree_code code;
14168   union tree_node buf;
14169   int i, len;
14170 
14171  recursive_label:
14172   if (expr == NULL)
14173     return;
14174   slot = ht->find_slot (expr, INSERT);
14175   if (*slot != NULL)
14176     return;
14177   *slot = expr;
14178   code = TREE_CODE (expr);
14179   if (TREE_CODE_CLASS (code) == tcc_declaration
14180       && HAS_DECL_ASSEMBLER_NAME_P (expr))
14181     {
14182       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
14183       memcpy ((char *) &buf, expr, tree_size (expr));
14184       SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14185       buf.decl_with_vis.symtab_node = NULL;
14186       expr = (tree) &buf;
14187     }
14188   else if (TREE_CODE_CLASS (code) == tcc_type
14189 	   && (TYPE_POINTER_TO (expr)
14190 	       || TYPE_REFERENCE_TO (expr)
14191 	       || TYPE_CACHED_VALUES_P (expr)
14192 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14193 	       || TYPE_NEXT_VARIANT (expr)))
14194     {
14195       /* Allow these fields to be modified.  */
14196       tree tmp;
14197       memcpy ((char *) &buf, expr, tree_size (expr));
14198       expr = tmp = (tree) &buf;
14199       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14200       TYPE_POINTER_TO (tmp) = NULL;
14201       TYPE_REFERENCE_TO (tmp) = NULL;
14202       TYPE_NEXT_VARIANT (tmp) = NULL;
14203       if (TYPE_CACHED_VALUES_P (tmp))
14204 	{
14205 	  TYPE_CACHED_VALUES_P (tmp) = 0;
14206 	  TYPE_CACHED_VALUES (tmp) = NULL;
14207 	}
14208     }
14209   md5_process_bytes (expr, tree_size (expr), ctx);
14210   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14211     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14212   if (TREE_CODE_CLASS (code) != tcc_type
14213       && TREE_CODE_CLASS (code) != tcc_declaration
14214       && code != TREE_LIST
14215       && code != SSA_NAME
14216       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14217     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14218   switch (TREE_CODE_CLASS (code))
14219     {
14220     case tcc_constant:
14221       switch (code)
14222 	{
14223 	case STRING_CST:
14224 	  md5_process_bytes (TREE_STRING_POINTER (expr),
14225 			     TREE_STRING_LENGTH (expr), ctx);
14226 	  break;
14227 	case COMPLEX_CST:
14228 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14229 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14230 	  break;
14231 	case VECTOR_CST:
14232 	  for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14233 	    fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14234 	  break;
14235 	default:
14236 	  break;
14237 	}
14238       break;
14239     case tcc_exceptional:
14240       switch (code)
14241 	{
14242 	case TREE_LIST:
14243 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14244 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14245 	  expr = TREE_CHAIN (expr);
14246 	  goto recursive_label;
14247 	  break;
14248 	case TREE_VEC:
14249 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14250 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14251 	  break;
14252 	default:
14253 	  break;
14254 	}
14255       break;
14256     case tcc_expression:
14257     case tcc_reference:
14258     case tcc_comparison:
14259     case tcc_unary:
14260     case tcc_binary:
14261     case tcc_statement:
14262     case tcc_vl_exp:
14263       len = TREE_OPERAND_LENGTH (expr);
14264       for (i = 0; i < len; ++i)
14265 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14266       break;
14267     case tcc_declaration:
14268       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14269       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14270       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14271 	{
14272 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14273 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14274 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14275 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14276 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14277 	}
14278 
14279       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14280 	{
14281 	  if (TREE_CODE (expr) == FUNCTION_DECL)
14282 	    {
14283 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14284 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14285 	    }
14286 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14287 	}
14288       break;
14289     case tcc_type:
14290       if (TREE_CODE (expr) == ENUMERAL_TYPE)
14291         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14292       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14293       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14294       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14295       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14296       if (INTEGRAL_TYPE_P (expr)
14297           || SCALAR_FLOAT_TYPE_P (expr))
14298 	{
14299 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14300 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14301 	}
14302       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14303       if (TREE_CODE (expr) == RECORD_TYPE
14304 	  || TREE_CODE (expr) == UNION_TYPE
14305 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
14306 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14307       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14308       break;
14309     default:
14310       break;
14311     }
14312 }
14313 
14314 /* Helper function for outputting the checksum of a tree T.  When
14315    debugging with gdb, you can "define mynext" to be "next" followed
14316    by "call debug_fold_checksum (op0)", then just trace down till the
14317    outputs differ.  */
14318 
14319 DEBUG_FUNCTION void
14320 debug_fold_checksum (const_tree t)
14321 {
14322   int i;
14323   unsigned char checksum[16];
14324   struct md5_ctx ctx;
14325   hash_table<pointer_hash<const tree_node> > ht (32);
14326 
14327   md5_init_ctx (&ctx);
14328   fold_checksum_tree (t, &ctx, &ht);
14329   md5_finish_ctx (&ctx, checksum);
14330   ht.empty ();
14331 
14332   for (i = 0; i < 16; i++)
14333     fprintf (stderr, "%d ", checksum[i]);
14334 
14335   fprintf (stderr, "\n");
14336 }
14337 
14338 #endif
14339 
14340 /* Fold a unary tree expression with code CODE of type TYPE with an
14341    operand OP0.  LOC is the location of the resulting expression.
14342    Return a folded expression if successful.  Otherwise, return a tree
14343    expression with code CODE of type TYPE with an operand OP0.  */
14344 
14345 tree
14346 fold_build1_stat_loc (location_t loc,
14347 		      enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14348 {
14349   tree tem;
14350 #ifdef ENABLE_FOLD_CHECKING
14351   unsigned char checksum_before[16], checksum_after[16];
14352   struct md5_ctx ctx;
14353   hash_table<pointer_hash<const tree_node> > ht (32);
14354 
14355   md5_init_ctx (&ctx);
14356   fold_checksum_tree (op0, &ctx, &ht);
14357   md5_finish_ctx (&ctx, checksum_before);
14358   ht.empty ();
14359 #endif
14360 
14361   tem = fold_unary_loc (loc, code, type, op0);
14362   if (!tem)
14363     tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14364 
14365 #ifdef ENABLE_FOLD_CHECKING
14366   md5_init_ctx (&ctx);
14367   fold_checksum_tree (op0, &ctx, &ht);
14368   md5_finish_ctx (&ctx, checksum_after);
14369 
14370   if (memcmp (checksum_before, checksum_after, 16))
14371     fold_check_failed (op0, tem);
14372 #endif
14373   return tem;
14374 }
14375 
14376 /* Fold a binary tree expression with code CODE of type TYPE with
14377    operands OP0 and OP1.  LOC is the location of the resulting
14378    expression.  Return a folded expression if successful.  Otherwise,
14379    return a tree expression with code CODE of type TYPE with operands
14380    OP0 and OP1.  */
14381 
14382 tree
14383 fold_build2_stat_loc (location_t loc,
14384 		      enum tree_code code, tree type, tree op0, tree op1
14385 		      MEM_STAT_DECL)
14386 {
14387   tree tem;
14388 #ifdef ENABLE_FOLD_CHECKING
14389   unsigned char checksum_before_op0[16],
14390                 checksum_before_op1[16],
14391 		checksum_after_op0[16],
14392 		checksum_after_op1[16];
14393   struct md5_ctx ctx;
14394   hash_table<pointer_hash<const tree_node> > ht (32);
14395 
14396   md5_init_ctx (&ctx);
14397   fold_checksum_tree (op0, &ctx, &ht);
14398   md5_finish_ctx (&ctx, checksum_before_op0);
14399   ht.empty ();
14400 
14401   md5_init_ctx (&ctx);
14402   fold_checksum_tree (op1, &ctx, &ht);
14403   md5_finish_ctx (&ctx, checksum_before_op1);
14404   ht.empty ();
14405 #endif
14406 
14407   tem = fold_binary_loc (loc, code, type, op0, op1);
14408   if (!tem)
14409     tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14410 
14411 #ifdef ENABLE_FOLD_CHECKING
14412   md5_init_ctx (&ctx);
14413   fold_checksum_tree (op0, &ctx, &ht);
14414   md5_finish_ctx (&ctx, checksum_after_op0);
14415   ht.empty ();
14416 
14417   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14418     fold_check_failed (op0, tem);
14419 
14420   md5_init_ctx (&ctx);
14421   fold_checksum_tree (op1, &ctx, &ht);
14422   md5_finish_ctx (&ctx, checksum_after_op1);
14423 
14424   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14425     fold_check_failed (op1, tem);
14426 #endif
14427   return tem;
14428 }
14429 
14430 /* Fold a ternary tree expression with code CODE of type TYPE with
14431    operands OP0, OP1, and OP2.  Return a folded expression if
14432    successful.  Otherwise, return a tree expression with code CODE of
14433    type TYPE with operands OP0, OP1, and OP2.  */
14434 
14435 tree
14436 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14437 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
14438 {
14439   tree tem;
14440 #ifdef ENABLE_FOLD_CHECKING
14441   unsigned char checksum_before_op0[16],
14442                 checksum_before_op1[16],
14443                 checksum_before_op2[16],
14444 		checksum_after_op0[16],
14445 		checksum_after_op1[16],
14446 		checksum_after_op2[16];
14447   struct md5_ctx ctx;
14448   hash_table<pointer_hash<const tree_node> > ht (32);
14449 
14450   md5_init_ctx (&ctx);
14451   fold_checksum_tree (op0, &ctx, &ht);
14452   md5_finish_ctx (&ctx, checksum_before_op0);
14453   ht.empty ();
14454 
14455   md5_init_ctx (&ctx);
14456   fold_checksum_tree (op1, &ctx, &ht);
14457   md5_finish_ctx (&ctx, checksum_before_op1);
14458   ht.empty ();
14459 
14460   md5_init_ctx (&ctx);
14461   fold_checksum_tree (op2, &ctx, &ht);
14462   md5_finish_ctx (&ctx, checksum_before_op2);
14463   ht.empty ();
14464 #endif
14465 
14466   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14467   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14468   if (!tem)
14469     tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14470 
14471 #ifdef ENABLE_FOLD_CHECKING
14472   md5_init_ctx (&ctx);
14473   fold_checksum_tree (op0, &ctx, &ht);
14474   md5_finish_ctx (&ctx, checksum_after_op0);
14475   ht.empty ();
14476 
14477   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14478     fold_check_failed (op0, tem);
14479 
14480   md5_init_ctx (&ctx);
14481   fold_checksum_tree (op1, &ctx, &ht);
14482   md5_finish_ctx (&ctx, checksum_after_op1);
14483   ht.empty ();
14484 
14485   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14486     fold_check_failed (op1, tem);
14487 
14488   md5_init_ctx (&ctx);
14489   fold_checksum_tree (op2, &ctx, &ht);
14490   md5_finish_ctx (&ctx, checksum_after_op2);
14491 
14492   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14493     fold_check_failed (op2, tem);
14494 #endif
14495   return tem;
14496 }
14497 
14498 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14499    arguments in ARGARRAY, and a null static chain.
14500    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
14501    of type TYPE from the given operands as constructed by build_call_array.  */
14502 
14503 tree
14504 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14505 			   int nargs, tree *argarray)
14506 {
14507   tree tem;
14508 #ifdef ENABLE_FOLD_CHECKING
14509   unsigned char checksum_before_fn[16],
14510                 checksum_before_arglist[16],
14511 		checksum_after_fn[16],
14512 		checksum_after_arglist[16];
14513   struct md5_ctx ctx;
14514   hash_table<pointer_hash<const tree_node> > ht (32);
14515   int i;
14516 
14517   md5_init_ctx (&ctx);
14518   fold_checksum_tree (fn, &ctx, &ht);
14519   md5_finish_ctx (&ctx, checksum_before_fn);
14520   ht.empty ();
14521 
14522   md5_init_ctx (&ctx);
14523   for (i = 0; i < nargs; i++)
14524     fold_checksum_tree (argarray[i], &ctx, &ht);
14525   md5_finish_ctx (&ctx, checksum_before_arglist);
14526   ht.empty ();
14527 #endif
14528 
14529   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14530   if (!tem)
14531     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14532 
14533 #ifdef ENABLE_FOLD_CHECKING
14534   md5_init_ctx (&ctx);
14535   fold_checksum_tree (fn, &ctx, &ht);
14536   md5_finish_ctx (&ctx, checksum_after_fn);
14537   ht.empty ();
14538 
14539   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14540     fold_check_failed (fn, tem);
14541 
14542   md5_init_ctx (&ctx);
14543   for (i = 0; i < nargs; i++)
14544     fold_checksum_tree (argarray[i], &ctx, &ht);
14545   md5_finish_ctx (&ctx, checksum_after_arglist);
14546 
14547   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14548     fold_check_failed (NULL_TREE, tem);
14549 #endif
14550   return tem;
14551 }
14552 
14553 /* Perform constant folding and related simplification of initializer
14554    expression EXPR.  These behave identically to "fold_buildN" but ignore
14555    potential run-time traps and exceptions that fold must preserve.  */
14556 
14557 #define START_FOLD_INIT \
14558   int saved_signaling_nans = flag_signaling_nans;\
14559   int saved_trapping_math = flag_trapping_math;\
14560   int saved_rounding_math = flag_rounding_math;\
14561   int saved_trapv = flag_trapv;\
14562   int saved_folding_initializer = folding_initializer;\
14563   flag_signaling_nans = 0;\
14564   flag_trapping_math = 0;\
14565   flag_rounding_math = 0;\
14566   flag_trapv = 0;\
14567   folding_initializer = 1;
14568 
14569 #define END_FOLD_INIT \
14570   flag_signaling_nans = saved_signaling_nans;\
14571   flag_trapping_math = saved_trapping_math;\
14572   flag_rounding_math = saved_rounding_math;\
14573   flag_trapv = saved_trapv;\
14574   folding_initializer = saved_folding_initializer;
14575 
14576 tree
14577 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14578 			     tree type, tree op)
14579 {
14580   tree result;
14581   START_FOLD_INIT;
14582 
14583   result = fold_build1_loc (loc, code, type, op);
14584 
14585   END_FOLD_INIT;
14586   return result;
14587 }
14588 
14589 tree
14590 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14591 			     tree type, tree op0, tree op1)
14592 {
14593   tree result;
14594   START_FOLD_INIT;
14595 
14596   result = fold_build2_loc (loc, code, type, op0, op1);
14597 
14598   END_FOLD_INIT;
14599   return result;
14600 }
14601 
14602 tree
14603 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14604 				       int nargs, tree *argarray)
14605 {
14606   tree result;
14607   START_FOLD_INIT;
14608 
14609   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14610 
14611   END_FOLD_INIT;
14612   return result;
14613 }
14614 
14615 #undef START_FOLD_INIT
14616 #undef END_FOLD_INIT
14617 
14618 /* Determine if first argument is a multiple of second argument.  Return 0 if
14619    it is not, or we cannot easily determined it to be.
14620 
14621    An example of the sort of thing we care about (at this point; this routine
14622    could surely be made more general, and expanded to do what the *_DIV_EXPR's
14623    fold cases do now) is discovering that
14624 
14625      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14626 
14627    is a multiple of
14628 
14629      SAVE_EXPR (J * 8)
14630 
14631    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14632 
14633    This code also handles discovering that
14634 
14635      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14636 
14637    is a multiple of 8 so we don't have to worry about dealing with a
14638    possible remainder.
14639 
14640    Note that we *look* inside a SAVE_EXPR only to determine how it was
14641    calculated; it is not safe for fold to do much of anything else with the
14642    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14643    at run time.  For example, the latter example above *cannot* be implemented
14644    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14645    evaluation time of the original SAVE_EXPR is not necessarily the same at
14646    the time the new expression is evaluated.  The only optimization of this
14647    sort that would be valid is changing
14648 
14649      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14650 
14651    divided by 8 to
14652 
14653      SAVE_EXPR (I) * SAVE_EXPR (J)
14654 
14655    (where the same SAVE_EXPR (J) is used in the original and the
14656    transformed version).  */
14657 
14658 int
14659 multiple_of_p (tree type, const_tree top, const_tree bottom)
14660 {
14661   if (operand_equal_p (top, bottom, 0))
14662     return 1;
14663 
14664   if (TREE_CODE (type) != INTEGER_TYPE)
14665     return 0;
14666 
14667   switch (TREE_CODE (top))
14668     {
14669     case BIT_AND_EXPR:
14670       /* Bitwise and provides a power of two multiple.  If the mask is
14671 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
14672       if (!integer_pow2p (bottom))
14673 	return 0;
14674       /* FALLTHRU */
14675 
14676     case MULT_EXPR:
14677       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14678 	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14679 
14680     case PLUS_EXPR:
14681     case MINUS_EXPR:
14682       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14683 	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14684 
14685     case LSHIFT_EXPR:
14686       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14687 	{
14688 	  tree op1, t1;
14689 
14690 	  op1 = TREE_OPERAND (top, 1);
14691 	  /* const_binop may not detect overflow correctly,
14692 	     so check for it explicitly here.  */
14693 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14694 	      && 0 != (t1 = fold_convert (type,
14695 					  const_binop (LSHIFT_EXPR,
14696 						       size_one_node,
14697 						       op1)))
14698 	      && !TREE_OVERFLOW (t1))
14699 	    return multiple_of_p (type, t1, bottom);
14700 	}
14701       return 0;
14702 
14703     case NOP_EXPR:
14704       /* Can't handle conversions from non-integral or wider integral type.  */
14705       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14706 	  || (TYPE_PRECISION (type)
14707 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14708 	return 0;
14709 
14710       /* .. fall through ...  */
14711 
14712     case SAVE_EXPR:
14713       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14714 
14715     case COND_EXPR:
14716       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14717 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14718 
14719     case INTEGER_CST:
14720       if (TREE_CODE (bottom) != INTEGER_CST
14721 	  || integer_zerop (bottom)
14722 	  || (TYPE_UNSIGNED (type)
14723 	      && (tree_int_cst_sgn (top) < 0
14724 		  || tree_int_cst_sgn (bottom) < 0)))
14725 	return 0;
14726       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14727 				SIGNED);
14728 
14729     default:
14730       return 0;
14731     }
14732 }
14733 
14734 /* Return true if CODE or TYPE is known to be non-negative. */
14735 
14736 static bool
14737 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14738 {
14739   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14740       && truth_value_p (code))
14741     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14742        have a signed:1 type (where the value is -1 and 0).  */
14743     return true;
14744   return false;
14745 }
14746 
14747 /* Return true if (CODE OP0) is known to be non-negative.  If the return
14748    value is based on the assumption that signed overflow is undefined,
14749    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14750    *STRICT_OVERFLOW_P.  */
14751 
14752 bool
14753 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14754 				bool *strict_overflow_p)
14755 {
14756   if (TYPE_UNSIGNED (type))
14757     return true;
14758 
14759   switch (code)
14760     {
14761     case ABS_EXPR:
14762       /* We can't return 1 if flag_wrapv is set because
14763 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
14764       if (!INTEGRAL_TYPE_P (type))
14765 	return true;
14766       if (TYPE_OVERFLOW_UNDEFINED (type))
14767 	{
14768 	  *strict_overflow_p = true;
14769 	  return true;
14770 	}
14771       break;
14772 
14773     case NON_LVALUE_EXPR:
14774     case FLOAT_EXPR:
14775     case FIX_TRUNC_EXPR:
14776       return tree_expr_nonnegative_warnv_p (op0,
14777 					    strict_overflow_p);
14778 
14779     CASE_CONVERT:
14780       {
14781 	tree inner_type = TREE_TYPE (op0);
14782 	tree outer_type = type;
14783 
14784 	if (TREE_CODE (outer_type) == REAL_TYPE)
14785 	  {
14786 	    if (TREE_CODE (inner_type) == REAL_TYPE)
14787 	      return tree_expr_nonnegative_warnv_p (op0,
14788 						    strict_overflow_p);
14789 	    if (INTEGRAL_TYPE_P (inner_type))
14790 	      {
14791 		if (TYPE_UNSIGNED (inner_type))
14792 		  return true;
14793 		return tree_expr_nonnegative_warnv_p (op0,
14794 						      strict_overflow_p);
14795 	      }
14796 	  }
14797 	else if (INTEGRAL_TYPE_P (outer_type))
14798 	  {
14799 	    if (TREE_CODE (inner_type) == REAL_TYPE)
14800 	      return tree_expr_nonnegative_warnv_p (op0,
14801 						    strict_overflow_p);
14802 	    if (INTEGRAL_TYPE_P (inner_type))
14803 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14804 		      && TYPE_UNSIGNED (inner_type);
14805 	  }
14806       }
14807       break;
14808 
14809     default:
14810       return tree_simple_nonnegative_warnv_p (code, type);
14811     }
14812 
14813   /* We don't know sign of `t', so be conservative and return false.  */
14814   return false;
14815 }
14816 
14817 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
14818    value is based on the assumption that signed overflow is undefined,
14819    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14820    *STRICT_OVERFLOW_P.  */
14821 
14822 bool
14823 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14824 				      tree op1, bool *strict_overflow_p)
14825 {
14826   if (TYPE_UNSIGNED (type))
14827     return true;
14828 
14829   switch (code)
14830     {
14831     case POINTER_PLUS_EXPR:
14832     case PLUS_EXPR:
14833       if (FLOAT_TYPE_P (type))
14834 	return (tree_expr_nonnegative_warnv_p (op0,
14835 					       strict_overflow_p)
14836 		&& tree_expr_nonnegative_warnv_p (op1,
14837 						  strict_overflow_p));
14838 
14839       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14840 	 both unsigned and at least 2 bits shorter than the result.  */
14841       if (TREE_CODE (type) == INTEGER_TYPE
14842 	  && TREE_CODE (op0) == NOP_EXPR
14843 	  && TREE_CODE (op1) == NOP_EXPR)
14844 	{
14845 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14846 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14847 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14848 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14849 	    {
14850 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
14851 				       TYPE_PRECISION (inner2)) + 1;
14852 	      return prec < TYPE_PRECISION (type);
14853 	    }
14854 	}
14855       break;
14856 
14857     case MULT_EXPR:
14858       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14859 	{
14860 	  /* x * x is always non-negative for floating point x
14861 	     or without overflow.  */
14862 	  if (operand_equal_p (op0, op1, 0)
14863 	      || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14864 		  && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14865 	    {
14866 	      if (ANY_INTEGRAL_TYPE_P (type)
14867 		  && TYPE_OVERFLOW_UNDEFINED (type))
14868 		*strict_overflow_p = true;
14869 	      return true;
14870 	    }
14871 	}
14872 
14873       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14874 	 both unsigned and their total bits is shorter than the result.  */
14875       if (TREE_CODE (type) == INTEGER_TYPE
14876 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14877 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14878 	{
14879 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14880 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
14881 	    : TREE_TYPE (op0);
14882 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14883 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
14884 	    : TREE_TYPE (op1);
14885 
14886 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
14887 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
14888 
14889 	  if (TREE_CODE (op0) == INTEGER_CST)
14890 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14891 
14892 	  if (TREE_CODE (op1) == INTEGER_CST)
14893 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14894 
14895 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14896 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14897 	    {
14898 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14899 		? tree_int_cst_min_precision (op0, UNSIGNED)
14900 		: TYPE_PRECISION (inner0);
14901 
14902 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14903 		? tree_int_cst_min_precision (op1, UNSIGNED)
14904 		: TYPE_PRECISION (inner1);
14905 
14906 	      return precision0 + precision1 < TYPE_PRECISION (type);
14907 	    }
14908 	}
14909       return false;
14910 
14911     case BIT_AND_EXPR:
14912     case MAX_EXPR:
14913       return (tree_expr_nonnegative_warnv_p (op0,
14914 					     strict_overflow_p)
14915 	      || tree_expr_nonnegative_warnv_p (op1,
14916 						strict_overflow_p));
14917 
14918     case BIT_IOR_EXPR:
14919     case BIT_XOR_EXPR:
14920     case MIN_EXPR:
14921     case RDIV_EXPR:
14922     case TRUNC_DIV_EXPR:
14923     case CEIL_DIV_EXPR:
14924     case FLOOR_DIV_EXPR:
14925     case ROUND_DIV_EXPR:
14926       return (tree_expr_nonnegative_warnv_p (op0,
14927 					     strict_overflow_p)
14928 	      && tree_expr_nonnegative_warnv_p (op1,
14929 						strict_overflow_p));
14930 
14931     case TRUNC_MOD_EXPR:
14932     case CEIL_MOD_EXPR:
14933     case FLOOR_MOD_EXPR:
14934     case ROUND_MOD_EXPR:
14935       return tree_expr_nonnegative_warnv_p (op0,
14936 					    strict_overflow_p);
14937     default:
14938       return tree_simple_nonnegative_warnv_p (code, type);
14939     }
14940 
14941   /* We don't know sign of `t', so be conservative and return false.  */
14942   return false;
14943 }
14944 
14945 /* Return true if T is known to be non-negative.  If the return
14946    value is based on the assumption that signed overflow is undefined,
14947    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14948    *STRICT_OVERFLOW_P.  */
14949 
14950 bool
14951 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14952 {
14953   if (TYPE_UNSIGNED (TREE_TYPE (t)))
14954     return true;
14955 
14956   switch (TREE_CODE (t))
14957     {
14958     case INTEGER_CST:
14959       return tree_int_cst_sgn (t) >= 0;
14960 
14961     case REAL_CST:
14962       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14963 
14964     case FIXED_CST:
14965       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14966 
14967     case COND_EXPR:
14968       return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14969 					     strict_overflow_p)
14970 	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14971 						strict_overflow_p));
14972     default:
14973       return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14974 						   TREE_TYPE (t));
14975     }
14976   /* We don't know sign of `t', so be conservative and return false.  */
14977   return false;
14978 }
14979 
14980 /* Return true if T is known to be non-negative.  If the return
14981    value is based on the assumption that signed overflow is undefined,
14982    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14983    *STRICT_OVERFLOW_P.  */
14984 
14985 bool
14986 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14987 			       tree arg0, tree arg1, bool *strict_overflow_p)
14988 {
14989   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14990     switch (DECL_FUNCTION_CODE (fndecl))
14991       {
14992 	CASE_FLT_FN (BUILT_IN_ACOS):
14993 	CASE_FLT_FN (BUILT_IN_ACOSH):
14994 	CASE_FLT_FN (BUILT_IN_CABS):
14995 	CASE_FLT_FN (BUILT_IN_COSH):
14996 	CASE_FLT_FN (BUILT_IN_ERFC):
14997 	CASE_FLT_FN (BUILT_IN_EXP):
14998 	CASE_FLT_FN (BUILT_IN_EXP10):
14999 	CASE_FLT_FN (BUILT_IN_EXP2):
15000 	CASE_FLT_FN (BUILT_IN_FABS):
15001 	CASE_FLT_FN (BUILT_IN_FDIM):
15002 	CASE_FLT_FN (BUILT_IN_HYPOT):
15003 	CASE_FLT_FN (BUILT_IN_POW10):
15004 	CASE_INT_FN (BUILT_IN_FFS):
15005 	CASE_INT_FN (BUILT_IN_PARITY):
15006 	CASE_INT_FN (BUILT_IN_POPCOUNT):
15007 	CASE_INT_FN (BUILT_IN_CLZ):
15008 	CASE_INT_FN (BUILT_IN_CLRSB):
15009       case BUILT_IN_BSWAP32:
15010       case BUILT_IN_BSWAP64:
15011 	/* Always true.  */
15012 	return true;
15013 
15014 	CASE_FLT_FN (BUILT_IN_SQRT):
15015 	/* sqrt(-0.0) is -0.0.  */
15016 	if (!HONOR_SIGNED_ZEROS (element_mode (type)))
15017 	  return true;
15018 	return tree_expr_nonnegative_warnv_p (arg0,
15019 					      strict_overflow_p);
15020 
15021 	CASE_FLT_FN (BUILT_IN_ASINH):
15022 	CASE_FLT_FN (BUILT_IN_ATAN):
15023 	CASE_FLT_FN (BUILT_IN_ATANH):
15024 	CASE_FLT_FN (BUILT_IN_CBRT):
15025 	CASE_FLT_FN (BUILT_IN_CEIL):
15026 	CASE_FLT_FN (BUILT_IN_ERF):
15027 	CASE_FLT_FN (BUILT_IN_EXPM1):
15028 	CASE_FLT_FN (BUILT_IN_FLOOR):
15029 	CASE_FLT_FN (BUILT_IN_FMOD):
15030 	CASE_FLT_FN (BUILT_IN_FREXP):
15031 	CASE_FLT_FN (BUILT_IN_ICEIL):
15032 	CASE_FLT_FN (BUILT_IN_IFLOOR):
15033 	CASE_FLT_FN (BUILT_IN_IRINT):
15034 	CASE_FLT_FN (BUILT_IN_IROUND):
15035 	CASE_FLT_FN (BUILT_IN_LCEIL):
15036 	CASE_FLT_FN (BUILT_IN_LDEXP):
15037 	CASE_FLT_FN (BUILT_IN_LFLOOR):
15038 	CASE_FLT_FN (BUILT_IN_LLCEIL):
15039 	CASE_FLT_FN (BUILT_IN_LLFLOOR):
15040 	CASE_FLT_FN (BUILT_IN_LLRINT):
15041 	CASE_FLT_FN (BUILT_IN_LLROUND):
15042 	CASE_FLT_FN (BUILT_IN_LRINT):
15043 	CASE_FLT_FN (BUILT_IN_LROUND):
15044 	CASE_FLT_FN (BUILT_IN_MODF):
15045 	CASE_FLT_FN (BUILT_IN_NEARBYINT):
15046 	CASE_FLT_FN (BUILT_IN_RINT):
15047 	CASE_FLT_FN (BUILT_IN_ROUND):
15048 	CASE_FLT_FN (BUILT_IN_SCALB):
15049 	CASE_FLT_FN (BUILT_IN_SCALBLN):
15050 	CASE_FLT_FN (BUILT_IN_SCALBN):
15051 	CASE_FLT_FN (BUILT_IN_SIGNBIT):
15052 	CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15053 	CASE_FLT_FN (BUILT_IN_SINH):
15054 	CASE_FLT_FN (BUILT_IN_TANH):
15055 	CASE_FLT_FN (BUILT_IN_TRUNC):
15056 	/* True if the 1st argument is nonnegative.  */
15057 	return tree_expr_nonnegative_warnv_p (arg0,
15058 					      strict_overflow_p);
15059 
15060 	CASE_FLT_FN (BUILT_IN_FMAX):
15061 	/* True if the 1st OR 2nd arguments are nonnegative.  */
15062 	return (tree_expr_nonnegative_warnv_p (arg0,
15063 					       strict_overflow_p)
15064 		|| (tree_expr_nonnegative_warnv_p (arg1,
15065 						   strict_overflow_p)));
15066 
15067 	CASE_FLT_FN (BUILT_IN_FMIN):
15068 	/* True if the 1st AND 2nd arguments are nonnegative.  */
15069 	return (tree_expr_nonnegative_warnv_p (arg0,
15070 					       strict_overflow_p)
15071 		&& (tree_expr_nonnegative_warnv_p (arg1,
15072 						   strict_overflow_p)));
15073 
15074 	CASE_FLT_FN (BUILT_IN_COPYSIGN):
15075 	/* True if the 2nd argument is nonnegative.  */
15076 	return tree_expr_nonnegative_warnv_p (arg1,
15077 					      strict_overflow_p);
15078 
15079 	CASE_FLT_FN (BUILT_IN_POWI):
15080 	/* True if the 1st argument is nonnegative or the second
15081 	   argument is an even integer.  */
15082 	if (TREE_CODE (arg1) == INTEGER_CST
15083 	    && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15084 	  return true;
15085 	return tree_expr_nonnegative_warnv_p (arg0,
15086 					      strict_overflow_p);
15087 
15088 	CASE_FLT_FN (BUILT_IN_POW):
15089 	/* True if the 1st argument is nonnegative or the second
15090 	   argument is an even integer valued real.  */
15091 	if (TREE_CODE (arg1) == REAL_CST)
15092 	  {
15093 	    REAL_VALUE_TYPE c;
15094 	    HOST_WIDE_INT n;
15095 
15096 	    c = TREE_REAL_CST (arg1);
15097 	    n = real_to_integer (&c);
15098 	    if ((n & 1) == 0)
15099 	      {
15100 		REAL_VALUE_TYPE cint;
15101 		real_from_integer (&cint, VOIDmode, n, SIGNED);
15102 		if (real_identical (&c, &cint))
15103 		  return true;
15104 	      }
15105 	  }
15106 	return tree_expr_nonnegative_warnv_p (arg0,
15107 					      strict_overflow_p);
15108 
15109       default:
15110 	break;
15111       }
15112   return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15113 					  type);
15114 }
15115 
15116 /* Return true if T is known to be non-negative.  If the return
15117    value is based on the assumption that signed overflow is undefined,
15118    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15119    *STRICT_OVERFLOW_P.  */
15120 
15121 static bool
15122 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15123 {
15124   enum tree_code code = TREE_CODE (t);
15125   if (TYPE_UNSIGNED (TREE_TYPE (t)))
15126     return true;
15127 
15128   switch (code)
15129     {
15130     case TARGET_EXPR:
15131       {
15132 	tree temp = TARGET_EXPR_SLOT (t);
15133 	t = TARGET_EXPR_INITIAL (t);
15134 
15135 	/* If the initializer is non-void, then it's a normal expression
15136 	   that will be assigned to the slot.  */
15137 	if (!VOID_TYPE_P (t))
15138 	  return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15139 
15140 	/* Otherwise, the initializer sets the slot in some way.  One common
15141 	   way is an assignment statement at the end of the initializer.  */
15142 	while (1)
15143 	  {
15144 	    if (TREE_CODE (t) == BIND_EXPR)
15145 	      t = expr_last (BIND_EXPR_BODY (t));
15146 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15147 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
15148 	      t = expr_last (TREE_OPERAND (t, 0));
15149 	    else if (TREE_CODE (t) == STATEMENT_LIST)
15150 	      t = expr_last (t);
15151 	    else
15152 	      break;
15153 	  }
15154 	if (TREE_CODE (t) == MODIFY_EXPR
15155 	    && TREE_OPERAND (t, 0) == temp)
15156 	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15157 						strict_overflow_p);
15158 
15159 	return false;
15160       }
15161 
15162     case CALL_EXPR:
15163       {
15164 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
15165 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
15166 
15167 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15168 					      get_callee_fndecl (t),
15169 					      arg0,
15170 					      arg1,
15171 					      strict_overflow_p);
15172       }
15173     case COMPOUND_EXPR:
15174     case MODIFY_EXPR:
15175       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15176 					    strict_overflow_p);
15177     case BIND_EXPR:
15178       return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15179 					    strict_overflow_p);
15180     case SAVE_EXPR:
15181       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15182 					    strict_overflow_p);
15183 
15184     default:
15185       return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15186 						   TREE_TYPE (t));
15187     }
15188 
15189   /* We don't know sign of `t', so be conservative and return false.  */
15190   return false;
15191 }
15192 
15193 /* Return true if T is known to be non-negative.  If the return
15194    value is based on the assumption that signed overflow is undefined,
15195    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15196    *STRICT_OVERFLOW_P.  */
15197 
15198 bool
15199 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15200 {
15201   enum tree_code code;
15202   if (t == error_mark_node)
15203     return false;
15204 
15205   code = TREE_CODE (t);
15206   switch (TREE_CODE_CLASS (code))
15207     {
15208     case tcc_binary:
15209     case tcc_comparison:
15210       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15211 					      TREE_TYPE (t),
15212 					      TREE_OPERAND (t, 0),
15213 					      TREE_OPERAND (t, 1),
15214 					      strict_overflow_p);
15215 
15216     case tcc_unary:
15217       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15218 					     TREE_TYPE (t),
15219 					     TREE_OPERAND (t, 0),
15220 					     strict_overflow_p);
15221 
15222     case tcc_constant:
15223     case tcc_declaration:
15224     case tcc_reference:
15225       return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15226 
15227     default:
15228       break;
15229     }
15230 
15231   switch (code)
15232     {
15233     case TRUTH_AND_EXPR:
15234     case TRUTH_OR_EXPR:
15235     case TRUTH_XOR_EXPR:
15236       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15237 					      TREE_TYPE (t),
15238 					      TREE_OPERAND (t, 0),
15239 					      TREE_OPERAND (t, 1),
15240 					      strict_overflow_p);
15241     case TRUTH_NOT_EXPR:
15242       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15243 					     TREE_TYPE (t),
15244 					     TREE_OPERAND (t, 0),
15245 					     strict_overflow_p);
15246 
15247     case COND_EXPR:
15248     case CONSTRUCTOR:
15249     case OBJ_TYPE_REF:
15250     case ASSERT_EXPR:
15251     case ADDR_EXPR:
15252     case WITH_SIZE_EXPR:
15253     case SSA_NAME:
15254       return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15255 
15256     default:
15257       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15258     }
15259 }
15260 
15261 /* Return true if `t' is known to be non-negative.  Handle warnings
15262    about undefined signed overflow.  */
15263 
15264 bool
15265 tree_expr_nonnegative_p (tree t)
15266 {
15267   bool ret, strict_overflow_p;
15268 
15269   strict_overflow_p = false;
15270   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15271   if (strict_overflow_p)
15272     fold_overflow_warning (("assuming signed overflow does not occur when "
15273 			    "determining that expression is always "
15274 			    "non-negative"),
15275 			   WARN_STRICT_OVERFLOW_MISC);
15276   return ret;
15277 }
15278 
15279 
15280 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15281    For floating point we further ensure that T is not denormal.
15282    Similar logic is present in nonzero_address in rtlanal.h.
15283 
15284    If the return value is based on the assumption that signed overflow
15285    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15286    change *STRICT_OVERFLOW_P.  */
15287 
15288 bool
15289 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15290 				 bool *strict_overflow_p)
15291 {
15292   switch (code)
15293     {
15294     case ABS_EXPR:
15295       return tree_expr_nonzero_warnv_p (op0,
15296 					strict_overflow_p);
15297 
15298     case NOP_EXPR:
15299       {
15300 	tree inner_type = TREE_TYPE (op0);
15301 	tree outer_type = type;
15302 
15303 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15304 		&& tree_expr_nonzero_warnv_p (op0,
15305 					      strict_overflow_p));
15306       }
15307       break;
15308 
15309     case NON_LVALUE_EXPR:
15310       return tree_expr_nonzero_warnv_p (op0,
15311 					strict_overflow_p);
15312 
15313     default:
15314       break;
15315   }
15316 
15317   return false;
15318 }
15319 
15320 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15321    For floating point we further ensure that T is not denormal.
15322    Similar logic is present in nonzero_address in rtlanal.h.
15323 
15324    If the return value is based on the assumption that signed overflow
15325    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15326    change *STRICT_OVERFLOW_P.  */
15327 
15328 bool
15329 tree_binary_nonzero_warnv_p (enum tree_code code,
15330 			     tree type,
15331 			     tree op0,
15332 			     tree op1, bool *strict_overflow_p)
15333 {
15334   bool sub_strict_overflow_p;
15335   switch (code)
15336     {
15337     case POINTER_PLUS_EXPR:
15338     case PLUS_EXPR:
15339       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15340 	{
15341 	  /* With the presence of negative values it is hard
15342 	     to say something.  */
15343 	  sub_strict_overflow_p = false;
15344 	  if (!tree_expr_nonnegative_warnv_p (op0,
15345 					      &sub_strict_overflow_p)
15346 	      || !tree_expr_nonnegative_warnv_p (op1,
15347 						 &sub_strict_overflow_p))
15348 	    return false;
15349 	  /* One of operands must be positive and the other non-negative.  */
15350 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
15351 	     overflows, on a twos-complement machine the sum of two
15352 	     nonnegative numbers can never be zero.  */
15353 	  return (tree_expr_nonzero_warnv_p (op0,
15354 					     strict_overflow_p)
15355 		  || tree_expr_nonzero_warnv_p (op1,
15356 						strict_overflow_p));
15357 	}
15358       break;
15359 
15360     case MULT_EXPR:
15361       if (TYPE_OVERFLOW_UNDEFINED (type))
15362 	{
15363 	  if (tree_expr_nonzero_warnv_p (op0,
15364 					 strict_overflow_p)
15365 	      && tree_expr_nonzero_warnv_p (op1,
15366 					    strict_overflow_p))
15367 	    {
15368 	      *strict_overflow_p = true;
15369 	      return true;
15370 	    }
15371 	}
15372       break;
15373 
15374     case MIN_EXPR:
15375       sub_strict_overflow_p = false;
15376       if (tree_expr_nonzero_warnv_p (op0,
15377 				     &sub_strict_overflow_p)
15378 	  && tree_expr_nonzero_warnv_p (op1,
15379 					&sub_strict_overflow_p))
15380 	{
15381 	  if (sub_strict_overflow_p)
15382 	    *strict_overflow_p = true;
15383 	}
15384       break;
15385 
15386     case MAX_EXPR:
15387       sub_strict_overflow_p = false;
15388       if (tree_expr_nonzero_warnv_p (op0,
15389 				     &sub_strict_overflow_p))
15390 	{
15391 	  if (sub_strict_overflow_p)
15392 	    *strict_overflow_p = true;
15393 
15394 	  /* When both operands are nonzero, then MAX must be too.  */
15395 	  if (tree_expr_nonzero_warnv_p (op1,
15396 					 strict_overflow_p))
15397 	    return true;
15398 
15399 	  /* MAX where operand 0 is positive is positive.  */
15400 	  return tree_expr_nonnegative_warnv_p (op0,
15401 					       strict_overflow_p);
15402 	}
15403       /* MAX where operand 1 is positive is positive.  */
15404       else if (tree_expr_nonzero_warnv_p (op1,
15405 					  &sub_strict_overflow_p)
15406 	       && tree_expr_nonnegative_warnv_p (op1,
15407 						 &sub_strict_overflow_p))
15408 	{
15409 	  if (sub_strict_overflow_p)
15410 	    *strict_overflow_p = true;
15411 	  return true;
15412 	}
15413       break;
15414 
15415     case BIT_IOR_EXPR:
15416       return (tree_expr_nonzero_warnv_p (op1,
15417 					 strict_overflow_p)
15418 	      || tree_expr_nonzero_warnv_p (op0,
15419 					    strict_overflow_p));
15420 
15421     default:
15422       break;
15423   }
15424 
15425   return false;
15426 }
15427 
15428 /* Return true when T is an address and is known to be nonzero.
15429    For floating point we further ensure that T is not denormal.
15430    Similar logic is present in nonzero_address in rtlanal.h.
15431 
15432    If the return value is based on the assumption that signed overflow
15433    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15434    change *STRICT_OVERFLOW_P.  */
15435 
15436 bool
15437 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15438 {
15439   bool sub_strict_overflow_p;
15440   switch (TREE_CODE (t))
15441     {
15442     case INTEGER_CST:
15443       return !integer_zerop (t);
15444 
15445     case ADDR_EXPR:
15446       {
15447 	tree base = TREE_OPERAND (t, 0);
15448 
15449 	if (!DECL_P (base))
15450 	  base = get_base_address (base);
15451 
15452 	if (!base)
15453 	  return false;
15454 
15455 	/* For objects in symbol table check if we know they are non-zero.
15456 	   Don't do anything for variables and functions before symtab is built;
15457 	   it is quite possible that they will be declared weak later.  */
15458 	if (DECL_P (base) && decl_in_symtab_p (base))
15459 	  {
15460 	    struct symtab_node *symbol;
15461 
15462 	    symbol = symtab_node::get_create (base);
15463 	    if (symbol)
15464 	      return symbol->nonzero_address ();
15465 	    else
15466 	      return false;
15467 	  }
15468 
15469 	/* Function local objects are never NULL.  */
15470 	if (DECL_P (base)
15471 	    && (DECL_CONTEXT (base)
15472 		&& TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15473 		&& auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15474 	  return true;
15475 
15476 	/* Constants are never weak.  */
15477 	if (CONSTANT_CLASS_P (base))
15478 	  return true;
15479 
15480 	return false;
15481       }
15482 
15483     case COND_EXPR:
15484       sub_strict_overflow_p = false;
15485       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15486 				     &sub_strict_overflow_p)
15487 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15488 					&sub_strict_overflow_p))
15489 	{
15490 	  if (sub_strict_overflow_p)
15491 	    *strict_overflow_p = true;
15492 	  return true;
15493 	}
15494       break;
15495 
15496     default:
15497       break;
15498     }
15499   return false;
15500 }
15501 
15502 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15503    attempt to fold the expression to a constant without modifying TYPE,
15504    OP0 or OP1.
15505 
15506    If the expression could be simplified to a constant, then return
15507    the constant.  If the expression would not be simplified to a
15508    constant, then return NULL_TREE.  */
15509 
15510 tree
15511 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15512 {
15513   tree tem = fold_binary (code, type, op0, op1);
15514   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15515 }
15516 
15517 /* Given the components of a unary expression CODE, TYPE and OP0,
15518    attempt to fold the expression to a constant without modifying
15519    TYPE or OP0.
15520 
15521    If the expression could be simplified to a constant, then return
15522    the constant.  If the expression would not be simplified to a
15523    constant, then return NULL_TREE.  */
15524 
15525 tree
15526 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15527 {
15528   tree tem = fold_unary (code, type, op0);
15529   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15530 }
15531 
15532 /* If EXP represents referencing an element in a constant string
15533    (either via pointer arithmetic or array indexing), return the
15534    tree representing the value accessed, otherwise return NULL.  */
15535 
15536 tree
15537 fold_read_from_constant_string (tree exp)
15538 {
15539   if ((TREE_CODE (exp) == INDIRECT_REF
15540        || TREE_CODE (exp) == ARRAY_REF)
15541       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15542     {
15543       tree exp1 = TREE_OPERAND (exp, 0);
15544       tree index;
15545       tree string;
15546       location_t loc = EXPR_LOCATION (exp);
15547 
15548       if (TREE_CODE (exp) == INDIRECT_REF)
15549 	string = string_constant (exp1, &index);
15550       else
15551 	{
15552 	  tree low_bound = array_ref_low_bound (exp);
15553 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15554 
15555 	  /* Optimize the special-case of a zero lower bound.
15556 
15557 	     We convert the low_bound to sizetype to avoid some problems
15558 	     with constant folding.  (E.g. suppose the lower bound is 1,
15559 	     and its mode is QI.  Without the conversion,l (ARRAY
15560 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15561 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
15562 	  if (! integer_zerop (low_bound))
15563 	    index = size_diffop_loc (loc, index,
15564 				 fold_convert_loc (loc, sizetype, low_bound));
15565 
15566 	  string = exp1;
15567 	}
15568 
15569       if (string
15570 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15571 	  && TREE_CODE (string) == STRING_CST
15572 	  && TREE_CODE (index) == INTEGER_CST
15573 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15574 	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15575 	      == MODE_INT)
15576 	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15577 	return build_int_cst_type (TREE_TYPE (exp),
15578 				   (TREE_STRING_POINTER (string)
15579 				    [TREE_INT_CST_LOW (index)]));
15580     }
15581   return NULL;
15582 }
15583 
15584 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15585    an integer constant, real, or fixed-point constant.
15586 
15587    TYPE is the type of the result.  */
15588 
15589 static tree
15590 fold_negate_const (tree arg0, tree type)
15591 {
15592   tree t = NULL_TREE;
15593 
15594   switch (TREE_CODE (arg0))
15595     {
15596     case INTEGER_CST:
15597       {
15598 	bool overflow;
15599 	wide_int val = wi::neg (arg0, &overflow);
15600 	t = force_fit_type (type, val, 1,
15601 			    (overflow | TREE_OVERFLOW (arg0))
15602 			    && !TYPE_UNSIGNED (type));
15603 	break;
15604       }
15605 
15606     case REAL_CST:
15607       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15608       break;
15609 
15610     case FIXED_CST:
15611       {
15612         FIXED_VALUE_TYPE f;
15613         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15614 					    &(TREE_FIXED_CST (arg0)), NULL,
15615 					    TYPE_SATURATING (type));
15616 	t = build_fixed (type, f);
15617 	/* Propagate overflow flags.  */
15618 	if (overflow_p | TREE_OVERFLOW (arg0))
15619 	  TREE_OVERFLOW (t) = 1;
15620 	break;
15621       }
15622 
15623     default:
15624       gcc_unreachable ();
15625     }
15626 
15627   return t;
15628 }
15629 
15630 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15631    an integer constant or real constant.
15632 
15633    TYPE is the type of the result.  */
15634 
15635 tree
15636 fold_abs_const (tree arg0, tree type)
15637 {
15638   tree t = NULL_TREE;
15639 
15640   switch (TREE_CODE (arg0))
15641     {
15642     case INTEGER_CST:
15643       {
15644         /* If the value is unsigned or non-negative, then the absolute value
15645 	   is the same as the ordinary value.  */
15646 	if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15647 	  t = arg0;
15648 
15649 	/* If the value is negative, then the absolute value is
15650 	   its negation.  */
15651 	else
15652 	  {
15653 	    bool overflow;
15654 	    wide_int val = wi::neg (arg0, &overflow);
15655 	    t = force_fit_type (type, val, -1,
15656 				overflow | TREE_OVERFLOW (arg0));
15657 	  }
15658       }
15659       break;
15660 
15661     case REAL_CST:
15662       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15663 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15664       else
15665 	t =  arg0;
15666       break;
15667 
15668     default:
15669       gcc_unreachable ();
15670     }
15671 
15672   return t;
15673 }
15674 
15675 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15676    constant.  TYPE is the type of the result.  */
15677 
15678 static tree
15679 fold_not_const (const_tree arg0, tree type)
15680 {
15681   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15682 
15683   return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15684 }
15685 
15686 /* Given CODE, a relational operator, the target type, TYPE and two
15687    constant operands OP0 and OP1, return the result of the
15688    relational operation.  If the result is not a compile time
15689    constant, then return NULL_TREE.  */
15690 
15691 static tree
15692 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15693 {
15694   int result, invert;
15695 
15696   /* From here on, the only cases we handle are when the result is
15697      known to be a constant.  */
15698 
15699   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15700     {
15701       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15702       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15703 
15704       /* Handle the cases where either operand is a NaN.  */
15705       if (real_isnan (c0) || real_isnan (c1))
15706 	{
15707 	  switch (code)
15708 	    {
15709 	    case EQ_EXPR:
15710 	    case ORDERED_EXPR:
15711 	      result = 0;
15712 	      break;
15713 
15714 	    case NE_EXPR:
15715 	    case UNORDERED_EXPR:
15716 	    case UNLT_EXPR:
15717 	    case UNLE_EXPR:
15718 	    case UNGT_EXPR:
15719 	    case UNGE_EXPR:
15720 	    case UNEQ_EXPR:
15721               result = 1;
15722 	      break;
15723 
15724 	    case LT_EXPR:
15725 	    case LE_EXPR:
15726 	    case GT_EXPR:
15727 	    case GE_EXPR:
15728 	    case LTGT_EXPR:
15729 	      if (flag_trapping_math)
15730 		return NULL_TREE;
15731 	      result = 0;
15732 	      break;
15733 
15734 	    default:
15735 	      gcc_unreachable ();
15736 	    }
15737 
15738 	  return constant_boolean_node (result, type);
15739 	}
15740 
15741       return constant_boolean_node (real_compare (code, c0, c1), type);
15742     }
15743 
15744   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15745     {
15746       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15747       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15748       return constant_boolean_node (fixed_compare (code, c0, c1), type);
15749     }
15750 
15751   /* Handle equality/inequality of complex constants.  */
15752   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15753     {
15754       tree rcond = fold_relational_const (code, type,
15755 					  TREE_REALPART (op0),
15756 					  TREE_REALPART (op1));
15757       tree icond = fold_relational_const (code, type,
15758 					  TREE_IMAGPART (op0),
15759 					  TREE_IMAGPART (op1));
15760       if (code == EQ_EXPR)
15761 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15762       else if (code == NE_EXPR)
15763 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15764       else
15765 	return NULL_TREE;
15766     }
15767 
15768   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15769     {
15770       unsigned count = VECTOR_CST_NELTS (op0);
15771       tree *elts =  XALLOCAVEC (tree, count);
15772       gcc_assert (VECTOR_CST_NELTS (op1) == count
15773 		  && TYPE_VECTOR_SUBPARTS (type) == count);
15774 
15775       for (unsigned i = 0; i < count; i++)
15776 	{
15777 	  tree elem_type = TREE_TYPE (type);
15778 	  tree elem0 = VECTOR_CST_ELT (op0, i);
15779 	  tree elem1 = VECTOR_CST_ELT (op1, i);
15780 
15781 	  tree tem = fold_relational_const (code, elem_type,
15782 					    elem0, elem1);
15783 
15784 	  if (tem == NULL_TREE)
15785 	    return NULL_TREE;
15786 
15787 	  elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15788 	}
15789 
15790       return build_vector (type, elts);
15791     }
15792 
15793   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15794 
15795      To compute GT, swap the arguments and do LT.
15796      To compute GE, do LT and invert the result.
15797      To compute LE, swap the arguments, do LT and invert the result.
15798      To compute NE, do EQ and invert the result.
15799 
15800      Therefore, the code below must handle only EQ and LT.  */
15801 
15802   if (code == LE_EXPR || code == GT_EXPR)
15803     {
15804       tree tem = op0;
15805       op0 = op1;
15806       op1 = tem;
15807       code = swap_tree_comparison (code);
15808     }
15809 
15810   /* Note that it is safe to invert for real values here because we
15811      have already handled the one case that it matters.  */
15812 
15813   invert = 0;
15814   if (code == NE_EXPR || code == GE_EXPR)
15815     {
15816       invert = 1;
15817       code = invert_tree_comparison (code, false);
15818     }
15819 
15820   /* Compute a result for LT or EQ if args permit;
15821      Otherwise return T.  */
15822   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15823     {
15824       if (code == EQ_EXPR)
15825 	result = tree_int_cst_equal (op0, op1);
15826       else
15827 	result = tree_int_cst_lt (op0, op1);
15828     }
15829   else
15830     return NULL_TREE;
15831 
15832   if (invert)
15833     result ^= 1;
15834   return constant_boolean_node (result, type);
15835 }
15836 
15837 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15838    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
15839    itself.  */
15840 
15841 tree
15842 fold_build_cleanup_point_expr (tree type, tree expr)
15843 {
15844   /* If the expression does not have side effects then we don't have to wrap
15845      it with a cleanup point expression.  */
15846   if (!TREE_SIDE_EFFECTS (expr))
15847     return expr;
15848 
15849   /* If the expression is a return, check to see if the expression inside the
15850      return has no side effects or the right hand side of the modify expression
15851      inside the return. If either don't have side effects set we don't need to
15852      wrap the expression in a cleanup point expression.  Note we don't check the
15853      left hand side of the modify because it should always be a return decl.  */
15854   if (TREE_CODE (expr) == RETURN_EXPR)
15855     {
15856       tree op = TREE_OPERAND (expr, 0);
15857       if (!op || !TREE_SIDE_EFFECTS (op))
15858         return expr;
15859       op = TREE_OPERAND (op, 1);
15860       if (!TREE_SIDE_EFFECTS (op))
15861         return expr;
15862     }
15863 
15864   return build1 (CLEANUP_POINT_EXPR, type, expr);
15865 }
15866 
15867 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15868    of an indirection through OP0, or NULL_TREE if no simplification is
15869    possible.  */
15870 
15871 tree
15872 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15873 {
15874   tree sub = op0;
15875   tree subtype;
15876 
15877   STRIP_NOPS (sub);
15878   subtype = TREE_TYPE (sub);
15879   if (!POINTER_TYPE_P (subtype))
15880     return NULL_TREE;
15881 
15882   if (TREE_CODE (sub) == ADDR_EXPR)
15883     {
15884       tree op = TREE_OPERAND (sub, 0);
15885       tree optype = TREE_TYPE (op);
15886       /* *&CONST_DECL -> to the value of the const decl.  */
15887       if (TREE_CODE (op) == CONST_DECL)
15888 	return DECL_INITIAL (op);
15889       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
15890       if (type == optype)
15891 	{
15892 	  tree fop = fold_read_from_constant_string (op);
15893 	  if (fop)
15894 	    return fop;
15895 	  else
15896 	    return op;
15897 	}
15898       /* *(foo *)&fooarray => fooarray[0] */
15899       else if (TREE_CODE (optype) == ARRAY_TYPE
15900 	       && type == TREE_TYPE (optype)
15901 	       && (!in_gimple_form
15902 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15903 	{
15904 	  tree type_domain = TYPE_DOMAIN (optype);
15905 	  tree min_val = size_zero_node;
15906 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
15907 	    min_val = TYPE_MIN_VALUE (type_domain);
15908 	  if (in_gimple_form
15909 	      && TREE_CODE (min_val) != INTEGER_CST)
15910 	    return NULL_TREE;
15911 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
15912 			     NULL_TREE, NULL_TREE);
15913 	}
15914       /* *(foo *)&complexfoo => __real__ complexfoo */
15915       else if (TREE_CODE (optype) == COMPLEX_TYPE
15916 	       && type == TREE_TYPE (optype))
15917 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
15918       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15919       else if (TREE_CODE (optype) == VECTOR_TYPE
15920 	       && type == TREE_TYPE (optype))
15921 	{
15922 	  tree part_width = TYPE_SIZE (type);
15923 	  tree index = bitsize_int (0);
15924 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15925 	}
15926     }
15927 
15928   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15929       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15930     {
15931       tree op00 = TREE_OPERAND (sub, 0);
15932       tree op01 = TREE_OPERAND (sub, 1);
15933 
15934       STRIP_NOPS (op00);
15935       if (TREE_CODE (op00) == ADDR_EXPR)
15936 	{
15937 	  tree op00type;
15938 	  op00 = TREE_OPERAND (op00, 0);
15939 	  op00type = TREE_TYPE (op00);
15940 
15941 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15942 	  if (TREE_CODE (op00type) == VECTOR_TYPE
15943 	      && type == TREE_TYPE (op00type))
15944 	    {
15945 	      tree part_width = TYPE_SIZE (type);
15946 	      unsigned HOST_WIDE_INT max_offset
15947 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
15948 		   * TYPE_VECTOR_SUBPARTS (op00type));
15949 	      if (tree_int_cst_sign_bit (op01) == 0
15950 		  && compare_tree_int (op01, max_offset) == -1)
15951 		{
15952 		  unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
15953 		  unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15954 		  tree index = bitsize_int (indexi);
15955 		  return fold_build3_loc (loc,
15956 					  BIT_FIELD_REF, type, op00,
15957 					  part_width, index);
15958 		}
15959 	    }
15960 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15961 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
15962 		   && type == TREE_TYPE (op00type))
15963 	    {
15964 	      tree size = TYPE_SIZE_UNIT (type);
15965 	      if (tree_int_cst_equal (size, op01))
15966 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15967 	    }
15968 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
15969 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
15970 		   && type == TREE_TYPE (op00type))
15971 	    {
15972 	      tree type_domain = TYPE_DOMAIN (op00type);
15973 	      tree min_val = size_zero_node;
15974 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
15975 		min_val = TYPE_MIN_VALUE (type_domain);
15976 	      op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15977 				     TYPE_SIZE_UNIT (type));
15978 	      op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15979 	      return build4_loc (loc, ARRAY_REF, type, op00, op01,
15980 				 NULL_TREE, NULL_TREE);
15981 	    }
15982 	}
15983     }
15984 
15985   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15986   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15987       && type == TREE_TYPE (TREE_TYPE (subtype))
15988       && (!in_gimple_form
15989 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15990     {
15991       tree type_domain;
15992       tree min_val = size_zero_node;
15993       sub = build_fold_indirect_ref_loc (loc, sub);
15994       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15995       if (type_domain && TYPE_MIN_VALUE (type_domain))
15996 	min_val = TYPE_MIN_VALUE (type_domain);
15997       if (in_gimple_form
15998 	  && TREE_CODE (min_val) != INTEGER_CST)
15999 	return NULL_TREE;
16000       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16001 			 NULL_TREE);
16002     }
16003 
16004   return NULL_TREE;
16005 }
16006 
16007 /* Builds an expression for an indirection through T, simplifying some
16008    cases.  */
16009 
16010 tree
16011 build_fold_indirect_ref_loc (location_t loc, tree t)
16012 {
16013   tree type = TREE_TYPE (TREE_TYPE (t));
16014   tree sub = fold_indirect_ref_1 (loc, type, t);
16015 
16016   if (sub)
16017     return sub;
16018 
16019   return build1_loc (loc, INDIRECT_REF, type, t);
16020 }
16021 
16022 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
16023 
16024 tree
16025 fold_indirect_ref_loc (location_t loc, tree t)
16026 {
16027   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16028 
16029   if (sub)
16030     return sub;
16031   else
16032     return t;
16033 }
16034 
16035 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16036    whose result is ignored.  The type of the returned tree need not be
16037    the same as the original expression.  */
16038 
16039 tree
16040 fold_ignored_result (tree t)
16041 {
16042   if (!TREE_SIDE_EFFECTS (t))
16043     return integer_zero_node;
16044 
16045   for (;;)
16046     switch (TREE_CODE_CLASS (TREE_CODE (t)))
16047       {
16048       case tcc_unary:
16049 	t = TREE_OPERAND (t, 0);
16050 	break;
16051 
16052       case tcc_binary:
16053       case tcc_comparison:
16054 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16055 	  t = TREE_OPERAND (t, 0);
16056 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16057 	  t = TREE_OPERAND (t, 1);
16058 	else
16059 	  return t;
16060 	break;
16061 
16062       case tcc_expression:
16063 	switch (TREE_CODE (t))
16064 	  {
16065 	  case COMPOUND_EXPR:
16066 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16067 	      return t;
16068 	    t = TREE_OPERAND (t, 0);
16069 	    break;
16070 
16071 	  case COND_EXPR:
16072 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16073 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16074 	      return t;
16075 	    t = TREE_OPERAND (t, 0);
16076 	    break;
16077 
16078 	  default:
16079 	    return t;
16080 	  }
16081 	break;
16082 
16083       default:
16084 	return t;
16085       }
16086 }
16087 
16088 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16089 
16090 tree
16091 round_up_loc (location_t loc, tree value, unsigned int divisor)
16092 {
16093   tree div = NULL_TREE;
16094 
16095   if (divisor == 1)
16096     return value;
16097 
16098   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16099      have to do anything.  Only do this when we are not given a const,
16100      because in that case, this check is more expensive than just
16101      doing it.  */
16102   if (TREE_CODE (value) != INTEGER_CST)
16103     {
16104       div = build_int_cst (TREE_TYPE (value), divisor);
16105 
16106       if (multiple_of_p (TREE_TYPE (value), value, div))
16107 	return value;
16108     }
16109 
16110   /* If divisor is a power of two, simplify this to bit manipulation.  */
16111   if (divisor == (divisor & -divisor))
16112     {
16113       if (TREE_CODE (value) == INTEGER_CST)
16114 	{
16115 	  wide_int val = value;
16116 	  bool overflow_p;
16117 
16118 	  if ((val & (divisor - 1)) == 0)
16119 	    return value;
16120 
16121 	  overflow_p = TREE_OVERFLOW (value);
16122 	  val += divisor - 1;
16123 	  val &= - (int) divisor;
16124 	  if (val == 0)
16125 	    overflow_p = true;
16126 
16127 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16128 	}
16129       else
16130 	{
16131 	  tree t;
16132 
16133 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
16134 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
16135 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16136 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16137 	}
16138     }
16139   else
16140     {
16141       if (!div)
16142 	div = build_int_cst (TREE_TYPE (value), divisor);
16143       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16144       value = size_binop_loc (loc, MULT_EXPR, value, div);
16145     }
16146 
16147   return value;
16148 }
16149 
16150 /* Likewise, but round down.  */
16151 
16152 tree
16153 round_down_loc (location_t loc, tree value, int divisor)
16154 {
16155   tree div = NULL_TREE;
16156 
16157   gcc_assert (divisor > 0);
16158   if (divisor == 1)
16159     return value;
16160 
16161   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16162      have to do anything.  Only do this when we are not given a const,
16163      because in that case, this check is more expensive than just
16164      doing it.  */
16165   if (TREE_CODE (value) != INTEGER_CST)
16166     {
16167       div = build_int_cst (TREE_TYPE (value), divisor);
16168 
16169       if (multiple_of_p (TREE_TYPE (value), value, div))
16170 	return value;
16171     }
16172 
16173   /* If divisor is a power of two, simplify this to bit manipulation.  */
16174   if (divisor == (divisor & -divisor))
16175     {
16176       tree t;
16177 
16178       t = build_int_cst (TREE_TYPE (value), -divisor);
16179       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16180     }
16181   else
16182     {
16183       if (!div)
16184 	div = build_int_cst (TREE_TYPE (value), divisor);
16185       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16186       value = size_binop_loc (loc, MULT_EXPR, value, div);
16187     }
16188 
16189   return value;
16190 }
16191 
16192 /* Returns the pointer to the base of the object addressed by EXP and
16193    extracts the information about the offset of the access, storing it
16194    to PBITPOS and POFFSET.  */
16195 
16196 static tree
16197 split_address_to_core_and_offset (tree exp,
16198 				  HOST_WIDE_INT *pbitpos, tree *poffset)
16199 {
16200   tree core;
16201   machine_mode mode;
16202   int unsignedp, volatilep;
16203   HOST_WIDE_INT bitsize;
16204   location_t loc = EXPR_LOCATION (exp);
16205 
16206   if (TREE_CODE (exp) == ADDR_EXPR)
16207     {
16208       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16209 				  poffset, &mode, &unsignedp, &volatilep,
16210 				  false);
16211       core = build_fold_addr_expr_loc (loc, core);
16212     }
16213   else
16214     {
16215       core = exp;
16216       *pbitpos = 0;
16217       *poffset = NULL_TREE;
16218     }
16219 
16220   return core;
16221 }
16222 
16223 /* Returns true if addresses of E1 and E2 differ by a constant, false
16224    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
16225 
16226 bool
16227 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16228 {
16229   tree core1, core2;
16230   HOST_WIDE_INT bitpos1, bitpos2;
16231   tree toffset1, toffset2, tdiff, type;
16232 
16233   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16234   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16235 
16236   if (bitpos1 % BITS_PER_UNIT != 0
16237       || bitpos2 % BITS_PER_UNIT != 0
16238       || !operand_equal_p (core1, core2, 0))
16239     return false;
16240 
16241   if (toffset1 && toffset2)
16242     {
16243       type = TREE_TYPE (toffset1);
16244       if (type != TREE_TYPE (toffset2))
16245 	toffset2 = fold_convert (type, toffset2);
16246 
16247       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16248       if (!cst_and_fits_in_hwi (tdiff))
16249 	return false;
16250 
16251       *diff = int_cst_value (tdiff);
16252     }
16253   else if (toffset1 || toffset2)
16254     {
16255       /* If only one of the offsets is non-constant, the difference cannot
16256 	 be a constant.  */
16257       return false;
16258     }
16259   else
16260     *diff = 0;
16261 
16262   *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16263   return true;
16264 }
16265 
16266 /* Simplify the floating point expression EXP when the sign of the
16267    result is not significant.  Return NULL_TREE if no simplification
16268    is possible.  */
16269 
16270 tree
16271 fold_strip_sign_ops (tree exp)
16272 {
16273   tree arg0, arg1;
16274   location_t loc = EXPR_LOCATION (exp);
16275 
16276   switch (TREE_CODE (exp))
16277     {
16278     case ABS_EXPR:
16279     case NEGATE_EXPR:
16280       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16281       return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16282 
16283     case MULT_EXPR:
16284     case RDIV_EXPR:
16285       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16286 	return NULL_TREE;
16287       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16288       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16289       if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16290 	return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16291 			    arg0 ? arg0 : TREE_OPERAND (exp, 0),
16292 			    arg1 ? arg1 : TREE_OPERAND (exp, 1));
16293       break;
16294 
16295     case COMPOUND_EXPR:
16296       arg0 = TREE_OPERAND (exp, 0);
16297       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16298       if (arg1)
16299 	return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16300       break;
16301 
16302     case COND_EXPR:
16303       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16304       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16305       if (arg0 || arg1)
16306 	return fold_build3_loc (loc,
16307 			    COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16308 			    arg0 ? arg0 : TREE_OPERAND (exp, 1),
16309 			    arg1 ? arg1 : TREE_OPERAND (exp, 2));
16310       break;
16311 
16312     case CALL_EXPR:
16313       {
16314 	const enum built_in_function fcode = builtin_mathfn_code (exp);
16315 	switch (fcode)
16316 	{
16317 	CASE_FLT_FN (BUILT_IN_COPYSIGN):
16318 	  /* Strip copysign function call, return the 1st argument. */
16319 	  arg0 = CALL_EXPR_ARG (exp, 0);
16320 	  arg1 = CALL_EXPR_ARG (exp, 1);
16321 	  return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16322 
16323 	default:
16324 	  /* Strip sign ops from the argument of "odd" math functions.  */
16325 	  if (negate_mathfn_p (fcode))
16326             {
16327 	      arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16328 	      if (arg0)
16329 		return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16330 	    }
16331 	  break;
16332 	}
16333       }
16334       break;
16335 
16336     default:
16337       break;
16338     }
16339   return NULL_TREE;
16340 }
16341 
16342 /* Return OFF converted to a pointer offset type suitable as offset for
16343    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
16344 tree
16345 convert_to_ptrofftype_loc (location_t loc, tree off)
16346 {
16347   return fold_convert_loc (loc, sizetype, off);
16348 }
16349 
16350 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
16351 tree
16352 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16353 {
16354   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16355 			  ptr, convert_to_ptrofftype_loc (loc, off));
16356 }
16357 
16358 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
16359 tree
16360 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16361 {
16362   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16363 			  ptr, size_int (off));
16364 }
16365