xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/fold-const.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 
87 /* Nonzero if we are folding constants inside an initializer; zero
88    otherwise.  */
89 int folding_initializer = 0;
90 
91 /* The following constants represent a bit based encoding of GCC's
92    comparison operators.  This encoding simplifies transformations
93    on relational comparison operators, such as AND and OR.  */
94 enum comparison_code {
95   COMPCODE_FALSE = 0,
96   COMPCODE_LT = 1,
97   COMPCODE_EQ = 2,
98   COMPCODE_LE = 3,
99   COMPCODE_GT = 4,
100   COMPCODE_LTGT = 5,
101   COMPCODE_GE = 6,
102   COMPCODE_ORD = 7,
103   COMPCODE_UNORD = 8,
104   COMPCODE_UNLT = 9,
105   COMPCODE_UNEQ = 10,
106   COMPCODE_UNLE = 11,
107   COMPCODE_UNGT = 12,
108   COMPCODE_NE = 13,
109   COMPCODE_UNGE = 14,
110   COMPCODE_TRUE = 15
111 };
112 
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static int twoval_comparison_p (tree, tree *, tree *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree optimize_bit_field_compare (location_t, enum tree_code,
121 					tree, tree, tree);
122 static int simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 						 enum tree_code, tree,
134 						 tree, tree,
135 						 tree, tree, int);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static tree fold_negate_expr (location_t, tree);
142 
143 
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145    Otherwise, return LOC.  */
146 
147 static location_t
148 expr_location_or (tree t, location_t loc)
149 {
150   location_t tloc = EXPR_LOCATION (t);
151   return tloc == UNKNOWN_LOCATION ? loc : tloc;
152 }
153 
154 /* Similar to protected_set_expr_location, but never modify x in place,
155    if location can and needs to be set, unshare it.  */
156 
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
159 {
160   if (CAN_HAVE_LOCATION_P (x)
161       && EXPR_LOCATION (x) != loc
162       && !(TREE_CODE (x) == SAVE_EXPR
163 	   || TREE_CODE (x) == TARGET_EXPR
164 	   || TREE_CODE (x) == BIND_EXPR))
165     {
166       x = copy_node (x);
167       SET_EXPR_LOCATION (x, loc);
168     }
169   return x;
170 }
171 
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173    division and returns the quotient.  Otherwise returns
174    NULL_TREE.  */
175 
176 tree
177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 {
179   widest_int quo;
180 
181   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 			 SIGNED, &quo))
183     return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 
185   return NULL_TREE;
186 }
187 
188 /* This is nonzero if we should defer warnings about undefined
189    overflow.  This facility exists because these warnings are a
190    special case.  The code to estimate loop iterations does not want
191    to issue any warnings, since it works with expressions which do not
192    occur in user code.  Various bits of cleanup code call fold(), but
193    only use the result if it has certain characteristics (e.g., is a
194    constant); that code only wants to issue a warning if the result is
195    used.  */
196 
197 static int fold_deferring_overflow_warnings;
198 
199 /* If a warning about undefined overflow is deferred, this is the
200    warning.  Note that this may cause us to turn two warnings into
201    one, but that is fine since it is sufficient to only give one
202    warning per expression.  */
203 
204 static const char* fold_deferred_overflow_warning;
205 
206 /* If a warning about undefined overflow is deferred, this is the
207    level at which the warning should be emitted.  */
208 
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 
211 /* Start deferring overflow warnings.  We could use a stack here to
212    permit nested calls, but at present it is not necessary.  */
213 
214 void
215 fold_defer_overflow_warnings (void)
216 {
217   ++fold_deferring_overflow_warnings;
218 }
219 
220 /* Stop deferring overflow warnings.  If there is a pending warning,
221    and ISSUE is true, then issue the warning if appropriate.  STMT is
222    the statement with which the warning should be associated (used for
223    location information); STMT may be NULL.  CODE is the level of the
224    warning--a warn_strict_overflow_code value.  This function will use
225    the smaller of CODE and the deferred code when deciding whether to
226    issue the warning.  CODE may be zero to mean to always use the
227    deferred code.  */
228 
229 void
230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 {
232   const char *warnmsg;
233   location_t locus;
234 
235   gcc_assert (fold_deferring_overflow_warnings > 0);
236   --fold_deferring_overflow_warnings;
237   if (fold_deferring_overflow_warnings > 0)
238     {
239       if (fold_deferred_overflow_warning != NULL
240 	  && code != 0
241 	  && code < (int) fold_deferred_overflow_code)
242 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243       return;
244     }
245 
246   warnmsg = fold_deferred_overflow_warning;
247   fold_deferred_overflow_warning = NULL;
248 
249   if (!issue || warnmsg == NULL)
250     return;
251 
252   if (gimple_no_warning_p (stmt))
253     return;
254 
255   /* Use the smallest code level when deciding to issue the
256      warning.  */
257   if (code == 0 || code > (int) fold_deferred_overflow_code)
258     code = fold_deferred_overflow_code;
259 
260   if (!issue_strict_overflow_warning (code))
261     return;
262 
263   if (stmt == NULL)
264     locus = input_location;
265   else
266     locus = gimple_location (stmt);
267   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 }
269 
270 /* Stop deferring overflow warnings, ignoring any deferred
271    warnings.  */
272 
273 void
274 fold_undefer_and_ignore_overflow_warnings (void)
275 {
276   fold_undefer_overflow_warnings (false, NULL, 0);
277 }
278 
279 /* Whether we are deferring overflow warnings.  */
280 
281 bool
282 fold_deferring_overflow_warnings_p (void)
283 {
284   return fold_deferring_overflow_warnings > 0;
285 }
286 
287 /* This is called when we fold something based on the fact that signed
288    overflow is undefined.  */
289 
290 void
291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 {
293   if (fold_deferring_overflow_warnings > 0)
294     {
295       if (fold_deferred_overflow_warning == NULL
296 	  || wc < fold_deferred_overflow_code)
297 	{
298 	  fold_deferred_overflow_warning = gmsgid;
299 	  fold_deferred_overflow_code = wc;
300 	}
301     }
302   else if (issue_strict_overflow_warning (wc))
303     warning (OPT_Wstrict_overflow, gmsgid);
304 }
305 
306 /* Return true if the built-in mathematical function specified by CODE
307    is odd, i.e. -f(x) == f(-x).  */
308 
309 bool
310 negate_mathfn_p (combined_fn fn)
311 {
312   switch (fn)
313     {
314     CASE_CFN_ASIN:
315     CASE_CFN_ASINH:
316     CASE_CFN_ATAN:
317     CASE_CFN_ATANH:
318     CASE_CFN_CASIN:
319     CASE_CFN_CASINH:
320     CASE_CFN_CATAN:
321     CASE_CFN_CATANH:
322     CASE_CFN_CBRT:
323     CASE_CFN_CPROJ:
324     CASE_CFN_CSIN:
325     CASE_CFN_CSINH:
326     CASE_CFN_CTAN:
327     CASE_CFN_CTANH:
328     CASE_CFN_ERF:
329     CASE_CFN_LLROUND:
330     CASE_CFN_LROUND:
331     CASE_CFN_ROUND:
332     CASE_CFN_SIN:
333     CASE_CFN_SINH:
334     CASE_CFN_TAN:
335     CASE_CFN_TANH:
336     CASE_CFN_TRUNC:
337       return true;
338 
339     CASE_CFN_LLRINT:
340     CASE_CFN_LRINT:
341     CASE_CFN_NEARBYINT:
342     CASE_CFN_RINT:
343       return !flag_rounding_math;
344 
345     default:
346       break;
347     }
348   return false;
349 }
350 
351 /* Check whether we may negate an integer constant T without causing
352    overflow.  */
353 
354 bool
355 may_negate_without_overflow_p (const_tree t)
356 {
357   tree type;
358 
359   gcc_assert (TREE_CODE (t) == INTEGER_CST);
360 
361   type = TREE_TYPE (t);
362   if (TYPE_UNSIGNED (type))
363     return false;
364 
365   return !wi::only_sign_bit_p (wi::to_wide (t));
366 }
367 
368 /* Determine whether an expression T can be cheaply negated using
369    the function negate_expr without introducing undefined overflow.  */
370 
371 static bool
372 negate_expr_p (tree t)
373 {
374   tree type;
375 
376   if (t == 0)
377     return false;
378 
379   type = TREE_TYPE (t);
380 
381   STRIP_SIGN_NOPS (t);
382   switch (TREE_CODE (t))
383     {
384     case INTEGER_CST:
385       if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
386 	return true;
387 
388       /* Check that -CST will not overflow type.  */
389       return may_negate_without_overflow_p (t);
390     case BIT_NOT_EXPR:
391       return (INTEGRAL_TYPE_P (type)
392 	      && TYPE_OVERFLOW_WRAPS (type));
393 
394     case FIXED_CST:
395       return true;
396 
397     case NEGATE_EXPR:
398       return !TYPE_OVERFLOW_SANITIZED (type);
399 
400     case REAL_CST:
401       /* We want to canonicalize to positive real constants.  Pretend
402          that only negative ones can be easily negated.  */
403       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
404 
405     case COMPLEX_CST:
406       return negate_expr_p (TREE_REALPART (t))
407 	     && negate_expr_p (TREE_IMAGPART (t));
408 
409     case VECTOR_CST:
410       {
411 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
412 	  return true;
413 
414 	/* Steps don't prevent negation.  */
415 	unsigned int count = vector_cst_encoded_nelts (t);
416 	for (unsigned int i = 0; i < count; ++i)
417 	  if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
418 	    return false;
419 
420 	return true;
421       }
422 
423     case COMPLEX_EXPR:
424       return negate_expr_p (TREE_OPERAND (t, 0))
425 	     && negate_expr_p (TREE_OPERAND (t, 1));
426 
427     case CONJ_EXPR:
428       return negate_expr_p (TREE_OPERAND (t, 0));
429 
430     case PLUS_EXPR:
431       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
432 	  || HONOR_SIGNED_ZEROS (element_mode (type))
433 	  || (ANY_INTEGRAL_TYPE_P (type)
434 	      && ! TYPE_OVERFLOW_WRAPS (type)))
435 	return false;
436       /* -(A + B) -> (-B) - A.  */
437       if (negate_expr_p (TREE_OPERAND (t, 1)))
438 	return true;
439       /* -(A + B) -> (-A) - B.  */
440       return negate_expr_p (TREE_OPERAND (t, 0));
441 
442     case MINUS_EXPR:
443       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
444       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
445 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
446 	     && (! ANY_INTEGRAL_TYPE_P (type)
447 		 || TYPE_OVERFLOW_WRAPS (type));
448 
449     case MULT_EXPR:
450       if (TYPE_UNSIGNED (type))
451 	break;
452       /* INT_MIN/n * n doesn't overflow while negating one operand it does
453          if n is a (negative) power of two.  */
454       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
455 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
456 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
457 		 && (wi::popcount
458 		     (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
459 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 		    && (wi::popcount
461 			(wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
462 	break;
463 
464       /* Fall through.  */
465 
466     case RDIV_EXPR:
467       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
468 	return negate_expr_p (TREE_OPERAND (t, 1))
469 	       || negate_expr_p (TREE_OPERAND (t, 0));
470       break;
471 
472     case TRUNC_DIV_EXPR:
473     case ROUND_DIV_EXPR:
474     case EXACT_DIV_EXPR:
475       if (TYPE_UNSIGNED (type))
476 	break;
477       /* In general we can't negate A in A / B, because if A is INT_MIN and
478          B is not 1 we change the sign of the result.  */
479       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
480 	  && negate_expr_p (TREE_OPERAND (t, 0)))
481 	return true;
482       /* In general we can't negate B in A / B, because if A is INT_MIN and
483 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 	 and actually traps on some architectures.  */
485       if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
486 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
487 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
488 	      && ! integer_onep (TREE_OPERAND (t, 1))))
489 	return negate_expr_p (TREE_OPERAND (t, 1));
490       break;
491 
492     case NOP_EXPR:
493       /* Negate -((double)float) as (double)(-float).  */
494       if (TREE_CODE (type) == REAL_TYPE)
495 	{
496 	  tree tem = strip_float_extensions (t);
497 	  if (tem != t)
498 	    return negate_expr_p (tem);
499 	}
500       break;
501 
502     case CALL_EXPR:
503       /* Negate -f(x) as f(-x).  */
504       if (negate_mathfn_p (get_call_combined_fn (t)))
505 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
506       break;
507 
508     case RSHIFT_EXPR:
509       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
510       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
511 	{
512 	  tree op1 = TREE_OPERAND (t, 1);
513 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
514 	    return true;
515 	}
516       break;
517 
518     default:
519       break;
520     }
521   return false;
522 }
523 
524 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
525    simplification is possible.
526    If negate_expr_p would return true for T, NULL_TREE will never be
527    returned.  */
528 
529 static tree
530 fold_negate_expr_1 (location_t loc, tree t)
531 {
532   tree type = TREE_TYPE (t);
533   tree tem;
534 
535   switch (TREE_CODE (t))
536     {
537     /* Convert - (~A) to A + 1.  */
538     case BIT_NOT_EXPR:
539       if (INTEGRAL_TYPE_P (type))
540         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
541 				build_one_cst (type));
542       break;
543 
544     case INTEGER_CST:
545       tem = fold_negate_const (t, type);
546       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
547 	  || (ANY_INTEGRAL_TYPE_P (type)
548 	      && !TYPE_OVERFLOW_TRAPS (type)
549 	      && TYPE_OVERFLOW_WRAPS (type))
550 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
551 	return tem;
552       break;
553 
554     case POLY_INT_CST:
555     case REAL_CST:
556     case FIXED_CST:
557       tem = fold_negate_const (t, type);
558       return tem;
559 
560     case COMPLEX_CST:
561       {
562 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
563 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
564 	if (rpart && ipart)
565 	  return build_complex (type, rpart, ipart);
566       }
567       break;
568 
569     case VECTOR_CST:
570       {
571 	tree_vector_builder elts;
572 	elts.new_unary_operation (type, t, true);
573 	unsigned int count = elts.encoded_nelts ();
574 	for (unsigned int i = 0; i < count; ++i)
575 	  {
576 	    tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
577 	    if (elt == NULL_TREE)
578 	      return NULL_TREE;
579 	    elts.quick_push (elt);
580 	  }
581 
582 	return elts.build ();
583       }
584 
585     case COMPLEX_EXPR:
586       if (negate_expr_p (t))
587 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
588 				fold_negate_expr (loc, TREE_OPERAND (t, 0)),
589 				fold_negate_expr (loc, TREE_OPERAND (t, 1)));
590       break;
591 
592     case CONJ_EXPR:
593       if (negate_expr_p (t))
594 	return fold_build1_loc (loc, CONJ_EXPR, type,
595 				fold_negate_expr (loc, TREE_OPERAND (t, 0)));
596       break;
597 
598     case NEGATE_EXPR:
599       if (!TYPE_OVERFLOW_SANITIZED (type))
600 	return TREE_OPERAND (t, 0);
601       break;
602 
603     case PLUS_EXPR:
604       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
605 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
606 	{
607 	  /* -(A + B) -> (-B) - A.  */
608 	  if (negate_expr_p (TREE_OPERAND (t, 1)))
609 	    {
610 	      tem = negate_expr (TREE_OPERAND (t, 1));
611 	      return fold_build2_loc (loc, MINUS_EXPR, type,
612 				      tem, TREE_OPERAND (t, 0));
613 	    }
614 
615 	  /* -(A + B) -> (-A) - B.  */
616 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
617 	    {
618 	      tem = negate_expr (TREE_OPERAND (t, 0));
619 	      return fold_build2_loc (loc, MINUS_EXPR, type,
620 				      tem, TREE_OPERAND (t, 1));
621 	    }
622 	}
623       break;
624 
625     case MINUS_EXPR:
626       /* - (A - B) -> B - A  */
627       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
628 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
629 	return fold_build2_loc (loc, MINUS_EXPR, type,
630 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
631       break;
632 
633     case MULT_EXPR:
634       if (TYPE_UNSIGNED (type))
635         break;
636 
637       /* Fall through.  */
638 
639     case RDIV_EXPR:
640       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
641 	{
642 	  tem = TREE_OPERAND (t, 1);
643 	  if (negate_expr_p (tem))
644 	    return fold_build2_loc (loc, TREE_CODE (t), type,
645 				    TREE_OPERAND (t, 0), negate_expr (tem));
646 	  tem = TREE_OPERAND (t, 0);
647 	  if (negate_expr_p (tem))
648 	    return fold_build2_loc (loc, TREE_CODE (t), type,
649 				    negate_expr (tem), TREE_OPERAND (t, 1));
650 	}
651       break;
652 
653     case TRUNC_DIV_EXPR:
654     case ROUND_DIV_EXPR:
655     case EXACT_DIV_EXPR:
656       if (TYPE_UNSIGNED (type))
657 	break;
658       /* In general we can't negate A in A / B, because if A is INT_MIN and
659 	 B is not 1 we change the sign of the result.  */
660       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
661 	  && negate_expr_p (TREE_OPERAND (t, 0)))
662 	return fold_build2_loc (loc, TREE_CODE (t), type,
663 				negate_expr (TREE_OPERAND (t, 0)),
664 				TREE_OPERAND (t, 1));
665       /* In general we can't negate B in A / B, because if A is INT_MIN and
666 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
667 	 and actually traps on some architectures.  */
668       if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
669 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
670 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
671 	       && ! integer_onep (TREE_OPERAND (t, 1))))
672 	  && negate_expr_p (TREE_OPERAND (t, 1)))
673 	return fold_build2_loc (loc, TREE_CODE (t), type,
674 				TREE_OPERAND (t, 0),
675 				negate_expr (TREE_OPERAND (t, 1)));
676       break;
677 
678     case NOP_EXPR:
679       /* Convert -((double)float) into (double)(-float).  */
680       if (TREE_CODE (type) == REAL_TYPE)
681 	{
682 	  tem = strip_float_extensions (t);
683 	  if (tem != t && negate_expr_p (tem))
684 	    return fold_convert_loc (loc, type, negate_expr (tem));
685 	}
686       break;
687 
688     case CALL_EXPR:
689       /* Negate -f(x) as f(-x).  */
690       if (negate_mathfn_p (get_call_combined_fn (t))
691 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
692 	{
693 	  tree fndecl, arg;
694 
695 	  fndecl = get_callee_fndecl (t);
696 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
697 	  return build_call_expr_loc (loc, fndecl, 1, arg);
698 	}
699       break;
700 
701     case RSHIFT_EXPR:
702       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
703       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
704 	{
705 	  tree op1 = TREE_OPERAND (t, 1);
706 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
707 	    {
708 	      tree ntype = TYPE_UNSIGNED (type)
709 			   ? signed_type_for (type)
710 			   : unsigned_type_for (type);
711 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
712 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
713 	      return fold_convert_loc (loc, type, temp);
714 	    }
715 	}
716       break;
717 
718     default:
719       break;
720     }
721 
722   return NULL_TREE;
723 }
724 
725 /* A wrapper for fold_negate_expr_1.  */
726 
727 static tree
728 fold_negate_expr (location_t loc, tree t)
729 {
730   tree type = TREE_TYPE (t);
731   STRIP_SIGN_NOPS (t);
732   tree tem = fold_negate_expr_1 (loc, t);
733   if (tem == NULL_TREE)
734     return NULL_TREE;
735   return fold_convert_loc (loc, type, tem);
736 }
737 
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
739    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
740    return NULL_TREE. */
741 
742 static tree
743 negate_expr (tree t)
744 {
745   tree type, tem;
746   location_t loc;
747 
748   if (t == NULL_TREE)
749     return NULL_TREE;
750 
751   loc = EXPR_LOCATION (t);
752   type = TREE_TYPE (t);
753   STRIP_SIGN_NOPS (t);
754 
755   tem = fold_negate_expr (loc, t);
756   if (!tem)
757     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758   return fold_convert_loc (loc, type, tem);
759 }
760 
761 /* Split a tree IN into a constant, literal and variable parts that could be
762    combined with CODE to make IN.  "constant" means an expression with
763    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
764    commutative arithmetic operation.  Store the constant part into *CONP,
765    the literal in *LITP and return the variable part.  If a part isn't
766    present, set it to null.  If the tree does not decompose in this way,
767    return the entire tree as the variable part and the other parts as null.
768 
769    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
770    case, we negate an operand that was subtracted.  Except if it is a
771    literal for which we use *MINUS_LITP instead.
772 
773    If NEGATE_P is true, we are negating all of IN, again except a literal
774    for which we use *MINUS_LITP instead.  If a variable part is of pointer
775    type, it is negated after converting to TYPE.  This prevents us from
776    generating illegal MINUS pointer expression.  LOC is the location of
777    the converted variable part.
778 
779    If IN is itself a literal or constant, return it as appropriate.
780 
781    Note that we do not guarantee that any of the three values will be the
782    same type as IN, but they will have the same signedness and mode.  */
783 
784 static tree
785 split_tree (tree in, tree type, enum tree_code code,
786 	    tree *minus_varp, tree *conp, tree *minus_conp,
787 	    tree *litp, tree *minus_litp, int negate_p)
788 {
789   tree var = 0;
790   *minus_varp = 0;
791   *conp = 0;
792   *minus_conp = 0;
793   *litp = 0;
794   *minus_litp = 0;
795 
796   /* Strip any conversions that don't change the machine mode or signedness.  */
797   STRIP_SIGN_NOPS (in);
798 
799   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
800       || TREE_CODE (in) == FIXED_CST)
801     *litp = in;
802   else if (TREE_CODE (in) == code
803 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
804 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
805 	       /* We can associate addition and subtraction together (even
806 		  though the C standard doesn't say so) for integers because
807 		  the value is not affected.  For reals, the value might be
808 		  affected, so we can't.  */
809 	       && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
810 		   || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
811 		   || (code == MINUS_EXPR
812 		       && (TREE_CODE (in) == PLUS_EXPR
813 			   || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
814     {
815       tree op0 = TREE_OPERAND (in, 0);
816       tree op1 = TREE_OPERAND (in, 1);
817       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
818       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
819 
820       /* First see if either of the operands is a literal, then a constant.  */
821       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
822 	  || TREE_CODE (op0) == FIXED_CST)
823 	*litp = op0, op0 = 0;
824       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
825 	       || TREE_CODE (op1) == FIXED_CST)
826 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
827 
828       if (op0 != 0 && TREE_CONSTANT (op0))
829 	*conp = op0, op0 = 0;
830       else if (op1 != 0 && TREE_CONSTANT (op1))
831 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
832 
833       /* If we haven't dealt with either operand, this is not a case we can
834 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
835       if (op0 != 0 && op1 != 0)
836 	var = in;
837       else if (op0 != 0)
838 	var = op0;
839       else
840 	var = op1, neg_var_p = neg1_p;
841 
842       /* Now do any needed negations.  */
843       if (neg_litp_p)
844 	*minus_litp = *litp, *litp = 0;
845       if (neg_conp_p && *conp)
846 	*minus_conp = *conp, *conp = 0;
847       if (neg_var_p && var)
848 	*minus_varp = var, var = 0;
849     }
850   else if (TREE_CONSTANT (in))
851     *conp = in;
852   else if (TREE_CODE (in) == BIT_NOT_EXPR
853 	   && code == PLUS_EXPR)
854     {
855       /* -1 - X is folded to ~X, undo that here.  Do _not_ do this
856          when IN is constant.  */
857       *litp = build_minus_one_cst (type);
858       *minus_varp = TREE_OPERAND (in, 0);
859     }
860   else
861     var = in;
862 
863   if (negate_p)
864     {
865       if (*litp)
866 	*minus_litp = *litp, *litp = 0;
867       else if (*minus_litp)
868 	*litp = *minus_litp, *minus_litp = 0;
869       if (*conp)
870 	*minus_conp = *conp, *conp = 0;
871       else if (*minus_conp)
872 	*conp = *minus_conp, *minus_conp = 0;
873       if (var)
874 	*minus_varp = var, var = 0;
875       else if (*minus_varp)
876 	var = *minus_varp, *minus_varp = 0;
877     }
878 
879   if (*litp
880       && TREE_OVERFLOW_P (*litp))
881     *litp = drop_tree_overflow (*litp);
882   if (*minus_litp
883       && TREE_OVERFLOW_P (*minus_litp))
884     *minus_litp = drop_tree_overflow (*minus_litp);
885 
886   return var;
887 }
888 
889 /* Re-associate trees split by the above function.  T1 and T2 are
890    either expressions to associate or null.  Return the new
891    expression, if any.  LOC is the location of the new expression.  If
892    we build an operation, do it in TYPE and with CODE.  */
893 
894 static tree
895 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
896 {
897   if (t1 == 0)
898     {
899       gcc_assert (t2 == 0 || code != MINUS_EXPR);
900       return t2;
901     }
902   else if (t2 == 0)
903     return t1;
904 
905   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
906      try to fold this since we will have infinite recursion.  But do
907      deal with any NEGATE_EXPRs.  */
908   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
909       || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
910       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
911     {
912       if (code == PLUS_EXPR)
913 	{
914 	  if (TREE_CODE (t1) == NEGATE_EXPR)
915 	    return build2_loc (loc, MINUS_EXPR, type,
916 			       fold_convert_loc (loc, type, t2),
917 			       fold_convert_loc (loc, type,
918 						 TREE_OPERAND (t1, 0)));
919 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
920 	    return build2_loc (loc, MINUS_EXPR, type,
921 			       fold_convert_loc (loc, type, t1),
922 			       fold_convert_loc (loc, type,
923 						 TREE_OPERAND (t2, 0)));
924 	  else if (integer_zerop (t2))
925 	    return fold_convert_loc (loc, type, t1);
926 	}
927       else if (code == MINUS_EXPR)
928 	{
929 	  if (integer_zerop (t2))
930 	    return fold_convert_loc (loc, type, t1);
931 	}
932 
933       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 			 fold_convert_loc (loc, type, t2));
935     }
936 
937   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 			  fold_convert_loc (loc, type, t2));
939 }
940 
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942    for use in int_const_binop, size_binop and size_diffop.  */
943 
944 static bool
945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
946 {
947   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948     return false;
949   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950     return false;
951 
952   switch (code)
953     {
954     case LSHIFT_EXPR:
955     case RSHIFT_EXPR:
956     case LROTATE_EXPR:
957     case RROTATE_EXPR:
958       return true;
959 
960     default:
961       break;
962     }
963 
964   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
967 }
968 
969 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
970    a new constant in RES.  Return FALSE if we don't know how to
971    evaluate CODE at compile-time.  */
972 
973 bool
974 wide_int_binop (wide_int &res,
975 		enum tree_code code, const wide_int &arg1, const wide_int &arg2,
976 		signop sign, wi::overflow_type *overflow)
977 {
978   wide_int tmp;
979   *overflow = wi::OVF_NONE;
980   switch (code)
981     {
982     case BIT_IOR_EXPR:
983       res = wi::bit_or (arg1, arg2);
984       break;
985 
986     case BIT_XOR_EXPR:
987       res = wi::bit_xor (arg1, arg2);
988       break;
989 
990     case BIT_AND_EXPR:
991       res = wi::bit_and (arg1, arg2);
992       break;
993 
994     case RSHIFT_EXPR:
995     case LSHIFT_EXPR:
996       if (wi::neg_p (arg2))
997 	{
998 	  tmp = -arg2;
999 	  if (code == RSHIFT_EXPR)
1000 	    code = LSHIFT_EXPR;
1001 	  else
1002 	    code = RSHIFT_EXPR;
1003 	}
1004       else
1005         tmp = arg2;
1006 
1007       if (code == RSHIFT_EXPR)
1008 	/* It's unclear from the C standard whether shifts can overflow.
1009 	   The following code ignores overflow; perhaps a C standard
1010 	   interpretation ruling is needed.  */
1011 	res = wi::rshift (arg1, tmp, sign);
1012       else
1013 	res = wi::lshift (arg1, tmp);
1014       break;
1015 
1016     case RROTATE_EXPR:
1017     case LROTATE_EXPR:
1018       if (wi::neg_p (arg2))
1019 	{
1020 	  tmp = -arg2;
1021 	  if (code == RROTATE_EXPR)
1022 	    code = LROTATE_EXPR;
1023 	  else
1024 	    code = RROTATE_EXPR;
1025 	}
1026       else
1027         tmp = arg2;
1028 
1029       if (code == RROTATE_EXPR)
1030 	res = wi::rrotate (arg1, tmp);
1031       else
1032 	res = wi::lrotate (arg1, tmp);
1033       break;
1034 
1035     case PLUS_EXPR:
1036       res = wi::add (arg1, arg2, sign, overflow);
1037       break;
1038 
1039     case MINUS_EXPR:
1040       res = wi::sub (arg1, arg2, sign, overflow);
1041       break;
1042 
1043     case MULT_EXPR:
1044       res = wi::mul (arg1, arg2, sign, overflow);
1045       break;
1046 
1047     case MULT_HIGHPART_EXPR:
1048       res = wi::mul_high (arg1, arg2, sign);
1049       break;
1050 
1051     case TRUNC_DIV_EXPR:
1052     case EXACT_DIV_EXPR:
1053       if (arg2 == 0)
1054 	return false;
1055       res = wi::div_trunc (arg1, arg2, sign, overflow);
1056       break;
1057 
1058     case FLOOR_DIV_EXPR:
1059       if (arg2 == 0)
1060 	return false;
1061       res = wi::div_floor (arg1, arg2, sign, overflow);
1062       break;
1063 
1064     case CEIL_DIV_EXPR:
1065       if (arg2 == 0)
1066 	return false;
1067       res = wi::div_ceil (arg1, arg2, sign, overflow);
1068       break;
1069 
1070     case ROUND_DIV_EXPR:
1071       if (arg2 == 0)
1072 	return false;
1073       res = wi::div_round (arg1, arg2, sign, overflow);
1074       break;
1075 
1076     case TRUNC_MOD_EXPR:
1077       if (arg2 == 0)
1078 	return false;
1079       res = wi::mod_trunc (arg1, arg2, sign, overflow);
1080       break;
1081 
1082     case FLOOR_MOD_EXPR:
1083       if (arg2 == 0)
1084 	return false;
1085       res = wi::mod_floor (arg1, arg2, sign, overflow);
1086       break;
1087 
1088     case CEIL_MOD_EXPR:
1089       if (arg2 == 0)
1090 	return false;
1091       res = wi::mod_ceil (arg1, arg2, sign, overflow);
1092       break;
1093 
1094     case ROUND_MOD_EXPR:
1095       if (arg2 == 0)
1096 	return false;
1097       res = wi::mod_round (arg1, arg2, sign, overflow);
1098       break;
1099 
1100     case MIN_EXPR:
1101       res = wi::min (arg1, arg2, sign);
1102       break;
1103 
1104     case MAX_EXPR:
1105       res = wi::max (arg1, arg2, sign);
1106       break;
1107 
1108     default:
1109       return false;
1110     }
1111   return true;
1112 }
1113 
1114 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1115    produce a new constant in RES.  Return FALSE if we don't know how
1116    to evaluate CODE at compile-time.  */
1117 
1118 static bool
1119 poly_int_binop (poly_wide_int &res, enum tree_code code,
1120 		const_tree arg1, const_tree arg2,
1121 		signop sign, wi::overflow_type *overflow)
1122 {
1123   gcc_assert (NUM_POLY_INT_COEFFS != 1);
1124   gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1125   switch (code)
1126     {
1127     case PLUS_EXPR:
1128       res = wi::add (wi::to_poly_wide (arg1),
1129 		     wi::to_poly_wide (arg2), sign, overflow);
1130       break;
1131 
1132     case MINUS_EXPR:
1133       res = wi::sub (wi::to_poly_wide (arg1),
1134 		     wi::to_poly_wide (arg2), sign, overflow);
1135       break;
1136 
1137     case MULT_EXPR:
1138       if (TREE_CODE (arg2) == INTEGER_CST)
1139 	res = wi::mul (wi::to_poly_wide (arg1),
1140 		       wi::to_wide (arg2), sign, overflow);
1141       else if (TREE_CODE (arg1) == INTEGER_CST)
1142 	res = wi::mul (wi::to_poly_wide (arg2),
1143 		       wi::to_wide (arg1), sign, overflow);
1144       else
1145 	return NULL_TREE;
1146       break;
1147 
1148     case LSHIFT_EXPR:
1149       if (TREE_CODE (arg2) == INTEGER_CST)
1150 	res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1151       else
1152 	return false;
1153       break;
1154 
1155     case BIT_IOR_EXPR:
1156       if (TREE_CODE (arg2) != INTEGER_CST
1157 	  || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1158 			 &res))
1159 	return false;
1160       break;
1161 
1162     default:
1163       return false;
1164     }
1165   return true;
1166 }
1167 
1168 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1169    produce a new constant.  Return NULL_TREE if we don't know how to
1170    evaluate CODE at compile-time.  */
1171 
1172 tree
1173 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1174 		 int overflowable)
1175 {
1176   poly_wide_int poly_res;
1177   tree type = TREE_TYPE (arg1);
1178   signop sign = TYPE_SIGN (type);
1179   wi::overflow_type overflow = wi::OVF_NONE;
1180 
1181   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1182     {
1183       wide_int warg1 = wi::to_wide (arg1), res;
1184       wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1185       if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1186 	return NULL_TREE;
1187       poly_res = res;
1188     }
1189   else if (!poly_int_tree_p (arg1)
1190 	   || !poly_int_tree_p (arg2)
1191 	   || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1192     return NULL_TREE;
1193   return force_fit_type (type, poly_res, overflowable,
1194 			 (((sign == SIGNED || overflowable == -1)
1195 			   && overflow)
1196 			  | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1197 }
1198 
1199 /* Return true if binary operation OP distributes over addition in operand
1200    OPNO, with the other operand being held constant.  OPNO counts from 1.  */
1201 
1202 static bool
1203 distributes_over_addition_p (tree_code op, int opno)
1204 {
1205   switch (op)
1206     {
1207     case PLUS_EXPR:
1208     case MINUS_EXPR:
1209     case MULT_EXPR:
1210       return true;
1211 
1212     case LSHIFT_EXPR:
1213       return opno == 1;
1214 
1215     default:
1216       return false;
1217     }
1218 }
1219 
1220 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1221    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1222    are the same kind of constant and the same machine mode.  Return zero if
1223    combining the constants is not allowed in the current operating mode.  */
1224 
1225 static tree
1226 const_binop (enum tree_code code, tree arg1, tree arg2)
1227 {
1228   /* Sanity check for the recursive cases.  */
1229   if (!arg1 || !arg2)
1230     return NULL_TREE;
1231 
1232   STRIP_NOPS (arg1);
1233   STRIP_NOPS (arg2);
1234 
1235   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1236     {
1237       if (code == POINTER_PLUS_EXPR)
1238 	return int_const_binop (PLUS_EXPR,
1239 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1240 
1241       return int_const_binop (code, arg1, arg2);
1242     }
1243 
1244   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1245     {
1246       machine_mode mode;
1247       REAL_VALUE_TYPE d1;
1248       REAL_VALUE_TYPE d2;
1249       REAL_VALUE_TYPE value;
1250       REAL_VALUE_TYPE result;
1251       bool inexact;
1252       tree t, type;
1253 
1254       /* The following codes are handled by real_arithmetic.  */
1255       switch (code)
1256 	{
1257 	case PLUS_EXPR:
1258 	case MINUS_EXPR:
1259 	case MULT_EXPR:
1260 	case RDIV_EXPR:
1261 	case MIN_EXPR:
1262 	case MAX_EXPR:
1263 	  break;
1264 
1265 	default:
1266 	  return NULL_TREE;
1267 	}
1268 
1269       d1 = TREE_REAL_CST (arg1);
1270       d2 = TREE_REAL_CST (arg2);
1271 
1272       type = TREE_TYPE (arg1);
1273       mode = TYPE_MODE (type);
1274 
1275       /* Don't perform operation if we honor signaling NaNs and
1276 	 either operand is a signaling NaN.  */
1277       if (HONOR_SNANS (mode)
1278 	  && (REAL_VALUE_ISSIGNALING_NAN (d1)
1279 	      || REAL_VALUE_ISSIGNALING_NAN (d2)))
1280 	return NULL_TREE;
1281 
1282       /* Don't perform operation if it would raise a division
1283 	 by zero exception.  */
1284       if (code == RDIV_EXPR
1285 	  && real_equal (&d2, &dconst0)
1286 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1287 	return NULL_TREE;
1288 
1289       /* If either operand is a NaN, just return it.  Otherwise, set up
1290 	 for floating-point trap; we return an overflow.  */
1291       if (REAL_VALUE_ISNAN (d1))
1292       {
1293 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1294 	   is off.  */
1295 	d1.signalling = 0;
1296 	t = build_real (type, d1);
1297 	return t;
1298       }
1299       else if (REAL_VALUE_ISNAN (d2))
1300       {
1301 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1302 	   is off.  */
1303 	d2.signalling = 0;
1304 	t = build_real (type, d2);
1305 	return t;
1306       }
1307 
1308       inexact = real_arithmetic (&value, code, &d1, &d2);
1309       real_convert (&result, mode, &value);
1310 
1311       /* Don't constant fold this floating point operation if
1312 	 the result has overflowed and flag_trapping_math.  */
1313       if (flag_trapping_math
1314 	  && MODE_HAS_INFINITIES (mode)
1315 	  && REAL_VALUE_ISINF (result)
1316 	  && !REAL_VALUE_ISINF (d1)
1317 	  && !REAL_VALUE_ISINF (d2))
1318 	return NULL_TREE;
1319 
1320       /* Don't constant fold this floating point operation if the
1321 	 result may dependent upon the run-time rounding mode and
1322 	 flag_rounding_math is set, or if GCC's software emulation
1323 	 is unable to accurately represent the result.  */
1324       if ((flag_rounding_math
1325 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1326 	  && (inexact || !real_identical (&result, &value)))
1327 	return NULL_TREE;
1328 
1329       t = build_real (type, result);
1330 
1331       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1332       return t;
1333     }
1334 
1335   if (TREE_CODE (arg1) == FIXED_CST)
1336     {
1337       FIXED_VALUE_TYPE f1;
1338       FIXED_VALUE_TYPE f2;
1339       FIXED_VALUE_TYPE result;
1340       tree t, type;
1341       int sat_p;
1342       bool overflow_p;
1343 
1344       /* The following codes are handled by fixed_arithmetic.  */
1345       switch (code)
1346         {
1347 	case PLUS_EXPR:
1348 	case MINUS_EXPR:
1349 	case MULT_EXPR:
1350 	case TRUNC_DIV_EXPR:
1351 	  if (TREE_CODE (arg2) != FIXED_CST)
1352 	    return NULL_TREE;
1353 	  f2 = TREE_FIXED_CST (arg2);
1354 	  break;
1355 
1356 	case LSHIFT_EXPR:
1357 	case RSHIFT_EXPR:
1358 	  {
1359 	    if (TREE_CODE (arg2) != INTEGER_CST)
1360 	      return NULL_TREE;
1361 	    wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1362 	    f2.data.high = w2.elt (1);
1363 	    f2.data.low = w2.ulow ();
1364 	    f2.mode = SImode;
1365 	  }
1366 	  break;
1367 
1368         default:
1369 	  return NULL_TREE;
1370         }
1371 
1372       f1 = TREE_FIXED_CST (arg1);
1373       type = TREE_TYPE (arg1);
1374       sat_p = TYPE_SATURATING (type);
1375       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1376       t = build_fixed (type, result);
1377       /* Propagate overflow flags.  */
1378       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1379 	TREE_OVERFLOW (t) = 1;
1380       return t;
1381     }
1382 
1383   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1384     {
1385       tree type = TREE_TYPE (arg1);
1386       tree r1 = TREE_REALPART (arg1);
1387       tree i1 = TREE_IMAGPART (arg1);
1388       tree r2 = TREE_REALPART (arg2);
1389       tree i2 = TREE_IMAGPART (arg2);
1390       tree real, imag;
1391 
1392       switch (code)
1393 	{
1394 	case PLUS_EXPR:
1395 	case MINUS_EXPR:
1396 	  real = const_binop (code, r1, r2);
1397 	  imag = const_binop (code, i1, i2);
1398 	  break;
1399 
1400 	case MULT_EXPR:
1401 	  if (COMPLEX_FLOAT_TYPE_P (type))
1402 	    return do_mpc_arg2 (arg1, arg2, type,
1403 				/* do_nonfinite= */ folding_initializer,
1404 				mpc_mul);
1405 
1406 	  real = const_binop (MINUS_EXPR,
1407 			      const_binop (MULT_EXPR, r1, r2),
1408 			      const_binop (MULT_EXPR, i1, i2));
1409 	  imag = const_binop (PLUS_EXPR,
1410 			      const_binop (MULT_EXPR, r1, i2),
1411 			      const_binop (MULT_EXPR, i1, r2));
1412 	  break;
1413 
1414 	case RDIV_EXPR:
1415 	  if (COMPLEX_FLOAT_TYPE_P (type))
1416 	    return do_mpc_arg2 (arg1, arg2, type,
1417                                 /* do_nonfinite= */ folding_initializer,
1418 				mpc_div);
1419 	  /* Fallthru. */
1420 	case TRUNC_DIV_EXPR:
1421 	case CEIL_DIV_EXPR:
1422 	case FLOOR_DIV_EXPR:
1423 	case ROUND_DIV_EXPR:
1424 	  if (flag_complex_method == 0)
1425 	  {
1426 	    /* Keep this algorithm in sync with
1427 	       tree-complex.c:expand_complex_div_straight().
1428 
1429 	       Expand complex division to scalars, straightforward algorithm.
1430 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1431 	       t = br*br + bi*bi
1432 	    */
1433 	    tree magsquared
1434 	      = const_binop (PLUS_EXPR,
1435 			     const_binop (MULT_EXPR, r2, r2),
1436 			     const_binop (MULT_EXPR, i2, i2));
1437 	    tree t1
1438 	      = const_binop (PLUS_EXPR,
1439 			     const_binop (MULT_EXPR, r1, r2),
1440 			     const_binop (MULT_EXPR, i1, i2));
1441 	    tree t2
1442 	      = const_binop (MINUS_EXPR,
1443 			     const_binop (MULT_EXPR, i1, r2),
1444 			     const_binop (MULT_EXPR, r1, i2));
1445 
1446 	    real = const_binop (code, t1, magsquared);
1447 	    imag = const_binop (code, t2, magsquared);
1448 	  }
1449 	  else
1450 	  {
1451 	    /* Keep this algorithm in sync with
1452                tree-complex.c:expand_complex_div_wide().
1453 
1454 	       Expand complex division to scalars, modified algorithm to minimize
1455 	       overflow with wide input ranges.  */
1456 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1457 					fold_abs_const (r2, TREE_TYPE (type)),
1458 					fold_abs_const (i2, TREE_TYPE (type)));
1459 
1460 	    if (integer_nonzerop (compare))
1461 	      {
1462 		/* In the TRUE branch, we compute
1463 		   ratio = br/bi;
1464 		   div = (br * ratio) + bi;
1465 		   tr = (ar * ratio) + ai;
1466 		   ti = (ai * ratio) - ar;
1467 		   tr = tr / div;
1468 		   ti = ti / div;  */
1469 		tree ratio = const_binop (code, r2, i2);
1470 		tree div = const_binop (PLUS_EXPR, i2,
1471 					const_binop (MULT_EXPR, r2, ratio));
1472 		real = const_binop (MULT_EXPR, r1, ratio);
1473 		real = const_binop (PLUS_EXPR, real, i1);
1474 		real = const_binop (code, real, div);
1475 
1476 		imag = const_binop (MULT_EXPR, i1, ratio);
1477 		imag = const_binop (MINUS_EXPR, imag, r1);
1478 		imag = const_binop (code, imag, div);
1479 	      }
1480 	    else
1481 	      {
1482 		/* In the FALSE branch, we compute
1483 		   ratio = d/c;
1484 		   divisor = (d * ratio) + c;
1485 		   tr = (b * ratio) + a;
1486 		   ti = b - (a * ratio);
1487 		   tr = tr / div;
1488 		   ti = ti / div;  */
1489 		tree ratio = const_binop (code, i2, r2);
1490 		tree div = const_binop (PLUS_EXPR, r2,
1491                                         const_binop (MULT_EXPR, i2, ratio));
1492 
1493 		real = const_binop (MULT_EXPR, i1, ratio);
1494 		real = const_binop (PLUS_EXPR, real, r1);
1495 		real = const_binop (code, real, div);
1496 
1497 		imag = const_binop (MULT_EXPR, r1, ratio);
1498 		imag = const_binop (MINUS_EXPR, i1, imag);
1499 		imag = const_binop (code, imag, div);
1500 	      }
1501 	  }
1502 	  break;
1503 
1504 	default:
1505 	  return NULL_TREE;
1506 	}
1507 
1508       if (real && imag)
1509 	return build_complex (type, real, imag);
1510     }
1511 
1512   if (TREE_CODE (arg1) == VECTOR_CST
1513       && TREE_CODE (arg2) == VECTOR_CST
1514       && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1515 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1516     {
1517       tree type = TREE_TYPE (arg1);
1518       bool step_ok_p;
1519       if (VECTOR_CST_STEPPED_P (arg1)
1520 	  && VECTOR_CST_STEPPED_P (arg2))
1521 	/* We can operate directly on the encoding if:
1522 
1523 	      a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1524 	    implies
1525 	      (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1526 
1527 	   Addition and subtraction are the supported operators
1528 	   for which this is true.  */
1529 	step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1530       else if (VECTOR_CST_STEPPED_P (arg1))
1531 	/* We can operate directly on stepped encodings if:
1532 
1533 	     a3 - a2 == a2 - a1
1534 	   implies:
1535 	     (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1536 
1537 	   which is true if (x -> x op c) distributes over addition.  */
1538 	step_ok_p = distributes_over_addition_p (code, 1);
1539       else
1540 	/* Similarly in reverse.  */
1541 	step_ok_p = distributes_over_addition_p (code, 2);
1542       tree_vector_builder elts;
1543       if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1544 	return NULL_TREE;
1545       unsigned int count = elts.encoded_nelts ();
1546       for (unsigned int i = 0; i < count; ++i)
1547 	{
1548 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1549 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1550 
1551 	  tree elt = const_binop (code, elem1, elem2);
1552 
1553 	  /* It is possible that const_binop cannot handle the given
1554 	     code and return NULL_TREE */
1555 	  if (elt == NULL_TREE)
1556 	    return NULL_TREE;
1557 	  elts.quick_push (elt);
1558 	}
1559 
1560       return elts.build ();
1561     }
1562 
1563   /* Shifts allow a scalar offset for a vector.  */
1564   if (TREE_CODE (arg1) == VECTOR_CST
1565       && TREE_CODE (arg2) == INTEGER_CST)
1566     {
1567       tree type = TREE_TYPE (arg1);
1568       bool step_ok_p = distributes_over_addition_p (code, 1);
1569       tree_vector_builder elts;
1570       if (!elts.new_unary_operation (type, arg1, step_ok_p))
1571 	return NULL_TREE;
1572       unsigned int count = elts.encoded_nelts ();
1573       for (unsigned int i = 0; i < count; ++i)
1574 	{
1575 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1576 
1577 	  tree elt = const_binop (code, elem1, arg2);
1578 
1579 	  /* It is possible that const_binop cannot handle the given
1580 	     code and return NULL_TREE.  */
1581 	  if (elt == NULL_TREE)
1582 	    return NULL_TREE;
1583 	  elts.quick_push (elt);
1584 	}
1585 
1586       return elts.build ();
1587     }
1588   return NULL_TREE;
1589 }
1590 
1591 /* Overload that adds a TYPE parameter to be able to dispatch
1592    to fold_relational_const.  */
1593 
1594 tree
1595 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1596 {
1597   if (TREE_CODE_CLASS (code) == tcc_comparison)
1598     return fold_relational_const (code, type, arg1, arg2);
1599 
1600   /* ???  Until we make the const_binop worker take the type of the
1601      result as argument put those cases that need it here.  */
1602   switch (code)
1603     {
1604     case VEC_SERIES_EXPR:
1605       if (CONSTANT_CLASS_P (arg1)
1606 	  && CONSTANT_CLASS_P (arg2))
1607 	return build_vec_series (type, arg1, arg2);
1608       return NULL_TREE;
1609 
1610     case COMPLEX_EXPR:
1611       if ((TREE_CODE (arg1) == REAL_CST
1612 	   && TREE_CODE (arg2) == REAL_CST)
1613 	  || (TREE_CODE (arg1) == INTEGER_CST
1614 	      && TREE_CODE (arg2) == INTEGER_CST))
1615 	return build_complex (type, arg1, arg2);
1616       return NULL_TREE;
1617 
1618     case POINTER_DIFF_EXPR:
1619       if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1620 	{
1621 	  poly_offset_int res = (wi::to_poly_offset (arg1)
1622 				 - wi::to_poly_offset (arg2));
1623 	  return force_fit_type (type, res, 1,
1624 				 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1625 	}
1626       return NULL_TREE;
1627 
1628     case VEC_PACK_TRUNC_EXPR:
1629     case VEC_PACK_FIX_TRUNC_EXPR:
1630     case VEC_PACK_FLOAT_EXPR:
1631       {
1632 	unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1633 
1634 	if (TREE_CODE (arg1) != VECTOR_CST
1635 	    || TREE_CODE (arg2) != VECTOR_CST)
1636 	  return NULL_TREE;
1637 
1638 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1639 	  return NULL_TREE;
1640 
1641 	out_nelts = in_nelts * 2;
1642 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1643 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1644 
1645 	tree_vector_builder elts (type, out_nelts, 1);
1646 	for (i = 0; i < out_nelts; i++)
1647 	  {
1648 	    tree elt = (i < in_nelts
1649 			? VECTOR_CST_ELT (arg1, i)
1650 			: VECTOR_CST_ELT (arg2, i - in_nelts));
1651 	    elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1652 				      ? NOP_EXPR
1653 				      : code == VEC_PACK_FLOAT_EXPR
1654 				      ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1655 				      TREE_TYPE (type), elt);
1656 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1657 	      return NULL_TREE;
1658 	    elts.quick_push (elt);
1659 	  }
1660 
1661 	return elts.build ();
1662       }
1663 
1664     case VEC_WIDEN_MULT_LO_EXPR:
1665     case VEC_WIDEN_MULT_HI_EXPR:
1666     case VEC_WIDEN_MULT_EVEN_EXPR:
1667     case VEC_WIDEN_MULT_ODD_EXPR:
1668       {
1669 	unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1670 
1671 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1672 	  return NULL_TREE;
1673 
1674 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1675 	  return NULL_TREE;
1676 	out_nelts = in_nelts / 2;
1677 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1678 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1679 
1680 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1681 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1682 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1683 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1684 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1685 	  scale = 1, ofs = 0;
1686 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1687 	  scale = 1, ofs = 1;
1688 
1689 	tree_vector_builder elts (type, out_nelts, 1);
1690 	for (out = 0; out < out_nelts; out++)
1691 	  {
1692 	    unsigned int in = (out << scale) + ofs;
1693 	    tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1694 					  VECTOR_CST_ELT (arg1, in));
1695 	    tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1696 					  VECTOR_CST_ELT (arg2, in));
1697 
1698 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1699 	      return NULL_TREE;
1700 	    tree elt = const_binop (MULT_EXPR, t1, t2);
1701 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1702 	      return NULL_TREE;
1703 	    elts.quick_push (elt);
1704 	  }
1705 
1706 	return elts.build ();
1707       }
1708 
1709     default:;
1710     }
1711 
1712   if (TREE_CODE_CLASS (code) != tcc_binary)
1713     return NULL_TREE;
1714 
1715   /* Make sure type and arg0 have the same saturating flag.  */
1716   gcc_checking_assert (TYPE_SATURATING (type)
1717 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1718 
1719   return const_binop (code, arg1, arg2);
1720 }
1721 
1722 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1723    Return zero if computing the constants is not possible.  */
1724 
1725 tree
1726 const_unop (enum tree_code code, tree type, tree arg0)
1727 {
1728   /* Don't perform the operation, other than NEGATE and ABS, if
1729      flag_signaling_nans is on and the operand is a signaling NaN.  */
1730   if (TREE_CODE (arg0) == REAL_CST
1731       && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1732       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1733       && code != NEGATE_EXPR
1734       && code != ABS_EXPR
1735       && code != ABSU_EXPR)
1736     return NULL_TREE;
1737 
1738   switch (code)
1739     {
1740     CASE_CONVERT:
1741     case FLOAT_EXPR:
1742     case FIX_TRUNC_EXPR:
1743     case FIXED_CONVERT_EXPR:
1744       return fold_convert_const (code, type, arg0);
1745 
1746     case ADDR_SPACE_CONVERT_EXPR:
1747       /* If the source address is 0, and the source address space
1748 	 cannot have a valid object at 0, fold to dest type null.  */
1749       if (integer_zerop (arg0)
1750 	  && !(targetm.addr_space.zero_address_valid
1751 	       (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1752 	return fold_convert_const (code, type, arg0);
1753       break;
1754 
1755     case VIEW_CONVERT_EXPR:
1756       return fold_view_convert_expr (type, arg0);
1757 
1758     case NEGATE_EXPR:
1759       {
1760 	/* Can't call fold_negate_const directly here as that doesn't
1761 	   handle all cases and we might not be able to negate some
1762 	   constants.  */
1763 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1764 	if (tem && CONSTANT_CLASS_P (tem))
1765 	  return tem;
1766 	break;
1767       }
1768 
1769     case ABS_EXPR:
1770     case ABSU_EXPR:
1771       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1772 	return fold_abs_const (arg0, type);
1773       break;
1774 
1775     case CONJ_EXPR:
1776       if (TREE_CODE (arg0) == COMPLEX_CST)
1777 	{
1778 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1779 					  TREE_TYPE (type));
1780 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1781 	}
1782       break;
1783 
1784     case BIT_NOT_EXPR:
1785       if (TREE_CODE (arg0) == INTEGER_CST)
1786 	return fold_not_const (arg0, type);
1787       else if (POLY_INT_CST_P (arg0))
1788 	return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1789       /* Perform BIT_NOT_EXPR on each element individually.  */
1790       else if (TREE_CODE (arg0) == VECTOR_CST)
1791 	{
1792 	  tree elem;
1793 
1794 	  /* This can cope with stepped encodings because ~x == -1 - x.  */
1795 	  tree_vector_builder elements;
1796 	  elements.new_unary_operation (type, arg0, true);
1797 	  unsigned int i, count = elements.encoded_nelts ();
1798 	  for (i = 0; i < count; ++i)
1799 	    {
1800 	      elem = VECTOR_CST_ELT (arg0, i);
1801 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1802 	      if (elem == NULL_TREE)
1803 		break;
1804 	      elements.quick_push (elem);
1805 	    }
1806 	  if (i == count)
1807 	    return elements.build ();
1808 	}
1809       break;
1810 
1811     case TRUTH_NOT_EXPR:
1812       if (TREE_CODE (arg0) == INTEGER_CST)
1813 	return constant_boolean_node (integer_zerop (arg0), type);
1814       break;
1815 
1816     case REALPART_EXPR:
1817       if (TREE_CODE (arg0) == COMPLEX_CST)
1818 	return fold_convert (type, TREE_REALPART (arg0));
1819       break;
1820 
1821     case IMAGPART_EXPR:
1822       if (TREE_CODE (arg0) == COMPLEX_CST)
1823 	return fold_convert (type, TREE_IMAGPART (arg0));
1824       break;
1825 
1826     case VEC_UNPACK_LO_EXPR:
1827     case VEC_UNPACK_HI_EXPR:
1828     case VEC_UNPACK_FLOAT_LO_EXPR:
1829     case VEC_UNPACK_FLOAT_HI_EXPR:
1830     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1831     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1832       {
1833 	unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1834 	enum tree_code subcode;
1835 
1836 	if (TREE_CODE (arg0) != VECTOR_CST)
1837 	  return NULL_TREE;
1838 
1839 	if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1840 	  return NULL_TREE;
1841 	out_nelts = in_nelts / 2;
1842 	gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1843 
1844 	unsigned int offset = 0;
1845 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1846 				   || code == VEC_UNPACK_FLOAT_LO_EXPR
1847 				   || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1848 	  offset = out_nelts;
1849 
1850 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1851 	  subcode = NOP_EXPR;
1852 	else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1853 		 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1854 	  subcode = FLOAT_EXPR;
1855 	else
1856 	  subcode = FIX_TRUNC_EXPR;
1857 
1858 	tree_vector_builder elts (type, out_nelts, 1);
1859 	for (i = 0; i < out_nelts; i++)
1860 	  {
1861 	    tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1862 					   VECTOR_CST_ELT (arg0, i + offset));
1863 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1864 	      return NULL_TREE;
1865 	    elts.quick_push (elt);
1866 	  }
1867 
1868 	return elts.build ();
1869       }
1870 
1871     case VEC_DUPLICATE_EXPR:
1872       if (CONSTANT_CLASS_P (arg0))
1873 	return build_vector_from_val (type, arg0);
1874       return NULL_TREE;
1875 
1876     default:
1877       break;
1878     }
1879 
1880   return NULL_TREE;
1881 }
1882 
1883 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1884    indicates which particular sizetype to create.  */
1885 
1886 tree
1887 size_int_kind (poly_int64 number, enum size_type_kind kind)
1888 {
1889   return build_int_cst (sizetype_tab[(int) kind], number);
1890 }
1891 
1892 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1893    is a tree code.  The type of the result is taken from the operands.
1894    Both must be equivalent integer types, ala int_binop_types_match_p.
1895    If the operands are constant, so is the result.  */
1896 
1897 tree
1898 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1899 {
1900   tree type = TREE_TYPE (arg0);
1901 
1902   if (arg0 == error_mark_node || arg1 == error_mark_node)
1903     return error_mark_node;
1904 
1905   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1906                                        TREE_TYPE (arg1)));
1907 
1908   /* Handle the special case of two poly_int constants faster.  */
1909   if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1910     {
1911       /* And some specific cases even faster than that.  */
1912       if (code == PLUS_EXPR)
1913 	{
1914 	  if (integer_zerop (arg0)
1915 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1916 	    return arg1;
1917 	  if (integer_zerop (arg1)
1918 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1919 	    return arg0;
1920 	}
1921       else if (code == MINUS_EXPR)
1922 	{
1923 	  if (integer_zerop (arg1)
1924 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1925 	    return arg0;
1926 	}
1927       else if (code == MULT_EXPR)
1928 	{
1929 	  if (integer_onep (arg0)
1930 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1931 	    return arg1;
1932 	}
1933 
1934       /* Handle general case of two integer constants.  For sizetype
1935          constant calculations we always want to know about overflow,
1936 	 even in the unsigned case.  */
1937       tree res = int_const_binop (code, arg0, arg1, -1);
1938       if (res != NULL_TREE)
1939 	return res;
1940     }
1941 
1942   return fold_build2_loc (loc, code, type, arg0, arg1);
1943 }
1944 
1945 /* Given two values, either both of sizetype or both of bitsizetype,
1946    compute the difference between the two values.  Return the value
1947    in signed type corresponding to the type of the operands.  */
1948 
1949 tree
1950 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1951 {
1952   tree type = TREE_TYPE (arg0);
1953   tree ctype;
1954 
1955   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1956 				       TREE_TYPE (arg1)));
1957 
1958   /* If the type is already signed, just do the simple thing.  */
1959   if (!TYPE_UNSIGNED (type))
1960     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1961 
1962   if (type == sizetype)
1963     ctype = ssizetype;
1964   else if (type == bitsizetype)
1965     ctype = sbitsizetype;
1966   else
1967     ctype = signed_type_for (type);
1968 
1969   /* If either operand is not a constant, do the conversions to the signed
1970      type and subtract.  The hardware will do the right thing with any
1971      overflow in the subtraction.  */
1972   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1973     return size_binop_loc (loc, MINUS_EXPR,
1974 			   fold_convert_loc (loc, ctype, arg0),
1975 			   fold_convert_loc (loc, ctype, arg1));
1976 
1977   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1978      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1979      overflow) and negate (which can't either).  Special-case a result
1980      of zero while we're here.  */
1981   if (tree_int_cst_equal (arg0, arg1))
1982     return build_int_cst (ctype, 0);
1983   else if (tree_int_cst_lt (arg1, arg0))
1984     return fold_convert_loc (loc, ctype,
1985 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1986   else
1987     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1988 			   fold_convert_loc (loc, ctype,
1989 					     size_binop_loc (loc,
1990 							     MINUS_EXPR,
1991 							     arg1, arg0)));
1992 }
1993 
1994 /* A subroutine of fold_convert_const handling conversions of an
1995    INTEGER_CST to another integer type.  */
1996 
1997 static tree
1998 fold_convert_const_int_from_int (tree type, const_tree arg1)
1999 {
2000   /* Given an integer constant, make new constant with new type,
2001      appropriately sign-extended or truncated.  Use widest_int
2002      so that any extension is done according ARG1's type.  */
2003   return force_fit_type (type, wi::to_widest (arg1),
2004 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2005 			 TREE_OVERFLOW (arg1));
2006 }
2007 
2008 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2009    to an integer type.  */
2010 
2011 static tree
2012 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2013 {
2014   bool overflow = false;
2015   tree t;
2016 
2017   /* The following code implements the floating point to integer
2018      conversion rules required by the Java Language Specification,
2019      that IEEE NaNs are mapped to zero and values that overflow
2020      the target precision saturate, i.e. values greater than
2021      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2022      are mapped to INT_MIN.  These semantics are allowed by the
2023      C and C++ standards that simply state that the behavior of
2024      FP-to-integer conversion is unspecified upon overflow.  */
2025 
2026   wide_int val;
2027   REAL_VALUE_TYPE r;
2028   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2029 
2030   switch (code)
2031     {
2032     case FIX_TRUNC_EXPR:
2033       real_trunc (&r, VOIDmode, &x);
2034       break;
2035 
2036     default:
2037       gcc_unreachable ();
2038     }
2039 
2040   /* If R is NaN, return zero and show we have an overflow.  */
2041   if (REAL_VALUE_ISNAN (r))
2042     {
2043       overflow = true;
2044       val = wi::zero (TYPE_PRECISION (type));
2045     }
2046 
2047   /* See if R is less than the lower bound or greater than the
2048      upper bound.  */
2049 
2050   if (! overflow)
2051     {
2052       tree lt = TYPE_MIN_VALUE (type);
2053       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2054       if (real_less (&r, &l))
2055 	{
2056 	  overflow = true;
2057 	  val = wi::to_wide (lt);
2058 	}
2059     }
2060 
2061   if (! overflow)
2062     {
2063       tree ut = TYPE_MAX_VALUE (type);
2064       if (ut)
2065 	{
2066 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2067 	  if (real_less (&u, &r))
2068 	    {
2069 	      overflow = true;
2070 	      val = wi::to_wide (ut);
2071 	    }
2072 	}
2073     }
2074 
2075   if (! overflow)
2076     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2077 
2078   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2079   return t;
2080 }
2081 
2082 /* A subroutine of fold_convert_const handling conversions of a
2083    FIXED_CST to an integer type.  */
2084 
2085 static tree
2086 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2087 {
2088   tree t;
2089   double_int temp, temp_trunc;
2090   scalar_mode mode;
2091 
2092   /* Right shift FIXED_CST to temp by fbit.  */
2093   temp = TREE_FIXED_CST (arg1).data;
2094   mode = TREE_FIXED_CST (arg1).mode;
2095   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2096     {
2097       temp = temp.rshift (GET_MODE_FBIT (mode),
2098 			  HOST_BITS_PER_DOUBLE_INT,
2099 			  SIGNED_FIXED_POINT_MODE_P (mode));
2100 
2101       /* Left shift temp to temp_trunc by fbit.  */
2102       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2103 				HOST_BITS_PER_DOUBLE_INT,
2104 				SIGNED_FIXED_POINT_MODE_P (mode));
2105     }
2106   else
2107     {
2108       temp = double_int_zero;
2109       temp_trunc = double_int_zero;
2110     }
2111 
2112   /* If FIXED_CST is negative, we need to round the value toward 0.
2113      By checking if the fractional bits are not zero to add 1 to temp.  */
2114   if (SIGNED_FIXED_POINT_MODE_P (mode)
2115       && temp_trunc.is_negative ()
2116       && TREE_FIXED_CST (arg1).data != temp_trunc)
2117     temp += double_int_one;
2118 
2119   /* Given a fixed-point constant, make new constant with new type,
2120      appropriately sign-extended or truncated.  */
2121   t = force_fit_type (type, temp, -1,
2122 		      (temp.is_negative ()
2123 		       && (TYPE_UNSIGNED (type)
2124 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2125 		      | TREE_OVERFLOW (arg1));
2126 
2127   return t;
2128 }
2129 
2130 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2131    to another floating point type.  */
2132 
2133 static tree
2134 fold_convert_const_real_from_real (tree type, const_tree arg1)
2135 {
2136   REAL_VALUE_TYPE value;
2137   tree t;
2138 
2139   /* Don't perform the operation if flag_signaling_nans is on
2140      and the operand is a signaling NaN.  */
2141   if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2142       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2143     return NULL_TREE;
2144 
2145   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2146   t = build_real (type, value);
2147 
2148   /* If converting an infinity or NAN to a representation that doesn't
2149      have one, set the overflow bit so that we can produce some kind of
2150      error message at the appropriate point if necessary.  It's not the
2151      most user-friendly message, but it's better than nothing.  */
2152   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2153       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2154     TREE_OVERFLOW (t) = 1;
2155   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2156 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2157     TREE_OVERFLOW (t) = 1;
2158   /* Regular overflow, conversion produced an infinity in a mode that
2159      can't represent them.  */
2160   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2161 	   && REAL_VALUE_ISINF (value)
2162 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2163     TREE_OVERFLOW (t) = 1;
2164   else
2165     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2166   return t;
2167 }
2168 
2169 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2170    to a floating point type.  */
2171 
2172 static tree
2173 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2174 {
2175   REAL_VALUE_TYPE value;
2176   tree t;
2177 
2178   real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2179 			   &TREE_FIXED_CST (arg1));
2180   t = build_real (type, value);
2181 
2182   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2183   return t;
2184 }
2185 
2186 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2187    to another fixed-point type.  */
2188 
2189 static tree
2190 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2191 {
2192   FIXED_VALUE_TYPE value;
2193   tree t;
2194   bool overflow_p;
2195 
2196   overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2197 			      &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2198   t = build_fixed (type, value);
2199 
2200   /* Propagate overflow flags.  */
2201   if (overflow_p | TREE_OVERFLOW (arg1))
2202     TREE_OVERFLOW (t) = 1;
2203   return t;
2204 }
2205 
2206 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2207    to a fixed-point type.  */
2208 
2209 static tree
2210 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2211 {
2212   FIXED_VALUE_TYPE value;
2213   tree t;
2214   bool overflow_p;
2215   double_int di;
2216 
2217   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2218 
2219   di.low = TREE_INT_CST_ELT (arg1, 0);
2220   if (TREE_INT_CST_NUNITS (arg1) == 1)
2221     di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2222   else
2223     di.high = TREE_INT_CST_ELT (arg1, 1);
2224 
2225   overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2226 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2227 				       TYPE_SATURATING (type));
2228   t = build_fixed (type, value);
2229 
2230   /* Propagate overflow flags.  */
2231   if (overflow_p | TREE_OVERFLOW (arg1))
2232     TREE_OVERFLOW (t) = 1;
2233   return t;
2234 }
2235 
2236 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2237    to a fixed-point type.  */
2238 
2239 static tree
2240 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2241 {
2242   FIXED_VALUE_TYPE value;
2243   tree t;
2244   bool overflow_p;
2245 
2246   overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2247 					&TREE_REAL_CST (arg1),
2248 					TYPE_SATURATING (type));
2249   t = build_fixed (type, value);
2250 
2251   /* Propagate overflow flags.  */
2252   if (overflow_p | TREE_OVERFLOW (arg1))
2253     TREE_OVERFLOW (t) = 1;
2254   return t;
2255 }
2256 
2257 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2258    type TYPE.  If no simplification can be done return NULL_TREE.  */
2259 
2260 static tree
2261 fold_convert_const (enum tree_code code, tree type, tree arg1)
2262 {
2263   tree arg_type = TREE_TYPE (arg1);
2264   if (arg_type == type)
2265     return arg1;
2266 
2267   /* We can't widen types, since the runtime value could overflow the
2268      original type before being extended to the new type.  */
2269   if (POLY_INT_CST_P (arg1)
2270       && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2271       && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2272     return build_poly_int_cst (type,
2273 			       poly_wide_int::from (poly_int_cst_value (arg1),
2274 						    TYPE_PRECISION (type),
2275 						    TYPE_SIGN (arg_type)));
2276 
2277   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2278       || TREE_CODE (type) == OFFSET_TYPE)
2279     {
2280       if (TREE_CODE (arg1) == INTEGER_CST)
2281 	return fold_convert_const_int_from_int (type, arg1);
2282       else if (TREE_CODE (arg1) == REAL_CST)
2283 	return fold_convert_const_int_from_real (code, type, arg1);
2284       else if (TREE_CODE (arg1) == FIXED_CST)
2285 	return fold_convert_const_int_from_fixed (type, arg1);
2286     }
2287   else if (TREE_CODE (type) == REAL_TYPE)
2288     {
2289       if (TREE_CODE (arg1) == INTEGER_CST)
2290 	return build_real_from_int_cst (type, arg1);
2291       else if (TREE_CODE (arg1) == REAL_CST)
2292 	return fold_convert_const_real_from_real (type, arg1);
2293       else if (TREE_CODE (arg1) == FIXED_CST)
2294 	return fold_convert_const_real_from_fixed (type, arg1);
2295     }
2296   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2297     {
2298       if (TREE_CODE (arg1) == FIXED_CST)
2299 	return fold_convert_const_fixed_from_fixed (type, arg1);
2300       else if (TREE_CODE (arg1) == INTEGER_CST)
2301 	return fold_convert_const_fixed_from_int (type, arg1);
2302       else if (TREE_CODE (arg1) == REAL_CST)
2303 	return fold_convert_const_fixed_from_real (type, arg1);
2304     }
2305   else if (TREE_CODE (type) == VECTOR_TYPE)
2306     {
2307       if (TREE_CODE (arg1) == VECTOR_CST
2308 	  && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2309 	{
2310 	  tree elttype = TREE_TYPE (type);
2311 	  tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2312 	  /* We can't handle steps directly when extending, since the
2313 	     values need to wrap at the original precision first.  */
2314 	  bool step_ok_p
2315 	    = (INTEGRAL_TYPE_P (elttype)
2316 	       && INTEGRAL_TYPE_P (arg1_elttype)
2317 	       && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2318 	  tree_vector_builder v;
2319 	  if (!v.new_unary_operation (type, arg1, step_ok_p))
2320 	    return NULL_TREE;
2321 	  unsigned int len = v.encoded_nelts ();
2322 	  for (unsigned int i = 0; i < len; ++i)
2323 	    {
2324 	      tree elt = VECTOR_CST_ELT (arg1, i);
2325 	      tree cvt = fold_convert_const (code, elttype, elt);
2326 	      if (cvt == NULL_TREE)
2327 		return NULL_TREE;
2328 	      v.quick_push (cvt);
2329 	    }
2330 	  return v.build ();
2331 	}
2332     }
2333   return NULL_TREE;
2334 }
2335 
2336 /* Construct a vector of zero elements of vector type TYPE.  */
2337 
2338 static tree
2339 build_zero_vector (tree type)
2340 {
2341   tree t;
2342 
2343   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2344   return build_vector_from_val (type, t);
2345 }
2346 
2347 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2348 
2349 bool
2350 fold_convertible_p (const_tree type, const_tree arg)
2351 {
2352   tree orig = TREE_TYPE (arg);
2353 
2354   if (type == orig)
2355     return true;
2356 
2357   if (TREE_CODE (arg) == ERROR_MARK
2358       || TREE_CODE (type) == ERROR_MARK
2359       || TREE_CODE (orig) == ERROR_MARK)
2360     return false;
2361 
2362   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2363     return true;
2364 
2365   switch (TREE_CODE (type))
2366     {
2367     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2368     case POINTER_TYPE: case REFERENCE_TYPE:
2369     case OFFSET_TYPE:
2370       return (INTEGRAL_TYPE_P (orig)
2371 	      || (POINTER_TYPE_P (orig)
2372 		  && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2373 	      || TREE_CODE (orig) == OFFSET_TYPE);
2374 
2375     case REAL_TYPE:
2376     case FIXED_POINT_TYPE:
2377     case VECTOR_TYPE:
2378     case VOID_TYPE:
2379       return TREE_CODE (type) == TREE_CODE (orig);
2380 
2381     default:
2382       return false;
2383     }
2384 }
2385 
2386 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2387    simple conversions in preference to calling the front-end's convert.  */
2388 
2389 tree
2390 fold_convert_loc (location_t loc, tree type, tree arg)
2391 {
2392   tree orig = TREE_TYPE (arg);
2393   tree tem;
2394 
2395   if (type == orig)
2396     return arg;
2397 
2398   if (TREE_CODE (arg) == ERROR_MARK
2399       || TREE_CODE (type) == ERROR_MARK
2400       || TREE_CODE (orig) == ERROR_MARK)
2401     return error_mark_node;
2402 
2403   switch (TREE_CODE (type))
2404     {
2405     case POINTER_TYPE:
2406     case REFERENCE_TYPE:
2407       /* Handle conversions between pointers to different address spaces.  */
2408       if (POINTER_TYPE_P (orig)
2409 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2410 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2411 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2412       /* fall through */
2413 
2414     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2415     case OFFSET_TYPE:
2416       if (TREE_CODE (arg) == INTEGER_CST)
2417 	{
2418 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2419 	  if (tem != NULL_TREE)
2420 	    return tem;
2421 	}
2422       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2423 	  || TREE_CODE (orig) == OFFSET_TYPE)
2424 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2425       if (TREE_CODE (orig) == COMPLEX_TYPE)
2426 	return fold_convert_loc (loc, type,
2427 				 fold_build1_loc (loc, REALPART_EXPR,
2428 						  TREE_TYPE (orig), arg));
2429       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2430 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2431       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2432 
2433     case REAL_TYPE:
2434       if (TREE_CODE (arg) == INTEGER_CST)
2435 	{
2436 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2437 	  if (tem != NULL_TREE)
2438 	    return tem;
2439 	}
2440       else if (TREE_CODE (arg) == REAL_CST)
2441 	{
2442 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2443 	  if (tem != NULL_TREE)
2444 	    return tem;
2445 	}
2446       else if (TREE_CODE (arg) == FIXED_CST)
2447 	{
2448 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2449 	  if (tem != NULL_TREE)
2450 	    return tem;
2451 	}
2452 
2453       switch (TREE_CODE (orig))
2454 	{
2455 	case INTEGER_TYPE:
2456 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2457 	case POINTER_TYPE: case REFERENCE_TYPE:
2458 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2459 
2460 	case REAL_TYPE:
2461 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2462 
2463 	case FIXED_POINT_TYPE:
2464 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2465 
2466 	case COMPLEX_TYPE:
2467 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2468 	  return fold_convert_loc (loc, type, tem);
2469 
2470 	default:
2471 	  gcc_unreachable ();
2472 	}
2473 
2474     case FIXED_POINT_TYPE:
2475       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2476 	  || TREE_CODE (arg) == REAL_CST)
2477 	{
2478 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2479 	  if (tem != NULL_TREE)
2480 	    goto fold_convert_exit;
2481 	}
2482 
2483       switch (TREE_CODE (orig))
2484 	{
2485 	case FIXED_POINT_TYPE:
2486 	case INTEGER_TYPE:
2487 	case ENUMERAL_TYPE:
2488 	case BOOLEAN_TYPE:
2489 	case REAL_TYPE:
2490 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2491 
2492 	case COMPLEX_TYPE:
2493 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2494 	  return fold_convert_loc (loc, type, tem);
2495 
2496 	default:
2497 	  gcc_unreachable ();
2498 	}
2499 
2500     case COMPLEX_TYPE:
2501       switch (TREE_CODE (orig))
2502 	{
2503 	case INTEGER_TYPE:
2504 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2505 	case POINTER_TYPE: case REFERENCE_TYPE:
2506 	case REAL_TYPE:
2507 	case FIXED_POINT_TYPE:
2508 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2509 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2510 			      fold_convert_loc (loc, TREE_TYPE (type),
2511 					    integer_zero_node));
2512 	case COMPLEX_TYPE:
2513 	  {
2514 	    tree rpart, ipart;
2515 
2516 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2517 	      {
2518 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2519 				      TREE_OPERAND (arg, 0));
2520 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2521 				      TREE_OPERAND (arg, 1));
2522 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2523 	      }
2524 
2525 	    arg = save_expr (arg);
2526 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2527 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2528 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2529 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2530 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2531 	  }
2532 
2533 	default:
2534 	  gcc_unreachable ();
2535 	}
2536 
2537     case VECTOR_TYPE:
2538       if (integer_zerop (arg))
2539 	return build_zero_vector (type);
2540       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2541       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2542 		  || TREE_CODE (orig) == VECTOR_TYPE);
2543       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2544 
2545     case VOID_TYPE:
2546       tem = fold_ignored_result (arg);
2547       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2548 
2549     default:
2550       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2551 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2552       gcc_unreachable ();
2553     }
2554  fold_convert_exit:
2555   protected_set_expr_location_unshare (tem, loc);
2556   return tem;
2557 }
2558 
2559 /* Return false if expr can be assumed not to be an lvalue, true
2560    otherwise.  */
2561 
2562 static bool
2563 maybe_lvalue_p (const_tree x)
2564 {
2565   /* We only need to wrap lvalue tree codes.  */
2566   switch (TREE_CODE (x))
2567   {
2568   case VAR_DECL:
2569   case PARM_DECL:
2570   case RESULT_DECL:
2571   case LABEL_DECL:
2572   case FUNCTION_DECL:
2573   case SSA_NAME:
2574 
2575   case COMPONENT_REF:
2576   case MEM_REF:
2577   case INDIRECT_REF:
2578   case ARRAY_REF:
2579   case ARRAY_RANGE_REF:
2580   case BIT_FIELD_REF:
2581   case OBJ_TYPE_REF:
2582 
2583   case REALPART_EXPR:
2584   case IMAGPART_EXPR:
2585   case PREINCREMENT_EXPR:
2586   case PREDECREMENT_EXPR:
2587   case SAVE_EXPR:
2588   case TRY_CATCH_EXPR:
2589   case WITH_CLEANUP_EXPR:
2590   case COMPOUND_EXPR:
2591   case MODIFY_EXPR:
2592   case TARGET_EXPR:
2593   case COND_EXPR:
2594   case BIND_EXPR:
2595     break;
2596 
2597   default:
2598     /* Assume the worst for front-end tree codes.  */
2599     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2600       break;
2601     return false;
2602   }
2603 
2604   return true;
2605 }
2606 
2607 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2608 
2609 tree
2610 non_lvalue_loc (location_t loc, tree x)
2611 {
2612   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2613      us.  */
2614   if (in_gimple_form)
2615     return x;
2616 
2617   if (! maybe_lvalue_p (x))
2618     return x;
2619   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2620 }
2621 
2622 /* When pedantic, return an expr equal to X but certainly not valid as a
2623    pedantic lvalue.  Otherwise, return X.  */
2624 
2625 static tree
2626 pedantic_non_lvalue_loc (location_t loc, tree x)
2627 {
2628   return protected_set_expr_location_unshare (x, loc);
2629 }
2630 
2631 /* Given a tree comparison code, return the code that is the logical inverse.
2632    It is generally not safe to do this for floating-point comparisons, except
2633    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2634    ERROR_MARK in this case.  */
2635 
2636 enum tree_code
2637 invert_tree_comparison (enum tree_code code, bool honor_nans)
2638 {
2639   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2640       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2641     return ERROR_MARK;
2642 
2643   switch (code)
2644     {
2645     case EQ_EXPR:
2646       return NE_EXPR;
2647     case NE_EXPR:
2648       return EQ_EXPR;
2649     case GT_EXPR:
2650       return honor_nans ? UNLE_EXPR : LE_EXPR;
2651     case GE_EXPR:
2652       return honor_nans ? UNLT_EXPR : LT_EXPR;
2653     case LT_EXPR:
2654       return honor_nans ? UNGE_EXPR : GE_EXPR;
2655     case LE_EXPR:
2656       return honor_nans ? UNGT_EXPR : GT_EXPR;
2657     case LTGT_EXPR:
2658       return UNEQ_EXPR;
2659     case UNEQ_EXPR:
2660       return LTGT_EXPR;
2661     case UNGT_EXPR:
2662       return LE_EXPR;
2663     case UNGE_EXPR:
2664       return LT_EXPR;
2665     case UNLT_EXPR:
2666       return GE_EXPR;
2667     case UNLE_EXPR:
2668       return GT_EXPR;
2669     case ORDERED_EXPR:
2670       return UNORDERED_EXPR;
2671     case UNORDERED_EXPR:
2672       return ORDERED_EXPR;
2673     default:
2674       gcc_unreachable ();
2675     }
2676 }
2677 
2678 /* Similar, but return the comparison that results if the operands are
2679    swapped.  This is safe for floating-point.  */
2680 
2681 enum tree_code
2682 swap_tree_comparison (enum tree_code code)
2683 {
2684   switch (code)
2685     {
2686     case EQ_EXPR:
2687     case NE_EXPR:
2688     case ORDERED_EXPR:
2689     case UNORDERED_EXPR:
2690     case LTGT_EXPR:
2691     case UNEQ_EXPR:
2692       return code;
2693     case GT_EXPR:
2694       return LT_EXPR;
2695     case GE_EXPR:
2696       return LE_EXPR;
2697     case LT_EXPR:
2698       return GT_EXPR;
2699     case LE_EXPR:
2700       return GE_EXPR;
2701     case UNGT_EXPR:
2702       return UNLT_EXPR;
2703     case UNGE_EXPR:
2704       return UNLE_EXPR;
2705     case UNLT_EXPR:
2706       return UNGT_EXPR;
2707     case UNLE_EXPR:
2708       return UNGE_EXPR;
2709     default:
2710       gcc_unreachable ();
2711     }
2712 }
2713 
2714 
2715 /* Convert a comparison tree code from an enum tree_code representation
2716    into a compcode bit-based encoding.  This function is the inverse of
2717    compcode_to_comparison.  */
2718 
2719 static enum comparison_code
2720 comparison_to_compcode (enum tree_code code)
2721 {
2722   switch (code)
2723     {
2724     case LT_EXPR:
2725       return COMPCODE_LT;
2726     case EQ_EXPR:
2727       return COMPCODE_EQ;
2728     case LE_EXPR:
2729       return COMPCODE_LE;
2730     case GT_EXPR:
2731       return COMPCODE_GT;
2732     case NE_EXPR:
2733       return COMPCODE_NE;
2734     case GE_EXPR:
2735       return COMPCODE_GE;
2736     case ORDERED_EXPR:
2737       return COMPCODE_ORD;
2738     case UNORDERED_EXPR:
2739       return COMPCODE_UNORD;
2740     case UNLT_EXPR:
2741       return COMPCODE_UNLT;
2742     case UNEQ_EXPR:
2743       return COMPCODE_UNEQ;
2744     case UNLE_EXPR:
2745       return COMPCODE_UNLE;
2746     case UNGT_EXPR:
2747       return COMPCODE_UNGT;
2748     case LTGT_EXPR:
2749       return COMPCODE_LTGT;
2750     case UNGE_EXPR:
2751       return COMPCODE_UNGE;
2752     default:
2753       gcc_unreachable ();
2754     }
2755 }
2756 
2757 /* Convert a compcode bit-based encoding of a comparison operator back
2758    to GCC's enum tree_code representation.  This function is the
2759    inverse of comparison_to_compcode.  */
2760 
2761 static enum tree_code
2762 compcode_to_comparison (enum comparison_code code)
2763 {
2764   switch (code)
2765     {
2766     case COMPCODE_LT:
2767       return LT_EXPR;
2768     case COMPCODE_EQ:
2769       return EQ_EXPR;
2770     case COMPCODE_LE:
2771       return LE_EXPR;
2772     case COMPCODE_GT:
2773       return GT_EXPR;
2774     case COMPCODE_NE:
2775       return NE_EXPR;
2776     case COMPCODE_GE:
2777       return GE_EXPR;
2778     case COMPCODE_ORD:
2779       return ORDERED_EXPR;
2780     case COMPCODE_UNORD:
2781       return UNORDERED_EXPR;
2782     case COMPCODE_UNLT:
2783       return UNLT_EXPR;
2784     case COMPCODE_UNEQ:
2785       return UNEQ_EXPR;
2786     case COMPCODE_UNLE:
2787       return UNLE_EXPR;
2788     case COMPCODE_UNGT:
2789       return UNGT_EXPR;
2790     case COMPCODE_LTGT:
2791       return LTGT_EXPR;
2792     case COMPCODE_UNGE:
2793       return UNGE_EXPR;
2794     default:
2795       gcc_unreachable ();
2796     }
2797 }
2798 
2799 /* Return true if COND1 tests the opposite condition of COND2.  */
2800 
2801 bool
2802 inverse_conditions_p (const_tree cond1, const_tree cond2)
2803 {
2804   return (COMPARISON_CLASS_P (cond1)
2805 	  && COMPARISON_CLASS_P (cond2)
2806 	  && (invert_tree_comparison
2807 	      (TREE_CODE (cond1),
2808 	       HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2809 	  && operand_equal_p (TREE_OPERAND (cond1, 0),
2810 			      TREE_OPERAND (cond2, 0), 0)
2811 	  && operand_equal_p (TREE_OPERAND (cond1, 1),
2812 			      TREE_OPERAND (cond2, 1), 0));
2813 }
2814 
2815 /* Return a tree for the comparison which is the combination of
2816    doing the AND or OR (depending on CODE) of the two operations LCODE
2817    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2818    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2819    if this makes the transformation invalid.  */
2820 
2821 tree
2822 combine_comparisons (location_t loc,
2823 		     enum tree_code code, enum tree_code lcode,
2824 		     enum tree_code rcode, tree truth_type,
2825 		     tree ll_arg, tree lr_arg)
2826 {
2827   bool honor_nans = HONOR_NANS (ll_arg);
2828   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2829   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2830   int compcode;
2831 
2832   switch (code)
2833     {
2834     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2835       compcode = lcompcode & rcompcode;
2836       break;
2837 
2838     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2839       compcode = lcompcode | rcompcode;
2840       break;
2841 
2842     default:
2843       return NULL_TREE;
2844     }
2845 
2846   if (!honor_nans)
2847     {
2848       /* Eliminate unordered comparisons, as well as LTGT and ORD
2849 	 which are not used unless the mode has NaNs.  */
2850       compcode &= ~COMPCODE_UNORD;
2851       if (compcode == COMPCODE_LTGT)
2852 	compcode = COMPCODE_NE;
2853       else if (compcode == COMPCODE_ORD)
2854 	compcode = COMPCODE_TRUE;
2855     }
2856    else if (flag_trapping_math)
2857      {
2858 	/* Check that the original operation and the optimized ones will trap
2859 	   under the same condition.  */
2860 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2861 		     && (lcompcode != COMPCODE_EQ)
2862 		     && (lcompcode != COMPCODE_ORD);
2863 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2864 		     && (rcompcode != COMPCODE_EQ)
2865 		     && (rcompcode != COMPCODE_ORD);
2866 	bool trap = (compcode & COMPCODE_UNORD) == 0
2867 		    && (compcode != COMPCODE_EQ)
2868 		    && (compcode != COMPCODE_ORD);
2869 
2870         /* In a short-circuited boolean expression the LHS might be
2871 	   such that the RHS, if evaluated, will never trap.  For
2872 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2873 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2874 	   example, the expression above will never trap, hence
2875 	   optimizing it to x < y would be invalid).  */
2876         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2877             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2878           rtrap = false;
2879 
2880         /* If the comparison was short-circuited, and only the RHS
2881 	   trapped, we may now generate a spurious trap.  */
2882 	if (rtrap && !ltrap
2883 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2884 	  return NULL_TREE;
2885 
2886 	/* If we changed the conditions that cause a trap, we lose.  */
2887 	if ((ltrap || rtrap) != trap)
2888 	  return NULL_TREE;
2889       }
2890 
2891   if (compcode == COMPCODE_TRUE)
2892     return constant_boolean_node (true, truth_type);
2893   else if (compcode == COMPCODE_FALSE)
2894     return constant_boolean_node (false, truth_type);
2895   else
2896     {
2897       enum tree_code tcode;
2898 
2899       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2900       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2901     }
2902 }
2903 
2904 /* Return nonzero if two operands (typically of the same tree node)
2905    are necessarily equal. FLAGS modifies behavior as follows:
2906 
2907    If OEP_ONLY_CONST is set, only return nonzero for constants.
2908    This function tests whether the operands are indistinguishable;
2909    it does not test whether they are equal using C's == operation.
2910    The distinction is important for IEEE floating point, because
2911    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2912    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2913 
2914    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2915    even though it may hold multiple values during a function.
2916    This is because a GCC tree node guarantees that nothing else is
2917    executed between the evaluation of its "operands" (which may often
2918    be evaluated in arbitrary order).  Hence if the operands themselves
2919    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2920    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2921    unset means assuming isochronic (or instantaneous) tree equivalence.
2922    Unless comparing arbitrary expression trees, such as from different
2923    statements, this flag can usually be left unset.
2924 
2925    If OEP_PURE_SAME is set, then pure functions with identical arguments
2926    are considered the same.  It is used when the caller has other ways
2927    to ensure that global memory is unchanged in between.
2928 
2929    If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2930    not values of expressions.
2931 
2932    If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2933    such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2934 
2935    If OEP_BITWISE is set, then require the values to be bitwise identical
2936    rather than simply numerically equal.  Do not take advantage of things
2937    like math-related flags or undefined behavior; only return true for
2938    values that are provably bitwise identical in all circumstances.
2939 
2940    Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2941    any operand with side effect.  This is unnecesarily conservative in the
2942    case we know that arg0 and arg1 are in disjoint code paths (such as in
2943    ?: operator).  In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2944    addresses with TREE_CONSTANT flag set so we know that &var == &var
2945    even if var is volatile.  */
2946 
2947 int
2948 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2949 {
2950   /* When checking, verify at the outermost operand_equal_p call that
2951      if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2952      hash value.  */
2953   if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2954     {
2955       if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2956 	{
2957 	  if (arg0 != arg1)
2958 	    {
2959 	      inchash::hash hstate0 (0), hstate1 (0);
2960 	      inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2961 	      inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2962 	      hashval_t h0 = hstate0.end ();
2963 	      hashval_t h1 = hstate1.end ();
2964 	      gcc_assert (h0 == h1);
2965 	    }
2966 	  return 1;
2967 	}
2968       else
2969 	return 0;
2970     }
2971 
2972   STRIP_ANY_LOCATION_WRAPPER (arg0);
2973   STRIP_ANY_LOCATION_WRAPPER (arg1);
2974 
2975   /* If either is ERROR_MARK, they aren't equal.  */
2976   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2977       || TREE_TYPE (arg0) == error_mark_node
2978       || TREE_TYPE (arg1) == error_mark_node)
2979     return 0;
2980 
2981   /* Similar, if either does not have a type (like a template id),
2982      they aren't equal.  */
2983   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2984     return 0;
2985 
2986   /* Bitwise identity makes no sense if the values have different layouts.  */
2987   if ((flags & OEP_BITWISE)
2988       && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2989     return 0;
2990 
2991   /* We cannot consider pointers to different address space equal.  */
2992   if (POINTER_TYPE_P (TREE_TYPE (arg0))
2993       && POINTER_TYPE_P (TREE_TYPE (arg1))
2994       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2995 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2996     return 0;
2997 
2998   /* Check equality of integer constants before bailing out due to
2999      precision differences.  */
3000   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3001     {
3002       /* Address of INTEGER_CST is not defined; check that we did not forget
3003 	 to drop the OEP_ADDRESS_OF flags.  */
3004       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3005       return tree_int_cst_equal (arg0, arg1);
3006     }
3007 
3008   if (!(flags & OEP_ADDRESS_OF))
3009     {
3010       /* If both types don't have the same signedness, then we can't consider
3011 	 them equal.  We must check this before the STRIP_NOPS calls
3012 	 because they may change the signedness of the arguments.  As pointers
3013 	 strictly don't have a signedness, require either two pointers or
3014 	 two non-pointers as well.  */
3015       if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3016 	  || POINTER_TYPE_P (TREE_TYPE (arg0))
3017 			     != POINTER_TYPE_P (TREE_TYPE (arg1)))
3018 	return 0;
3019 
3020       /* If both types don't have the same precision, then it is not safe
3021 	 to strip NOPs.  */
3022       if (element_precision (TREE_TYPE (arg0))
3023 	  != element_precision (TREE_TYPE (arg1)))
3024 	return 0;
3025 
3026       STRIP_NOPS (arg0);
3027       STRIP_NOPS (arg1);
3028     }
3029 #if 0
3030   /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3031      sanity check once the issue is solved.  */
3032   else
3033     /* Addresses of conversions and SSA_NAMEs (and many other things)
3034        are not defined.  Check that we did not forget to drop the
3035        OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags.  */
3036     gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3037 			 && TREE_CODE (arg0) != SSA_NAME);
3038 #endif
3039 
3040   /* In case both args are comparisons but with different comparison
3041      code, try to swap the comparison operands of one arg to produce
3042      a match and compare that variant.  */
3043   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3044       && COMPARISON_CLASS_P (arg0)
3045       && COMPARISON_CLASS_P (arg1))
3046     {
3047       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3048 
3049       if (TREE_CODE (arg0) == swap_code)
3050 	return operand_equal_p (TREE_OPERAND (arg0, 0),
3051 			        TREE_OPERAND (arg1, 1), flags)
3052 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
3053 				   TREE_OPERAND (arg1, 0), flags);
3054     }
3055 
3056   if (TREE_CODE (arg0) != TREE_CODE (arg1))
3057     {
3058       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
3059       if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3060 	;
3061       else if (flags & OEP_ADDRESS_OF)
3062 	{
3063 	  /* If we are interested in comparing addresses ignore
3064 	     MEM_REF wrappings of the base that can appear just for
3065 	     TBAA reasons.  */
3066 	  if (TREE_CODE (arg0) == MEM_REF
3067 	      && DECL_P (arg1)
3068 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3069 	      && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3070 	      && integer_zerop (TREE_OPERAND (arg0, 1)))
3071 	    return 1;
3072 	  else if (TREE_CODE (arg1) == MEM_REF
3073 		   && DECL_P (arg0)
3074 		   && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3075 		   && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3076 		   && integer_zerop (TREE_OPERAND (arg1, 1)))
3077 	    return 1;
3078 	  return 0;
3079 	}
3080       else
3081 	return 0;
3082     }
3083 
3084   /* When not checking adddresses, this is needed for conversions and for
3085      COMPONENT_REF.  Might as well play it safe and always test this.  */
3086   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3087       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3088       || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3089 	  && !(flags & OEP_ADDRESS_OF)))
3090     return 0;
3091 
3092   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3093      We don't care about side effects in that case because the SAVE_EXPR
3094      takes care of that for us. In all other cases, two expressions are
3095      equal if they have no side effects.  If we have two identical
3096      expressions with side effects that should be treated the same due
3097      to the only side effects being identical SAVE_EXPR's, that will
3098      be detected in the recursive calls below.
3099      If we are taking an invariant address of two identical objects
3100      they are necessarily equal as well.  */
3101   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3102       && (TREE_CODE (arg0) == SAVE_EXPR
3103 	  || (flags & OEP_MATCH_SIDE_EFFECTS)
3104 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3105     return 1;
3106 
3107   /* Next handle constant cases, those for which we can return 1 even
3108      if ONLY_CONST is set.  */
3109   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3110     switch (TREE_CODE (arg0))
3111       {
3112       case INTEGER_CST:
3113 	return tree_int_cst_equal (arg0, arg1);
3114 
3115       case FIXED_CST:
3116 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3117 				       TREE_FIXED_CST (arg1));
3118 
3119       case REAL_CST:
3120 	if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3121 	  return 1;
3122 
3123 	if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3124 	  {
3125 	    /* If we do not distinguish between signed and unsigned zero,
3126 	       consider them equal.  */
3127 	    if (real_zerop (arg0) && real_zerop (arg1))
3128 	      return 1;
3129 	  }
3130 	return 0;
3131 
3132       case VECTOR_CST:
3133 	{
3134 	  if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3135 	      != VECTOR_CST_LOG2_NPATTERNS (arg1))
3136 	    return 0;
3137 
3138 	  if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3139 	      != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3140 	    return 0;
3141 
3142 	  unsigned int count = vector_cst_encoded_nelts (arg0);
3143 	  for (unsigned int i = 0; i < count; ++i)
3144 	    if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3145 				  VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3146 	      return 0;
3147 	  return 1;
3148 	}
3149 
3150       case COMPLEX_CST:
3151 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3152 				 flags)
3153 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3154 				    flags));
3155 
3156       case STRING_CST:
3157 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3158 		&& ! memcmp (TREE_STRING_POINTER (arg0),
3159 			      TREE_STRING_POINTER (arg1),
3160 			      TREE_STRING_LENGTH (arg0)));
3161 
3162       case ADDR_EXPR:
3163 	gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3164 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3165 				flags | OEP_ADDRESS_OF
3166 				| OEP_MATCH_SIDE_EFFECTS);
3167       case CONSTRUCTOR:
3168 	/* In GIMPLE empty constructors are allowed in initializers of
3169 	   aggregates.  */
3170 	return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3171       default:
3172 	break;
3173       }
3174 
3175   /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3176      two instances of undefined behavior will give identical results.  */
3177   if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3178     return 0;
3179 
3180 /* Define macros to test an operand from arg0 and arg1 for equality and a
3181    variant that allows null and views null as being different from any
3182    non-null value.  In the latter case, if either is null, the both
3183    must be; otherwise, do the normal comparison.  */
3184 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
3185 				    TREE_OPERAND (arg1, N), flags)
3186 
3187 #define OP_SAME_WITH_NULL(N)				\
3188   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3189    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3190 
3191   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3192     {
3193     case tcc_unary:
3194       /* Two conversions are equal only if signedness and modes match.  */
3195       switch (TREE_CODE (arg0))
3196         {
3197 	CASE_CONVERT:
3198         case FIX_TRUNC_EXPR:
3199 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3200 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3201 	    return 0;
3202 	  break;
3203 	default:
3204 	  break;
3205 	}
3206 
3207       return OP_SAME (0);
3208 
3209 
3210     case tcc_comparison:
3211     case tcc_binary:
3212       if (OP_SAME (0) && OP_SAME (1))
3213 	return 1;
3214 
3215       /* For commutative ops, allow the other order.  */
3216       return (commutative_tree_code (TREE_CODE (arg0))
3217 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3218 				  TREE_OPERAND (arg1, 1), flags)
3219 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3220 				  TREE_OPERAND (arg1, 0), flags));
3221 
3222     case tcc_reference:
3223       /* If either of the pointer (or reference) expressions we are
3224 	 dereferencing contain a side effect, these cannot be equal,
3225 	 but their addresses can be.  */
3226       if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3227 	  && (TREE_SIDE_EFFECTS (arg0)
3228 	      || TREE_SIDE_EFFECTS (arg1)))
3229 	return 0;
3230 
3231       switch (TREE_CODE (arg0))
3232 	{
3233 	case INDIRECT_REF:
3234 	  if (!(flags & OEP_ADDRESS_OF))
3235 	    {
3236 	      if (TYPE_ALIGN (TREE_TYPE (arg0))
3237 		  != TYPE_ALIGN (TREE_TYPE (arg1)))
3238 		return 0;
3239 	      /* Verify that the access types are compatible.  */
3240 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3241 		  != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3242 		return 0;
3243 	    }
3244 	  flags &= ~OEP_ADDRESS_OF;
3245 	  return OP_SAME (0);
3246 
3247 	case IMAGPART_EXPR:
3248 	  /* Require the same offset.  */
3249 	  if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3250 				TYPE_SIZE (TREE_TYPE (arg1)),
3251 				flags & ~OEP_ADDRESS_OF))
3252 	    return 0;
3253 
3254 	/* Fallthru.  */
3255 	case REALPART_EXPR:
3256 	case VIEW_CONVERT_EXPR:
3257 	  return OP_SAME (0);
3258 
3259 	case TARGET_MEM_REF:
3260 	case MEM_REF:
3261 	  if (!(flags & OEP_ADDRESS_OF))
3262 	    {
3263 	      /* Require equal access sizes */
3264 	      if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3265 		  && (!TYPE_SIZE (TREE_TYPE (arg0))
3266 		      || !TYPE_SIZE (TREE_TYPE (arg1))
3267 		      || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3268 					   TYPE_SIZE (TREE_TYPE (arg1)),
3269 					   flags)))
3270 		return 0;
3271 	      /* Verify that access happens in similar types.  */
3272 	      if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3273 		return 0;
3274 	      /* Verify that accesses are TBAA compatible.  */
3275 	      if (!alias_ptr_types_compatible_p
3276 		    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3277 		     TREE_TYPE (TREE_OPERAND (arg1, 1)))
3278 		  || (MR_DEPENDENCE_CLIQUE (arg0)
3279 		      != MR_DEPENDENCE_CLIQUE (arg1))
3280 		  || (MR_DEPENDENCE_BASE (arg0)
3281 		      != MR_DEPENDENCE_BASE (arg1)))
3282 		return 0;
3283 	     /* Verify that alignment is compatible.  */
3284 	     if (TYPE_ALIGN (TREE_TYPE (arg0))
3285 		 != TYPE_ALIGN (TREE_TYPE (arg1)))
3286 		return 0;
3287 	    }
3288 	  flags &= ~OEP_ADDRESS_OF;
3289 	  return (OP_SAME (0) && OP_SAME (1)
3290 		  /* TARGET_MEM_REF require equal extra operands.  */
3291 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
3292 		      || (OP_SAME_WITH_NULL (2)
3293 			  && OP_SAME_WITH_NULL (3)
3294 			  && OP_SAME_WITH_NULL (4))));
3295 
3296 	case ARRAY_REF:
3297 	case ARRAY_RANGE_REF:
3298 	  if (!OP_SAME (0))
3299 	    return 0;
3300 	  flags &= ~OEP_ADDRESS_OF;
3301 	  /* Compare the array index by value if it is constant first as we
3302 	     may have different types but same value here.  */
3303 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3304 				       TREE_OPERAND (arg1, 1))
3305 		   || OP_SAME (1))
3306 		  && OP_SAME_WITH_NULL (2)
3307 		  && OP_SAME_WITH_NULL (3)
3308 		  /* Compare low bound and element size as with OEP_ADDRESS_OF
3309 		     we have to account for the offset of the ref.  */
3310 		  && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3311 		      == TREE_TYPE (TREE_OPERAND (arg1, 0))
3312 		      || (operand_equal_p (array_ref_low_bound
3313 					     (CONST_CAST_TREE (arg0)),
3314 					   array_ref_low_bound
3315 					     (CONST_CAST_TREE (arg1)), flags)
3316 			  && operand_equal_p (array_ref_element_size
3317 					        (CONST_CAST_TREE (arg0)),
3318 					      array_ref_element_size
3319 					        (CONST_CAST_TREE (arg1)),
3320 					      flags))));
3321 
3322 	case COMPONENT_REF:
3323 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3324 	     may be NULL when we're called to compare MEM_EXPRs.  */
3325 	  if (!OP_SAME_WITH_NULL (0)
3326 	      || !OP_SAME (1))
3327 	    return 0;
3328 	  flags &= ~OEP_ADDRESS_OF;
3329 	  return OP_SAME_WITH_NULL (2);
3330 
3331 	case BIT_FIELD_REF:
3332 	  if (!OP_SAME (0))
3333 	    return 0;
3334 	  flags &= ~OEP_ADDRESS_OF;
3335 	  return OP_SAME (1) && OP_SAME (2);
3336 
3337 	default:
3338 	  return 0;
3339 	}
3340 
3341     case tcc_expression:
3342       switch (TREE_CODE (arg0))
3343 	{
3344 	case ADDR_EXPR:
3345 	  /* Be sure we pass right ADDRESS_OF flag.  */
3346 	  gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3347 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3348 				  TREE_OPERAND (arg1, 0),
3349 				  flags | OEP_ADDRESS_OF);
3350 
3351 	case TRUTH_NOT_EXPR:
3352 	  return OP_SAME (0);
3353 
3354 	case TRUTH_ANDIF_EXPR:
3355 	case TRUTH_ORIF_EXPR:
3356 	  return OP_SAME (0) && OP_SAME (1);
3357 
3358 	case WIDEN_MULT_PLUS_EXPR:
3359 	case WIDEN_MULT_MINUS_EXPR:
3360 	  if (!OP_SAME (2))
3361 	    return 0;
3362 	  /* The multiplcation operands are commutative.  */
3363 	  /* FALLTHRU */
3364 
3365 	case TRUTH_AND_EXPR:
3366 	case TRUTH_OR_EXPR:
3367 	case TRUTH_XOR_EXPR:
3368 	  if (OP_SAME (0) && OP_SAME (1))
3369 	    return 1;
3370 
3371 	  /* Otherwise take into account this is a commutative operation.  */
3372 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3373 				   TREE_OPERAND (arg1, 1), flags)
3374 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3375 				      TREE_OPERAND (arg1, 0), flags));
3376 
3377 	case COND_EXPR:
3378 	  if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3379 	    return 0;
3380 	  flags &= ~OEP_ADDRESS_OF;
3381 	  return OP_SAME (0);
3382 
3383 	case BIT_INSERT_EXPR:
3384 	  /* BIT_INSERT_EXPR has an implict operand as the type precision
3385 	     of op1.  Need to check to make sure they are the same.  */
3386 	  if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3387 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3388 	      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3389 		 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3390 	    return false;
3391 	  /* FALLTHRU */
3392 
3393 	case VEC_COND_EXPR:
3394 	case DOT_PROD_EXPR:
3395 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3396 
3397 	case MODIFY_EXPR:
3398 	case INIT_EXPR:
3399 	case COMPOUND_EXPR:
3400 	case PREDECREMENT_EXPR:
3401 	case PREINCREMENT_EXPR:
3402 	case POSTDECREMENT_EXPR:
3403 	case POSTINCREMENT_EXPR:
3404 	  if (flags & OEP_LEXICOGRAPHIC)
3405 	    return OP_SAME (0) && OP_SAME (1);
3406 	  return 0;
3407 
3408 	case CLEANUP_POINT_EXPR:
3409 	case EXPR_STMT:
3410 	case SAVE_EXPR:
3411 	  if (flags & OEP_LEXICOGRAPHIC)
3412 	    return OP_SAME (0);
3413 	  return 0;
3414 
3415 	default:
3416 	  return 0;
3417 	}
3418 
3419     case tcc_vl_exp:
3420       switch (TREE_CODE (arg0))
3421 	{
3422 	case CALL_EXPR:
3423 	  if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3424 	      != (CALL_EXPR_FN (arg1) == NULL_TREE))
3425 	    /* If not both CALL_EXPRs are either internal or normal function
3426 	       functions, then they are not equal.  */
3427 	    return 0;
3428 	  else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3429 	    {
3430 	      /* If the CALL_EXPRs call different internal functions, then they
3431 		 are not equal.  */
3432 	      if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3433 		return 0;
3434 	    }
3435 	  else
3436 	    {
3437 	      /* If the CALL_EXPRs call different functions, then they are not
3438 		 equal.  */
3439 	      if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3440 				     flags))
3441 		return 0;
3442 	    }
3443 
3444 	  /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS.  */
3445 	  {
3446 	    unsigned int cef = call_expr_flags (arg0);
3447 	    if (flags & OEP_PURE_SAME)
3448 	      cef &= ECF_CONST | ECF_PURE;
3449 	    else
3450 	      cef &= ECF_CONST;
3451 	    if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3452 	      return 0;
3453 	  }
3454 
3455 	  /* Now see if all the arguments are the same.  */
3456 	  {
3457 	    const_call_expr_arg_iterator iter0, iter1;
3458 	    const_tree a0, a1;
3459 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3460 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3461 		 a0 && a1;
3462 		 a0 = next_const_call_expr_arg (&iter0),
3463 		   a1 = next_const_call_expr_arg (&iter1))
3464 	      if (! operand_equal_p (a0, a1, flags))
3465 		return 0;
3466 
3467 	    /* If we get here and both argument lists are exhausted
3468 	       then the CALL_EXPRs are equal.  */
3469 	    return ! (a0 || a1);
3470 	  }
3471 	default:
3472 	  return 0;
3473 	}
3474 
3475     case tcc_declaration:
3476       /* Consider __builtin_sqrt equal to sqrt.  */
3477       return (TREE_CODE (arg0) == FUNCTION_DECL
3478 	      && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3479 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3480 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3481 
3482     case tcc_exceptional:
3483       if (TREE_CODE (arg0) == CONSTRUCTOR)
3484 	{
3485 	  /* In GIMPLE constructors are used only to build vectors from
3486 	     elements.  Individual elements in the constructor must be
3487 	     indexed in increasing order and form an initial sequence.
3488 
3489 	     We make no effort to compare constructors in generic.
3490 	     (see sem_variable::equals in ipa-icf which can do so for
3491 	      constants).  */
3492 	  if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3493 	      || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3494 	    return 0;
3495 
3496 	  /* Be sure that vectors constructed have the same representation.
3497 	     We only tested element precision and modes to match.
3498 	     Vectors may be BLKmode and thus also check that the number of
3499 	     parts match.  */
3500 	  if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3501 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3502 	    return 0;
3503 
3504 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3505 	  vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3506 	  unsigned int len = vec_safe_length (v0);
3507 
3508 	  if (len != vec_safe_length (v1))
3509 	    return 0;
3510 
3511 	  for (unsigned int i = 0; i < len; i++)
3512 	    {
3513 	      constructor_elt *c0 = &(*v0)[i];
3514 	      constructor_elt *c1 = &(*v1)[i];
3515 
3516 	      if (!operand_equal_p (c0->value, c1->value, flags)
3517 		  /* In GIMPLE the indexes can be either NULL or matching i.
3518 		     Double check this so we won't get false
3519 		     positives for GENERIC.  */
3520 		  || (c0->index
3521 		      && (TREE_CODE (c0->index) != INTEGER_CST
3522 			  || !compare_tree_int (c0->index, i)))
3523 		  || (c1->index
3524 		      && (TREE_CODE (c1->index) != INTEGER_CST
3525 			  || !compare_tree_int (c1->index, i))))
3526 		return 0;
3527 	    }
3528 	  return 1;
3529 	}
3530       else if (TREE_CODE (arg0) == STATEMENT_LIST
3531 	       && (flags & OEP_LEXICOGRAPHIC))
3532 	{
3533 	  /* Compare the STATEMENT_LISTs.  */
3534 	  tree_stmt_iterator tsi1, tsi2;
3535 	  tree body1 = CONST_CAST_TREE (arg0);
3536 	  tree body2 = CONST_CAST_TREE (arg1);
3537 	  for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3538 	       tsi_next (&tsi1), tsi_next (&tsi2))
3539 	    {
3540 	      /* The lists don't have the same number of statements.  */
3541 	      if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3542 		return 0;
3543 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3544 		return 1;
3545 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3546 				    flags & (OEP_LEXICOGRAPHIC
3547 					     | OEP_NO_HASH_CHECK)))
3548 		return 0;
3549 	    }
3550 	}
3551       return 0;
3552 
3553     case tcc_statement:
3554       switch (TREE_CODE (arg0))
3555 	{
3556 	case RETURN_EXPR:
3557 	  if (flags & OEP_LEXICOGRAPHIC)
3558 	    return OP_SAME_WITH_NULL (0);
3559 	  return 0;
3560 	case DEBUG_BEGIN_STMT:
3561 	  if (flags & OEP_LEXICOGRAPHIC)
3562 	    return 1;
3563 	  return 0;
3564 	default:
3565 	  return 0;
3566 	 }
3567 
3568     default:
3569       return 0;
3570     }
3571 
3572 #undef OP_SAME
3573 #undef OP_SAME_WITH_NULL
3574 }
3575 
3576 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3577    with a different signedness or a narrower precision.  */
3578 
3579 static bool
3580 operand_equal_for_comparison_p (tree arg0, tree arg1)
3581 {
3582   if (operand_equal_p (arg0, arg1, 0))
3583     return true;
3584 
3585   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3586       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3587     return false;
3588 
3589   /* Discard any conversions that don't change the modes of ARG0 and ARG1
3590      and see if the inner values are the same.  This removes any
3591      signedness comparison, which doesn't matter here.  */
3592   tree op0 = arg0;
3593   tree op1 = arg1;
3594   STRIP_NOPS (op0);
3595   STRIP_NOPS (op1);
3596   if (operand_equal_p (op0, op1, 0))
3597     return true;
3598 
3599   /* Discard a single widening conversion from ARG1 and see if the inner
3600      value is the same as ARG0.  */
3601   if (CONVERT_EXPR_P (arg1)
3602       && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3603       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3604          < TYPE_PRECISION (TREE_TYPE (arg1))
3605       && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3606     return true;
3607 
3608   return false;
3609 }
3610 
3611 /* See if ARG is an expression that is either a comparison or is performing
3612    arithmetic on comparisons.  The comparisons must only be comparing
3613    two different values, which will be stored in *CVAL1 and *CVAL2; if
3614    they are nonzero it means that some operands have already been found.
3615    No variables may be used anywhere else in the expression except in the
3616    comparisons.
3617 
3618    If this is true, return 1.  Otherwise, return zero.  */
3619 
3620 static int
3621 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3622 {
3623   enum tree_code code = TREE_CODE (arg);
3624   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3625 
3626   /* We can handle some of the tcc_expression cases here.  */
3627   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3628     tclass = tcc_unary;
3629   else if (tclass == tcc_expression
3630 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3631 	       || code == COMPOUND_EXPR))
3632     tclass = tcc_binary;
3633 
3634   switch (tclass)
3635     {
3636     case tcc_unary:
3637       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3638 
3639     case tcc_binary:
3640       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3641 	      && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3642 
3643     case tcc_constant:
3644       return 1;
3645 
3646     case tcc_expression:
3647       if (code == COND_EXPR)
3648 	return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3649 		&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3650 		&& twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3651       return 0;
3652 
3653     case tcc_comparison:
3654       /* First see if we can handle the first operand, then the second.  For
3655 	 the second operand, we know *CVAL1 can't be zero.  It must be that
3656 	 one side of the comparison is each of the values; test for the
3657 	 case where this isn't true by failing if the two operands
3658 	 are the same.  */
3659 
3660       if (operand_equal_p (TREE_OPERAND (arg, 0),
3661 			   TREE_OPERAND (arg, 1), 0))
3662 	return 0;
3663 
3664       if (*cval1 == 0)
3665 	*cval1 = TREE_OPERAND (arg, 0);
3666       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3667 	;
3668       else if (*cval2 == 0)
3669 	*cval2 = TREE_OPERAND (arg, 0);
3670       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3671 	;
3672       else
3673 	return 0;
3674 
3675       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3676 	;
3677       else if (*cval2 == 0)
3678 	*cval2 = TREE_OPERAND (arg, 1);
3679       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3680 	;
3681       else
3682 	return 0;
3683 
3684       return 1;
3685 
3686     default:
3687       return 0;
3688     }
3689 }
3690 
3691 /* ARG is a tree that is known to contain just arithmetic operations and
3692    comparisons.  Evaluate the operations in the tree substituting NEW0 for
3693    any occurrence of OLD0 as an operand of a comparison and likewise for
3694    NEW1 and OLD1.  */
3695 
3696 static tree
3697 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3698 	    tree old1, tree new1)
3699 {
3700   tree type = TREE_TYPE (arg);
3701   enum tree_code code = TREE_CODE (arg);
3702   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3703 
3704   /* We can handle some of the tcc_expression cases here.  */
3705   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3706     tclass = tcc_unary;
3707   else if (tclass == tcc_expression
3708 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3709     tclass = tcc_binary;
3710 
3711   switch (tclass)
3712     {
3713     case tcc_unary:
3714       return fold_build1_loc (loc, code, type,
3715 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3716 				      old0, new0, old1, new1));
3717 
3718     case tcc_binary:
3719       return fold_build2_loc (loc, code, type,
3720 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3721 				      old0, new0, old1, new1),
3722 			  eval_subst (loc, TREE_OPERAND (arg, 1),
3723 				      old0, new0, old1, new1));
3724 
3725     case tcc_expression:
3726       switch (code)
3727 	{
3728 	case SAVE_EXPR:
3729 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3730 			     old1, new1);
3731 
3732 	case COMPOUND_EXPR:
3733 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3734 			     old1, new1);
3735 
3736 	case COND_EXPR:
3737 	  return fold_build3_loc (loc, code, type,
3738 			      eval_subst (loc, TREE_OPERAND (arg, 0),
3739 					  old0, new0, old1, new1),
3740 			      eval_subst (loc, TREE_OPERAND (arg, 1),
3741 					  old0, new0, old1, new1),
3742 			      eval_subst (loc, TREE_OPERAND (arg, 2),
3743 					  old0, new0, old1, new1));
3744 	default:
3745 	  break;
3746 	}
3747       /* Fall through - ???  */
3748 
3749     case tcc_comparison:
3750       {
3751 	tree arg0 = TREE_OPERAND (arg, 0);
3752 	tree arg1 = TREE_OPERAND (arg, 1);
3753 
3754 	/* We need to check both for exact equality and tree equality.  The
3755 	   former will be true if the operand has a side-effect.  In that
3756 	   case, we know the operand occurred exactly once.  */
3757 
3758 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3759 	  arg0 = new0;
3760 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3761 	  arg0 = new1;
3762 
3763 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3764 	  arg1 = new0;
3765 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3766 	  arg1 = new1;
3767 
3768 	return fold_build2_loc (loc, code, type, arg0, arg1);
3769       }
3770 
3771     default:
3772       return arg;
3773     }
3774 }
3775 
3776 /* Return a tree for the case when the result of an expression is RESULT
3777    converted to TYPE and OMITTED was previously an operand of the expression
3778    but is now not needed (e.g., we folded OMITTED * 0).
3779 
3780    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3781    the conversion of RESULT to TYPE.  */
3782 
3783 tree
3784 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3785 {
3786   tree t = fold_convert_loc (loc, type, result);
3787 
3788   /* If the resulting operand is an empty statement, just return the omitted
3789      statement casted to void. */
3790   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3791     return build1_loc (loc, NOP_EXPR, void_type_node,
3792 		       fold_ignored_result (omitted));
3793 
3794   if (TREE_SIDE_EFFECTS (omitted))
3795     return build2_loc (loc, COMPOUND_EXPR, type,
3796 		       fold_ignored_result (omitted), t);
3797 
3798   return non_lvalue_loc (loc, t);
3799 }
3800 
3801 /* Return a tree for the case when the result of an expression is RESULT
3802    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3803    of the expression but are now not needed.
3804 
3805    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3806    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3807    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3808    just do the conversion of RESULT to TYPE.  */
3809 
3810 tree
3811 omit_two_operands_loc (location_t loc, tree type, tree result,
3812 		       tree omitted1, tree omitted2)
3813 {
3814   tree t = fold_convert_loc (loc, type, result);
3815 
3816   if (TREE_SIDE_EFFECTS (omitted2))
3817     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3818   if (TREE_SIDE_EFFECTS (omitted1))
3819     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3820 
3821   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3822 }
3823 
3824 
3825 /* Return a simplified tree node for the truth-negation of ARG.  This
3826    never alters ARG itself.  We assume that ARG is an operation that
3827    returns a truth value (0 or 1).
3828 
3829    FIXME: one would think we would fold the result, but it causes
3830    problems with the dominator optimizer.  */
3831 
3832 static tree
3833 fold_truth_not_expr (location_t loc, tree arg)
3834 {
3835   tree type = TREE_TYPE (arg);
3836   enum tree_code code = TREE_CODE (arg);
3837   location_t loc1, loc2;
3838 
3839   /* If this is a comparison, we can simply invert it, except for
3840      floating-point non-equality comparisons, in which case we just
3841      enclose a TRUTH_NOT_EXPR around what we have.  */
3842 
3843   if (TREE_CODE_CLASS (code) == tcc_comparison)
3844     {
3845       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3846       if (FLOAT_TYPE_P (op_type)
3847 	  && flag_trapping_math
3848 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3849 	  && code != NE_EXPR && code != EQ_EXPR)
3850 	return NULL_TREE;
3851 
3852       code = invert_tree_comparison (code, HONOR_NANS (op_type));
3853       if (code == ERROR_MARK)
3854 	return NULL_TREE;
3855 
3856       tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3857 			     TREE_OPERAND (arg, 1));
3858       if (TREE_NO_WARNING (arg))
3859 	TREE_NO_WARNING (ret) = 1;
3860       return ret;
3861     }
3862 
3863   switch (code)
3864     {
3865     case INTEGER_CST:
3866       return constant_boolean_node (integer_zerop (arg), type);
3867 
3868     case TRUTH_AND_EXPR:
3869       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3870       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3871       return build2_loc (loc, TRUTH_OR_EXPR, type,
3872 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3873 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3874 
3875     case TRUTH_OR_EXPR:
3876       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3877       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3878       return build2_loc (loc, TRUTH_AND_EXPR, type,
3879 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3880 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3881 
3882     case TRUTH_XOR_EXPR:
3883       /* Here we can invert either operand.  We invert the first operand
3884 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3885 	 result is the XOR of the first operand with the inside of the
3886 	 negation of the second operand.  */
3887 
3888       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3889 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3890 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3891       else
3892 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3893 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3894 			   TREE_OPERAND (arg, 1));
3895 
3896     case TRUTH_ANDIF_EXPR:
3897       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3898       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3899       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3900 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3901 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3902 
3903     case TRUTH_ORIF_EXPR:
3904       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3905       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3906       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3907 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3908 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3909 
3910     case TRUTH_NOT_EXPR:
3911       return TREE_OPERAND (arg, 0);
3912 
3913     case COND_EXPR:
3914       {
3915 	tree arg1 = TREE_OPERAND (arg, 1);
3916 	tree arg2 = TREE_OPERAND (arg, 2);
3917 
3918 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3919 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3920 
3921 	/* A COND_EXPR may have a throw as one operand, which
3922 	   then has void type.  Just leave void operands
3923 	   as they are.  */
3924 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3925 			   VOID_TYPE_P (TREE_TYPE (arg1))
3926 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3927 			   VOID_TYPE_P (TREE_TYPE (arg2))
3928 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3929       }
3930 
3931     case COMPOUND_EXPR:
3932       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3933       return build2_loc (loc, COMPOUND_EXPR, type,
3934 			 TREE_OPERAND (arg, 0),
3935 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3936 
3937     case NON_LVALUE_EXPR:
3938       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3939       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3940 
3941     CASE_CONVERT:
3942       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3943 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3944 
3945       /* fall through */
3946 
3947     case FLOAT_EXPR:
3948       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3949       return build1_loc (loc, TREE_CODE (arg), type,
3950 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3951 
3952     case BIT_AND_EXPR:
3953       if (!integer_onep (TREE_OPERAND (arg, 1)))
3954 	return NULL_TREE;
3955       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3956 
3957     case SAVE_EXPR:
3958       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3959 
3960     case CLEANUP_POINT_EXPR:
3961       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3962       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3963 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3964 
3965     default:
3966       return NULL_TREE;
3967     }
3968 }
3969 
3970 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
3971    assume that ARG is an operation that returns a truth value (0 or 1
3972    for scalars, 0 or -1 for vectors).  Return the folded expression if
3973    folding is successful.  Otherwise, return NULL_TREE.  */
3974 
3975 static tree
3976 fold_invert_truthvalue (location_t loc, tree arg)
3977 {
3978   tree type = TREE_TYPE (arg);
3979   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3980 			      ? BIT_NOT_EXPR
3981 			      : TRUTH_NOT_EXPR,
3982 			 type, arg);
3983 }
3984 
3985 /* Return a simplified tree node for the truth-negation of ARG.  This
3986    never alters ARG itself.  We assume that ARG is an operation that
3987    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
3988 
3989 tree
3990 invert_truthvalue_loc (location_t loc, tree arg)
3991 {
3992   if (TREE_CODE (arg) == ERROR_MARK)
3993     return arg;
3994 
3995   tree type = TREE_TYPE (arg);
3996   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3997 			       ? BIT_NOT_EXPR
3998 			       : TRUTH_NOT_EXPR,
3999 			  type, arg);
4000 }
4001 
4002 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4003    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero
4004    and uses reverse storage order if REVERSEP is nonzero.  ORIG_INNER
4005    is the original memory reference used to preserve the alias set of
4006    the access.  */
4007 
4008 static tree
4009 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4010 		    HOST_WIDE_INT bitsize, poly_int64 bitpos,
4011 		    int unsignedp, int reversep)
4012 {
4013   tree result, bftype;
4014 
4015   /* Attempt not to lose the access path if possible.  */
4016   if (TREE_CODE (orig_inner) == COMPONENT_REF)
4017     {
4018       tree ninner = TREE_OPERAND (orig_inner, 0);
4019       machine_mode nmode;
4020       poly_int64 nbitsize, nbitpos;
4021       tree noffset;
4022       int nunsignedp, nreversep, nvolatilep = 0;
4023       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4024 				       &noffset, &nmode, &nunsignedp,
4025 				       &nreversep, &nvolatilep);
4026       if (base == inner
4027 	  && noffset == NULL_TREE
4028 	  && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4029 	  && !reversep
4030 	  && !nreversep
4031 	  && !nvolatilep)
4032 	{
4033 	  inner = ninner;
4034 	  bitpos -= nbitpos;
4035 	}
4036     }
4037 
4038   alias_set_type iset = get_alias_set (orig_inner);
4039   if (iset == 0 && get_alias_set (inner) != iset)
4040     inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4041 			 build_fold_addr_expr (inner),
4042 			 build_int_cst (ptr_type_node, 0));
4043 
4044   if (known_eq (bitpos, 0) && !reversep)
4045     {
4046       tree size = TYPE_SIZE (TREE_TYPE (inner));
4047       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4048 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
4049 	  && tree_fits_shwi_p (size)
4050 	  && tree_to_shwi (size) == bitsize)
4051 	return fold_convert_loc (loc, type, inner);
4052     }
4053 
4054   bftype = type;
4055   if (TYPE_PRECISION (bftype) != bitsize
4056       || TYPE_UNSIGNED (bftype) == !unsignedp)
4057     bftype = build_nonstandard_integer_type (bitsize, 0);
4058 
4059   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4060 		       bitsize_int (bitsize), bitsize_int (bitpos));
4061   REF_REVERSE_STORAGE_ORDER (result) = reversep;
4062 
4063   if (bftype != type)
4064     result = fold_convert_loc (loc, type, result);
4065 
4066   return result;
4067 }
4068 
4069 /* Optimize a bit-field compare.
4070 
4071    There are two cases:  First is a compare against a constant and the
4072    second is a comparison of two items where the fields are at the same
4073    bit position relative to the start of a chunk (byte, halfword, word)
4074    large enough to contain it.  In these cases we can avoid the shift
4075    implicit in bitfield extractions.
4076 
4077    For constants, we emit a compare of the shifted constant with the
4078    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4079    compared.  For two fields at the same position, we do the ANDs with the
4080    similar mask and compare the result of the ANDs.
4081 
4082    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4083    COMPARE_TYPE is the type of the comparison, and LHS and RHS
4084    are the left and right operands of the comparison, respectively.
4085 
4086    If the optimization described above can be done, we return the resulting
4087    tree.  Otherwise we return zero.  */
4088 
4089 static tree
4090 optimize_bit_field_compare (location_t loc, enum tree_code code,
4091 			    tree compare_type, tree lhs, tree rhs)
4092 {
4093   poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4094   HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4095   tree type = TREE_TYPE (lhs);
4096   tree unsigned_type;
4097   int const_p = TREE_CODE (rhs) == INTEGER_CST;
4098   machine_mode lmode, rmode;
4099   scalar_int_mode nmode;
4100   int lunsignedp, runsignedp;
4101   int lreversep, rreversep;
4102   int lvolatilep = 0, rvolatilep = 0;
4103   tree linner, rinner = NULL_TREE;
4104   tree mask;
4105   tree offset;
4106 
4107   /* Get all the information about the extractions being done.  If the bit size
4108      is the same as the size of the underlying object, we aren't doing an
4109      extraction at all and so can do nothing.  We also don't want to
4110      do anything if the inner expression is a PLACEHOLDER_EXPR since we
4111      then will no longer be able to replace it.  */
4112   linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4113 				&lunsignedp, &lreversep, &lvolatilep);
4114   if (linner == lhs
4115       || !known_size_p (plbitsize)
4116       || !plbitsize.is_constant (&lbitsize)
4117       || !plbitpos.is_constant (&lbitpos)
4118       || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4119       || offset != 0
4120       || TREE_CODE (linner) == PLACEHOLDER_EXPR
4121       || lvolatilep)
4122     return 0;
4123 
4124   if (const_p)
4125     rreversep = lreversep;
4126   else
4127    {
4128      /* If this is not a constant, we can only do something if bit positions,
4129 	sizes, signedness and storage order are the same.  */
4130      rinner
4131        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4132 			      &runsignedp, &rreversep, &rvolatilep);
4133 
4134      if (rinner == rhs
4135 	 || maybe_ne (lbitpos, rbitpos)
4136 	 || maybe_ne (lbitsize, rbitsize)
4137 	 || lunsignedp != runsignedp
4138 	 || lreversep != rreversep
4139 	 || offset != 0
4140 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4141 	 || rvolatilep)
4142        return 0;
4143    }
4144 
4145   /* Honor the C++ memory model and mimic what RTL expansion does.  */
4146   poly_uint64 bitstart = 0;
4147   poly_uint64 bitend = 0;
4148   if (TREE_CODE (lhs) == COMPONENT_REF)
4149     {
4150       get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4151       if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4152 	return 0;
4153     }
4154 
4155   /* See if we can find a mode to refer to this field.  We should be able to,
4156      but fail if we can't.  */
4157   if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4158 		      const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4159 		      : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4160 			     TYPE_ALIGN (TREE_TYPE (rinner))),
4161 		      BITS_PER_WORD, false, &nmode))
4162     return 0;
4163 
4164   /* Set signed and unsigned types of the precision of this mode for the
4165      shifts below.  */
4166   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4167 
4168   /* Compute the bit position and size for the new reference and our offset
4169      within it. If the new reference is the same size as the original, we
4170      won't optimize anything, so return zero.  */
4171   nbitsize = GET_MODE_BITSIZE (nmode);
4172   nbitpos = lbitpos & ~ (nbitsize - 1);
4173   lbitpos -= nbitpos;
4174   if (nbitsize == lbitsize)
4175     return 0;
4176 
4177   if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4178     lbitpos = nbitsize - lbitsize - lbitpos;
4179 
4180   /* Make the mask to be used against the extracted field.  */
4181   mask = build_int_cst_type (unsigned_type, -1);
4182   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4183   mask = const_binop (RSHIFT_EXPR, mask,
4184 		      size_int (nbitsize - lbitsize - lbitpos));
4185 
4186   if (! const_p)
4187     {
4188       if (nbitpos < 0)
4189 	return 0;
4190 
4191       /* If not comparing with constant, just rework the comparison
4192 	 and return.  */
4193       tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4194 				    nbitsize, nbitpos, 1, lreversep);
4195       t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4196       tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4197 				    nbitsize, nbitpos, 1, rreversep);
4198       t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4199       return fold_build2_loc (loc, code, compare_type, t1, t2);
4200     }
4201 
4202   /* Otherwise, we are handling the constant case.  See if the constant is too
4203      big for the field.  Warn and return a tree for 0 (false) if so.  We do
4204      this not only for its own sake, but to avoid having to test for this
4205      error case below.  If we didn't, we might generate wrong code.
4206 
4207      For unsigned fields, the constant shifted right by the field length should
4208      be all zero.  For signed fields, the high-order bits should agree with
4209      the sign bit.  */
4210 
4211   if (lunsignedp)
4212     {
4213       if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4214 	{
4215 	  warning (0, "comparison is always %d due to width of bit-field",
4216 		   code == NE_EXPR);
4217 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4218 	}
4219     }
4220   else
4221     {
4222       wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4223       if (tem != 0 && tem != -1)
4224 	{
4225 	  warning (0, "comparison is always %d due to width of bit-field",
4226 		   code == NE_EXPR);
4227 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4228 	}
4229     }
4230 
4231   if (nbitpos < 0)
4232     return 0;
4233 
4234   /* Single-bit compares should always be against zero.  */
4235   if (lbitsize == 1 && ! integer_zerop (rhs))
4236     {
4237       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4238       rhs = build_int_cst (type, 0);
4239     }
4240 
4241   /* Make a new bitfield reference, shift the constant over the
4242      appropriate number of bits and mask it with the computed mask
4243      (in case this was a signed field).  If we changed it, make a new one.  */
4244   lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4245 			    nbitsize, nbitpos, 1, lreversep);
4246 
4247   rhs = const_binop (BIT_AND_EXPR,
4248 		     const_binop (LSHIFT_EXPR,
4249 				  fold_convert_loc (loc, unsigned_type, rhs),
4250 				  size_int (lbitpos)),
4251 		     mask);
4252 
4253   lhs = build2_loc (loc, code, compare_type,
4254 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4255   return lhs;
4256 }
4257 
4258 /* Subroutine for fold_truth_andor_1: decode a field reference.
4259 
4260    If EXP is a comparison reference, we return the innermost reference.
4261 
4262    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4263    set to the starting bit number.
4264 
4265    If the innermost field can be completely contained in a mode-sized
4266    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4267 
4268    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4269    otherwise it is not changed.
4270 
4271    *PUNSIGNEDP is set to the signedness of the field.
4272 
4273    *PREVERSEP is set to the storage order of the field.
4274 
4275    *PMASK is set to the mask used.  This is either contained in a
4276    BIT_AND_EXPR or derived from the width of the field.
4277 
4278    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4279 
4280    Return 0 if this is not a component reference or is one that we can't
4281    do anything with.  */
4282 
4283 static tree
4284 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4285 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4286 			int *punsignedp, int *preversep, int *pvolatilep,
4287 			tree *pmask, tree *pand_mask)
4288 {
4289   tree exp = *exp_;
4290   tree outer_type = 0;
4291   tree and_mask = 0;
4292   tree mask, inner, offset;
4293   tree unsigned_type;
4294   unsigned int precision;
4295 
4296   /* All the optimizations using this function assume integer fields.
4297      There are problems with FP fields since the type_for_size call
4298      below can fail for, e.g., XFmode.  */
4299   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4300     return NULL_TREE;
4301 
4302   /* We are interested in the bare arrangement of bits, so strip everything
4303      that doesn't affect the machine mode.  However, record the type of the
4304      outermost expression if it may matter below.  */
4305   if (CONVERT_EXPR_P (exp)
4306       || TREE_CODE (exp) == NON_LVALUE_EXPR)
4307     outer_type = TREE_TYPE (exp);
4308   STRIP_NOPS (exp);
4309 
4310   if (TREE_CODE (exp) == BIT_AND_EXPR)
4311     {
4312       and_mask = TREE_OPERAND (exp, 1);
4313       exp = TREE_OPERAND (exp, 0);
4314       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4315       if (TREE_CODE (and_mask) != INTEGER_CST)
4316 	return NULL_TREE;
4317     }
4318 
4319   poly_int64 poly_bitsize, poly_bitpos;
4320   inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4321 			       pmode, punsignedp, preversep, pvolatilep);
4322   if ((inner == exp && and_mask == 0)
4323       || !poly_bitsize.is_constant (pbitsize)
4324       || !poly_bitpos.is_constant (pbitpos)
4325       || *pbitsize < 0
4326       || offset != 0
4327       || TREE_CODE (inner) == PLACEHOLDER_EXPR
4328       /* Reject out-of-bound accesses (PR79731).  */
4329       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4330 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4331 			       *pbitpos + *pbitsize) < 0))
4332     return NULL_TREE;
4333 
4334   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4335   if (unsigned_type == NULL_TREE)
4336     return NULL_TREE;
4337 
4338   *exp_ = exp;
4339 
4340   /* If the number of bits in the reference is the same as the bitsize of
4341      the outer type, then the outer type gives the signedness. Otherwise
4342      (in case of a small bitfield) the signedness is unchanged.  */
4343   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4344     *punsignedp = TYPE_UNSIGNED (outer_type);
4345 
4346   /* Compute the mask to access the bitfield.  */
4347   precision = TYPE_PRECISION (unsigned_type);
4348 
4349   mask = build_int_cst_type (unsigned_type, -1);
4350 
4351   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4352   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4353 
4354   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4355   if (and_mask != 0)
4356     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4357 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4358 
4359   *pmask = mask;
4360   *pand_mask = and_mask;
4361   return inner;
4362 }
4363 
4364 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4365    bit positions and MASK is SIGNED.  */
4366 
4367 static int
4368 all_ones_mask_p (const_tree mask, unsigned int size)
4369 {
4370   tree type = TREE_TYPE (mask);
4371   unsigned int precision = TYPE_PRECISION (type);
4372 
4373   /* If this function returns true when the type of the mask is
4374      UNSIGNED, then there will be errors.  In particular see
4375      gcc.c-torture/execute/990326-1.c.  There does not appear to be
4376      any documentation paper trail as to why this is so.  But the pre
4377      wide-int worked with that restriction and it has been preserved
4378      here.  */
4379   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4380     return false;
4381 
4382   return wi::mask (size, false, precision) == wi::to_wide (mask);
4383 }
4384 
4385 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4386    represents the sign bit of EXP's type.  If EXP represents a sign
4387    or zero extension, also test VAL against the unextended type.
4388    The return value is the (sub)expression whose sign bit is VAL,
4389    or NULL_TREE otherwise.  */
4390 
4391 tree
4392 sign_bit_p (tree exp, const_tree val)
4393 {
4394   int width;
4395   tree t;
4396 
4397   /* Tree EXP must have an integral type.  */
4398   t = TREE_TYPE (exp);
4399   if (! INTEGRAL_TYPE_P (t))
4400     return NULL_TREE;
4401 
4402   /* Tree VAL must be an integer constant.  */
4403   if (TREE_CODE (val) != INTEGER_CST
4404       || TREE_OVERFLOW (val))
4405     return NULL_TREE;
4406 
4407   width = TYPE_PRECISION (t);
4408   if (wi::only_sign_bit_p (wi::to_wide (val), width))
4409     return exp;
4410 
4411   /* Handle extension from a narrower type.  */
4412   if (TREE_CODE (exp) == NOP_EXPR
4413       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4414     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4415 
4416   return NULL_TREE;
4417 }
4418 
4419 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4420    to be evaluated unconditionally.  */
4421 
4422 static int
4423 simple_operand_p (const_tree exp)
4424 {
4425   /* Strip any conversions that don't change the machine mode.  */
4426   STRIP_NOPS (exp);
4427 
4428   return (CONSTANT_CLASS_P (exp)
4429   	  || TREE_CODE (exp) == SSA_NAME
4430 	  || (DECL_P (exp)
4431 	      && ! TREE_ADDRESSABLE (exp)
4432 	      && ! TREE_THIS_VOLATILE (exp)
4433 	      && ! DECL_NONLOCAL (exp)
4434 	      /* Don't regard global variables as simple.  They may be
4435 		 allocated in ways unknown to the compiler (shared memory,
4436 		 #pragma weak, etc).  */
4437 	      && ! TREE_PUBLIC (exp)
4438 	      && ! DECL_EXTERNAL (exp)
4439 	      /* Weakrefs are not safe to be read, since they can be NULL.
4440  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4441 		 have DECL_WEAK flag set.  */
4442 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4443 	      /* Loading a static variable is unduly expensive, but global
4444 		 registers aren't expensive.  */
4445 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4446 }
4447 
4448 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4449    to be evaluated unconditionally.
4450    I addition to simple_operand_p, we assume that comparisons, conversions,
4451    and logic-not operations are simple, if their operands are simple, too.  */
4452 
4453 static bool
4454 simple_operand_p_2 (tree exp)
4455 {
4456   enum tree_code code;
4457 
4458   if (TREE_SIDE_EFFECTS (exp)
4459       || tree_could_trap_p (exp))
4460     return false;
4461 
4462   while (CONVERT_EXPR_P (exp))
4463     exp = TREE_OPERAND (exp, 0);
4464 
4465   code = TREE_CODE (exp);
4466 
4467   if (TREE_CODE_CLASS (code) == tcc_comparison)
4468     return (simple_operand_p (TREE_OPERAND (exp, 0))
4469 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4470 
4471   if (code == TRUTH_NOT_EXPR)
4472       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4473 
4474   return simple_operand_p (exp);
4475 }
4476 
4477 
4478 /* The following functions are subroutines to fold_range_test and allow it to
4479    try to change a logical combination of comparisons into a range test.
4480 
4481    For example, both
4482 	X == 2 || X == 3 || X == 4 || X == 5
4483    and
4484 	X >= 2 && X <= 5
4485    are converted to
4486 	(unsigned) (X - 2) <= 3
4487 
4488    We describe each set of comparisons as being either inside or outside
4489    a range, using a variable named like IN_P, and then describe the
4490    range with a lower and upper bound.  If one of the bounds is omitted,
4491    it represents either the highest or lowest value of the type.
4492 
4493    In the comments below, we represent a range by two numbers in brackets
4494    preceded by a "+" to designate being inside that range, or a "-" to
4495    designate being outside that range, so the condition can be inverted by
4496    flipping the prefix.  An omitted bound is represented by a "-".  For
4497    example, "- [-, 10]" means being outside the range starting at the lowest
4498    possible value and ending at 10, in other words, being greater than 10.
4499    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4500    always false.
4501 
4502    We set up things so that the missing bounds are handled in a consistent
4503    manner so neither a missing bound nor "true" and "false" need to be
4504    handled using a special case.  */
4505 
4506 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4507    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4508    and UPPER1_P are nonzero if the respective argument is an upper bound
4509    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4510    must be specified for a comparison.  ARG1 will be converted to ARG0's
4511    type if both are specified.  */
4512 
4513 static tree
4514 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4515 	     tree arg1, int upper1_p)
4516 {
4517   tree tem;
4518   int result;
4519   int sgn0, sgn1;
4520 
4521   /* If neither arg represents infinity, do the normal operation.
4522      Else, if not a comparison, return infinity.  Else handle the special
4523      comparison rules. Note that most of the cases below won't occur, but
4524      are handled for consistency.  */
4525 
4526   if (arg0 != 0 && arg1 != 0)
4527     {
4528       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4529 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4530       STRIP_NOPS (tem);
4531       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4532     }
4533 
4534   if (TREE_CODE_CLASS (code) != tcc_comparison)
4535     return 0;
4536 
4537   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4538      for neither.  In real maths, we cannot assume open ended ranges are
4539      the same. But, this is computer arithmetic, where numbers are finite.
4540      We can therefore make the transformation of any unbounded range with
4541      the value Z, Z being greater than any representable number. This permits
4542      us to treat unbounded ranges as equal.  */
4543   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4544   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4545   switch (code)
4546     {
4547     case EQ_EXPR:
4548       result = sgn0 == sgn1;
4549       break;
4550     case NE_EXPR:
4551       result = sgn0 != sgn1;
4552       break;
4553     case LT_EXPR:
4554       result = sgn0 < sgn1;
4555       break;
4556     case LE_EXPR:
4557       result = sgn0 <= sgn1;
4558       break;
4559     case GT_EXPR:
4560       result = sgn0 > sgn1;
4561       break;
4562     case GE_EXPR:
4563       result = sgn0 >= sgn1;
4564       break;
4565     default:
4566       gcc_unreachable ();
4567     }
4568 
4569   return constant_boolean_node (result, type);
4570 }
4571 
4572 /* Helper routine for make_range.  Perform one step for it, return
4573    new expression if the loop should continue or NULL_TREE if it should
4574    stop.  */
4575 
4576 tree
4577 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4578 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4579 		 bool *strict_overflow_p)
4580 {
4581   tree arg0_type = TREE_TYPE (arg0);
4582   tree n_low, n_high, low = *p_low, high = *p_high;
4583   int in_p = *p_in_p, n_in_p;
4584 
4585   switch (code)
4586     {
4587     case TRUTH_NOT_EXPR:
4588       /* We can only do something if the range is testing for zero.  */
4589       if (low == NULL_TREE || high == NULL_TREE
4590 	  || ! integer_zerop (low) || ! integer_zerop (high))
4591 	return NULL_TREE;
4592       *p_in_p = ! in_p;
4593       return arg0;
4594 
4595     case EQ_EXPR: case NE_EXPR:
4596     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4597       /* We can only do something if the range is testing for zero
4598 	 and if the second operand is an integer constant.  Note that
4599 	 saying something is "in" the range we make is done by
4600 	 complementing IN_P since it will set in the initial case of
4601 	 being not equal to zero; "out" is leaving it alone.  */
4602       if (low == NULL_TREE || high == NULL_TREE
4603 	  || ! integer_zerop (low) || ! integer_zerop (high)
4604 	  || TREE_CODE (arg1) != INTEGER_CST)
4605 	return NULL_TREE;
4606 
4607       switch (code)
4608 	{
4609 	case NE_EXPR:  /* - [c, c]  */
4610 	  low = high = arg1;
4611 	  break;
4612 	case EQ_EXPR:  /* + [c, c]  */
4613 	  in_p = ! in_p, low = high = arg1;
4614 	  break;
4615 	case GT_EXPR:  /* - [-, c] */
4616 	  low = 0, high = arg1;
4617 	  break;
4618 	case GE_EXPR:  /* + [c, -] */
4619 	  in_p = ! in_p, low = arg1, high = 0;
4620 	  break;
4621 	case LT_EXPR:  /* - [c, -] */
4622 	  low = arg1, high = 0;
4623 	  break;
4624 	case LE_EXPR:  /* + [-, c] */
4625 	  in_p = ! in_p, low = 0, high = arg1;
4626 	  break;
4627 	default:
4628 	  gcc_unreachable ();
4629 	}
4630 
4631       /* If this is an unsigned comparison, we also know that EXP is
4632 	 greater than or equal to zero.  We base the range tests we make
4633 	 on that fact, so we record it here so we can parse existing
4634 	 range tests.  We test arg0_type since often the return type
4635 	 of, e.g. EQ_EXPR, is boolean.  */
4636       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4637 	{
4638 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4639 			      in_p, low, high, 1,
4640 			      build_int_cst (arg0_type, 0),
4641 			      NULL_TREE))
4642 	    return NULL_TREE;
4643 
4644 	  in_p = n_in_p, low = n_low, high = n_high;
4645 
4646 	  /* If the high bound is missing, but we have a nonzero low
4647 	     bound, reverse the range so it goes from zero to the low bound
4648 	     minus 1.  */
4649 	  if (high == 0 && low && ! integer_zerop (low))
4650 	    {
4651 	      in_p = ! in_p;
4652 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4653 				  build_int_cst (TREE_TYPE (low), 1), 0);
4654 	      low = build_int_cst (arg0_type, 0);
4655 	    }
4656 	}
4657 
4658       *p_low = low;
4659       *p_high = high;
4660       *p_in_p = in_p;
4661       return arg0;
4662 
4663     case NEGATE_EXPR:
4664       /* If flag_wrapv and ARG0_TYPE is signed, make sure
4665 	 low and high are non-NULL, then normalize will DTRT.  */
4666       if (!TYPE_UNSIGNED (arg0_type)
4667 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4668 	{
4669 	  if (low == NULL_TREE)
4670 	    low = TYPE_MIN_VALUE (arg0_type);
4671 	  if (high == NULL_TREE)
4672 	    high = TYPE_MAX_VALUE (arg0_type);
4673 	}
4674 
4675       /* (-x) IN [a,b] -> x in [-b, -a]  */
4676       n_low = range_binop (MINUS_EXPR, exp_type,
4677 			   build_int_cst (exp_type, 0),
4678 			   0, high, 1);
4679       n_high = range_binop (MINUS_EXPR, exp_type,
4680 			    build_int_cst (exp_type, 0),
4681 			    0, low, 0);
4682       if (n_high != 0 && TREE_OVERFLOW (n_high))
4683 	return NULL_TREE;
4684       goto normalize;
4685 
4686     case BIT_NOT_EXPR:
4687       /* ~ X -> -X - 1  */
4688       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4689 			 build_int_cst (exp_type, 1));
4690 
4691     case PLUS_EXPR:
4692     case MINUS_EXPR:
4693       if (TREE_CODE (arg1) != INTEGER_CST)
4694 	return NULL_TREE;
4695 
4696       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4697 	 move a constant to the other side.  */
4698       if (!TYPE_UNSIGNED (arg0_type)
4699 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4700 	return NULL_TREE;
4701 
4702       /* If EXP is signed, any overflow in the computation is undefined,
4703 	 so we don't worry about it so long as our computations on
4704 	 the bounds don't overflow.  For unsigned, overflow is defined
4705 	 and this is exactly the right thing.  */
4706       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4707 			   arg0_type, low, 0, arg1, 0);
4708       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4709 			    arg0_type, high, 1, arg1, 0);
4710       if ((n_low != 0 && TREE_OVERFLOW (n_low))
4711 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
4712 	return NULL_TREE;
4713 
4714       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4715 	*strict_overflow_p = true;
4716 
4717       normalize:
4718 	/* Check for an unsigned range which has wrapped around the maximum
4719 	   value thus making n_high < n_low, and normalize it.  */
4720 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4721 	  {
4722 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4723 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
4724 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4725 				build_int_cst (TREE_TYPE (n_low), 1), 0);
4726 
4727 	    /* If the range is of the form +/- [ x+1, x ], we won't
4728 	       be able to normalize it.  But then, it represents the
4729 	       whole range or the empty set, so make it
4730 	       +/- [ -, - ].  */
4731 	    if (tree_int_cst_equal (n_low, low)
4732 		&& tree_int_cst_equal (n_high, high))
4733 	      low = high = 0;
4734 	    else
4735 	      in_p = ! in_p;
4736 	  }
4737 	else
4738 	  low = n_low, high = n_high;
4739 
4740 	*p_low = low;
4741 	*p_high = high;
4742 	*p_in_p = in_p;
4743 	return arg0;
4744 
4745     CASE_CONVERT:
4746     case NON_LVALUE_EXPR:
4747       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4748 	return NULL_TREE;
4749 
4750       if (! INTEGRAL_TYPE_P (arg0_type)
4751 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
4752 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4753 	return NULL_TREE;
4754 
4755       n_low = low, n_high = high;
4756 
4757       if (n_low != 0)
4758 	n_low = fold_convert_loc (loc, arg0_type, n_low);
4759 
4760       if (n_high != 0)
4761 	n_high = fold_convert_loc (loc, arg0_type, n_high);
4762 
4763       /* If we're converting arg0 from an unsigned type, to exp,
4764 	 a signed type,  we will be doing the comparison as unsigned.
4765 	 The tests above have already verified that LOW and HIGH
4766 	 are both positive.
4767 
4768 	 So we have to ensure that we will handle large unsigned
4769 	 values the same way that the current signed bounds treat
4770 	 negative values.  */
4771 
4772       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4773 	{
4774 	  tree high_positive;
4775 	  tree equiv_type;
4776 	  /* For fixed-point modes, we need to pass the saturating flag
4777 	     as the 2nd parameter.  */
4778 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4779 	    equiv_type
4780 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4781 						TYPE_SATURATING (arg0_type));
4782 	  else
4783 	    equiv_type
4784 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4785 
4786 	  /* A range without an upper bound is, naturally, unbounded.
4787 	     Since convert would have cropped a very large value, use
4788 	     the max value for the destination type.  */
4789 	  high_positive
4790 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4791 	      : TYPE_MAX_VALUE (arg0_type);
4792 
4793 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4794 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4795 					     fold_convert_loc (loc, arg0_type,
4796 							       high_positive),
4797 					     build_int_cst (arg0_type, 1));
4798 
4799 	  /* If the low bound is specified, "and" the range with the
4800 	     range for which the original unsigned value will be
4801 	     positive.  */
4802 	  if (low != 0)
4803 	    {
4804 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4805 				  1, fold_convert_loc (loc, arg0_type,
4806 						       integer_zero_node),
4807 				  high_positive))
4808 		return NULL_TREE;
4809 
4810 	      in_p = (n_in_p == in_p);
4811 	    }
4812 	  else
4813 	    {
4814 	      /* Otherwise, "or" the range with the range of the input
4815 		 that will be interpreted as negative.  */
4816 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4817 				  1, fold_convert_loc (loc, arg0_type,
4818 						       integer_zero_node),
4819 				  high_positive))
4820 		return NULL_TREE;
4821 
4822 	      in_p = (in_p != n_in_p);
4823 	    }
4824 	}
4825 
4826       *p_low = n_low;
4827       *p_high = n_high;
4828       *p_in_p = in_p;
4829       return arg0;
4830 
4831     default:
4832       return NULL_TREE;
4833     }
4834 }
4835 
4836 /* Given EXP, a logical expression, set the range it is testing into
4837    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4838    actually being tested.  *PLOW and *PHIGH will be made of the same
4839    type as the returned expression.  If EXP is not a comparison, we
4840    will most likely not be returning a useful value and range.  Set
4841    *STRICT_OVERFLOW_P to true if the return value is only valid
4842    because signed overflow is undefined; otherwise, do not change
4843    *STRICT_OVERFLOW_P.  */
4844 
4845 tree
4846 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4847 	    bool *strict_overflow_p)
4848 {
4849   enum tree_code code;
4850   tree arg0, arg1 = NULL_TREE;
4851   tree exp_type, nexp;
4852   int in_p;
4853   tree low, high;
4854   location_t loc = EXPR_LOCATION (exp);
4855 
4856   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4857      and see if we can refine the range.  Some of the cases below may not
4858      happen, but it doesn't seem worth worrying about this.  We "continue"
4859      the outer loop when we've changed something; otherwise we "break"
4860      the switch, which will "break" the while.  */
4861 
4862   in_p = 0;
4863   low = high = build_int_cst (TREE_TYPE (exp), 0);
4864 
4865   while (1)
4866     {
4867       code = TREE_CODE (exp);
4868       exp_type = TREE_TYPE (exp);
4869       arg0 = NULL_TREE;
4870 
4871       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4872 	{
4873 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4874 	    arg0 = TREE_OPERAND (exp, 0);
4875 	  if (TREE_CODE_CLASS (code) == tcc_binary
4876 	      || TREE_CODE_CLASS (code) == tcc_comparison
4877 	      || (TREE_CODE_CLASS (code) == tcc_expression
4878 		  && TREE_OPERAND_LENGTH (exp) > 1))
4879 	    arg1 = TREE_OPERAND (exp, 1);
4880 	}
4881       if (arg0 == NULL_TREE)
4882 	break;
4883 
4884       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4885 			      &high, &in_p, strict_overflow_p);
4886       if (nexp == NULL_TREE)
4887 	break;
4888       exp = nexp;
4889     }
4890 
4891   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4892   if (TREE_CODE (exp) == INTEGER_CST)
4893     {
4894       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4895 						 exp, 0, low, 0))
4896 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4897 						    exp, 1, high, 1)));
4898       low = high = 0;
4899       exp = 0;
4900     }
4901 
4902   *pin_p = in_p, *plow = low, *phigh = high;
4903   return exp;
4904 }
4905 
4906 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4907    a bitwise check i.e. when
4908      LOW  == 0xXX...X00...0
4909      HIGH == 0xXX...X11...1
4910    Return corresponding mask in MASK and stem in VALUE.  */
4911 
4912 static bool
4913 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4914 		  tree *value)
4915 {
4916   if (TREE_CODE (low) != INTEGER_CST
4917       || TREE_CODE (high) != INTEGER_CST)
4918     return false;
4919 
4920   unsigned prec = TYPE_PRECISION (type);
4921   wide_int lo = wi::to_wide (low, prec);
4922   wide_int hi = wi::to_wide (high, prec);
4923 
4924   wide_int end_mask = lo ^ hi;
4925   if ((end_mask & (end_mask + 1)) != 0
4926       || (lo & end_mask) != 0)
4927     return false;
4928 
4929   wide_int stem_mask = ~end_mask;
4930   wide_int stem = lo & stem_mask;
4931   if (stem != (hi & stem_mask))
4932     return false;
4933 
4934   *mask = wide_int_to_tree (type, stem_mask);
4935   *value = wide_int_to_tree (type, stem);
4936 
4937   return true;
4938 }
4939 
4940 /* Helper routine for build_range_check and match.pd.  Return the type to
4941    perform the check or NULL if it shouldn't be optimized.  */
4942 
4943 tree
4944 range_check_type (tree etype)
4945 {
4946   /* First make sure that arithmetics in this type is valid, then make sure
4947      that it wraps around.  */
4948   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4949     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
4950 
4951   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
4952     {
4953       tree utype, minv, maxv;
4954 
4955       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4956 	 for the type in question, as we rely on this here.  */
4957       utype = unsigned_type_for (etype);
4958       maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4959       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4960 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
4961       minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4962 
4963       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4964 				      minv, 1, maxv, 1)))
4965 	etype = utype;
4966       else
4967 	return NULL_TREE;
4968     }
4969   else if (POINTER_TYPE_P (etype))
4970     etype = unsigned_type_for (etype);
4971   return etype;
4972 }
4973 
4974 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4975    type, TYPE, return an expression to test if EXP is in (or out of, depending
4976    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4977 
4978 tree
4979 build_range_check (location_t loc, tree type, tree exp, int in_p,
4980 		   tree low, tree high)
4981 {
4982   tree etype = TREE_TYPE (exp), mask, value;
4983 
4984   /* Disable this optimization for function pointer expressions
4985      on targets that require function pointer canonicalization.  */
4986   if (targetm.have_canonicalize_funcptr_for_compare ()
4987       && POINTER_TYPE_P (etype)
4988       && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
4989     return NULL_TREE;
4990 
4991   if (! in_p)
4992     {
4993       value = build_range_check (loc, type, exp, 1, low, high);
4994       if (value != 0)
4995         return invert_truthvalue_loc (loc, value);
4996 
4997       return 0;
4998     }
4999 
5000   if (low == 0 && high == 0)
5001     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5002 
5003   if (low == 0)
5004     return fold_build2_loc (loc, LE_EXPR, type, exp,
5005 			    fold_convert_loc (loc, etype, high));
5006 
5007   if (high == 0)
5008     return fold_build2_loc (loc, GE_EXPR, type, exp,
5009 			    fold_convert_loc (loc, etype, low));
5010 
5011   if (operand_equal_p (low, high, 0))
5012     return fold_build2_loc (loc, EQ_EXPR, type, exp,
5013 			    fold_convert_loc (loc, etype, low));
5014 
5015   if (TREE_CODE (exp) == BIT_AND_EXPR
5016       && maskable_range_p (low, high, etype, &mask, &value))
5017     return fold_build2_loc (loc, EQ_EXPR, type,
5018 			    fold_build2_loc (loc, BIT_AND_EXPR, etype,
5019 					     exp, mask),
5020 			    value);
5021 
5022   if (integer_zerop (low))
5023     {
5024       if (! TYPE_UNSIGNED (etype))
5025 	{
5026 	  etype = unsigned_type_for (etype);
5027 	  high = fold_convert_loc (loc, etype, high);
5028 	  exp = fold_convert_loc (loc, etype, exp);
5029 	}
5030       return build_range_check (loc, type, exp, 1, 0, high);
5031     }
5032 
5033   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
5034   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5035     {
5036       int prec = TYPE_PRECISION (etype);
5037 
5038       if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5039 	{
5040 	  if (TYPE_UNSIGNED (etype))
5041 	    {
5042 	      tree signed_etype = signed_type_for (etype);
5043 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5044 		etype
5045 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5046 	      else
5047 		etype = signed_etype;
5048 	      exp = fold_convert_loc (loc, etype, exp);
5049 	    }
5050 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
5051 				  build_int_cst (etype, 0));
5052 	}
5053     }
5054 
5055   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5056      This requires wrap-around arithmetics for the type of the expression.  */
5057   etype = range_check_type (etype);
5058   if (etype == NULL_TREE)
5059     return NULL_TREE;
5060 
5061   high = fold_convert_loc (loc, etype, high);
5062   low = fold_convert_loc (loc, etype, low);
5063   exp = fold_convert_loc (loc, etype, exp);
5064 
5065   value = const_binop (MINUS_EXPR, high, low);
5066 
5067   if (value != 0 && !TREE_OVERFLOW (value))
5068     return build_range_check (loc, type,
5069 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5070 			      1, build_int_cst (etype, 0), value);
5071 
5072   return 0;
5073 }
5074 
5075 /* Return the predecessor of VAL in its type, handling the infinite case.  */
5076 
5077 static tree
5078 range_predecessor (tree val)
5079 {
5080   tree type = TREE_TYPE (val);
5081 
5082   if (INTEGRAL_TYPE_P (type)
5083       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5084     return 0;
5085   else
5086     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5087 			build_int_cst (TREE_TYPE (val), 1), 0);
5088 }
5089 
5090 /* Return the successor of VAL in its type, handling the infinite case.  */
5091 
5092 static tree
5093 range_successor (tree val)
5094 {
5095   tree type = TREE_TYPE (val);
5096 
5097   if (INTEGRAL_TYPE_P (type)
5098       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5099     return 0;
5100   else
5101     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5102 			build_int_cst (TREE_TYPE (val), 1), 0);
5103 }
5104 
5105 /* Given two ranges, see if we can merge them into one.  Return 1 if we
5106    can, 0 if we can't.  Set the output range into the specified parameters.  */
5107 
5108 bool
5109 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5110 	      tree high0, int in1_p, tree low1, tree high1)
5111 {
5112   int no_overlap;
5113   int subset;
5114   int temp;
5115   tree tem;
5116   int in_p;
5117   tree low, high;
5118   int lowequal = ((low0 == 0 && low1 == 0)
5119 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5120 						low0, 0, low1, 0)));
5121   int highequal = ((high0 == 0 && high1 == 0)
5122 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5123 						 high0, 1, high1, 1)));
5124 
5125   /* Make range 0 be the range that starts first, or ends last if they
5126      start at the same value.  Swap them if it isn't.  */
5127   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5128 				 low0, 0, low1, 0))
5129       || (lowequal
5130 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
5131 					high1, 1, high0, 1))))
5132     {
5133       temp = in0_p, in0_p = in1_p, in1_p = temp;
5134       tem = low0, low0 = low1, low1 = tem;
5135       tem = high0, high0 = high1, high1 = tem;
5136     }
5137 
5138   /* If the second range is != high1 where high1 is the type maximum of
5139      the type, try first merging with < high1 range.  */
5140   if (low1
5141       && high1
5142       && TREE_CODE (low1) == INTEGER_CST
5143       && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5144 	  || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5145 	      && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5146 			   GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5147       && operand_equal_p (low1, high1, 0))
5148     {
5149       if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5150 	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5151 			   !in1_p, NULL_TREE, range_predecessor (low1)))
5152 	return true;
5153       /* Similarly for the second range != low1 where low1 is the type minimum
5154 	 of the type, try first merging with > low1 range.  */
5155       if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5156 	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5157 			   !in1_p, range_successor (low1), NULL_TREE))
5158 	return true;
5159     }
5160 
5161   /* Now flag two cases, whether the ranges are disjoint or whether the
5162      second range is totally subsumed in the first.  Note that the tests
5163      below are simplified by the ones above.  */
5164   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5165 					  high0, 1, low1, 0));
5166   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5167 				      high1, 1, high0, 1));
5168 
5169   /* We now have four cases, depending on whether we are including or
5170      excluding the two ranges.  */
5171   if (in0_p && in1_p)
5172     {
5173       /* If they don't overlap, the result is false.  If the second range
5174 	 is a subset it is the result.  Otherwise, the range is from the start
5175 	 of the second to the end of the first.  */
5176       if (no_overlap)
5177 	in_p = 0, low = high = 0;
5178       else if (subset)
5179 	in_p = 1, low = low1, high = high1;
5180       else
5181 	in_p = 1, low = low1, high = high0;
5182     }
5183 
5184   else if (in0_p && ! in1_p)
5185     {
5186       /* If they don't overlap, the result is the first range.  If they are
5187 	 equal, the result is false.  If the second range is a subset of the
5188 	 first, and the ranges begin at the same place, we go from just after
5189 	 the end of the second range to the end of the first.  If the second
5190 	 range is not a subset of the first, or if it is a subset and both
5191 	 ranges end at the same place, the range starts at the start of the
5192 	 first range and ends just before the second range.
5193 	 Otherwise, we can't describe this as a single range.  */
5194       if (no_overlap)
5195 	in_p = 1, low = low0, high = high0;
5196       else if (lowequal && highequal)
5197 	in_p = 0, low = high = 0;
5198       else if (subset && lowequal)
5199 	{
5200 	  low = range_successor (high1);
5201 	  high = high0;
5202 	  in_p = 1;
5203 	  if (low == 0)
5204 	    {
5205 	      /* We are in the weird situation where high0 > high1 but
5206 		 high1 has no successor.  Punt.  */
5207 	      return 0;
5208 	    }
5209 	}
5210       else if (! subset || highequal)
5211 	{
5212 	  low = low0;
5213 	  high = range_predecessor (low1);
5214 	  in_p = 1;
5215 	  if (high == 0)
5216 	    {
5217 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5218 	      return 0;
5219 	    }
5220 	}
5221       else
5222 	return 0;
5223     }
5224 
5225   else if (! in0_p && in1_p)
5226     {
5227       /* If they don't overlap, the result is the second range.  If the second
5228 	 is a subset of the first, the result is false.  Otherwise,
5229 	 the range starts just after the first range and ends at the
5230 	 end of the second.  */
5231       if (no_overlap)
5232 	in_p = 1, low = low1, high = high1;
5233       else if (subset || highequal)
5234 	in_p = 0, low = high = 0;
5235       else
5236 	{
5237 	  low = range_successor (high0);
5238 	  high = high1;
5239 	  in_p = 1;
5240 	  if (low == 0)
5241 	    {
5242 	      /* high1 > high0 but high0 has no successor.  Punt.  */
5243 	      return 0;
5244 	    }
5245 	}
5246     }
5247 
5248   else
5249     {
5250       /* The case where we are excluding both ranges.  Here the complex case
5251 	 is if they don't overlap.  In that case, the only time we have a
5252 	 range is if they are adjacent.  If the second is a subset of the
5253 	 first, the result is the first.  Otherwise, the range to exclude
5254 	 starts at the beginning of the first range and ends at the end of the
5255 	 second.  */
5256       if (no_overlap)
5257 	{
5258 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5259 					 range_successor (high0),
5260 					 1, low1, 0)))
5261 	    in_p = 0, low = low0, high = high1;
5262 	  else
5263 	    {
5264 	      /* Canonicalize - [min, x] into - [-, x].  */
5265 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5266 		switch (TREE_CODE (TREE_TYPE (low0)))
5267 		  {
5268 		  case ENUMERAL_TYPE:
5269 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5270 				  GET_MODE_BITSIZE
5271 				    (TYPE_MODE (TREE_TYPE (low0)))))
5272 		      break;
5273 		    /* FALLTHROUGH */
5274 		  case INTEGER_TYPE:
5275 		    if (tree_int_cst_equal (low0,
5276 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5277 		      low0 = 0;
5278 		    break;
5279 		  case POINTER_TYPE:
5280 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5281 			&& integer_zerop (low0))
5282 		      low0 = 0;
5283 		    break;
5284 		  default:
5285 		    break;
5286 		  }
5287 
5288 	      /* Canonicalize - [x, max] into - [x, -].  */
5289 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5290 		switch (TREE_CODE (TREE_TYPE (high1)))
5291 		  {
5292 		  case ENUMERAL_TYPE:
5293 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5294 				  GET_MODE_BITSIZE
5295 				    (TYPE_MODE (TREE_TYPE (high1)))))
5296 		      break;
5297 		    /* FALLTHROUGH */
5298 		  case INTEGER_TYPE:
5299 		    if (tree_int_cst_equal (high1,
5300 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5301 		      high1 = 0;
5302 		    break;
5303 		  case POINTER_TYPE:
5304 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5305 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5306 						       high1, 1,
5307 						       build_int_cst (TREE_TYPE (high1), 1),
5308 						       1)))
5309 		      high1 = 0;
5310 		    break;
5311 		  default:
5312 		    break;
5313 		  }
5314 
5315 	      /* The ranges might be also adjacent between the maximum and
5316 	         minimum values of the given type.  For
5317 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5318 	         return + [x + 1, y - 1].  */
5319 	      if (low0 == 0 && high1 == 0)
5320 	        {
5321 		  low = range_successor (high0);
5322 		  high = range_predecessor (low1);
5323 		  if (low == 0 || high == 0)
5324 		    return 0;
5325 
5326 		  in_p = 1;
5327 		}
5328 	      else
5329 		return 0;
5330 	    }
5331 	}
5332       else if (subset)
5333 	in_p = 0, low = low0, high = high0;
5334       else
5335 	in_p = 0, low = low0, high = high1;
5336     }
5337 
5338   *pin_p = in_p, *plow = low, *phigh = high;
5339   return 1;
5340 }
5341 
5342 
5343 /* Subroutine of fold, looking inside expressions of the form
5344    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5345    of the COND_EXPR.  This function is being used also to optimize
5346    A op B ? C : A, by reversing the comparison first.
5347 
5348    Return a folded expression whose code is not a COND_EXPR
5349    anymore, or NULL_TREE if no folding opportunity is found.  */
5350 
5351 static tree
5352 fold_cond_expr_with_comparison (location_t loc, tree type,
5353 				tree arg0, tree arg1, tree arg2)
5354 {
5355   enum tree_code comp_code = TREE_CODE (arg0);
5356   tree arg00 = TREE_OPERAND (arg0, 0);
5357   tree arg01 = TREE_OPERAND (arg0, 1);
5358   tree arg1_type = TREE_TYPE (arg1);
5359   tree tem;
5360 
5361   STRIP_NOPS (arg1);
5362   STRIP_NOPS (arg2);
5363 
5364   /* If we have A op 0 ? A : -A, consider applying the following
5365      transformations:
5366 
5367      A == 0? A : -A    same as -A
5368      A != 0? A : -A    same as A
5369      A >= 0? A : -A    same as abs (A)
5370      A > 0?  A : -A    same as abs (A)
5371      A <= 0? A : -A    same as -abs (A)
5372      A < 0?  A : -A    same as -abs (A)
5373 
5374      None of these transformations work for modes with signed
5375      zeros.  If A is +/-0, the first two transformations will
5376      change the sign of the result (from +0 to -0, or vice
5377      versa).  The last four will fix the sign of the result,
5378      even though the original expressions could be positive or
5379      negative, depending on the sign of A.
5380 
5381      Note that all these transformations are correct if A is
5382      NaN, since the two alternatives (A and -A) are also NaNs.  */
5383   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5384       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5385 	  ? real_zerop (arg01)
5386 	  : integer_zerop (arg01))
5387       && ((TREE_CODE (arg2) == NEGATE_EXPR
5388 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5389 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5390 	        have already been folded to Y-X, check for that. */
5391 	  || (TREE_CODE (arg1) == MINUS_EXPR
5392 	      && TREE_CODE (arg2) == MINUS_EXPR
5393 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5394 				  TREE_OPERAND (arg2, 1), 0)
5395 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5396 				  TREE_OPERAND (arg2, 0), 0))))
5397     switch (comp_code)
5398       {
5399       case EQ_EXPR:
5400       case UNEQ_EXPR:
5401 	tem = fold_convert_loc (loc, arg1_type, arg1);
5402 	return fold_convert_loc (loc, type, negate_expr (tem));
5403       case NE_EXPR:
5404       case LTGT_EXPR:
5405 	return fold_convert_loc (loc, type, arg1);
5406       case UNGE_EXPR:
5407       case UNGT_EXPR:
5408 	if (flag_trapping_math)
5409 	  break;
5410 	/* Fall through.  */
5411       case GE_EXPR:
5412       case GT_EXPR:
5413 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5414 	  break;
5415 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5416 	return fold_convert_loc (loc, type, tem);
5417       case UNLE_EXPR:
5418       case UNLT_EXPR:
5419 	if (flag_trapping_math)
5420 	  break;
5421 	/* FALLTHRU */
5422       case LE_EXPR:
5423       case LT_EXPR:
5424 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5425 	  break;
5426 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5427 	return negate_expr (fold_convert_loc (loc, type, tem));
5428       default:
5429 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5430 	break;
5431       }
5432 
5433   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5434      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5435      both transformations are correct when A is NaN: A != 0
5436      is then true, and A == 0 is false.  */
5437 
5438   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5439       && integer_zerop (arg01) && integer_zerop (arg2))
5440     {
5441       if (comp_code == NE_EXPR)
5442 	return fold_convert_loc (loc, type, arg1);
5443       else if (comp_code == EQ_EXPR)
5444 	return build_zero_cst (type);
5445     }
5446 
5447   /* Try some transformations of A op B ? A : B.
5448 
5449      A == B? A : B    same as B
5450      A != B? A : B    same as A
5451      A >= B? A : B    same as max (A, B)
5452      A > B?  A : B    same as max (B, A)
5453      A <= B? A : B    same as min (A, B)
5454      A < B?  A : B    same as min (B, A)
5455 
5456      As above, these transformations don't work in the presence
5457      of signed zeros.  For example, if A and B are zeros of
5458      opposite sign, the first two transformations will change
5459      the sign of the result.  In the last four, the original
5460      expressions give different results for (A=+0, B=-0) and
5461      (A=-0, B=+0), but the transformed expressions do not.
5462 
5463      The first two transformations are correct if either A or B
5464      is a NaN.  In the first transformation, the condition will
5465      be false, and B will indeed be chosen.  In the case of the
5466      second transformation, the condition A != B will be true,
5467      and A will be chosen.
5468 
5469      The conversions to max() and min() are not correct if B is
5470      a number and A is not.  The conditions in the original
5471      expressions will be false, so all four give B.  The min()
5472      and max() versions would give a NaN instead.  */
5473   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5474       && operand_equal_for_comparison_p (arg01, arg2)
5475       /* Avoid these transformations if the COND_EXPR may be used
5476 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5477       && (in_gimple_form
5478 	  || VECTOR_TYPE_P (type)
5479 	  || (! lang_GNU_CXX ()
5480 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5481 	  || ! maybe_lvalue_p (arg1)
5482 	  || ! maybe_lvalue_p (arg2)))
5483     {
5484       tree comp_op0 = arg00;
5485       tree comp_op1 = arg01;
5486       tree comp_type = TREE_TYPE (comp_op0);
5487 
5488       switch (comp_code)
5489 	{
5490 	case EQ_EXPR:
5491 	  return fold_convert_loc (loc, type, arg2);
5492 	case NE_EXPR:
5493 	  return fold_convert_loc (loc, type, arg1);
5494 	case LE_EXPR:
5495 	case LT_EXPR:
5496 	case UNLE_EXPR:
5497 	case UNLT_EXPR:
5498 	  /* In C++ a ?: expression can be an lvalue, so put the
5499 	     operand which will be used if they are equal first
5500 	     so that we can convert this back to the
5501 	     corresponding COND_EXPR.  */
5502 	  if (!HONOR_NANS (arg1))
5503 	    {
5504 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5505 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5506 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5507 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5508 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5509 				   comp_op1, comp_op0);
5510 	      return fold_convert_loc (loc, type, tem);
5511 	    }
5512 	  break;
5513 	case GE_EXPR:
5514 	case GT_EXPR:
5515 	case UNGE_EXPR:
5516 	case UNGT_EXPR:
5517 	  if (!HONOR_NANS (arg1))
5518 	    {
5519 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5520 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5521 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5522 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5523 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5524 				   comp_op1, comp_op0);
5525 	      return fold_convert_loc (loc, type, tem);
5526 	    }
5527 	  break;
5528 	case UNEQ_EXPR:
5529 	  if (!HONOR_NANS (arg1))
5530 	    return fold_convert_loc (loc, type, arg2);
5531 	  break;
5532 	case LTGT_EXPR:
5533 	  if (!HONOR_NANS (arg1))
5534 	    return fold_convert_loc (loc, type, arg1);
5535 	  break;
5536 	default:
5537 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5538 	  break;
5539 	}
5540     }
5541 
5542   return NULL_TREE;
5543 }
5544 
5545 
5546 
5547 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5548 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5549   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5550 		false) >= 2)
5551 #endif
5552 
5553 /* EXP is some logical combination of boolean tests.  See if we can
5554    merge it into some range test.  Return the new tree if so.  */
5555 
5556 static tree
5557 fold_range_test (location_t loc, enum tree_code code, tree type,
5558 		 tree op0, tree op1)
5559 {
5560   int or_op = (code == TRUTH_ORIF_EXPR
5561 	       || code == TRUTH_OR_EXPR);
5562   int in0_p, in1_p, in_p;
5563   tree low0, low1, low, high0, high1, high;
5564   bool strict_overflow_p = false;
5565   tree tem, lhs, rhs;
5566   const char * const warnmsg = G_("assuming signed overflow does not occur "
5567 				  "when simplifying range test");
5568 
5569   if (!INTEGRAL_TYPE_P (type))
5570     return 0;
5571 
5572   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5573   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5574 
5575   /* If this is an OR operation, invert both sides; we will invert
5576      again at the end.  */
5577   if (or_op)
5578     in0_p = ! in0_p, in1_p = ! in1_p;
5579 
5580   /* If both expressions are the same, if we can merge the ranges, and we
5581      can build the range test, return it or it inverted.  If one of the
5582      ranges is always true or always false, consider it to be the same
5583      expression as the other.  */
5584   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5585       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5586 		       in1_p, low1, high1)
5587       && (tem = (build_range_check (loc, type,
5588 				    lhs != 0 ? lhs
5589 				    : rhs != 0 ? rhs : integer_zero_node,
5590 				    in_p, low, high))) != 0)
5591     {
5592       if (strict_overflow_p)
5593 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5594       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5595     }
5596 
5597   /* On machines where the branch cost is expensive, if this is a
5598      short-circuited branch and the underlying object on both sides
5599      is the same, make a non-short-circuit operation.  */
5600   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5601   if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
5602     logical_op_non_short_circuit
5603       = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
5604   if (logical_op_non_short_circuit
5605       && !flag_sanitize_coverage
5606       && lhs != 0 && rhs != 0
5607       && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5608       && operand_equal_p (lhs, rhs, 0))
5609     {
5610       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5611 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5612 	 which cases we can't do this.  */
5613       if (simple_operand_p (lhs))
5614 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5615 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5616 			   type, op0, op1);
5617 
5618       else if (!lang_hooks.decls.global_bindings_p ()
5619 	       && !CONTAINS_PLACEHOLDER_P (lhs))
5620 	{
5621 	  tree common = save_expr (lhs);
5622 
5623 	  if ((lhs = build_range_check (loc, type, common,
5624 					or_op ? ! in0_p : in0_p,
5625 					low0, high0)) != 0
5626 	      && (rhs = build_range_check (loc, type, common,
5627 					   or_op ? ! in1_p : in1_p,
5628 					   low1, high1)) != 0)
5629 	    {
5630 	      if (strict_overflow_p)
5631 		fold_overflow_warning (warnmsg,
5632 				       WARN_STRICT_OVERFLOW_COMPARISON);
5633 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5634 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5635 				 type, lhs, rhs);
5636 	    }
5637 	}
5638     }
5639 
5640   return 0;
5641 }
5642 
5643 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5644    bit value.  Arrange things so the extra bits will be set to zero if and
5645    only if C is signed-extended to its full width.  If MASK is nonzero,
5646    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5647 
5648 static tree
5649 unextend (tree c, int p, int unsignedp, tree mask)
5650 {
5651   tree type = TREE_TYPE (c);
5652   int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5653   tree temp;
5654 
5655   if (p == modesize || unsignedp)
5656     return c;
5657 
5658   /* We work by getting just the sign bit into the low-order bit, then
5659      into the high-order bit, then sign-extend.  We then XOR that value
5660      with C.  */
5661   temp = build_int_cst (TREE_TYPE (c),
5662 			wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5663 
5664   /* We must use a signed type in order to get an arithmetic right shift.
5665      However, we must also avoid introducing accidental overflows, so that
5666      a subsequent call to integer_zerop will work.  Hence we must
5667      do the type conversion here.  At this point, the constant is either
5668      zero or one, and the conversion to a signed type can never overflow.
5669      We could get an overflow if this conversion is done anywhere else.  */
5670   if (TYPE_UNSIGNED (type))
5671     temp = fold_convert (signed_type_for (type), temp);
5672 
5673   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5674   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5675   if (mask != 0)
5676     temp = const_binop (BIT_AND_EXPR, temp,
5677 			fold_convert (TREE_TYPE (c), mask));
5678   /* If necessary, convert the type back to match the type of C.  */
5679   if (TYPE_UNSIGNED (type))
5680     temp = fold_convert (type, temp);
5681 
5682   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5683 }
5684 
5685 /* For an expression that has the form
5686      (A && B) || ~B
5687    or
5688      (A || B) && ~B,
5689    we can drop one of the inner expressions and simplify to
5690      A || ~B
5691    or
5692      A && ~B
5693    LOC is the location of the resulting expression.  OP is the inner
5694    logical operation; the left-hand side in the examples above, while CMPOP
5695    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
5696    removing a condition that guards another, as in
5697      (A != NULL && A->...) || A == NULL
5698    which we must not transform.  If RHS_ONLY is true, only eliminate the
5699    right-most operand of the inner logical operation.  */
5700 
5701 static tree
5702 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5703 				 bool rhs_only)
5704 {
5705   tree type = TREE_TYPE (cmpop);
5706   enum tree_code code = TREE_CODE (cmpop);
5707   enum tree_code truthop_code = TREE_CODE (op);
5708   tree lhs = TREE_OPERAND (op, 0);
5709   tree rhs = TREE_OPERAND (op, 1);
5710   tree orig_lhs = lhs, orig_rhs = rhs;
5711   enum tree_code rhs_code = TREE_CODE (rhs);
5712   enum tree_code lhs_code = TREE_CODE (lhs);
5713   enum tree_code inv_code;
5714 
5715   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5716     return NULL_TREE;
5717 
5718   if (TREE_CODE_CLASS (code) != tcc_comparison)
5719     return NULL_TREE;
5720 
5721   if (rhs_code == truthop_code)
5722     {
5723       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5724       if (newrhs != NULL_TREE)
5725 	{
5726 	  rhs = newrhs;
5727 	  rhs_code = TREE_CODE (rhs);
5728 	}
5729     }
5730   if (lhs_code == truthop_code && !rhs_only)
5731     {
5732       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5733       if (newlhs != NULL_TREE)
5734 	{
5735 	  lhs = newlhs;
5736 	  lhs_code = TREE_CODE (lhs);
5737 	}
5738     }
5739 
5740   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5741   if (inv_code == rhs_code
5742       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5743       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5744     return lhs;
5745   if (!rhs_only && inv_code == lhs_code
5746       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5747       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5748     return rhs;
5749   if (rhs != orig_rhs || lhs != orig_lhs)
5750     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5751 			    lhs, rhs);
5752   return NULL_TREE;
5753 }
5754 
5755 /* Find ways of folding logical expressions of LHS and RHS:
5756    Try to merge two comparisons to the same innermost item.
5757    Look for range tests like "ch >= '0' && ch <= '9'".
5758    Look for combinations of simple terms on machines with expensive branches
5759    and evaluate the RHS unconditionally.
5760 
5761    For example, if we have p->a == 2 && p->b == 4 and we can make an
5762    object large enough to span both A and B, we can do this with a comparison
5763    against the object ANDed with the a mask.
5764 
5765    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5766    operations to do this with one comparison.
5767 
5768    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5769    function and the one above.
5770 
5771    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5772    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5773 
5774    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5775    two operands.
5776 
5777    We return the simplified tree or 0 if no optimization is possible.  */
5778 
5779 static tree
5780 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5781 		    tree lhs, tree rhs)
5782 {
5783   /* If this is the "or" of two comparisons, we can do something if
5784      the comparisons are NE_EXPR.  If this is the "and", we can do something
5785      if the comparisons are EQ_EXPR.  I.e.,
5786 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5787 
5788      WANTED_CODE is this operation code.  For single bit fields, we can
5789      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5790      comparison for one-bit fields.  */
5791 
5792   enum tree_code wanted_code;
5793   enum tree_code lcode, rcode;
5794   tree ll_arg, lr_arg, rl_arg, rr_arg;
5795   tree ll_inner, lr_inner, rl_inner, rr_inner;
5796   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5797   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5798   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5799   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5800   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5801   int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5802   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5803   scalar_int_mode lnmode, rnmode;
5804   tree ll_mask, lr_mask, rl_mask, rr_mask;
5805   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5806   tree l_const, r_const;
5807   tree lntype, rntype, result;
5808   HOST_WIDE_INT first_bit, end_bit;
5809   int volatilep;
5810 
5811   /* Start by getting the comparison codes.  Fail if anything is volatile.
5812      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5813      it were surrounded with a NE_EXPR.  */
5814 
5815   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5816     return 0;
5817 
5818   lcode = TREE_CODE (lhs);
5819   rcode = TREE_CODE (rhs);
5820 
5821   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5822     {
5823       lhs = build2 (NE_EXPR, truth_type, lhs,
5824 		    build_int_cst (TREE_TYPE (lhs), 0));
5825       lcode = NE_EXPR;
5826     }
5827 
5828   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5829     {
5830       rhs = build2 (NE_EXPR, truth_type, rhs,
5831 		    build_int_cst (TREE_TYPE (rhs), 0));
5832       rcode = NE_EXPR;
5833     }
5834 
5835   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5836       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5837     return 0;
5838 
5839   ll_arg = TREE_OPERAND (lhs, 0);
5840   lr_arg = TREE_OPERAND (lhs, 1);
5841   rl_arg = TREE_OPERAND (rhs, 0);
5842   rr_arg = TREE_OPERAND (rhs, 1);
5843 
5844   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5845   if (simple_operand_p (ll_arg)
5846       && simple_operand_p (lr_arg))
5847     {
5848       if (operand_equal_p (ll_arg, rl_arg, 0)
5849           && operand_equal_p (lr_arg, rr_arg, 0))
5850 	{
5851           result = combine_comparisons (loc, code, lcode, rcode,
5852 					truth_type, ll_arg, lr_arg);
5853 	  if (result)
5854 	    return result;
5855 	}
5856       else if (operand_equal_p (ll_arg, rr_arg, 0)
5857                && operand_equal_p (lr_arg, rl_arg, 0))
5858 	{
5859           result = combine_comparisons (loc, code, lcode,
5860 					swap_tree_comparison (rcode),
5861 					truth_type, ll_arg, lr_arg);
5862 	  if (result)
5863 	    return result;
5864 	}
5865     }
5866 
5867   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5868 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5869 
5870   /* If the RHS can be evaluated unconditionally and its operands are
5871      simple, it wins to evaluate the RHS unconditionally on machines
5872      with expensive branches.  In this case, this isn't a comparison
5873      that can be merged.  */
5874 
5875   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5876 		   false) >= 2
5877       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5878       && simple_operand_p (rl_arg)
5879       && simple_operand_p (rr_arg))
5880     {
5881       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5882       if (code == TRUTH_OR_EXPR
5883 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5884 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5885 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5886 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5887 	return build2_loc (loc, NE_EXPR, truth_type,
5888 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5889 				   ll_arg, rl_arg),
5890 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5891 
5892       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5893       if (code == TRUTH_AND_EXPR
5894 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5895 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5896 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5897 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5898 	return build2_loc (loc, EQ_EXPR, truth_type,
5899 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5900 				   ll_arg, rl_arg),
5901 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5902     }
5903 
5904   /* See if the comparisons can be merged.  Then get all the parameters for
5905      each side.  */
5906 
5907   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5908       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5909     return 0;
5910 
5911   ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5912   volatilep = 0;
5913   ll_inner = decode_field_reference (loc, &ll_arg,
5914 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5915 				     &ll_unsignedp, &ll_reversep, &volatilep,
5916 				     &ll_mask, &ll_and_mask);
5917   lr_inner = decode_field_reference (loc, &lr_arg,
5918 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5919 				     &lr_unsignedp, &lr_reversep, &volatilep,
5920 				     &lr_mask, &lr_and_mask);
5921   rl_inner = decode_field_reference (loc, &rl_arg,
5922 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5923 				     &rl_unsignedp, &rl_reversep, &volatilep,
5924 				     &rl_mask, &rl_and_mask);
5925   rr_inner = decode_field_reference (loc, &rr_arg,
5926 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5927 				     &rr_unsignedp, &rr_reversep, &volatilep,
5928 				     &rr_mask, &rr_and_mask);
5929 
5930   /* It must be true that the inner operation on the lhs of each
5931      comparison must be the same if we are to be able to do anything.
5932      Then see if we have constants.  If not, the same must be true for
5933      the rhs's.  */
5934   if (volatilep
5935       || ll_reversep != rl_reversep
5936       || ll_inner == 0 || rl_inner == 0
5937       || ! operand_equal_p (ll_inner, rl_inner, 0))
5938     return 0;
5939 
5940   if (TREE_CODE (lr_arg) == INTEGER_CST
5941       && TREE_CODE (rr_arg) == INTEGER_CST)
5942     {
5943       l_const = lr_arg, r_const = rr_arg;
5944       lr_reversep = ll_reversep;
5945     }
5946   else if (lr_reversep != rr_reversep
5947 	   || lr_inner == 0 || rr_inner == 0
5948 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5949     return 0;
5950   else
5951     l_const = r_const = 0;
5952 
5953   /* If either comparison code is not correct for our logical operation,
5954      fail.  However, we can convert a one-bit comparison against zero into
5955      the opposite comparison against that bit being set in the field.  */
5956 
5957   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5958   if (lcode != wanted_code)
5959     {
5960       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5961 	{
5962 	  /* Make the left operand unsigned, since we are only interested
5963 	     in the value of one bit.  Otherwise we are doing the wrong
5964 	     thing below.  */
5965 	  ll_unsignedp = 1;
5966 	  l_const = ll_mask;
5967 	}
5968       else
5969 	return 0;
5970     }
5971 
5972   /* This is analogous to the code for l_const above.  */
5973   if (rcode != wanted_code)
5974     {
5975       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5976 	{
5977 	  rl_unsignedp = 1;
5978 	  r_const = rl_mask;
5979 	}
5980       else
5981 	return 0;
5982     }
5983 
5984   /* See if we can find a mode that contains both fields being compared on
5985      the left.  If we can't, fail.  Otherwise, update all constants and masks
5986      to be relative to a field of that size.  */
5987   first_bit = MIN (ll_bitpos, rl_bitpos);
5988   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5989   if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5990 		      TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5991 		      volatilep, &lnmode))
5992     return 0;
5993 
5994   lnbitsize = GET_MODE_BITSIZE (lnmode);
5995   lnbitpos = first_bit & ~ (lnbitsize - 1);
5996   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5997   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5998 
5999   if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6000     {
6001       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6002       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6003     }
6004 
6005   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6006 			 size_int (xll_bitpos));
6007   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6008 			 size_int (xrl_bitpos));
6009 
6010   if (l_const)
6011     {
6012       l_const = fold_convert_loc (loc, lntype, l_const);
6013       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6014       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6015       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6016 					fold_build1_loc (loc, BIT_NOT_EXPR,
6017 						     lntype, ll_mask))))
6018 	{
6019 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6020 
6021 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6022 	}
6023     }
6024   if (r_const)
6025     {
6026       r_const = fold_convert_loc (loc, lntype, r_const);
6027       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6028       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6029       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6030 					fold_build1_loc (loc, BIT_NOT_EXPR,
6031 						     lntype, rl_mask))))
6032 	{
6033 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6034 
6035 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6036 	}
6037     }
6038 
6039   /* If the right sides are not constant, do the same for it.  Also,
6040      disallow this optimization if a size, signedness or storage order
6041      mismatch occurs between the left and right sides.  */
6042   if (l_const == 0)
6043     {
6044       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6045 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6046 	  || ll_reversep != lr_reversep
6047 	  /* Make sure the two fields on the right
6048 	     correspond to the left without being swapped.  */
6049 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6050 	return 0;
6051 
6052       first_bit = MIN (lr_bitpos, rr_bitpos);
6053       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6054       if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6055 			  TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6056 			  volatilep, &rnmode))
6057 	return 0;
6058 
6059       rnbitsize = GET_MODE_BITSIZE (rnmode);
6060       rnbitpos = first_bit & ~ (rnbitsize - 1);
6061       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6062       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6063 
6064       if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6065 	{
6066 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6067 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6068 	}
6069 
6070       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6071 							    rntype, lr_mask),
6072 			     size_int (xlr_bitpos));
6073       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6074 							    rntype, rr_mask),
6075 			     size_int (xrr_bitpos));
6076 
6077       /* Make a mask that corresponds to both fields being compared.
6078 	 Do this for both items being compared.  If the operands are the
6079 	 same size and the bits being compared are in the same position
6080 	 then we can do this by masking both and comparing the masked
6081 	 results.  */
6082       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6083       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6084       if (lnbitsize == rnbitsize
6085 	  && xll_bitpos == xlr_bitpos
6086 	  && lnbitpos >= 0
6087 	  && rnbitpos >= 0)
6088 	{
6089 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6090 				    lntype, lnbitsize, lnbitpos,
6091 				    ll_unsignedp || rl_unsignedp, ll_reversep);
6092 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
6093 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6094 
6095 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6096 				    rntype, rnbitsize, rnbitpos,
6097 				    lr_unsignedp || rr_unsignedp, lr_reversep);
6098 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
6099 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6100 
6101 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6102 	}
6103 
6104       /* There is still another way we can do something:  If both pairs of
6105 	 fields being compared are adjacent, we may be able to make a wider
6106 	 field containing them both.
6107 
6108 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
6109 	 the mask must be shifted to account for the shift done by
6110 	 make_bit_field_ref.  */
6111       if (((ll_bitsize + ll_bitpos == rl_bitpos
6112 	    && lr_bitsize + lr_bitpos == rr_bitpos)
6113 	   || (ll_bitpos == rl_bitpos + rl_bitsize
6114 	       && lr_bitpos == rr_bitpos + rr_bitsize))
6115 	  && ll_bitpos >= 0
6116 	  && rl_bitpos >= 0
6117 	  && lr_bitpos >= 0
6118 	  && rr_bitpos >= 0)
6119 	{
6120 	  tree type;
6121 
6122 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6123 				    ll_bitsize + rl_bitsize,
6124 				    MIN (ll_bitpos, rl_bitpos),
6125 				    ll_unsignedp, ll_reversep);
6126 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6127 				    lr_bitsize + rr_bitsize,
6128 				    MIN (lr_bitpos, rr_bitpos),
6129 				    lr_unsignedp, lr_reversep);
6130 
6131 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6132 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
6133 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6134 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6135 
6136 	  /* Convert to the smaller type before masking out unwanted bits.  */
6137 	  type = lntype;
6138 	  if (lntype != rntype)
6139 	    {
6140 	      if (lnbitsize > rnbitsize)
6141 		{
6142 		  lhs = fold_convert_loc (loc, rntype, lhs);
6143 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6144 		  type = rntype;
6145 		}
6146 	      else if (lnbitsize < rnbitsize)
6147 		{
6148 		  rhs = fold_convert_loc (loc, lntype, rhs);
6149 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6150 		  type = lntype;
6151 		}
6152 	    }
6153 
6154 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6155 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6156 
6157 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6158 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6159 
6160 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6161 	}
6162 
6163       return 0;
6164     }
6165 
6166   /* Handle the case of comparisons with constants.  If there is something in
6167      common between the masks, those bits of the constants must be the same.
6168      If not, the condition is always false.  Test for this to avoid generating
6169      incorrect code below.  */
6170   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6171   if (! integer_zerop (result)
6172       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6173 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6174     {
6175       if (wanted_code == NE_EXPR)
6176 	{
6177 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
6178 	  return constant_boolean_node (true, truth_type);
6179 	}
6180       else
6181 	{
6182 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6183 	  return constant_boolean_node (false, truth_type);
6184 	}
6185     }
6186 
6187   if (lnbitpos < 0)
6188     return 0;
6189 
6190   /* Construct the expression we will return.  First get the component
6191      reference we will make.  Unless the mask is all ones the width of
6192      that field, perform the mask operation.  Then compare with the
6193      merged constant.  */
6194   result = make_bit_field_ref (loc, ll_inner, ll_arg,
6195 			       lntype, lnbitsize, lnbitpos,
6196 			       ll_unsignedp || rl_unsignedp, ll_reversep);
6197 
6198   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6199   if (! all_ones_mask_p (ll_mask, lnbitsize))
6200     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6201 
6202   return build2_loc (loc, wanted_code, truth_type, result,
6203 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
6204 }
6205 
6206 /* T is an integer expression that is being multiplied, divided, or taken a
6207    modulus (CODE says which and what kind of divide or modulus) by a
6208    constant C.  See if we can eliminate that operation by folding it with
6209    other operations already in T.  WIDE_TYPE, if non-null, is a type that
6210    should be used for the computation if wider than our type.
6211 
6212    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6213    (X * 2) + (Y * 4).  We must, however, be assured that either the original
6214    expression would not overflow or that overflow is undefined for the type
6215    in the language in question.
6216 
6217    If we return a non-null expression, it is an equivalent form of the
6218    original computation, but need not be in the original type.
6219 
6220    We set *STRICT_OVERFLOW_P to true if the return values depends on
6221    signed overflow being undefined.  Otherwise we do not change
6222    *STRICT_OVERFLOW_P.  */
6223 
6224 static tree
6225 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6226 		bool *strict_overflow_p)
6227 {
6228   /* To avoid exponential search depth, refuse to allow recursion past
6229      three levels.  Beyond that (1) it's highly unlikely that we'll find
6230      something interesting and (2) we've probably processed it before
6231      when we built the inner expression.  */
6232 
6233   static int depth;
6234   tree ret;
6235 
6236   if (depth > 3)
6237     return NULL;
6238 
6239   depth++;
6240   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6241   depth--;
6242 
6243   return ret;
6244 }
6245 
6246 static tree
6247 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6248 		  bool *strict_overflow_p)
6249 {
6250   tree type = TREE_TYPE (t);
6251   enum tree_code tcode = TREE_CODE (t);
6252   tree ctype = (wide_type != 0
6253 		&& (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6254 		    > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6255 		? wide_type : type);
6256   tree t1, t2;
6257   int same_p = tcode == code;
6258   tree op0 = NULL_TREE, op1 = NULL_TREE;
6259   bool sub_strict_overflow_p;
6260 
6261   /* Don't deal with constants of zero here; they confuse the code below.  */
6262   if (integer_zerop (c))
6263     return NULL_TREE;
6264 
6265   if (TREE_CODE_CLASS (tcode) == tcc_unary)
6266     op0 = TREE_OPERAND (t, 0);
6267 
6268   if (TREE_CODE_CLASS (tcode) == tcc_binary)
6269     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6270 
6271   /* Note that we need not handle conditional operations here since fold
6272      already handles those cases.  So just do arithmetic here.  */
6273   switch (tcode)
6274     {
6275     case INTEGER_CST:
6276       /* For a constant, we can always simplify if we are a multiply
6277 	 or (for divide and modulus) if it is a multiple of our constant.  */
6278       if (code == MULT_EXPR
6279 	  || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6280 				TYPE_SIGN (type)))
6281 	{
6282 	  tree tem = const_binop (code, fold_convert (ctype, t),
6283 				  fold_convert (ctype, c));
6284 	  /* If the multiplication overflowed, we lost information on it.
6285 	     See PR68142 and PR69845.  */
6286 	  if (TREE_OVERFLOW (tem))
6287 	    return NULL_TREE;
6288 	  return tem;
6289 	}
6290       break;
6291 
6292     CASE_CONVERT: case NON_LVALUE_EXPR:
6293       /* If op0 is an expression ...  */
6294       if ((COMPARISON_CLASS_P (op0)
6295 	   || UNARY_CLASS_P (op0)
6296 	   || BINARY_CLASS_P (op0)
6297 	   || VL_EXP_CLASS_P (op0)
6298 	   || EXPRESSION_CLASS_P (op0))
6299 	  /* ... and has wrapping overflow, and its type is smaller
6300 	     than ctype, then we cannot pass through as widening.  */
6301 	  && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6302 		&& TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6303 	       && (TYPE_PRECISION (ctype)
6304 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6305 	      /* ... or this is a truncation (t is narrower than op0),
6306 		 then we cannot pass through this narrowing.  */
6307 	      || (TYPE_PRECISION (type)
6308 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6309 	      /* ... or signedness changes for division or modulus,
6310 		 then we cannot pass through this conversion.  */
6311 	      || (code != MULT_EXPR
6312 		  && (TYPE_UNSIGNED (ctype)
6313 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6314 	      /* ... or has undefined overflow while the converted to
6315 		 type has not, we cannot do the operation in the inner type
6316 		 as that would introduce undefined overflow.  */
6317 	      || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6318 		   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6319 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6320 	break;
6321 
6322       /* Pass the constant down and see if we can make a simplification.  If
6323 	 we can, replace this expression with the inner simplification for
6324 	 possible later conversion to our or some other type.  */
6325       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6326 	  && TREE_CODE (t2) == INTEGER_CST
6327 	  && !TREE_OVERFLOW (t2)
6328 	  && (t1 = extract_muldiv (op0, t2, code,
6329 				   code == MULT_EXPR ? ctype : NULL_TREE,
6330 				   strict_overflow_p)) != 0)
6331 	return t1;
6332       break;
6333 
6334     case ABS_EXPR:
6335       /* If widening the type changes it from signed to unsigned, then we
6336          must avoid building ABS_EXPR itself as unsigned.  */
6337       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6338         {
6339           tree cstype = (*signed_type_for) (ctype);
6340           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6341 	      != 0)
6342             {
6343               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6344               return fold_convert (ctype, t1);
6345             }
6346           break;
6347         }
6348       /* If the constant is negative, we cannot simplify this.  */
6349       if (tree_int_cst_sgn (c) == -1)
6350         break;
6351       /* FALLTHROUGH */
6352     case NEGATE_EXPR:
6353       /* For division and modulus, type can't be unsigned, as e.g.
6354 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6355 	 For signed types, even with wrapping overflow, this is fine.  */
6356       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6357 	break;
6358       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6359 	  != 0)
6360 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6361       break;
6362 
6363     case MIN_EXPR:  case MAX_EXPR:
6364       /* If widening the type changes the signedness, then we can't perform
6365 	 this optimization as that changes the result.  */
6366       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6367 	break;
6368 
6369       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6370       sub_strict_overflow_p = false;
6371       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6372 				&sub_strict_overflow_p)) != 0
6373 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6374 				   &sub_strict_overflow_p)) != 0)
6375 	{
6376 	  if (tree_int_cst_sgn (c) < 0)
6377 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6378 	  if (sub_strict_overflow_p)
6379 	    *strict_overflow_p = true;
6380 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6381 			      fold_convert (ctype, t2));
6382 	}
6383       break;
6384 
6385     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6386       /* If the second operand is constant, this is a multiplication
6387 	 or floor division, by a power of two, so we can treat it that
6388 	 way unless the multiplier or divisor overflows.  Signed
6389 	 left-shift overflow is implementation-defined rather than
6390 	 undefined in C90, so do not convert signed left shift into
6391 	 multiplication.  */
6392       if (TREE_CODE (op1) == INTEGER_CST
6393 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6394 	  /* const_binop may not detect overflow correctly,
6395 	     so check for it explicitly here.  */
6396 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6397 			wi::to_wide (op1))
6398 	  && (t1 = fold_convert (ctype,
6399 				 const_binop (LSHIFT_EXPR, size_one_node,
6400 					      op1))) != 0
6401 	  && !TREE_OVERFLOW (t1))
6402 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6403 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6404 				       ctype,
6405 				       fold_convert (ctype, op0),
6406 				       t1),
6407 			       c, code, wide_type, strict_overflow_p);
6408       break;
6409 
6410     case PLUS_EXPR:  case MINUS_EXPR:
6411       /* See if we can eliminate the operation on both sides.  If we can, we
6412 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6413 	 cases where we can do anything are if the second operand is a
6414 	 constant.  */
6415       sub_strict_overflow_p = false;
6416       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6417       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6418       if (t1 != 0 && t2 != 0
6419 	  && TYPE_OVERFLOW_WRAPS (ctype)
6420 	  && (code == MULT_EXPR
6421 	      /* If not multiplication, we can only do this if both operands
6422 		 are divisible by c.  */
6423 	      || (multiple_of_p (ctype, op0, c)
6424 	          && multiple_of_p (ctype, op1, c))))
6425 	{
6426 	  if (sub_strict_overflow_p)
6427 	    *strict_overflow_p = true;
6428 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6429 			      fold_convert (ctype, t2));
6430 	}
6431 
6432       /* If this was a subtraction, negate OP1 and set it to be an addition.
6433 	 This simplifies the logic below.  */
6434       if (tcode == MINUS_EXPR)
6435 	{
6436 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6437 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6438 	  if (TREE_CODE (op0) == INTEGER_CST)
6439 	    {
6440 	      std::swap (op0, op1);
6441 	      std::swap (t1, t2);
6442 	    }
6443 	}
6444 
6445       if (TREE_CODE (op1) != INTEGER_CST)
6446 	break;
6447 
6448       /* If either OP1 or C are negative, this optimization is not safe for
6449 	 some of the division and remainder types while for others we need
6450 	 to change the code.  */
6451       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6452 	{
6453 	  if (code == CEIL_DIV_EXPR)
6454 	    code = FLOOR_DIV_EXPR;
6455 	  else if (code == FLOOR_DIV_EXPR)
6456 	    code = CEIL_DIV_EXPR;
6457 	  else if (code != MULT_EXPR
6458 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6459 	    break;
6460 	}
6461 
6462       /* If it's a multiply or a division/modulus operation of a multiple
6463          of our constant, do the operation and verify it doesn't overflow.  */
6464       if (code == MULT_EXPR
6465 	  || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6466 				TYPE_SIGN (type)))
6467 	{
6468 	  op1 = const_binop (code, fold_convert (ctype, op1),
6469 			     fold_convert (ctype, c));
6470 	  /* We allow the constant to overflow with wrapping semantics.  */
6471 	  if (op1 == 0
6472 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6473 	    break;
6474 	}
6475       else
6476 	break;
6477 
6478       /* If we have an unsigned type, we cannot widen the operation since it
6479 	 will change the result if the original computation overflowed.  */
6480       if (TYPE_UNSIGNED (ctype) && ctype != type)
6481 	break;
6482 
6483       /* The last case is if we are a multiply.  In that case, we can
6484 	 apply the distributive law to commute the multiply and addition
6485 	 if the multiplication of the constants doesn't overflow
6486 	 and overflow is defined.  With undefined overflow
6487 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6488 	 But fold_plusminus_mult_expr would factor back any power-of-two
6489 	 value so do not distribute in the first place in this case.  */
6490       if (code == MULT_EXPR
6491 	  && TYPE_OVERFLOW_WRAPS (ctype)
6492 	  && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6493 	return fold_build2 (tcode, ctype,
6494 			    fold_build2 (code, ctype,
6495 					 fold_convert (ctype, op0),
6496 					 fold_convert (ctype, c)),
6497 			    op1);
6498 
6499       break;
6500 
6501     case MULT_EXPR:
6502       /* We have a special case here if we are doing something like
6503 	 (C * 8) % 4 since we know that's zero.  */
6504       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6505 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6506 	  /* If the multiplication can overflow we cannot optimize this.  */
6507 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6508 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6509 	  && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6510 				TYPE_SIGN (type)))
6511 	{
6512 	  *strict_overflow_p = true;
6513 	  return omit_one_operand (type, integer_zero_node, op0);
6514 	}
6515 
6516       /* ... fall through ...  */
6517 
6518     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6519     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6520       /* If we can extract our operation from the LHS, do so and return a
6521 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6522 	 do something only if the second operand is a constant.  */
6523       if (same_p
6524 	  && TYPE_OVERFLOW_WRAPS (ctype)
6525 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6526 				   strict_overflow_p)) != 0)
6527 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6528 			    fold_convert (ctype, op1));
6529       else if (tcode == MULT_EXPR && code == MULT_EXPR
6530 	       && TYPE_OVERFLOW_WRAPS (ctype)
6531 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6532 					strict_overflow_p)) != 0)
6533 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6534 			    fold_convert (ctype, t1));
6535       else if (TREE_CODE (op1) != INTEGER_CST)
6536 	return 0;
6537 
6538       /* If these are the same operation types, we can associate them
6539 	 assuming no overflow.  */
6540       if (tcode == code)
6541 	{
6542 	  bool overflow_p = false;
6543 	  wi::overflow_type overflow_mul;
6544 	  signop sign = TYPE_SIGN (ctype);
6545 	  unsigned prec = TYPE_PRECISION (ctype);
6546 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
6547 				  wi::to_wide (c, prec),
6548 				  sign, &overflow_mul);
6549 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6550 	  if (overflow_mul
6551 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6552 	    overflow_p = true;
6553 	  if (!overflow_p)
6554 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6555 				wide_int_to_tree (ctype, mul));
6556 	}
6557 
6558       /* If these operations "cancel" each other, we have the main
6559 	 optimizations of this pass, which occur when either constant is a
6560 	 multiple of the other, in which case we replace this with either an
6561 	 operation or CODE or TCODE.
6562 
6563 	 If we have an unsigned type, we cannot do this since it will change
6564 	 the result if the original computation overflowed.  */
6565       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6566 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6567 	      || (tcode == MULT_EXPR
6568 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6569 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6570 		  && code != MULT_EXPR)))
6571 	{
6572 	  if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6573 				 TYPE_SIGN (type)))
6574 	    {
6575 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6576 		*strict_overflow_p = true;
6577 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6578 				  fold_convert (ctype,
6579 						const_binop (TRUNC_DIV_EXPR,
6580 							     op1, c)));
6581 	    }
6582 	  else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6583 				      TYPE_SIGN (type)))
6584 	    {
6585 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6586 		*strict_overflow_p = true;
6587 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6588 				  fold_convert (ctype,
6589 						const_binop (TRUNC_DIV_EXPR,
6590 							     c, op1)));
6591 	    }
6592 	}
6593       break;
6594 
6595     default:
6596       break;
6597     }
6598 
6599   return 0;
6600 }
6601 
6602 /* Return a node which has the indicated constant VALUE (either 0 or
6603    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6604    and is of the indicated TYPE.  */
6605 
6606 tree
6607 constant_boolean_node (bool value, tree type)
6608 {
6609   if (type == integer_type_node)
6610     return value ? integer_one_node : integer_zero_node;
6611   else if (type == boolean_type_node)
6612     return value ? boolean_true_node : boolean_false_node;
6613   else if (TREE_CODE (type) == VECTOR_TYPE)
6614     return build_vector_from_val (type,
6615 				  build_int_cst (TREE_TYPE (type),
6616 						 value ? -1 : 0));
6617   else
6618     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6619 }
6620 
6621 
6622 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6623    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6624    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6625    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6626    COND is the first argument to CODE; otherwise (as in the example
6627    given here), it is the second argument.  TYPE is the type of the
6628    original expression.  Return NULL_TREE if no simplification is
6629    possible.  */
6630 
6631 static tree
6632 fold_binary_op_with_conditional_arg (location_t loc,
6633 				     enum tree_code code,
6634 				     tree type, tree op0, tree op1,
6635 				     tree cond, tree arg, int cond_first_p)
6636 {
6637   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6638   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6639   tree test, true_value, false_value;
6640   tree lhs = NULL_TREE;
6641   tree rhs = NULL_TREE;
6642   enum tree_code cond_code = COND_EXPR;
6643 
6644   /* Do not move possibly trapping operations into the conditional as this
6645      pessimizes code and causes gimplification issues when applied late.  */
6646   if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
6647 			      ANY_INTEGRAL_TYPE_P (type)
6648 			      && TYPE_OVERFLOW_TRAPS (type), op1))
6649     return NULL_TREE;
6650 
6651   if (TREE_CODE (cond) == COND_EXPR
6652       || TREE_CODE (cond) == VEC_COND_EXPR)
6653     {
6654       test = TREE_OPERAND (cond, 0);
6655       true_value = TREE_OPERAND (cond, 1);
6656       false_value = TREE_OPERAND (cond, 2);
6657       /* If this operand throws an expression, then it does not make
6658 	 sense to try to perform a logical or arithmetic operation
6659 	 involving it.  */
6660       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6661 	lhs = true_value;
6662       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6663 	rhs = false_value;
6664     }
6665   else if (!(TREE_CODE (type) != VECTOR_TYPE
6666 	     && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6667     {
6668       tree testtype = TREE_TYPE (cond);
6669       test = cond;
6670       true_value = constant_boolean_node (true, testtype);
6671       false_value = constant_boolean_node (false, testtype);
6672     }
6673   else
6674     /* Detect the case of mixing vector and scalar types - bail out.  */
6675     return NULL_TREE;
6676 
6677   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6678     cond_code = VEC_COND_EXPR;
6679 
6680   /* This transformation is only worthwhile if we don't have to wrap ARG
6681      in a SAVE_EXPR and the operation can be simplified without recursing
6682      on at least one of the branches once its pushed inside the COND_EXPR.  */
6683   if (!TREE_CONSTANT (arg)
6684       && (TREE_SIDE_EFFECTS (arg)
6685 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6686 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6687     return NULL_TREE;
6688 
6689   arg = fold_convert_loc (loc, arg_type, arg);
6690   if (lhs == 0)
6691     {
6692       true_value = fold_convert_loc (loc, cond_type, true_value);
6693       if (cond_first_p)
6694 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6695       else
6696 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6697     }
6698   if (rhs == 0)
6699     {
6700       false_value = fold_convert_loc (loc, cond_type, false_value);
6701       if (cond_first_p)
6702 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6703       else
6704 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6705     }
6706 
6707   /* Check that we have simplified at least one of the branches.  */
6708   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6709     return NULL_TREE;
6710 
6711   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6712 }
6713 
6714 
6715 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6716 
6717    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6718    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6719    ADDEND is the same as X.
6720 
6721    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6722    and finite.  The problematic cases are when X is zero, and its mode
6723    has signed zeros.  In the case of rounding towards -infinity,
6724    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6725    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6726 
6727 bool
6728 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6729 {
6730   if (!real_zerop (addend))
6731     return false;
6732 
6733   /* Don't allow the fold with -fsignaling-nans.  */
6734   if (HONOR_SNANS (element_mode (type)))
6735     return false;
6736 
6737   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6738   if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6739     return true;
6740 
6741   /* In a vector or complex, we would need to check the sign of all zeros.  */
6742   if (TREE_CODE (addend) != REAL_CST)
6743     return false;
6744 
6745   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6746   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6747     negate = !negate;
6748 
6749   /* The mode has signed zeros, and we have to honor their sign.
6750      In this situation, there is only one case we can return true for.
6751      X - 0 is the same as X unless rounding towards -infinity is
6752      supported.  */
6753   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6754 }
6755 
6756 /* Subroutine of match.pd that optimizes comparisons of a division by
6757    a nonzero integer constant against an integer constant, i.e.
6758    X/C1 op C2.
6759 
6760    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6761    GE_EXPR or LE_EXPR.  ARG01 and ARG1 must be a INTEGER_CST.  */
6762 
6763 enum tree_code
6764 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6765 		  tree *hi, bool *neg_overflow)
6766 {
6767   tree prod, tmp, type = TREE_TYPE (c1);
6768   signop sign = TYPE_SIGN (type);
6769   wi::overflow_type overflow;
6770 
6771   /* We have to do this the hard way to detect unsigned overflow.
6772      prod = int_const_binop (MULT_EXPR, c1, c2);  */
6773   wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6774   prod = force_fit_type (type, val, -1, overflow);
6775   *neg_overflow = false;
6776 
6777   if (sign == UNSIGNED)
6778     {
6779       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6780       *lo = prod;
6781 
6782       /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6783       val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6784       *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6785     }
6786   else if (tree_int_cst_sgn (c1) >= 0)
6787     {
6788       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6789       switch (tree_int_cst_sgn (c2))
6790 	{
6791 	case -1:
6792 	  *neg_overflow = true;
6793 	  *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6794 	  *hi = prod;
6795 	  break;
6796 
6797 	case 0:
6798 	  *lo = fold_negate_const (tmp, type);
6799 	  *hi = tmp;
6800 	  break;
6801 
6802 	case 1:
6803 	  *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6804 	  *lo = prod;
6805 	  break;
6806 
6807 	default:
6808 	  gcc_unreachable ();
6809 	}
6810     }
6811   else
6812     {
6813       /* A negative divisor reverses the relational operators.  */
6814       code = swap_tree_comparison (code);
6815 
6816       tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6817       switch (tree_int_cst_sgn (c2))
6818 	{
6819 	case -1:
6820 	  *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6821 	  *lo = prod;
6822 	  break;
6823 
6824 	case 0:
6825 	  *hi = fold_negate_const (tmp, type);
6826 	  *lo = tmp;
6827 	  break;
6828 
6829 	case 1:
6830 	  *neg_overflow = true;
6831 	  *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6832 	  *hi = prod;
6833 	  break;
6834 
6835 	default:
6836 	  gcc_unreachable ();
6837 	}
6838     }
6839 
6840   if (code != EQ_EXPR && code != NE_EXPR)
6841     return code;
6842 
6843   if (TREE_OVERFLOW (*lo)
6844       || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6845     *lo = NULL_TREE;
6846   if (TREE_OVERFLOW (*hi)
6847       || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6848     *hi = NULL_TREE;
6849 
6850   return code;
6851 }
6852 
6853 
6854 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6855    equality/inequality test, then return a simplified form of the test
6856    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6857    result type.  */
6858 
6859 static tree
6860 fold_single_bit_test_into_sign_test (location_t loc,
6861 				     enum tree_code code, tree arg0, tree arg1,
6862 				     tree result_type)
6863 {
6864   /* If this is testing a single bit, we can optimize the test.  */
6865   if ((code == NE_EXPR || code == EQ_EXPR)
6866       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6867       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6868     {
6869       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6870 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6871       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6872 
6873       if (arg00 != NULL_TREE
6874 	  /* This is only a win if casting to a signed type is cheap,
6875 	     i.e. when arg00's type is not a partial mode.  */
6876 	  && type_has_mode_precision_p (TREE_TYPE (arg00)))
6877 	{
6878 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6879 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6880 			      result_type,
6881 			      fold_convert_loc (loc, stype, arg00),
6882 			      build_int_cst (stype, 0));
6883 	}
6884     }
6885 
6886   return NULL_TREE;
6887 }
6888 
6889 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6890    equality/inequality test, then return a simplified form of
6891    the test using shifts and logical operations.  Otherwise return
6892    NULL.  TYPE is the desired result type.  */
6893 
6894 tree
6895 fold_single_bit_test (location_t loc, enum tree_code code,
6896 		      tree arg0, tree arg1, tree result_type)
6897 {
6898   /* If this is testing a single bit, we can optimize the test.  */
6899   if ((code == NE_EXPR || code == EQ_EXPR)
6900       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6901       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6902     {
6903       tree inner = TREE_OPERAND (arg0, 0);
6904       tree type = TREE_TYPE (arg0);
6905       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6906       scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6907       int ops_unsigned;
6908       tree signed_type, unsigned_type, intermediate_type;
6909       tree tem, one;
6910 
6911       /* First, see if we can fold the single bit test into a sign-bit
6912 	 test.  */
6913       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6914 						 result_type);
6915       if (tem)
6916 	return tem;
6917 
6918       /* Otherwise we have (A & C) != 0 where C is a single bit,
6919 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6920 	 Similarly for (A & C) == 0.  */
6921 
6922       /* If INNER is a right shift of a constant and it plus BITNUM does
6923 	 not overflow, adjust BITNUM and INNER.  */
6924       if (TREE_CODE (inner) == RSHIFT_EXPR
6925 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6926 	  && bitnum < TYPE_PRECISION (type)
6927 	  && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6928 			TYPE_PRECISION (type) - bitnum))
6929 	{
6930 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6931 	  inner = TREE_OPERAND (inner, 0);
6932 	}
6933 
6934       /* If we are going to be able to omit the AND below, we must do our
6935 	 operations as unsigned.  If we must use the AND, we have a choice.
6936 	 Normally unsigned is faster, but for some machines signed is.  */
6937       ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6938 		      && !flag_syntax_only) ? 0 : 1;
6939 
6940       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6941       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6942       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6943       inner = fold_convert_loc (loc, intermediate_type, inner);
6944 
6945       if (bitnum != 0)
6946 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6947 			inner, size_int (bitnum));
6948 
6949       one = build_int_cst (intermediate_type, 1);
6950 
6951       if (code == EQ_EXPR)
6952 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6953 
6954       /* Put the AND last so it can combine with more things.  */
6955       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6956 
6957       /* Make sure to return the proper type.  */
6958       inner = fold_convert_loc (loc, result_type, inner);
6959 
6960       return inner;
6961     }
6962   return NULL_TREE;
6963 }
6964 
6965 /* Test whether it is preferable two swap two operands, ARG0 and
6966    ARG1, for example because ARG0 is an integer constant and ARG1
6967    isn't.  */
6968 
6969 bool
6970 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6971 {
6972   if (CONSTANT_CLASS_P (arg1))
6973     return 0;
6974   if (CONSTANT_CLASS_P (arg0))
6975     return 1;
6976 
6977   STRIP_NOPS (arg0);
6978   STRIP_NOPS (arg1);
6979 
6980   if (TREE_CONSTANT (arg1))
6981     return 0;
6982   if (TREE_CONSTANT (arg0))
6983     return 1;
6984 
6985   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6986      for commutative and comparison operators.  Ensuring a canonical
6987      form allows the optimizers to find additional redundancies without
6988      having to explicitly check for both orderings.  */
6989   if (TREE_CODE (arg0) == SSA_NAME
6990       && TREE_CODE (arg1) == SSA_NAME
6991       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6992     return 1;
6993 
6994   /* Put SSA_NAMEs last.  */
6995   if (TREE_CODE (arg1) == SSA_NAME)
6996     return 0;
6997   if (TREE_CODE (arg0) == SSA_NAME)
6998     return 1;
6999 
7000   /* Put variables last.  */
7001   if (DECL_P (arg1))
7002     return 0;
7003   if (DECL_P (arg0))
7004     return 1;
7005 
7006   return 0;
7007 }
7008 
7009 
7010 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7011    means A >= Y && A != MAX, but in this case we know that
7012    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7013 
7014 static tree
7015 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7016 {
7017   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7018 
7019   if (TREE_CODE (bound) == LT_EXPR)
7020     a = TREE_OPERAND (bound, 0);
7021   else if (TREE_CODE (bound) == GT_EXPR)
7022     a = TREE_OPERAND (bound, 1);
7023   else
7024     return NULL_TREE;
7025 
7026   typea = TREE_TYPE (a);
7027   if (!INTEGRAL_TYPE_P (typea)
7028       && !POINTER_TYPE_P (typea))
7029     return NULL_TREE;
7030 
7031   if (TREE_CODE (ineq) == LT_EXPR)
7032     {
7033       a1 = TREE_OPERAND (ineq, 1);
7034       y = TREE_OPERAND (ineq, 0);
7035     }
7036   else if (TREE_CODE (ineq) == GT_EXPR)
7037     {
7038       a1 = TREE_OPERAND (ineq, 0);
7039       y = TREE_OPERAND (ineq, 1);
7040     }
7041   else
7042     return NULL_TREE;
7043 
7044   if (TREE_TYPE (a1) != typea)
7045     return NULL_TREE;
7046 
7047   if (POINTER_TYPE_P (typea))
7048     {
7049       /* Convert the pointer types into integer before taking the difference.  */
7050       tree ta = fold_convert_loc (loc, ssizetype, a);
7051       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7052       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7053     }
7054   else
7055     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7056 
7057   if (!diff || !integer_onep (diff))
7058    return NULL_TREE;
7059 
7060   return fold_build2_loc (loc, GE_EXPR, type, a, y);
7061 }
7062 
7063 /* Fold a sum or difference of at least one multiplication.
7064    Returns the folded tree or NULL if no simplification could be made.  */
7065 
7066 static tree
7067 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7068 			  tree arg0, tree arg1)
7069 {
7070   tree arg00, arg01, arg10, arg11;
7071   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7072 
7073   /* (A * C) +- (B * C) -> (A+-B) * C.
7074      (A * C) +- A -> A * (C+-1).
7075      We are most concerned about the case where C is a constant,
7076      but other combinations show up during loop reduction.  Since
7077      it is not difficult, try all four possibilities.  */
7078 
7079   if (TREE_CODE (arg0) == MULT_EXPR)
7080     {
7081       arg00 = TREE_OPERAND (arg0, 0);
7082       arg01 = TREE_OPERAND (arg0, 1);
7083     }
7084   else if (TREE_CODE (arg0) == INTEGER_CST)
7085     {
7086       arg00 = build_one_cst (type);
7087       arg01 = arg0;
7088     }
7089   else
7090     {
7091       /* We cannot generate constant 1 for fract.  */
7092       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7093 	return NULL_TREE;
7094       arg00 = arg0;
7095       arg01 = build_one_cst (type);
7096     }
7097   if (TREE_CODE (arg1) == MULT_EXPR)
7098     {
7099       arg10 = TREE_OPERAND (arg1, 0);
7100       arg11 = TREE_OPERAND (arg1, 1);
7101     }
7102   else if (TREE_CODE (arg1) == INTEGER_CST)
7103     {
7104       arg10 = build_one_cst (type);
7105       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7106 	 the purpose of this canonicalization.  */
7107       if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7108 	  && negate_expr_p (arg1)
7109 	  && code == PLUS_EXPR)
7110 	{
7111 	  arg11 = negate_expr (arg1);
7112 	  code = MINUS_EXPR;
7113 	}
7114       else
7115 	arg11 = arg1;
7116     }
7117   else
7118     {
7119       /* We cannot generate constant 1 for fract.  */
7120       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7121 	return NULL_TREE;
7122       arg10 = arg1;
7123       arg11 = build_one_cst (type);
7124     }
7125   same = NULL_TREE;
7126 
7127   /* Prefer factoring a common non-constant.  */
7128   if (operand_equal_p (arg00, arg10, 0))
7129     same = arg00, alt0 = arg01, alt1 = arg11;
7130   else if (operand_equal_p (arg01, arg11, 0))
7131     same = arg01, alt0 = arg00, alt1 = arg10;
7132   else if (operand_equal_p (arg00, arg11, 0))
7133     same = arg00, alt0 = arg01, alt1 = arg10;
7134   else if (operand_equal_p (arg01, arg10, 0))
7135     same = arg01, alt0 = arg00, alt1 = arg11;
7136 
7137   /* No identical multiplicands; see if we can find a common
7138      power-of-two factor in non-power-of-two multiplies.  This
7139      can help in multi-dimensional array access.  */
7140   else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7141     {
7142       HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7143       HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7144       HOST_WIDE_INT tmp;
7145       bool swap = false;
7146       tree maybe_same;
7147 
7148       /* Move min of absolute values to int11.  */
7149       if (absu_hwi (int01) < absu_hwi (int11))
7150         {
7151 	  tmp = int01, int01 = int11, int11 = tmp;
7152 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7153 	  maybe_same = arg01;
7154 	  swap = true;
7155 	}
7156       else
7157 	maybe_same = arg11;
7158 
7159       const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7160       if (factor > 1
7161 	  && pow2p_hwi (factor)
7162 	  && (int01 & (factor - 1)) == 0
7163 	  /* The remainder should not be a constant, otherwise we
7164 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7165 	     increased the number of multiplications necessary.  */
7166 	  && TREE_CODE (arg10) != INTEGER_CST)
7167         {
7168 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7169 			      build_int_cst (TREE_TYPE (arg00),
7170 					     int01 / int11));
7171 	  alt1 = arg10;
7172 	  same = maybe_same;
7173 	  if (swap)
7174 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7175 	}
7176     }
7177 
7178   if (!same)
7179     return NULL_TREE;
7180 
7181   if (! ANY_INTEGRAL_TYPE_P (type)
7182       || TYPE_OVERFLOW_WRAPS (type)
7183       /* We are neither factoring zero nor minus one.  */
7184       || TREE_CODE (same) == INTEGER_CST)
7185     return fold_build2_loc (loc, MULT_EXPR, type,
7186 			fold_build2_loc (loc, code, type,
7187 				     fold_convert_loc (loc, type, alt0),
7188 				     fold_convert_loc (loc, type, alt1)),
7189 			fold_convert_loc (loc, type, same));
7190 
7191   /* Same may be zero and thus the operation 'code' may overflow.  Likewise
7192      same may be minus one and thus the multiplication may overflow.  Perform
7193      the sum operation in an unsigned type.  */
7194   tree utype = unsigned_type_for (type);
7195   tree tem = fold_build2_loc (loc, code, utype,
7196 			      fold_convert_loc (loc, utype, alt0),
7197 			      fold_convert_loc (loc, utype, alt1));
7198   /* If the sum evaluated to a constant that is not -INF the multiplication
7199      cannot overflow.  */
7200   if (TREE_CODE (tem) == INTEGER_CST
7201       && (wi::to_wide (tem)
7202 	  != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7203     return fold_build2_loc (loc, MULT_EXPR, type,
7204 			    fold_convert (type, tem), same);
7205 
7206   /* Do not resort to unsigned multiplication because
7207      we lose the no-overflow property of the expression.  */
7208   return NULL_TREE;
7209 }
7210 
7211 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7212    specified by EXPR into the buffer PTR of length LEN bytes.
7213    Return the number of bytes placed in the buffer, or zero
7214    upon failure.  */
7215 
7216 static int
7217 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7218 {
7219   tree type = TREE_TYPE (expr);
7220   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7221   int byte, offset, word, words;
7222   unsigned char value;
7223 
7224   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7225     return 0;
7226   if (off == -1)
7227     off = 0;
7228 
7229   if (ptr == NULL)
7230     /* Dry run.  */
7231     return MIN (len, total_bytes - off);
7232 
7233   words = total_bytes / UNITS_PER_WORD;
7234 
7235   for (byte = 0; byte < total_bytes; byte++)
7236     {
7237       int bitpos = byte * BITS_PER_UNIT;
7238       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7239 	 number of bytes.  */
7240       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7241 
7242       if (total_bytes > UNITS_PER_WORD)
7243 	{
7244 	  word = byte / UNITS_PER_WORD;
7245 	  if (WORDS_BIG_ENDIAN)
7246 	    word = (words - 1) - word;
7247 	  offset = word * UNITS_PER_WORD;
7248 	  if (BYTES_BIG_ENDIAN)
7249 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7250 	  else
7251 	    offset += byte % UNITS_PER_WORD;
7252 	}
7253       else
7254 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7255       if (offset >= off && offset - off < len)
7256 	ptr[offset - off] = value;
7257     }
7258   return MIN (len, total_bytes - off);
7259 }
7260 
7261 
7262 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7263    specified by EXPR into the buffer PTR of length LEN bytes.
7264    Return the number of bytes placed in the buffer, or zero
7265    upon failure.  */
7266 
7267 static int
7268 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7269 {
7270   tree type = TREE_TYPE (expr);
7271   scalar_mode mode = SCALAR_TYPE_MODE (type);
7272   int total_bytes = GET_MODE_SIZE (mode);
7273   FIXED_VALUE_TYPE value;
7274   tree i_value, i_type;
7275 
7276   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7277     return 0;
7278 
7279   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7280 
7281   if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7282     return 0;
7283 
7284   value = TREE_FIXED_CST (expr);
7285   i_value = double_int_to_tree (i_type, value.data);
7286 
7287   return native_encode_int (i_value, ptr, len, off);
7288 }
7289 
7290 
7291 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7292    specified by EXPR into the buffer PTR of length LEN bytes.
7293    Return the number of bytes placed in the buffer, or zero
7294    upon failure.  */
7295 
7296 static int
7297 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7298 {
7299   tree type = TREE_TYPE (expr);
7300   int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7301   int byte, offset, word, words, bitpos;
7302   unsigned char value;
7303 
7304   /* There are always 32 bits in each long, no matter the size of
7305      the hosts long.  We handle floating point representations with
7306      up to 192 bits.  */
7307   long tmp[6];
7308 
7309   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7310     return 0;
7311   if (off == -1)
7312     off = 0;
7313 
7314   if (ptr == NULL)
7315     /* Dry run.  */
7316     return MIN (len, total_bytes - off);
7317 
7318   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7319 
7320   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7321 
7322   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7323        bitpos += BITS_PER_UNIT)
7324     {
7325       byte = (bitpos / BITS_PER_UNIT) & 3;
7326       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7327 
7328       if (UNITS_PER_WORD < 4)
7329 	{
7330 	  word = byte / UNITS_PER_WORD;
7331 	  if (WORDS_BIG_ENDIAN)
7332 	    word = (words - 1) - word;
7333 	  offset = word * UNITS_PER_WORD;
7334 	  if (BYTES_BIG_ENDIAN)
7335 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7336 	  else
7337 	    offset += byte % UNITS_PER_WORD;
7338 	}
7339       else
7340 	{
7341 	  offset = byte;
7342 	  if (BYTES_BIG_ENDIAN)
7343 	    {
7344 	      /* Reverse bytes within each long, or within the entire float
7345 		 if it's smaller than a long (for HFmode).  */
7346 	      offset = MIN (3, total_bytes - 1) - offset;
7347 	      gcc_assert (offset >= 0);
7348 	    }
7349 	}
7350       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7351       if (offset >= off
7352 	  && offset - off < len)
7353 	ptr[offset - off] = value;
7354     }
7355   return MIN (len, total_bytes - off);
7356 }
7357 
7358 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7359    specified by EXPR into the buffer PTR of length LEN bytes.
7360    Return the number of bytes placed in the buffer, or zero
7361    upon failure.  */
7362 
7363 static int
7364 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7365 {
7366   int rsize, isize;
7367   tree part;
7368 
7369   part = TREE_REALPART (expr);
7370   rsize = native_encode_expr (part, ptr, len, off);
7371   if (off == -1 && rsize == 0)
7372     return 0;
7373   part = TREE_IMAGPART (expr);
7374   if (off != -1)
7375     off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7376   isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7377 			      len - rsize, off);
7378   if (off == -1 && isize != rsize)
7379     return 0;
7380   return rsize + isize;
7381 }
7382 
7383 
7384 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7385    specified by EXPR into the buffer PTR of length LEN bytes.
7386    Return the number of bytes placed in the buffer, or zero
7387    upon failure.  */
7388 
7389 static int
7390 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7391 {
7392   unsigned HOST_WIDE_INT i, count;
7393   int size, offset;
7394   tree itype, elem;
7395 
7396   offset = 0;
7397   if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7398     return 0;
7399   itype = TREE_TYPE (TREE_TYPE (expr));
7400   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7401   for (i = 0; i < count; i++)
7402     {
7403       if (off >= size)
7404 	{
7405 	  off -= size;
7406 	  continue;
7407 	}
7408       elem = VECTOR_CST_ELT (expr, i);
7409       int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7410 				    len - offset, off);
7411       if ((off == -1 && res != size) || res == 0)
7412 	return 0;
7413       offset += res;
7414       if (offset >= len)
7415 	return (off == -1 && i < count - 1) ? 0 : offset;
7416       if (off != -1)
7417 	off = 0;
7418     }
7419   return offset;
7420 }
7421 
7422 
7423 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7424    specified by EXPR into the buffer PTR of length LEN bytes.
7425    Return the number of bytes placed in the buffer, or zero
7426    upon failure.  */
7427 
7428 static int
7429 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7430 {
7431   tree type = TREE_TYPE (expr);
7432 
7433   /* Wide-char strings are encoded in target byte-order so native
7434      encoding them is trivial.  */
7435   if (BITS_PER_UNIT != CHAR_BIT
7436       || TREE_CODE (type) != ARRAY_TYPE
7437       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7438       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7439     return 0;
7440 
7441   HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7442   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7443     return 0;
7444   if (off == -1)
7445     off = 0;
7446   if (ptr == NULL)
7447     /* Dry run.  */;
7448   else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7449     {
7450       int written = 0;
7451       if (off < TREE_STRING_LENGTH (expr))
7452 	{
7453 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7454 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7455 	}
7456       memset (ptr + written, 0,
7457 	      MIN (total_bytes - written, len - written));
7458     }
7459   else
7460     memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7461   return MIN (total_bytes - off, len);
7462 }
7463 
7464 
7465 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7466    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7467    buffer PTR of length LEN bytes.  If PTR is NULL, don't actually store
7468    anything, just do a dry run.  If OFF is not -1 then start
7469    the encoding at byte offset OFF and encode at most LEN bytes.
7470    Return the number of bytes placed in the buffer, or zero upon failure.  */
7471 
7472 int
7473 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7474 {
7475   /* We don't support starting at negative offset and -1 is special.  */
7476   if (off < -1)
7477     return 0;
7478 
7479   switch (TREE_CODE (expr))
7480     {
7481     case INTEGER_CST:
7482       return native_encode_int (expr, ptr, len, off);
7483 
7484     case REAL_CST:
7485       return native_encode_real (expr, ptr, len, off);
7486 
7487     case FIXED_CST:
7488       return native_encode_fixed (expr, ptr, len, off);
7489 
7490     case COMPLEX_CST:
7491       return native_encode_complex (expr, ptr, len, off);
7492 
7493     case VECTOR_CST:
7494       return native_encode_vector (expr, ptr, len, off);
7495 
7496     case STRING_CST:
7497       return native_encode_string (expr, ptr, len, off);
7498 
7499     default:
7500       return 0;
7501     }
7502 }
7503 
7504 
7505 /* Subroutine of native_interpret_expr.  Interpret the contents of
7506    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7507    If the buffer cannot be interpreted, return NULL_TREE.  */
7508 
7509 static tree
7510 native_interpret_int (tree type, const unsigned char *ptr, int len)
7511 {
7512   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7513 
7514   if (total_bytes > len
7515       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7516     return NULL_TREE;
7517 
7518   wide_int result = wi::from_buffer (ptr, total_bytes);
7519 
7520   return wide_int_to_tree (type, result);
7521 }
7522 
7523 
7524 /* Subroutine of native_interpret_expr.  Interpret the contents of
7525    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7526    If the buffer cannot be interpreted, return NULL_TREE.  */
7527 
7528 static tree
7529 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7530 {
7531   scalar_mode mode = SCALAR_TYPE_MODE (type);
7532   int total_bytes = GET_MODE_SIZE (mode);
7533   double_int result;
7534   FIXED_VALUE_TYPE fixed_value;
7535 
7536   if (total_bytes > len
7537       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7538     return NULL_TREE;
7539 
7540   result = double_int::from_buffer (ptr, total_bytes);
7541   fixed_value = fixed_from_double_int (result, mode);
7542 
7543   return build_fixed (type, fixed_value);
7544 }
7545 
7546 
7547 /* Subroutine of native_interpret_expr.  Interpret the contents of
7548    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7549    If the buffer cannot be interpreted, return NULL_TREE.  */
7550 
7551 static tree
7552 native_interpret_real (tree type, const unsigned char *ptr, int len)
7553 {
7554   scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7555   int total_bytes = GET_MODE_SIZE (mode);
7556   unsigned char value;
7557   /* There are always 32 bits in each long, no matter the size of
7558      the hosts long.  We handle floating point representations with
7559      up to 192 bits.  */
7560   REAL_VALUE_TYPE r;
7561   long tmp[6];
7562 
7563   if (total_bytes > len || total_bytes > 24)
7564     return NULL_TREE;
7565   int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7566 
7567   memset (tmp, 0, sizeof (tmp));
7568   for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7569        bitpos += BITS_PER_UNIT)
7570     {
7571       /* Both OFFSET and BYTE index within a long;
7572 	 bitpos indexes the whole float.  */
7573       int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7574       if (UNITS_PER_WORD < 4)
7575 	{
7576 	  int word = byte / UNITS_PER_WORD;
7577 	  if (WORDS_BIG_ENDIAN)
7578 	    word = (words - 1) - word;
7579 	  offset = word * UNITS_PER_WORD;
7580 	  if (BYTES_BIG_ENDIAN)
7581 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7582 	  else
7583 	    offset += byte % UNITS_PER_WORD;
7584 	}
7585       else
7586 	{
7587 	  offset = byte;
7588 	  if (BYTES_BIG_ENDIAN)
7589 	    {
7590 	      /* Reverse bytes within each long, or within the entire float
7591 		 if it's smaller than a long (for HFmode).  */
7592 	      offset = MIN (3, total_bytes - 1) - offset;
7593 	      gcc_assert (offset >= 0);
7594 	    }
7595 	}
7596       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7597 
7598       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7599     }
7600 
7601   real_from_target (&r, tmp, mode);
7602   return build_real (type, r);
7603 }
7604 
7605 
7606 /* Subroutine of native_interpret_expr.  Interpret the contents of
7607    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7608    If the buffer cannot be interpreted, return NULL_TREE.  */
7609 
7610 static tree
7611 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7612 {
7613   tree etype, rpart, ipart;
7614   int size;
7615 
7616   etype = TREE_TYPE (type);
7617   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7618   if (size * 2 > len)
7619     return NULL_TREE;
7620   rpart = native_interpret_expr (etype, ptr, size);
7621   if (!rpart)
7622     return NULL_TREE;
7623   ipart = native_interpret_expr (etype, ptr+size, size);
7624   if (!ipart)
7625     return NULL_TREE;
7626   return build_complex (type, rpart, ipart);
7627 }
7628 
7629 
7630 /* Subroutine of native_interpret_expr.  Interpret the contents of
7631    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7632    If the buffer cannot be interpreted, return NULL_TREE.  */
7633 
7634 static tree
7635 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
7636 {
7637   tree etype, elem;
7638   unsigned int i, size;
7639   unsigned HOST_WIDE_INT count;
7640 
7641   etype = TREE_TYPE (type);
7642   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7643   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
7644       || size * count > len)
7645     return NULL_TREE;
7646 
7647   tree_vector_builder elements (type, count, 1);
7648   for (i = 0; i < count; ++i)
7649     {
7650       elem = native_interpret_expr (etype, ptr+(i*size), size);
7651       if (!elem)
7652 	return NULL_TREE;
7653       elements.quick_push (elem);
7654     }
7655   return elements.build ();
7656 }
7657 
7658 
7659 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7660    the buffer PTR of length LEN as a constant of type TYPE.  For
7661    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7662    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7663    return NULL_TREE.  */
7664 
7665 tree
7666 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7667 {
7668   switch (TREE_CODE (type))
7669     {
7670     case INTEGER_TYPE:
7671     case ENUMERAL_TYPE:
7672     case BOOLEAN_TYPE:
7673     case POINTER_TYPE:
7674     case REFERENCE_TYPE:
7675       return native_interpret_int (type, ptr, len);
7676 
7677     case REAL_TYPE:
7678       return native_interpret_real (type, ptr, len);
7679 
7680     case FIXED_POINT_TYPE:
7681       return native_interpret_fixed (type, ptr, len);
7682 
7683     case COMPLEX_TYPE:
7684       return native_interpret_complex (type, ptr, len);
7685 
7686     case VECTOR_TYPE:
7687       return native_interpret_vector (type, ptr, len);
7688 
7689     default:
7690       return NULL_TREE;
7691     }
7692 }
7693 
7694 /* Returns true if we can interpret the contents of a native encoding
7695    as TYPE.  */
7696 
7697 static bool
7698 can_native_interpret_type_p (tree type)
7699 {
7700   switch (TREE_CODE (type))
7701     {
7702     case INTEGER_TYPE:
7703     case ENUMERAL_TYPE:
7704     case BOOLEAN_TYPE:
7705     case POINTER_TYPE:
7706     case REFERENCE_TYPE:
7707     case FIXED_POINT_TYPE:
7708     case REAL_TYPE:
7709     case COMPLEX_TYPE:
7710     case VECTOR_TYPE:
7711       return true;
7712     default:
7713       return false;
7714     }
7715 }
7716 
7717 
7718 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7719    TYPE at compile-time.  If we're unable to perform the conversion
7720    return NULL_TREE.  */
7721 
7722 static tree
7723 fold_view_convert_expr (tree type, tree expr)
7724 {
7725   /* We support up to 512-bit values (for V8DFmode).  */
7726   unsigned char buffer[64];
7727   int len;
7728 
7729   /* Check that the host and target are sane.  */
7730   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7731     return NULL_TREE;
7732 
7733   len = native_encode_expr (expr, buffer, sizeof (buffer));
7734   if (len == 0)
7735     return NULL_TREE;
7736 
7737   return native_interpret_expr (type, buffer, len);
7738 }
7739 
7740 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7741    to avoid confusing the gimplify process.  */
7742 
7743 tree
7744 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7745 {
7746   /* The size of the object is not relevant when talking about its address.  */
7747   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7748     t = TREE_OPERAND (t, 0);
7749 
7750   if (TREE_CODE (t) == INDIRECT_REF)
7751     {
7752       t = TREE_OPERAND (t, 0);
7753 
7754       if (TREE_TYPE (t) != ptrtype)
7755 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7756     }
7757   else if (TREE_CODE (t) == MEM_REF
7758 	   && integer_zerop (TREE_OPERAND (t, 1)))
7759     return TREE_OPERAND (t, 0);
7760   else if (TREE_CODE (t) == MEM_REF
7761 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7762     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7763 			TREE_OPERAND (t, 0),
7764 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7765   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7766     {
7767       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7768 
7769       if (TREE_TYPE (t) != ptrtype)
7770 	t = fold_convert_loc (loc, ptrtype, t);
7771     }
7772   else
7773     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7774 
7775   return t;
7776 }
7777 
7778 /* Build an expression for the address of T.  */
7779 
7780 tree
7781 build_fold_addr_expr_loc (location_t loc, tree t)
7782 {
7783   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7784 
7785   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7786 }
7787 
7788 /* Fold a unary expression of code CODE and type TYPE with operand
7789    OP0.  Return the folded expression if folding is successful.
7790    Otherwise, return NULL_TREE.  */
7791 
7792 tree
7793 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7794 {
7795   tree tem;
7796   tree arg0;
7797   enum tree_code_class kind = TREE_CODE_CLASS (code);
7798 
7799   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7800 	      && TREE_CODE_LENGTH (code) == 1);
7801 
7802   arg0 = op0;
7803   if (arg0)
7804     {
7805       if (CONVERT_EXPR_CODE_P (code)
7806 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7807 	{
7808 	  /* Don't use STRIP_NOPS, because signedness of argument type
7809 	     matters.  */
7810 	  STRIP_SIGN_NOPS (arg0);
7811 	}
7812       else
7813 	{
7814 	  /* Strip any conversions that don't change the mode.  This
7815 	     is safe for every expression, except for a comparison
7816 	     expression because its signedness is derived from its
7817 	     operands.
7818 
7819 	     Note that this is done as an internal manipulation within
7820 	     the constant folder, in order to find the simplest
7821 	     representation of the arguments so that their form can be
7822 	     studied.  In any cases, the appropriate type conversions
7823 	     should be put back in the tree that will get out of the
7824 	     constant folder.  */
7825 	  STRIP_NOPS (arg0);
7826 	}
7827 
7828       if (CONSTANT_CLASS_P (arg0))
7829 	{
7830 	  tree tem = const_unop (code, type, arg0);
7831 	  if (tem)
7832 	    {
7833 	      if (TREE_TYPE (tem) != type)
7834 		tem = fold_convert_loc (loc, type, tem);
7835 	      return tem;
7836 	    }
7837 	}
7838     }
7839 
7840   tem = generic_simplify (loc, code, type, op0);
7841   if (tem)
7842     return tem;
7843 
7844   if (TREE_CODE_CLASS (code) == tcc_unary)
7845     {
7846       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7847 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7848 		       fold_build1_loc (loc, code, type,
7849 				    fold_convert_loc (loc, TREE_TYPE (op0),
7850 						      TREE_OPERAND (arg0, 1))));
7851       else if (TREE_CODE (arg0) == COND_EXPR)
7852 	{
7853 	  tree arg01 = TREE_OPERAND (arg0, 1);
7854 	  tree arg02 = TREE_OPERAND (arg0, 2);
7855 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7856 	    arg01 = fold_build1_loc (loc, code, type,
7857 				 fold_convert_loc (loc,
7858 						   TREE_TYPE (op0), arg01));
7859 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7860 	    arg02 = fold_build1_loc (loc, code, type,
7861 				 fold_convert_loc (loc,
7862 						   TREE_TYPE (op0), arg02));
7863 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7864 			     arg01, arg02);
7865 
7866 	  /* If this was a conversion, and all we did was to move into
7867 	     inside the COND_EXPR, bring it back out.  But leave it if
7868 	     it is a conversion from integer to integer and the
7869 	     result precision is no wider than a word since such a
7870 	     conversion is cheap and may be optimized away by combine,
7871 	     while it couldn't if it were outside the COND_EXPR.  Then return
7872 	     so we don't get into an infinite recursion loop taking the
7873 	     conversion out and then back in.  */
7874 
7875 	  if ((CONVERT_EXPR_CODE_P (code)
7876 	       || code == NON_LVALUE_EXPR)
7877 	      && TREE_CODE (tem) == COND_EXPR
7878 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7879 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7880 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7881 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7882 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7883 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7884 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7885 		     && (INTEGRAL_TYPE_P
7886 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7887 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7888 		  || flag_syntax_only))
7889 	    tem = build1_loc (loc, code, type,
7890 			      build3 (COND_EXPR,
7891 				      TREE_TYPE (TREE_OPERAND
7892 						 (TREE_OPERAND (tem, 1), 0)),
7893 				      TREE_OPERAND (tem, 0),
7894 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7895 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7896 						    0)));
7897 	  return tem;
7898 	}
7899    }
7900 
7901   switch (code)
7902     {
7903     case NON_LVALUE_EXPR:
7904       if (!maybe_lvalue_p (op0))
7905 	return fold_convert_loc (loc, type, op0);
7906       return NULL_TREE;
7907 
7908     CASE_CONVERT:
7909     case FLOAT_EXPR:
7910     case FIX_TRUNC_EXPR:
7911       if (COMPARISON_CLASS_P (op0))
7912 	{
7913 	  /* If we have (type) (a CMP b) and type is an integral type, return
7914 	     new expression involving the new type.  Canonicalize
7915 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7916 	     non-integral type.
7917 	     Do not fold the result as that would not simplify further, also
7918 	     folding again results in recursions.  */
7919 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7920 	    return build2_loc (loc, TREE_CODE (op0), type,
7921 			       TREE_OPERAND (op0, 0),
7922 			       TREE_OPERAND (op0, 1));
7923 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7924 		   && TREE_CODE (type) != VECTOR_TYPE)
7925 	    return build3_loc (loc, COND_EXPR, type, op0,
7926 			       constant_boolean_node (true, type),
7927 			       constant_boolean_node (false, type));
7928 	}
7929 
7930       /* Handle (T *)&A.B.C for A being of type T and B and C
7931 	 living at offset zero.  This occurs frequently in
7932 	 C++ upcasting and then accessing the base.  */
7933       if (TREE_CODE (op0) == ADDR_EXPR
7934 	  && POINTER_TYPE_P (type)
7935 	  && handled_component_p (TREE_OPERAND (op0, 0)))
7936         {
7937 	  poly_int64 bitsize, bitpos;
7938 	  tree offset;
7939 	  machine_mode mode;
7940 	  int unsignedp, reversep, volatilep;
7941 	  tree base
7942 	    = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7943 				   &offset, &mode, &unsignedp, &reversep,
7944 				   &volatilep);
7945 	  /* If the reference was to a (constant) zero offset, we can use
7946 	     the address of the base if it has the same base type
7947 	     as the result type and the pointer type is unqualified.  */
7948 	  if (!offset
7949 	      && known_eq (bitpos, 0)
7950 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7951 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7952 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7953 	    return fold_convert_loc (loc, type,
7954 				     build_fold_addr_expr_loc (loc, base));
7955         }
7956 
7957       if (TREE_CODE (op0) == MODIFY_EXPR
7958 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7959 	  /* Detect assigning a bitfield.  */
7960 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7961 	       && DECL_BIT_FIELD
7962 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7963 	{
7964 	  /* Don't leave an assignment inside a conversion
7965 	     unless assigning a bitfield.  */
7966 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7967 	  /* First do the assignment, then return converted constant.  */
7968 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7969 	  TREE_NO_WARNING (tem) = 1;
7970 	  TREE_USED (tem) = 1;
7971 	  return tem;
7972 	}
7973 
7974       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7975 	 constants (if x has signed type, the sign bit cannot be set
7976 	 in c).  This folds extension into the BIT_AND_EXPR.
7977 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7978 	 very likely don't have maximal range for their precision and this
7979 	 transformation effectively doesn't preserve non-maximal ranges.  */
7980       if (TREE_CODE (type) == INTEGER_TYPE
7981 	  && TREE_CODE (op0) == BIT_AND_EXPR
7982 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7983 	{
7984 	  tree and_expr = op0;
7985 	  tree and0 = TREE_OPERAND (and_expr, 0);
7986 	  tree and1 = TREE_OPERAND (and_expr, 1);
7987 	  int change = 0;
7988 
7989 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7990 	      || (TYPE_PRECISION (type)
7991 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7992 	    change = 1;
7993 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7994 		   <= HOST_BITS_PER_WIDE_INT
7995 		   && tree_fits_uhwi_p (and1))
7996 	    {
7997 	      unsigned HOST_WIDE_INT cst;
7998 
7999 	      cst = tree_to_uhwi (and1);
8000 	      cst &= HOST_WIDE_INT_M1U
8001 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8002 	      change = (cst == 0);
8003 	      if (change
8004 		  && !flag_syntax_only
8005 		  && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8006 		      == ZERO_EXTEND))
8007 		{
8008 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
8009 		  and0 = fold_convert_loc (loc, uns, and0);
8010 		  and1 = fold_convert_loc (loc, uns, and1);
8011 		}
8012 	    }
8013 	  if (change)
8014 	    {
8015 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
8016 				    TREE_OVERFLOW (and1));
8017 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
8018 				      fold_convert_loc (loc, type, and0), tem);
8019 	    }
8020 	}
8021 
8022       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8023 	 cast (T1)X will fold away.  We assume that this happens when X itself
8024 	 is a cast.  */
8025       if (POINTER_TYPE_P (type)
8026 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8027 	  && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8028 	{
8029 	  tree arg00 = TREE_OPERAND (arg0, 0);
8030 	  tree arg01 = TREE_OPERAND (arg0, 1);
8031 
8032 	  return fold_build_pointer_plus_loc
8033 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
8034 	}
8035 
8036       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8037 	 of the same precision, and X is an integer type not narrower than
8038 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
8039       if (INTEGRAL_TYPE_P (type)
8040 	  && TREE_CODE (op0) == BIT_NOT_EXPR
8041 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8042 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8043 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8044 	{
8045 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8046 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8047 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8048 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8049 				fold_convert_loc (loc, type, tem));
8050 	}
8051 
8052       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8053 	 type of X and Y (integer types only).  */
8054       if (INTEGRAL_TYPE_P (type)
8055 	  && TREE_CODE (op0) == MULT_EXPR
8056 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8057 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8058 	{
8059 	  /* Be careful not to introduce new overflows.  */
8060 	  tree mult_type;
8061           if (TYPE_OVERFLOW_WRAPS (type))
8062 	    mult_type = type;
8063 	  else
8064 	    mult_type = unsigned_type_for (type);
8065 
8066 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8067 	    {
8068 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8069 				 fold_convert_loc (loc, mult_type,
8070 						   TREE_OPERAND (op0, 0)),
8071 				 fold_convert_loc (loc, mult_type,
8072 						   TREE_OPERAND (op0, 1)));
8073 	      return fold_convert_loc (loc, type, tem);
8074 	    }
8075 	}
8076 
8077       return NULL_TREE;
8078 
8079     case VIEW_CONVERT_EXPR:
8080       if (TREE_CODE (op0) == MEM_REF)
8081         {
8082 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8083 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8084 	  tem = fold_build2_loc (loc, MEM_REF, type,
8085 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8086 	  REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8087 	  return tem;
8088 	}
8089 
8090       return NULL_TREE;
8091 
8092     case NEGATE_EXPR:
8093       tem = fold_negate_expr (loc, arg0);
8094       if (tem)
8095 	return fold_convert_loc (loc, type, tem);
8096       return NULL_TREE;
8097 
8098     case ABS_EXPR:
8099       /* Convert fabs((double)float) into (double)fabsf(float).  */
8100       if (TREE_CODE (arg0) == NOP_EXPR
8101 	  && TREE_CODE (type) == REAL_TYPE)
8102 	{
8103 	  tree targ0 = strip_float_extensions (arg0);
8104 	  if (targ0 != arg0)
8105 	    return fold_convert_loc (loc, type,
8106 				     fold_build1_loc (loc, ABS_EXPR,
8107 						  TREE_TYPE (targ0),
8108 						  targ0));
8109 	}
8110       return NULL_TREE;
8111 
8112     case BIT_NOT_EXPR:
8113       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8114       if (TREE_CODE (arg0) == BIT_XOR_EXPR
8115 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8116 				    fold_convert_loc (loc, type,
8117 						      TREE_OPERAND (arg0, 0)))))
8118 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8119 				fold_convert_loc (loc, type,
8120 						  TREE_OPERAND (arg0, 1)));
8121       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8122 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8123 			       	     fold_convert_loc (loc, type,
8124 						       TREE_OPERAND (arg0, 1)))))
8125 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8126 			    fold_convert_loc (loc, type,
8127 					      TREE_OPERAND (arg0, 0)), tem);
8128 
8129       return NULL_TREE;
8130 
8131     case TRUTH_NOT_EXPR:
8132       /* Note that the operand of this must be an int
8133 	 and its values must be 0 or 1.
8134 	 ("true" is a fixed value perhaps depending on the language,
8135 	 but we don't handle values other than 1 correctly yet.)  */
8136       tem = fold_truth_not_expr (loc, arg0);
8137       if (!tem)
8138 	return NULL_TREE;
8139       return fold_convert_loc (loc, type, tem);
8140 
8141     case INDIRECT_REF:
8142       /* Fold *&X to X if X is an lvalue.  */
8143       if (TREE_CODE (op0) == ADDR_EXPR)
8144 	{
8145 	  tree op00 = TREE_OPERAND (op0, 0);
8146 	  if ((VAR_P (op00)
8147 	       || TREE_CODE (op00) == PARM_DECL
8148 	       || TREE_CODE (op00) == RESULT_DECL)
8149 	      && !TREE_READONLY (op00))
8150 	    return op00;
8151 	}
8152       return NULL_TREE;
8153 
8154     default:
8155       return NULL_TREE;
8156     } /* switch (code) */
8157 }
8158 
8159 
8160 /* If the operation was a conversion do _not_ mark a resulting constant
8161    with TREE_OVERFLOW if the original constant was not.  These conversions
8162    have implementation defined behavior and retaining the TREE_OVERFLOW
8163    flag here would confuse later passes such as VRP.  */
8164 tree
8165 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8166 				tree type, tree op0)
8167 {
8168   tree res = fold_unary_loc (loc, code, type, op0);
8169   if (res
8170       && TREE_CODE (res) == INTEGER_CST
8171       && TREE_CODE (op0) == INTEGER_CST
8172       && CONVERT_EXPR_CODE_P (code))
8173     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8174 
8175   return res;
8176 }
8177 
8178 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8179    operands OP0 and OP1.  LOC is the location of the resulting expression.
8180    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8181    Return the folded expression if folding is successful.  Otherwise,
8182    return NULL_TREE.  */
8183 static tree
8184 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8185 		  tree arg0, tree arg1, tree op0, tree op1)
8186 {
8187   tree tem;
8188 
8189   /* We only do these simplifications if we are optimizing.  */
8190   if (!optimize)
8191     return NULL_TREE;
8192 
8193   /* Check for things like (A || B) && (A || C).  We can convert this
8194      to A || (B && C).  Note that either operator can be any of the four
8195      truth and/or operations and the transformation will still be
8196      valid.   Also note that we only care about order for the
8197      ANDIF and ORIF operators.  If B contains side effects, this
8198      might change the truth-value of A.  */
8199   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8200       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8201 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8202 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8203 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8204       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8205     {
8206       tree a00 = TREE_OPERAND (arg0, 0);
8207       tree a01 = TREE_OPERAND (arg0, 1);
8208       tree a10 = TREE_OPERAND (arg1, 0);
8209       tree a11 = TREE_OPERAND (arg1, 1);
8210       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8211 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8212 			 && (code == TRUTH_AND_EXPR
8213 			     || code == TRUTH_OR_EXPR));
8214 
8215       if (operand_equal_p (a00, a10, 0))
8216 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8217 			    fold_build2_loc (loc, code, type, a01, a11));
8218       else if (commutative && operand_equal_p (a00, a11, 0))
8219 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8220 			    fold_build2_loc (loc, code, type, a01, a10));
8221       else if (commutative && operand_equal_p (a01, a10, 0))
8222 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8223 			    fold_build2_loc (loc, code, type, a00, a11));
8224 
8225       /* This case if tricky because we must either have commutative
8226 	 operators or else A10 must not have side-effects.  */
8227 
8228       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8229 	       && operand_equal_p (a01, a11, 0))
8230 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8231 			    fold_build2_loc (loc, code, type, a00, a10),
8232 			    a01);
8233     }
8234 
8235   /* See if we can build a range comparison.  */
8236   if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8237     return tem;
8238 
8239   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8240       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8241     {
8242       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8243       if (tem)
8244 	return fold_build2_loc (loc, code, type, tem, arg1);
8245     }
8246 
8247   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8248       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8249     {
8250       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8251       if (tem)
8252 	return fold_build2_loc (loc, code, type, arg0, tem);
8253     }
8254 
8255   /* Check for the possibility of merging component references.  If our
8256      lhs is another similar operation, try to merge its rhs with our
8257      rhs.  Then try to merge our lhs and rhs.  */
8258   if (TREE_CODE (arg0) == code
8259       && (tem = fold_truth_andor_1 (loc, code, type,
8260 				    TREE_OPERAND (arg0, 1), arg1)) != 0)
8261     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8262 
8263   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8264     return tem;
8265 
8266   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
8267   if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
8268     logical_op_non_short_circuit
8269       = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
8270   if (logical_op_non_short_circuit
8271       && !flag_sanitize_coverage
8272       && (code == TRUTH_AND_EXPR
8273           || code == TRUTH_ANDIF_EXPR
8274           || code == TRUTH_OR_EXPR
8275           || code == TRUTH_ORIF_EXPR))
8276     {
8277       enum tree_code ncode, icode;
8278 
8279       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8280 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8281       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8282 
8283       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8284 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8285 	 We don't want to pack more than two leafs to a non-IF AND/OR
8286 	 expression.
8287 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8288 	 equal to IF-CODE, then we don't want to add right-hand operand.
8289 	 If the inner right-hand side of left-hand operand has
8290 	 side-effects, or isn't simple, then we can't add to it,
8291 	 as otherwise we might destroy if-sequence.  */
8292       if (TREE_CODE (arg0) == icode
8293 	  && simple_operand_p_2 (arg1)
8294 	  /* Needed for sequence points to handle trappings, and
8295 	     side-effects.  */
8296 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8297 	{
8298 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8299 				 arg1);
8300 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8301 				  tem);
8302 	}
8303 	/* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8304 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8305       else if (TREE_CODE (arg1) == icode
8306 	  && simple_operand_p_2 (arg0)
8307 	  /* Needed for sequence points to handle trappings, and
8308 	     side-effects.  */
8309 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8310 	{
8311 	  tem = fold_build2_loc (loc, ncode, type,
8312 				 arg0, TREE_OPERAND (arg1, 0));
8313 	  return fold_build2_loc (loc, icode, type, tem,
8314 				  TREE_OPERAND (arg1, 1));
8315 	}
8316       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8317 	 into (A OR B).
8318 	 For sequence point consistancy, we need to check for trapping,
8319 	 and side-effects.  */
8320       else if (code == icode && simple_operand_p_2 (arg0)
8321                && simple_operand_p_2 (arg1))
8322 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8323     }
8324 
8325   return NULL_TREE;
8326 }
8327 
8328 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8329    by changing CODE to reduce the magnitude of constants involved in
8330    ARG0 of the comparison.
8331    Returns a canonicalized comparison tree if a simplification was
8332    possible, otherwise returns NULL_TREE.
8333    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8334    valid if signed overflow is undefined.  */
8335 
8336 static tree
8337 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8338 				 tree arg0, tree arg1,
8339 				 bool *strict_overflow_p)
8340 {
8341   enum tree_code code0 = TREE_CODE (arg0);
8342   tree t, cst0 = NULL_TREE;
8343   int sgn0;
8344 
8345   /* Match A +- CST code arg1.  We can change this only if overflow
8346      is undefined.  */
8347   if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8348 	 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8349 	/* In principle pointers also have undefined overflow behavior,
8350 	   but that causes problems elsewhere.  */
8351 	&& !POINTER_TYPE_P (TREE_TYPE (arg0))
8352 	&& (code0 == MINUS_EXPR
8353 	    || code0 == PLUS_EXPR)
8354 	&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8355     return NULL_TREE;
8356 
8357   /* Identify the constant in arg0 and its sign.  */
8358   cst0 = TREE_OPERAND (arg0, 1);
8359   sgn0 = tree_int_cst_sgn (cst0);
8360 
8361   /* Overflowed constants and zero will cause problems.  */
8362   if (integer_zerop (cst0)
8363       || TREE_OVERFLOW (cst0))
8364     return NULL_TREE;
8365 
8366   /* See if we can reduce the magnitude of the constant in
8367      arg0 by changing the comparison code.  */
8368   /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8369   if (code == LT_EXPR
8370       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8371     code = LE_EXPR;
8372   /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8373   else if (code == GT_EXPR
8374 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8375     code = GE_EXPR;
8376   /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8377   else if (code == LE_EXPR
8378 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8379     code = LT_EXPR;
8380   /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8381   else if (code == GE_EXPR
8382 	   && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8383     code = GT_EXPR;
8384   else
8385     return NULL_TREE;
8386   *strict_overflow_p = true;
8387 
8388   /* Now build the constant reduced in magnitude.  But not if that
8389      would produce one outside of its types range.  */
8390   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8391       && ((sgn0 == 1
8392 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8393 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8394 	  || (sgn0 == -1
8395 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8396 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8397     return NULL_TREE;
8398 
8399   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8400 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8401   t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8402   t = fold_convert (TREE_TYPE (arg1), t);
8403 
8404   return fold_build2_loc (loc, code, type, t, arg1);
8405 }
8406 
8407 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8408    overflow further.  Try to decrease the magnitude of constants involved
8409    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8410    and put sole constants at the second argument position.
8411    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8412 
8413 static tree
8414 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8415 			       tree arg0, tree arg1)
8416 {
8417   tree t;
8418   bool strict_overflow_p;
8419   const char * const warnmsg = G_("assuming signed overflow does not occur "
8420 				  "when reducing constant in comparison");
8421 
8422   /* Try canonicalization by simplifying arg0.  */
8423   strict_overflow_p = false;
8424   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8425 				       &strict_overflow_p);
8426   if (t)
8427     {
8428       if (strict_overflow_p)
8429 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8430       return t;
8431     }
8432 
8433   /* Try canonicalization by simplifying arg1 using the swapped
8434      comparison.  */
8435   code = swap_tree_comparison (code);
8436   strict_overflow_p = false;
8437   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8438 				       &strict_overflow_p);
8439   if (t && strict_overflow_p)
8440     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8441   return t;
8442 }
8443 
8444 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8445    space.  This is used to avoid issuing overflow warnings for
8446    expressions like &p->x which cannot wrap.  */
8447 
8448 static bool
8449 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8450 {
8451   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8452     return true;
8453 
8454   if (maybe_lt (bitpos, 0))
8455     return true;
8456 
8457   poly_wide_int wi_offset;
8458   int precision = TYPE_PRECISION (TREE_TYPE (base));
8459   if (offset == NULL_TREE)
8460     wi_offset = wi::zero (precision);
8461   else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8462     return true;
8463   else
8464     wi_offset = wi::to_poly_wide (offset);
8465 
8466   wi::overflow_type overflow;
8467   poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8468 				  precision);
8469   poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8470   if (overflow)
8471     return true;
8472 
8473   poly_uint64 total_hwi, size;
8474   if (!total.to_uhwi (&total_hwi)
8475       || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8476 			   &size)
8477       || known_eq (size, 0U))
8478     return true;
8479 
8480   if (known_le (total_hwi, size))
8481     return false;
8482 
8483   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8484      array.  */
8485   if (TREE_CODE (base) == ADDR_EXPR
8486       && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8487 			  &size)
8488       && maybe_ne (size, 0U)
8489       && known_le (total_hwi, size))
8490     return false;
8491 
8492   return true;
8493 }
8494 
8495 /* Return a positive integer when the symbol DECL is known to have
8496    a nonzero address, zero when it's known not to (e.g., it's a weak
8497    symbol), and a negative integer when the symbol is not yet in the
8498    symbol table and so whether or not its address is zero is unknown.
8499    For function local objects always return positive integer.  */
8500 static int
8501 maybe_nonzero_address (tree decl)
8502 {
8503   if (DECL_P (decl) && decl_in_symtab_p (decl))
8504     if (struct symtab_node *symbol = symtab_node::get_create (decl))
8505       return symbol->nonzero_address ();
8506 
8507   /* Function local objects are never NULL.  */
8508   if (DECL_P (decl)
8509       && (DECL_CONTEXT (decl)
8510       && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8511       && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8512     return 1;
8513 
8514   return -1;
8515 }
8516 
8517 /* Subroutine of fold_binary.  This routine performs all of the
8518    transformations that are common to the equality/inequality
8519    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8520    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8521    fold_binary should call fold_binary.  Fold a comparison with
8522    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8523    the folded comparison or NULL_TREE.  */
8524 
8525 static tree
8526 fold_comparison (location_t loc, enum tree_code code, tree type,
8527 		 tree op0, tree op1)
8528 {
8529   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8530   tree arg0, arg1, tem;
8531 
8532   arg0 = op0;
8533   arg1 = op1;
8534 
8535   STRIP_SIGN_NOPS (arg0);
8536   STRIP_SIGN_NOPS (arg1);
8537 
8538   /* For comparisons of pointers we can decompose it to a compile time
8539      comparison of the base objects and the offsets into the object.
8540      This requires at least one operand being an ADDR_EXPR or a
8541      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
8542   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8543       && (TREE_CODE (arg0) == ADDR_EXPR
8544 	  || TREE_CODE (arg1) == ADDR_EXPR
8545 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8546 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8547     {
8548       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8549       poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
8550       machine_mode mode;
8551       int volatilep, reversep, unsignedp;
8552       bool indirect_base0 = false, indirect_base1 = false;
8553 
8554       /* Get base and offset for the access.  Strip ADDR_EXPR for
8555 	 get_inner_reference, but put it back by stripping INDIRECT_REF
8556 	 off the base object if possible.  indirect_baseN will be true
8557 	 if baseN is not an address but refers to the object itself.  */
8558       base0 = arg0;
8559       if (TREE_CODE (arg0) == ADDR_EXPR)
8560 	{
8561 	  base0
8562 	    = get_inner_reference (TREE_OPERAND (arg0, 0),
8563 				   &bitsize, &bitpos0, &offset0, &mode,
8564 				   &unsignedp, &reversep, &volatilep);
8565 	  if (TREE_CODE (base0) == INDIRECT_REF)
8566 	    base0 = TREE_OPERAND (base0, 0);
8567 	  else
8568 	    indirect_base0 = true;
8569 	}
8570       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8571 	{
8572 	  base0 = TREE_OPERAND (arg0, 0);
8573 	  STRIP_SIGN_NOPS (base0);
8574 	  if (TREE_CODE (base0) == ADDR_EXPR)
8575 	    {
8576 	      base0
8577 		= get_inner_reference (TREE_OPERAND (base0, 0),
8578 				       &bitsize, &bitpos0, &offset0, &mode,
8579 				       &unsignedp, &reversep, &volatilep);
8580 	      if (TREE_CODE (base0) == INDIRECT_REF)
8581 		base0 = TREE_OPERAND (base0, 0);
8582 	      else
8583 		indirect_base0 = true;
8584 	    }
8585 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
8586 	    offset0 = TREE_OPERAND (arg0, 1);
8587 	  else
8588 	    offset0 = size_binop (PLUS_EXPR, offset0,
8589 				  TREE_OPERAND (arg0, 1));
8590 	  if (poly_int_tree_p (offset0))
8591 	    {
8592 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
8593 					      TYPE_PRECISION (sizetype));
8594 	      tem <<= LOG2_BITS_PER_UNIT;
8595 	      tem += bitpos0;
8596 	      if (tem.to_shwi (&bitpos0))
8597 		offset0 = NULL_TREE;
8598 	    }
8599 	}
8600 
8601       base1 = arg1;
8602       if (TREE_CODE (arg1) == ADDR_EXPR)
8603 	{
8604 	  base1
8605 	    = get_inner_reference (TREE_OPERAND (arg1, 0),
8606 				   &bitsize, &bitpos1, &offset1, &mode,
8607 				   &unsignedp, &reversep, &volatilep);
8608 	  if (TREE_CODE (base1) == INDIRECT_REF)
8609 	    base1 = TREE_OPERAND (base1, 0);
8610 	  else
8611 	    indirect_base1 = true;
8612 	}
8613       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8614 	{
8615 	  base1 = TREE_OPERAND (arg1, 0);
8616 	  STRIP_SIGN_NOPS (base1);
8617 	  if (TREE_CODE (base1) == ADDR_EXPR)
8618 	    {
8619 	      base1
8620 		= get_inner_reference (TREE_OPERAND (base1, 0),
8621 				       &bitsize, &bitpos1, &offset1, &mode,
8622 				       &unsignedp, &reversep, &volatilep);
8623 	      if (TREE_CODE (base1) == INDIRECT_REF)
8624 		base1 = TREE_OPERAND (base1, 0);
8625 	      else
8626 		indirect_base1 = true;
8627 	    }
8628 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
8629 	    offset1 = TREE_OPERAND (arg1, 1);
8630 	  else
8631 	    offset1 = size_binop (PLUS_EXPR, offset1,
8632 				  TREE_OPERAND (arg1, 1));
8633 	  if (poly_int_tree_p (offset1))
8634 	    {
8635 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
8636 					      TYPE_PRECISION (sizetype));
8637 	      tem <<= LOG2_BITS_PER_UNIT;
8638 	      tem += bitpos1;
8639 	      if (tem.to_shwi (&bitpos1))
8640 		offset1 = NULL_TREE;
8641 	    }
8642 	}
8643 
8644       /* If we have equivalent bases we might be able to simplify.  */
8645       if (indirect_base0 == indirect_base1
8646 	  && operand_equal_p (base0, base1,
8647 			      indirect_base0 ? OEP_ADDRESS_OF : 0))
8648 	{
8649 	  /* We can fold this expression to a constant if the non-constant
8650 	     offset parts are equal.  */
8651 	  if ((offset0 == offset1
8652 	       || (offset0 && offset1
8653 		   && operand_equal_p (offset0, offset1, 0)))
8654 	      && (equality_code
8655 		  || (indirect_base0
8656 		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8657 		  || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8658 	    {
8659 	      if (!equality_code
8660 		  && maybe_ne (bitpos0, bitpos1)
8661 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8662 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8663 		fold_overflow_warning (("assuming pointer wraparound does not "
8664 					"occur when comparing P +- C1 with "
8665 					"P +- C2"),
8666 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
8667 
8668 	      switch (code)
8669 		{
8670 		case EQ_EXPR:
8671 		  if (known_eq (bitpos0, bitpos1))
8672 		    return constant_boolean_node (true, type);
8673 		  if (known_ne (bitpos0, bitpos1))
8674 		    return constant_boolean_node (false, type);
8675 		  break;
8676 		case NE_EXPR:
8677 		  if (known_ne (bitpos0, bitpos1))
8678 		    return constant_boolean_node (true, type);
8679 		  if (known_eq (bitpos0, bitpos1))
8680 		    return constant_boolean_node (false, type);
8681 		  break;
8682 		case LT_EXPR:
8683 		  if (known_lt (bitpos0, bitpos1))
8684 		    return constant_boolean_node (true, type);
8685 		  if (known_ge (bitpos0, bitpos1))
8686 		    return constant_boolean_node (false, type);
8687 		  break;
8688 		case LE_EXPR:
8689 		  if (known_le (bitpos0, bitpos1))
8690 		    return constant_boolean_node (true, type);
8691 		  if (known_gt (bitpos0, bitpos1))
8692 		    return constant_boolean_node (false, type);
8693 		  break;
8694 		case GE_EXPR:
8695 		  if (known_ge (bitpos0, bitpos1))
8696 		    return constant_boolean_node (true, type);
8697 		  if (known_lt (bitpos0, bitpos1))
8698 		    return constant_boolean_node (false, type);
8699 		  break;
8700 		case GT_EXPR:
8701 		  if (known_gt (bitpos0, bitpos1))
8702 		    return constant_boolean_node (true, type);
8703 		  if (known_le (bitpos0, bitpos1))
8704 		    return constant_boolean_node (false, type);
8705 		  break;
8706 		default:;
8707 		}
8708 	    }
8709 	  /* We can simplify the comparison to a comparison of the variable
8710 	     offset parts if the constant offset parts are equal.
8711 	     Be careful to use signed sizetype here because otherwise we
8712 	     mess with array offsets in the wrong way.  This is possible
8713 	     because pointer arithmetic is restricted to retain within an
8714 	     object and overflow on pointer differences is undefined as of
8715 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
8716 	  else if (known_eq (bitpos0, bitpos1)
8717 		   && (equality_code
8718 		       || (indirect_base0
8719 			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8720 		       || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8721 	    {
8722 	      /* By converting to signed sizetype we cover middle-end pointer
8723 	         arithmetic which operates on unsigned pointer types of size
8724 	         type size and ARRAY_REF offsets which are properly sign or
8725 	         zero extended from their type in case it is narrower than
8726 	         sizetype.  */
8727 	      if (offset0 == NULL_TREE)
8728 		offset0 = build_int_cst (ssizetype, 0);
8729 	      else
8730 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
8731 	      if (offset1 == NULL_TREE)
8732 		offset1 = build_int_cst (ssizetype, 0);
8733 	      else
8734 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
8735 
8736 	      if (!equality_code
8737 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8738 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8739 		fold_overflow_warning (("assuming pointer wraparound does not "
8740 					"occur when comparing P +- C1 with "
8741 					"P +- C2"),
8742 				       WARN_STRICT_OVERFLOW_COMPARISON);
8743 
8744 	      return fold_build2_loc (loc, code, type, offset0, offset1);
8745 	    }
8746 	}
8747       /* For equal offsets we can simplify to a comparison of the
8748 	 base addresses.  */
8749       else if (known_eq (bitpos0, bitpos1)
8750 	       && (indirect_base0
8751 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8752 	       && (indirect_base1
8753 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8754 	       && ((offset0 == offset1)
8755 		   || (offset0 && offset1
8756 		       && operand_equal_p (offset0, offset1, 0))))
8757 	{
8758 	  if (indirect_base0)
8759 	    base0 = build_fold_addr_expr_loc (loc, base0);
8760 	  if (indirect_base1)
8761 	    base1 = build_fold_addr_expr_loc (loc, base1);
8762 	  return fold_build2_loc (loc, code, type, base0, base1);
8763 	}
8764       /* Comparison between an ordinary (non-weak) symbol and a null
8765 	 pointer can be eliminated since such symbols must have a non
8766 	 null address.  In C, relational expressions between pointers
8767 	 to objects and null pointers are undefined.  The results
8768 	 below follow the C++ rules with the additional property that
8769 	 every object pointer compares greater than a null pointer.
8770       */
8771       else if (((DECL_P (base0)
8772 		 && maybe_nonzero_address (base0) > 0
8773 		 /* Avoid folding references to struct members at offset 0 to
8774 		    prevent tests like '&ptr->firstmember == 0' from getting
8775 		    eliminated.  When ptr is null, although the -> expression
8776 		    is strictly speaking invalid, GCC retains it as a matter
8777 		    of QoI.  See PR c/44555. */
8778 		 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
8779 		|| CONSTANT_CLASS_P (base0))
8780 	       && indirect_base0
8781 	       /* The caller guarantees that when one of the arguments is
8782 		  constant (i.e., null in this case) it is second.  */
8783 	       && integer_zerop (arg1))
8784 	{
8785 	  switch (code)
8786 	    {
8787 	    case EQ_EXPR:
8788 	    case LE_EXPR:
8789 	    case LT_EXPR:
8790 	      return constant_boolean_node (false, type);
8791 	    case GE_EXPR:
8792 	    case GT_EXPR:
8793 	    case NE_EXPR:
8794 	      return constant_boolean_node (true, type);
8795 	    default:
8796 	      gcc_unreachable ();
8797 	    }
8798 	}
8799     }
8800 
8801   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8802      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
8803      the resulting offset is smaller in absolute value than the
8804      original one and has the same sign.  */
8805   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8806       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8807       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8808       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8809 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8810       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8811       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8812 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8813     {
8814       tree const1 = TREE_OPERAND (arg0, 1);
8815       tree const2 = TREE_OPERAND (arg1, 1);
8816       tree variable1 = TREE_OPERAND (arg0, 0);
8817       tree variable2 = TREE_OPERAND (arg1, 0);
8818       tree cst;
8819       const char * const warnmsg = G_("assuming signed overflow does not "
8820 				      "occur when combining constants around "
8821 				      "a comparison");
8822 
8823       /* Put the constant on the side where it doesn't overflow and is
8824 	 of lower absolute value and of same sign than before.  */
8825       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8826 			     ? MINUS_EXPR : PLUS_EXPR,
8827 			     const2, const1);
8828       if (!TREE_OVERFLOW (cst)
8829 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8830 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8831 	{
8832 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8833 	  return fold_build2_loc (loc, code, type,
8834 				  variable1,
8835 				  fold_build2_loc (loc, TREE_CODE (arg1),
8836 						   TREE_TYPE (arg1),
8837 						   variable2, cst));
8838 	}
8839 
8840       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8841 			     ? MINUS_EXPR : PLUS_EXPR,
8842 			     const1, const2);
8843       if (!TREE_OVERFLOW (cst)
8844 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8845 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8846 	{
8847 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8848 	  return fold_build2_loc (loc, code, type,
8849 				  fold_build2_loc (loc, TREE_CODE (arg0),
8850 						   TREE_TYPE (arg0),
8851 						   variable1, cst),
8852 				  variable2);
8853 	}
8854     }
8855 
8856   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8857   if (tem)
8858     return tem;
8859 
8860   /* If we are comparing an expression that just has comparisons
8861      of two integer values, arithmetic expressions of those comparisons,
8862      and constants, we can simplify it.  There are only three cases
8863      to check: the two values can either be equal, the first can be
8864      greater, or the second can be greater.  Fold the expression for
8865      those three values.  Since each value must be 0 or 1, we have
8866      eight possibilities, each of which corresponds to the constant 0
8867      or 1 or one of the six possible comparisons.
8868 
8869      This handles common cases like (a > b) == 0 but also handles
8870      expressions like  ((x > y) - (y > x)) > 0, which supposedly
8871      occur in macroized code.  */
8872 
8873   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8874     {
8875       tree cval1 = 0, cval2 = 0;
8876 
8877       if (twoval_comparison_p (arg0, &cval1, &cval2)
8878 	  /* Don't handle degenerate cases here; they should already
8879 	     have been handled anyway.  */
8880 	  && cval1 != 0 && cval2 != 0
8881 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8882 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8883 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8884 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8885 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8886 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8887 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8888 	{
8889 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8890 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8891 
8892 	  /* We can't just pass T to eval_subst in case cval1 or cval2
8893 	     was the same as ARG1.  */
8894 
8895 	  tree high_result
8896 		= fold_build2_loc (loc, code, type,
8897 			       eval_subst (loc, arg0, cval1, maxval,
8898 					   cval2, minval),
8899 			       arg1);
8900 	  tree equal_result
8901 		= fold_build2_loc (loc, code, type,
8902 			       eval_subst (loc, arg0, cval1, maxval,
8903 					   cval2, maxval),
8904 			       arg1);
8905 	  tree low_result
8906 		= fold_build2_loc (loc, code, type,
8907 			       eval_subst (loc, arg0, cval1, minval,
8908 					   cval2, maxval),
8909 			       arg1);
8910 
8911 	  /* All three of these results should be 0 or 1.  Confirm they are.
8912 	     Then use those values to select the proper code to use.  */
8913 
8914 	  if (TREE_CODE (high_result) == INTEGER_CST
8915 	      && TREE_CODE (equal_result) == INTEGER_CST
8916 	      && TREE_CODE (low_result) == INTEGER_CST)
8917 	    {
8918 	      /* Make a 3-bit mask with the high-order bit being the
8919 		 value for `>', the next for '=', and the low for '<'.  */
8920 	      switch ((integer_onep (high_result) * 4)
8921 		      + (integer_onep (equal_result) * 2)
8922 		      + integer_onep (low_result))
8923 		{
8924 		case 0:
8925 		  /* Always false.  */
8926 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8927 		case 1:
8928 		  code = LT_EXPR;
8929 		  break;
8930 		case 2:
8931 		  code = EQ_EXPR;
8932 		  break;
8933 		case 3:
8934 		  code = LE_EXPR;
8935 		  break;
8936 		case 4:
8937 		  code = GT_EXPR;
8938 		  break;
8939 		case 5:
8940 		  code = NE_EXPR;
8941 		  break;
8942 		case 6:
8943 		  code = GE_EXPR;
8944 		  break;
8945 		case 7:
8946 		  /* Always true.  */
8947 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8948 		}
8949 
8950 	      return fold_build2_loc (loc, code, type, cval1, cval2);
8951 	    }
8952 	}
8953     }
8954 
8955   return NULL_TREE;
8956 }
8957 
8958 
8959 /* Subroutine of fold_binary.  Optimize complex multiplications of the
8960    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
8961    argument EXPR represents the expression "z" of type TYPE.  */
8962 
8963 static tree
8964 fold_mult_zconjz (location_t loc, tree type, tree expr)
8965 {
8966   tree itype = TREE_TYPE (type);
8967   tree rpart, ipart, tem;
8968 
8969   if (TREE_CODE (expr) == COMPLEX_EXPR)
8970     {
8971       rpart = TREE_OPERAND (expr, 0);
8972       ipart = TREE_OPERAND (expr, 1);
8973     }
8974   else if (TREE_CODE (expr) == COMPLEX_CST)
8975     {
8976       rpart = TREE_REALPART (expr);
8977       ipart = TREE_IMAGPART (expr);
8978     }
8979   else
8980     {
8981       expr = save_expr (expr);
8982       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8983       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8984     }
8985 
8986   rpart = save_expr (rpart);
8987   ipart = save_expr (ipart);
8988   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8989 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8990 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8991   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8992 			  build_zero_cst (itype));
8993 }
8994 
8995 
8996 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
8997    CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8998    true if successful.  */
8999 
9000 static bool
9001 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9002 {
9003   unsigned HOST_WIDE_INT i, nunits;
9004 
9005   if (TREE_CODE (arg) == VECTOR_CST
9006       && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9007     {
9008       for (i = 0; i < nunits; ++i)
9009 	elts[i] = VECTOR_CST_ELT (arg, i);
9010     }
9011   else if (TREE_CODE (arg) == CONSTRUCTOR)
9012     {
9013       constructor_elt *elt;
9014 
9015       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9016 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9017 	  return false;
9018 	else
9019 	  elts[i] = elt->value;
9020     }
9021   else
9022     return false;
9023   for (; i < nelts; i++)
9024     elts[i]
9025       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9026   return true;
9027 }
9028 
9029 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9030    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9031    NULL_TREE otherwise.  */
9032 
9033 static tree
9034 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9035 {
9036   unsigned int i;
9037   unsigned HOST_WIDE_INT nelts;
9038   bool need_ctor = false;
9039 
9040   if (!sel.length ().is_constant (&nelts))
9041     return NULL_TREE;
9042   gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9043 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9044 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9045   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9046       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9047     return NULL_TREE;
9048 
9049   tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9050   if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9051       || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9052     return NULL_TREE;
9053 
9054   tree_vector_builder out_elts (type, nelts, 1);
9055   for (i = 0; i < nelts; i++)
9056     {
9057       HOST_WIDE_INT index;
9058       if (!sel[i].is_constant (&index))
9059 	return NULL_TREE;
9060       if (!CONSTANT_CLASS_P (in_elts[index]))
9061 	need_ctor = true;
9062       out_elts.quick_push (unshare_expr (in_elts[index]));
9063     }
9064 
9065   if (need_ctor)
9066     {
9067       vec<constructor_elt, va_gc> *v;
9068       vec_alloc (v, nelts);
9069       for (i = 0; i < nelts; i++)
9070 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9071       return build_constructor (type, v);
9072     }
9073   else
9074     return out_elts.build ();
9075 }
9076 
9077 /* Try to fold a pointer difference of type TYPE two address expressions of
9078    array references AREF0 and AREF1 using location LOC.  Return a
9079    simplified expression for the difference or NULL_TREE.  */
9080 
9081 static tree
9082 fold_addr_of_array_ref_difference (location_t loc, tree type,
9083 				   tree aref0, tree aref1,
9084 				   bool use_pointer_diff)
9085 {
9086   tree base0 = TREE_OPERAND (aref0, 0);
9087   tree base1 = TREE_OPERAND (aref1, 0);
9088   tree base_offset = build_int_cst (type, 0);
9089 
9090   /* If the bases are array references as well, recurse.  If the bases
9091      are pointer indirections compute the difference of the pointers.
9092      If the bases are equal, we are set.  */
9093   if ((TREE_CODE (base0) == ARRAY_REF
9094        && TREE_CODE (base1) == ARRAY_REF
9095        && (base_offset
9096 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9097 						use_pointer_diff)))
9098       || (INDIRECT_REF_P (base0)
9099 	  && INDIRECT_REF_P (base1)
9100 	  && (base_offset
9101 	        = use_pointer_diff
9102 		  ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9103 				     TREE_OPERAND (base0, 0),
9104 				     TREE_OPERAND (base1, 0))
9105 		  : fold_binary_loc (loc, MINUS_EXPR, type,
9106 				     fold_convert (type,
9107 						   TREE_OPERAND (base0, 0)),
9108 				     fold_convert (type,
9109 						   TREE_OPERAND (base1, 0)))))
9110       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9111     {
9112       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9113       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9114       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9115       tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9116       return fold_build2_loc (loc, PLUS_EXPR, type,
9117 			      base_offset,
9118 			      fold_build2_loc (loc, MULT_EXPR, type,
9119 					       diff, esz));
9120     }
9121   return NULL_TREE;
9122 }
9123 
9124 /* If the real or vector real constant CST of type TYPE has an exact
9125    inverse, return it, else return NULL.  */
9126 
9127 tree
9128 exact_inverse (tree type, tree cst)
9129 {
9130   REAL_VALUE_TYPE r;
9131   tree unit_type;
9132   machine_mode mode;
9133 
9134   switch (TREE_CODE (cst))
9135     {
9136     case REAL_CST:
9137       r = TREE_REAL_CST (cst);
9138 
9139       if (exact_real_inverse (TYPE_MODE (type), &r))
9140 	return build_real (type, r);
9141 
9142       return NULL_TREE;
9143 
9144     case VECTOR_CST:
9145       {
9146 	unit_type = TREE_TYPE (type);
9147 	mode = TYPE_MODE (unit_type);
9148 
9149 	tree_vector_builder elts;
9150 	if (!elts.new_unary_operation (type, cst, false))
9151 	  return NULL_TREE;
9152 	unsigned int count = elts.encoded_nelts ();
9153 	for (unsigned int i = 0; i < count; ++i)
9154 	  {
9155 	    r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9156 	    if (!exact_real_inverse (mode, &r))
9157 	      return NULL_TREE;
9158 	    elts.quick_push (build_real (unit_type, r));
9159 	  }
9160 
9161 	return elts.build ();
9162       }
9163 
9164     default:
9165       return NULL_TREE;
9166     }
9167 }
9168 
9169 /*  Mask out the tz least significant bits of X of type TYPE where
9170     tz is the number of trailing zeroes in Y.  */
9171 static wide_int
9172 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9173 {
9174   int tz = wi::ctz (y);
9175   if (tz > 0)
9176     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9177   return x;
9178 }
9179 
9180 /* Return true when T is an address and is known to be nonzero.
9181    For floating point we further ensure that T is not denormal.
9182    Similar logic is present in nonzero_address in rtlanal.h.
9183 
9184    If the return value is based on the assumption that signed overflow
9185    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9186    change *STRICT_OVERFLOW_P.  */
9187 
9188 static bool
9189 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9190 {
9191   tree type = TREE_TYPE (t);
9192   enum tree_code code;
9193 
9194   /* Doing something useful for floating point would need more work.  */
9195   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9196     return false;
9197 
9198   code = TREE_CODE (t);
9199   switch (TREE_CODE_CLASS (code))
9200     {
9201     case tcc_unary:
9202       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9203 					      strict_overflow_p);
9204     case tcc_binary:
9205     case tcc_comparison:
9206       return tree_binary_nonzero_warnv_p (code, type,
9207 					       TREE_OPERAND (t, 0),
9208 					       TREE_OPERAND (t, 1),
9209 					       strict_overflow_p);
9210     case tcc_constant:
9211     case tcc_declaration:
9212     case tcc_reference:
9213       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9214 
9215     default:
9216       break;
9217     }
9218 
9219   switch (code)
9220     {
9221     case TRUTH_NOT_EXPR:
9222       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9223 					      strict_overflow_p);
9224 
9225     case TRUTH_AND_EXPR:
9226     case TRUTH_OR_EXPR:
9227     case TRUTH_XOR_EXPR:
9228       return tree_binary_nonzero_warnv_p (code, type,
9229 					       TREE_OPERAND (t, 0),
9230 					       TREE_OPERAND (t, 1),
9231 					       strict_overflow_p);
9232 
9233     case COND_EXPR:
9234     case CONSTRUCTOR:
9235     case OBJ_TYPE_REF:
9236     case ASSERT_EXPR:
9237     case ADDR_EXPR:
9238     case WITH_SIZE_EXPR:
9239     case SSA_NAME:
9240       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9241 
9242     case COMPOUND_EXPR:
9243     case MODIFY_EXPR:
9244     case BIND_EXPR:
9245       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9246 					strict_overflow_p);
9247 
9248     case SAVE_EXPR:
9249       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9250 					strict_overflow_p);
9251 
9252     case CALL_EXPR:
9253       {
9254 	tree fndecl = get_callee_fndecl (t);
9255 	if (!fndecl) return false;
9256 	if (flag_delete_null_pointer_checks && !flag_check_new
9257 	    && DECL_IS_OPERATOR_NEW (fndecl)
9258 	    && !TREE_NOTHROW (fndecl))
9259 	  return true;
9260 	if (flag_delete_null_pointer_checks
9261 	    && lookup_attribute ("returns_nonnull",
9262 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9263 	  return true;
9264 	return alloca_call_p (t);
9265       }
9266 
9267     default:
9268       break;
9269     }
9270   return false;
9271 }
9272 
9273 /* Return true when T is an address and is known to be nonzero.
9274    Handle warnings about undefined signed overflow.  */
9275 
9276 bool
9277 tree_expr_nonzero_p (tree t)
9278 {
9279   bool ret, strict_overflow_p;
9280 
9281   strict_overflow_p = false;
9282   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9283   if (strict_overflow_p)
9284     fold_overflow_warning (("assuming signed overflow does not occur when "
9285 			    "determining that expression is always "
9286 			    "non-zero"),
9287 			   WARN_STRICT_OVERFLOW_MISC);
9288   return ret;
9289 }
9290 
9291 /* Return true if T is known not to be equal to an integer W.  */
9292 
9293 bool
9294 expr_not_equal_to (tree t, const wide_int &w)
9295 {
9296   wide_int min, max, nz;
9297   value_range_kind rtype;
9298   switch (TREE_CODE (t))
9299     {
9300     case INTEGER_CST:
9301       return wi::to_wide (t) != w;
9302 
9303     case SSA_NAME:
9304       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9305 	return false;
9306       rtype = get_range_info (t, &min, &max);
9307       if (rtype == VR_RANGE)
9308 	{
9309 	  if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9310 	    return true;
9311 	  if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9312 	    return true;
9313 	}
9314       else if (rtype == VR_ANTI_RANGE
9315 	       && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9316 	       && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9317 	return true;
9318       /* If T has some known zero bits and W has any of those bits set,
9319 	 then T is known not to be equal to W.  */
9320       if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9321 			      TYPE_PRECISION (TREE_TYPE (t))), 0))
9322 	return true;
9323       return false;
9324 
9325     default:
9326       return false;
9327     }
9328 }
9329 
9330 /* Fold a binary expression of code CODE and type TYPE with operands
9331    OP0 and OP1.  LOC is the location of the resulting expression.
9332    Return the folded expression if folding is successful.  Otherwise,
9333    return NULL_TREE.  */
9334 
9335 tree
9336 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9337 		 tree op0, tree op1)
9338 {
9339   enum tree_code_class kind = TREE_CODE_CLASS (code);
9340   tree arg0, arg1, tem;
9341   tree t1 = NULL_TREE;
9342   bool strict_overflow_p;
9343   unsigned int prec;
9344 
9345   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9346 	      && TREE_CODE_LENGTH (code) == 2
9347 	      && op0 != NULL_TREE
9348 	      && op1 != NULL_TREE);
9349 
9350   arg0 = op0;
9351   arg1 = op1;
9352 
9353   /* Strip any conversions that don't change the mode.  This is
9354      safe for every expression, except for a comparison expression
9355      because its signedness is derived from its operands.  So, in
9356      the latter case, only strip conversions that don't change the
9357      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
9358      preserved.
9359 
9360      Note that this is done as an internal manipulation within the
9361      constant folder, in order to find the simplest representation
9362      of the arguments so that their form can be studied.  In any
9363      cases, the appropriate type conversions should be put back in
9364      the tree that will get out of the constant folder.  */
9365 
9366   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9367     {
9368       STRIP_SIGN_NOPS (arg0);
9369       STRIP_SIGN_NOPS (arg1);
9370     }
9371   else
9372     {
9373       STRIP_NOPS (arg0);
9374       STRIP_NOPS (arg1);
9375     }
9376 
9377   /* Note that TREE_CONSTANT isn't enough: static var addresses are
9378      constant but we can't do arithmetic on them.  */
9379   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9380     {
9381       tem = const_binop (code, type, arg0, arg1);
9382       if (tem != NULL_TREE)
9383 	{
9384 	  if (TREE_TYPE (tem) != type)
9385 	    tem = fold_convert_loc (loc, type, tem);
9386 	  return tem;
9387 	}
9388     }
9389 
9390   /* If this is a commutative operation, and ARG0 is a constant, move it
9391      to ARG1 to reduce the number of tests below.  */
9392   if (commutative_tree_code (code)
9393       && tree_swap_operands_p (arg0, arg1))
9394     return fold_build2_loc (loc, code, type, op1, op0);
9395 
9396   /* Likewise if this is a comparison, and ARG0 is a constant, move it
9397      to ARG1 to reduce the number of tests below.  */
9398   if (kind == tcc_comparison
9399       && tree_swap_operands_p (arg0, arg1))
9400     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9401 
9402   tem = generic_simplify (loc, code, type, op0, op1);
9403   if (tem)
9404     return tem;
9405 
9406   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9407 
9408      First check for cases where an arithmetic operation is applied to a
9409      compound, conditional, or comparison operation.  Push the arithmetic
9410      operation inside the compound or conditional to see if any folding
9411      can then be done.  Convert comparison to conditional for this purpose.
9412      The also optimizes non-constant cases that used to be done in
9413      expand_expr.
9414 
9415      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9416      one of the operands is a comparison and the other is a comparison, a
9417      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
9418      code below would make the expression more complex.  Change it to a
9419      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
9420      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
9421 
9422   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9423        || code == EQ_EXPR || code == NE_EXPR)
9424       && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9425       && ((truth_value_p (TREE_CODE (arg0))
9426 	   && (truth_value_p (TREE_CODE (arg1))
9427 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
9428 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
9429 	  || (truth_value_p (TREE_CODE (arg1))
9430 	      && (truth_value_p (TREE_CODE (arg0))
9431 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
9432 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
9433     {
9434       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9435 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9436 			 : TRUTH_XOR_EXPR,
9437 			 boolean_type_node,
9438 			 fold_convert_loc (loc, boolean_type_node, arg0),
9439 			 fold_convert_loc (loc, boolean_type_node, arg1));
9440 
9441       if (code == EQ_EXPR)
9442 	tem = invert_truthvalue_loc (loc, tem);
9443 
9444       return fold_convert_loc (loc, type, tem);
9445     }
9446 
9447   if (TREE_CODE_CLASS (code) == tcc_binary
9448       || TREE_CODE_CLASS (code) == tcc_comparison)
9449     {
9450       if (TREE_CODE (arg0) == COMPOUND_EXPR)
9451 	{
9452 	  tem = fold_build2_loc (loc, code, type,
9453 			     fold_convert_loc (loc, TREE_TYPE (op0),
9454 					       TREE_OPERAND (arg0, 1)), op1);
9455 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9456 			     tem);
9457 	}
9458       if (TREE_CODE (arg1) == COMPOUND_EXPR)
9459 	{
9460 	  tem = fold_build2_loc (loc, code, type, op0,
9461 			     fold_convert_loc (loc, TREE_TYPE (op1),
9462 					       TREE_OPERAND (arg1, 1)));
9463 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9464 			     tem);
9465 	}
9466 
9467       if (TREE_CODE (arg0) == COND_EXPR
9468 	  || TREE_CODE (arg0) == VEC_COND_EXPR
9469 	  || COMPARISON_CLASS_P (arg0))
9470 	{
9471 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9472 						     arg0, arg1,
9473 						     /*cond_first_p=*/1);
9474 	  if (tem != NULL_TREE)
9475 	    return tem;
9476 	}
9477 
9478       if (TREE_CODE (arg1) == COND_EXPR
9479 	  || TREE_CODE (arg1) == VEC_COND_EXPR
9480 	  || COMPARISON_CLASS_P (arg1))
9481 	{
9482 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9483 						     arg1, arg0,
9484 					             /*cond_first_p=*/0);
9485 	  if (tem != NULL_TREE)
9486 	    return tem;
9487 	}
9488     }
9489 
9490   switch (code)
9491     {
9492     case MEM_REF:
9493       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
9494       if (TREE_CODE (arg0) == ADDR_EXPR
9495 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9496 	{
9497 	  tree iref = TREE_OPERAND (arg0, 0);
9498 	  return fold_build2 (MEM_REF, type,
9499 			      TREE_OPERAND (iref, 0),
9500 			      int_const_binop (PLUS_EXPR, arg1,
9501 					       TREE_OPERAND (iref, 1)));
9502 	}
9503 
9504       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
9505       if (TREE_CODE (arg0) == ADDR_EXPR
9506 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
9507 	{
9508 	  tree base;
9509 	  poly_int64 coffset;
9510 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9511 						&coffset);
9512 	  if (!base)
9513 	    return NULL_TREE;
9514 	  return fold_build2 (MEM_REF, type,
9515 			      build_fold_addr_expr (base),
9516 			      int_const_binop (PLUS_EXPR, arg1,
9517 					       size_int (coffset)));
9518 	}
9519 
9520       return NULL_TREE;
9521 
9522     case POINTER_PLUS_EXPR:
9523       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
9524       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9525 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9526         return fold_convert_loc (loc, type,
9527 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9528 					      fold_convert_loc (loc, sizetype,
9529 								arg1),
9530 					      fold_convert_loc (loc, sizetype,
9531 								arg0)));
9532 
9533       return NULL_TREE;
9534 
9535     case PLUS_EXPR:
9536       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9537 	{
9538 	  /* X + (X / CST) * -CST is X % CST.  */
9539 	  if (TREE_CODE (arg1) == MULT_EXPR
9540 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9541 	      && operand_equal_p (arg0,
9542 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9543 	    {
9544 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9545 	      tree cst1 = TREE_OPERAND (arg1, 1);
9546 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9547 				      cst1, cst0);
9548 	      if (sum && integer_zerop (sum))
9549 		return fold_convert_loc (loc, type,
9550 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9551 						      TREE_TYPE (arg0), arg0,
9552 						      cst0));
9553 	    }
9554 	}
9555 
9556       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9557 	 one.  Make sure the type is not saturating and has the signedness of
9558 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9559 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9560       if ((TREE_CODE (arg0) == MULT_EXPR
9561 	   || TREE_CODE (arg1) == MULT_EXPR)
9562 	  && !TYPE_SATURATING (type)
9563 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9564 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9565 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9566         {
9567 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9568 	  if (tem)
9569 	    return tem;
9570 	}
9571 
9572       if (! FLOAT_TYPE_P (type))
9573 	{
9574 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9575 	     (plus (plus (mult) (mult)) (foo)) so that we can
9576 	     take advantage of the factoring cases below.  */
9577 	  if (ANY_INTEGRAL_TYPE_P (type)
9578 	      && TYPE_OVERFLOW_WRAPS (type)
9579 	      && (((TREE_CODE (arg0) == PLUS_EXPR
9580 		    || TREE_CODE (arg0) == MINUS_EXPR)
9581 		   && TREE_CODE (arg1) == MULT_EXPR)
9582 		  || ((TREE_CODE (arg1) == PLUS_EXPR
9583 		       || TREE_CODE (arg1) == MINUS_EXPR)
9584 		      && TREE_CODE (arg0) == MULT_EXPR)))
9585 	    {
9586 	      tree parg0, parg1, parg, marg;
9587 	      enum tree_code pcode;
9588 
9589 	      if (TREE_CODE (arg1) == MULT_EXPR)
9590 		parg = arg0, marg = arg1;
9591 	      else
9592 		parg = arg1, marg = arg0;
9593 	      pcode = TREE_CODE (parg);
9594 	      parg0 = TREE_OPERAND (parg, 0);
9595 	      parg1 = TREE_OPERAND (parg, 1);
9596 	      STRIP_NOPS (parg0);
9597 	      STRIP_NOPS (parg1);
9598 
9599 	      if (TREE_CODE (parg0) == MULT_EXPR
9600 		  && TREE_CODE (parg1) != MULT_EXPR)
9601 		return fold_build2_loc (loc, pcode, type,
9602 				    fold_build2_loc (loc, PLUS_EXPR, type,
9603 						 fold_convert_loc (loc, type,
9604 								   parg0),
9605 						 fold_convert_loc (loc, type,
9606 								   marg)),
9607 				    fold_convert_loc (loc, type, parg1));
9608 	      if (TREE_CODE (parg0) != MULT_EXPR
9609 		  && TREE_CODE (parg1) == MULT_EXPR)
9610 		return
9611 		  fold_build2_loc (loc, PLUS_EXPR, type,
9612 			       fold_convert_loc (loc, type, parg0),
9613 			       fold_build2_loc (loc, pcode, type,
9614 					    fold_convert_loc (loc, type, marg),
9615 					    fold_convert_loc (loc, type,
9616 							      parg1)));
9617 	    }
9618 	}
9619       else
9620 	{
9621 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9622 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
9623 	     if signed zeros are involved.  */
9624 	  if (!HONOR_SNANS (element_mode (arg0))
9625               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9626 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9627 	    {
9628 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9629 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9630 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9631 	      bool arg0rz = false, arg0iz = false;
9632 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
9633 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
9634 		{
9635 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9636 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9637 		  if (arg0rz && arg1i && real_zerop (arg1i))
9638 		    {
9639 		      tree rp = arg1r ? arg1r
9640 				  : build1 (REALPART_EXPR, rtype, arg1);
9641 		      tree ip = arg0i ? arg0i
9642 				  : build1 (IMAGPART_EXPR, rtype, arg0);
9643 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9644 		    }
9645 		  else if (arg0iz && arg1r && real_zerop (arg1r))
9646 		    {
9647 		      tree rp = arg0r ? arg0r
9648 				  : build1 (REALPART_EXPR, rtype, arg0);
9649 		      tree ip = arg1i ? arg1i
9650 				  : build1 (IMAGPART_EXPR, rtype, arg1);
9651 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9652 		    }
9653 		}
9654 	    }
9655 
9656           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9657              We associate floats only if the user has specified
9658              -fassociative-math.  */
9659           if (flag_associative_math
9660               && TREE_CODE (arg1) == PLUS_EXPR
9661               && TREE_CODE (arg0) != MULT_EXPR)
9662             {
9663               tree tree10 = TREE_OPERAND (arg1, 0);
9664               tree tree11 = TREE_OPERAND (arg1, 1);
9665               if (TREE_CODE (tree11) == MULT_EXPR
9666 		  && TREE_CODE (tree10) == MULT_EXPR)
9667                 {
9668                   tree tree0;
9669                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9670                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9671                 }
9672             }
9673           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9674              We associate floats only if the user has specified
9675              -fassociative-math.  */
9676           if (flag_associative_math
9677               && TREE_CODE (arg0) == PLUS_EXPR
9678               && TREE_CODE (arg1) != MULT_EXPR)
9679             {
9680               tree tree00 = TREE_OPERAND (arg0, 0);
9681               tree tree01 = TREE_OPERAND (arg0, 1);
9682               if (TREE_CODE (tree01) == MULT_EXPR
9683 		  && TREE_CODE (tree00) == MULT_EXPR)
9684                 {
9685                   tree tree0;
9686                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9687                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9688                 }
9689             }
9690 	}
9691 
9692      bit_rotate:
9693       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9694 	 is a rotate of A by C1 bits.  */
9695       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9696 	 is a rotate of A by B bits.
9697 	 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9698 	 though in this case CODE must be | and not + or ^, otherwise
9699 	 it doesn't return A when B is 0.  */
9700       {
9701 	enum tree_code code0, code1;
9702 	tree rtype;
9703 	code0 = TREE_CODE (arg0);
9704 	code1 = TREE_CODE (arg1);
9705 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9706 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9707 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
9708 			        TREE_OPERAND (arg1, 0), 0)
9709 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9710 	        TYPE_UNSIGNED (rtype))
9711 	    /* Only create rotates in complete modes.  Other cases are not
9712 	       expanded properly.  */
9713 	    && (element_precision (rtype)
9714 		== GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9715 	  {
9716 	    tree tree01, tree11;
9717 	    tree orig_tree01, orig_tree11;
9718 	    enum tree_code code01, code11;
9719 
9720 	    tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9721 	    tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9722 	    STRIP_NOPS (tree01);
9723 	    STRIP_NOPS (tree11);
9724 	    code01 = TREE_CODE (tree01);
9725 	    code11 = TREE_CODE (tree11);
9726 	    if (code11 != MINUS_EXPR
9727 		&& (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9728 	      {
9729 		std::swap (code0, code1);
9730 		std::swap (code01, code11);
9731 		std::swap (tree01, tree11);
9732 		std::swap (orig_tree01, orig_tree11);
9733 	      }
9734 	    if (code01 == INTEGER_CST
9735 		&& code11 == INTEGER_CST
9736 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
9737 		    == element_precision (rtype)))
9738 	      {
9739 		tem = build2_loc (loc, LROTATE_EXPR,
9740 				  rtype, TREE_OPERAND (arg0, 0),
9741 				  code0 == LSHIFT_EXPR
9742 				  ? orig_tree01 : orig_tree11);
9743 		return fold_convert_loc (loc, type, tem);
9744 	      }
9745 	    else if (code11 == MINUS_EXPR)
9746 	      {
9747 		tree tree110, tree111;
9748 		tree110 = TREE_OPERAND (tree11, 0);
9749 		tree111 = TREE_OPERAND (tree11, 1);
9750 		STRIP_NOPS (tree110);
9751 		STRIP_NOPS (tree111);
9752 		if (TREE_CODE (tree110) == INTEGER_CST
9753 		    && compare_tree_int (tree110,
9754 					 element_precision (rtype)) == 0
9755 		    && operand_equal_p (tree01, tree111, 0))
9756 		  {
9757 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9758 					    ? LROTATE_EXPR : RROTATE_EXPR),
9759 				      rtype, TREE_OPERAND (arg0, 0),
9760 				      orig_tree01);
9761 		    return fold_convert_loc (loc, type, tem);
9762 		  }
9763 	      }
9764 	    else if (code == BIT_IOR_EXPR
9765 		     && code11 == BIT_AND_EXPR
9766 		     && pow2p_hwi (element_precision (rtype)))
9767 	      {
9768 		tree tree110, tree111;
9769 		tree110 = TREE_OPERAND (tree11, 0);
9770 		tree111 = TREE_OPERAND (tree11, 1);
9771 		STRIP_NOPS (tree110);
9772 		STRIP_NOPS (tree111);
9773 		if (TREE_CODE (tree110) == NEGATE_EXPR
9774 		    && TREE_CODE (tree111) == INTEGER_CST
9775 		    && compare_tree_int (tree111,
9776 					 element_precision (rtype) - 1) == 0
9777 		    && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9778 		  {
9779 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9780 					    ? LROTATE_EXPR : RROTATE_EXPR),
9781 				      rtype, TREE_OPERAND (arg0, 0),
9782 				      orig_tree01);
9783 		    return fold_convert_loc (loc, type, tem);
9784 		  }
9785 	      }
9786 	  }
9787       }
9788 
9789     associate:
9790       /* In most languages, can't associate operations on floats through
9791 	 parentheses.  Rather than remember where the parentheses were, we
9792 	 don't associate floats at all, unless the user has specified
9793 	 -fassociative-math.
9794 	 And, we need to make sure type is not saturating.  */
9795 
9796       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9797 	  && !TYPE_SATURATING (type))
9798 	{
9799 	  tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9800 	  tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9801 	  tree atype = type;
9802 	  bool ok = true;
9803 
9804 	  /* Split both trees into variables, constants, and literals.  Then
9805 	     associate each group together, the constants with literals,
9806 	     then the result with variables.  This increases the chances of
9807 	     literals being recombined later and of generating relocatable
9808 	     expressions for the sum of a constant and literal.  */
9809 	  var0 = split_tree (arg0, type, code,
9810 			     &minus_var0, &con0, &minus_con0,
9811 			     &lit0, &minus_lit0, 0);
9812 	  var1 = split_tree (arg1, type, code,
9813 			     &minus_var1, &con1, &minus_con1,
9814 			     &lit1, &minus_lit1, code == MINUS_EXPR);
9815 
9816 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
9817 	  if (code == MINUS_EXPR)
9818 	    code = PLUS_EXPR;
9819 
9820 	  /* With undefined overflow prefer doing association in a type
9821 	     which wraps on overflow, if that is one of the operand types.  */
9822 	  if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
9823 	      && !TYPE_OVERFLOW_WRAPS (type))
9824 	    {
9825 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9826 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9827 		atype = TREE_TYPE (arg0);
9828 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9829 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9830 		atype = TREE_TYPE (arg1);
9831 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9832 	    }
9833 
9834 	  /* With undefined overflow we can only associate constants with one
9835 	     variable, and constants whose association doesn't overflow.  */
9836 	  if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
9837 	      && !TYPE_OVERFLOW_WRAPS (atype))
9838 	    {
9839 	      if ((var0 && var1) || (minus_var0 && minus_var1))
9840 		{
9841 		  /* ???  If split_tree would handle NEGATE_EXPR we could
9842 		     simply reject these cases and the allowed cases would
9843 		     be the var0/minus_var1 ones.  */
9844 		  tree tmp0 = var0 ? var0 : minus_var0;
9845 		  tree tmp1 = var1 ? var1 : minus_var1;
9846 		  bool one_neg = false;
9847 
9848 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
9849 		    {
9850 		      tmp0 = TREE_OPERAND (tmp0, 0);
9851 		      one_neg = !one_neg;
9852 		    }
9853 		  if (CONVERT_EXPR_P (tmp0)
9854 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9855 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9856 			  <= TYPE_PRECISION (atype)))
9857 		    tmp0 = TREE_OPERAND (tmp0, 0);
9858 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
9859 		    {
9860 		      tmp1 = TREE_OPERAND (tmp1, 0);
9861 		      one_neg = !one_neg;
9862 		    }
9863 		  if (CONVERT_EXPR_P (tmp1)
9864 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9865 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9866 			  <= TYPE_PRECISION (atype)))
9867 		    tmp1 = TREE_OPERAND (tmp1, 0);
9868 		  /* The only case we can still associate with two variables
9869 		     is if they cancel out.  */
9870 		  if (!one_neg
9871 		      || !operand_equal_p (tmp0, tmp1, 0))
9872 		    ok = false;
9873 		}
9874 	      else if ((var0 && minus_var1
9875 			&& ! operand_equal_p (var0, minus_var1, 0))
9876 		       || (minus_var0 && var1
9877 			   && ! operand_equal_p (minus_var0, var1, 0)))
9878 		ok = false;
9879 	    }
9880 
9881 	  /* Only do something if we found more than two objects.  Otherwise,
9882 	     nothing has changed and we risk infinite recursion.  */
9883 	  if (ok
9884 	      && ((var0 != 0) + (var1 != 0)
9885 		  + (minus_var0 != 0) + (minus_var1 != 0)
9886 		  + (con0 != 0) + (con1 != 0)
9887 		  + (minus_con0 != 0) + (minus_con1 != 0)
9888 		  + (lit0 != 0) + (lit1 != 0)
9889 		  + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
9890 	    {
9891 	      var0 = associate_trees (loc, var0, var1, code, atype);
9892 	      minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9893 					    code, atype);
9894 	      con0 = associate_trees (loc, con0, con1, code, atype);
9895 	      minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9896 					    code, atype);
9897 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
9898 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9899 					    code, atype);
9900 
9901 	      if (minus_var0 && var0)
9902 		{
9903 		  var0 = associate_trees (loc, var0, minus_var0,
9904 					  MINUS_EXPR, atype);
9905 		  minus_var0 = 0;
9906 		}
9907 	      if (minus_con0 && con0)
9908 		{
9909 		  con0 = associate_trees (loc, con0, minus_con0,
9910 					  MINUS_EXPR, atype);
9911 		  minus_con0 = 0;
9912 		}
9913 
9914 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
9915 		 greater than the positive part.  Otherwise, the multiplicative
9916 		 folding code (i.e extract_muldiv) may be fooled in case
9917 		 unsigned constants are subtracted, like in the following
9918 		 example: ((X*2 + 4) - 8U)/2.  */
9919 	      if (minus_lit0 && lit0)
9920 		{
9921 		  if (TREE_CODE (lit0) == INTEGER_CST
9922 		      && TREE_CODE (minus_lit0) == INTEGER_CST
9923 		      && tree_int_cst_lt (lit0, minus_lit0)
9924 		      /* But avoid ending up with only negated parts.  */
9925 		      && (var0 || con0))
9926 		    {
9927 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9928 						    MINUS_EXPR, atype);
9929 		      lit0 = 0;
9930 		    }
9931 		  else
9932 		    {
9933 		      lit0 = associate_trees (loc, lit0, minus_lit0,
9934 					      MINUS_EXPR, atype);
9935 		      minus_lit0 = 0;
9936 		    }
9937 		}
9938 
9939 	      /* Don't introduce overflows through reassociation.  */
9940 	      if ((lit0 && TREE_OVERFLOW_P (lit0))
9941 		  || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9942 		return NULL_TREE;
9943 
9944 	      /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9945 	      con0 = associate_trees (loc, con0, lit0, code, atype);
9946 	      lit0 = 0;
9947 	      minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9948 					    code, atype);
9949 	      minus_lit0 = 0;
9950 
9951 	      /* Eliminate minus_con0.  */
9952 	      if (minus_con0)
9953 		{
9954 		  if (con0)
9955 		    con0 = associate_trees (loc, con0, minus_con0,
9956 					    MINUS_EXPR, atype);
9957 		  else if (var0)
9958 		    var0 = associate_trees (loc, var0, minus_con0,
9959 					    MINUS_EXPR, atype);
9960 		  else
9961 		    gcc_unreachable ();
9962 		  minus_con0 = 0;
9963 		}
9964 
9965 	      /* Eliminate minus_var0.  */
9966 	      if (minus_var0)
9967 		{
9968 		  if (con0)
9969 		    con0 = associate_trees (loc, con0, minus_var0,
9970 					    MINUS_EXPR, atype);
9971 		  else
9972 		    gcc_unreachable ();
9973 		  minus_var0 = 0;
9974 		}
9975 
9976 	      return
9977 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9978 							      code, atype));
9979 	    }
9980 	}
9981 
9982       return NULL_TREE;
9983 
9984     case POINTER_DIFF_EXPR:
9985     case MINUS_EXPR:
9986       /* Fold &a[i] - &a[j] to i-j.  */
9987       if (TREE_CODE (arg0) == ADDR_EXPR
9988 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9989 	  && TREE_CODE (arg1) == ADDR_EXPR
9990 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9991         {
9992 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
9993 							TREE_OPERAND (arg0, 0),
9994 							TREE_OPERAND (arg1, 0),
9995 							code
9996 							== POINTER_DIFF_EXPR);
9997 	  if (tem)
9998 	    return tem;
9999 	}
10000 
10001       /* Further transformations are not for pointers.  */
10002       if (code == POINTER_DIFF_EXPR)
10003 	return NULL_TREE;
10004 
10005       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
10006       if (TREE_CODE (arg0) == NEGATE_EXPR
10007 	  && negate_expr_p (op1)
10008 	  /* If arg0 is e.g. unsigned int and type is int, then this could
10009 	     introduce UB, because if A is INT_MIN at runtime, the original
10010 	     expression can be well defined while the latter is not.
10011 	     See PR83269.  */
10012 	  && !(ANY_INTEGRAL_TYPE_P (type)
10013 	       && TYPE_OVERFLOW_UNDEFINED (type)
10014 	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10015 	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10016 	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10017 			        fold_convert_loc (loc, type,
10018 						  TREE_OPERAND (arg0, 0)));
10019 
10020       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10021 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
10022 	 signed zeros are involved.  */
10023       if (!HONOR_SNANS (element_mode (arg0))
10024 	  && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10025 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10026         {
10027 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10028 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10029 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10030 	  bool arg0rz = false, arg0iz = false;
10031 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
10032 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
10033 	    {
10034 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10035 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10036 	      if (arg0rz && arg1i && real_zerop (arg1i))
10037 	        {
10038 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10039 					 arg1r ? arg1r
10040 					 : build1 (REALPART_EXPR, rtype, arg1));
10041 		  tree ip = arg0i ? arg0i
10042 		    : build1 (IMAGPART_EXPR, rtype, arg0);
10043 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10044 		}
10045 	      else if (arg0iz && arg1r && real_zerop (arg1r))
10046 	        {
10047 		  tree rp = arg0r ? arg0r
10048 		    : build1 (REALPART_EXPR, rtype, arg0);
10049 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10050 					 arg1i ? arg1i
10051 					 : build1 (IMAGPART_EXPR, rtype, arg1));
10052 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10053 		}
10054 	    }
10055 	}
10056 
10057       /* A - B -> A + (-B) if B is easily negatable.  */
10058       if (negate_expr_p (op1)
10059 	  && ! TYPE_OVERFLOW_SANITIZED (type)
10060 	  && ((FLOAT_TYPE_P (type)
10061                /* Avoid this transformation if B is a positive REAL_CST.  */
10062 	       && (TREE_CODE (op1) != REAL_CST
10063 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10064 	      || INTEGRAL_TYPE_P (type)))
10065 	return fold_build2_loc (loc, PLUS_EXPR, type,
10066 				fold_convert_loc (loc, type, arg0),
10067 				negate_expr (op1));
10068 
10069       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10070 	 one.  Make sure the type is not saturating and has the signedness of
10071 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10072 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10073       if ((TREE_CODE (arg0) == MULT_EXPR
10074 	   || TREE_CODE (arg1) == MULT_EXPR)
10075 	  && !TYPE_SATURATING (type)
10076 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10077 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10078 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10079         {
10080 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10081 	  if (tem)
10082 	    return tem;
10083 	}
10084 
10085       goto associate;
10086 
10087     case MULT_EXPR:
10088       if (! FLOAT_TYPE_P (type))
10089 	{
10090 	  /* Transform x * -C into -x * C if x is easily negatable.  */
10091 	  if (TREE_CODE (op1) == INTEGER_CST
10092 	      && tree_int_cst_sgn (op1) == -1
10093 	      && negate_expr_p (op0)
10094 	      && negate_expr_p (op1)
10095 	      && (tem = negate_expr (op1)) != op1
10096 	      && ! TREE_OVERFLOW (tem))
10097 	    return fold_build2_loc (loc, MULT_EXPR, type,
10098 				    fold_convert_loc (loc, type,
10099 						      negate_expr (op0)), tem);
10100 
10101 	  strict_overflow_p = false;
10102 	  if (TREE_CODE (arg1) == INTEGER_CST
10103 	      && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10104 					&strict_overflow_p)) != 0)
10105 	    {
10106 	      if (strict_overflow_p)
10107 		fold_overflow_warning (("assuming signed overflow does not "
10108 					"occur when simplifying "
10109 					"multiplication"),
10110 				       WARN_STRICT_OVERFLOW_MISC);
10111 	      return fold_convert_loc (loc, type, tem);
10112 	    }
10113 
10114 	  /* Optimize z * conj(z) for integer complex numbers.  */
10115 	  if (TREE_CODE (arg0) == CONJ_EXPR
10116 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10117 	    return fold_mult_zconjz (loc, type, arg1);
10118 	  if (TREE_CODE (arg1) == CONJ_EXPR
10119 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10120 	    return fold_mult_zconjz (loc, type, arg0);
10121 	}
10122       else
10123 	{
10124 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10125 	     This is not the same for NaNs or if signed zeros are
10126 	     involved.  */
10127 	  if (!HONOR_NANS (arg0)
10128               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10129 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10130 	      && TREE_CODE (arg1) == COMPLEX_CST
10131 	      && real_zerop (TREE_REALPART (arg1)))
10132 	    {
10133 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10134 	      if (real_onep (TREE_IMAGPART (arg1)))
10135 		return
10136 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10137 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10138 							     rtype, arg0)),
10139 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10140 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
10141 		return
10142 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10143 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10144 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10145 							     rtype, arg0)));
10146 	    }
10147 
10148 	  /* Optimize z * conj(z) for floating point complex numbers.
10149 	     Guarded by flag_unsafe_math_optimizations as non-finite
10150 	     imaginary components don't produce scalar results.  */
10151 	  if (flag_unsafe_math_optimizations
10152 	      && TREE_CODE (arg0) == CONJ_EXPR
10153 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10154 	    return fold_mult_zconjz (loc, type, arg1);
10155 	  if (flag_unsafe_math_optimizations
10156 	      && TREE_CODE (arg1) == CONJ_EXPR
10157 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10158 	    return fold_mult_zconjz (loc, type, arg0);
10159 	}
10160       goto associate;
10161 
10162     case BIT_IOR_EXPR:
10163       /* Canonicalize (X & C1) | C2.  */
10164       if (TREE_CODE (arg0) == BIT_AND_EXPR
10165 	  && TREE_CODE (arg1) == INTEGER_CST
10166 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10167 	{
10168 	  int width = TYPE_PRECISION (type), w;
10169 	  wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10170 	  wide_int c2 = wi::to_wide (arg1);
10171 
10172 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
10173 	  if ((c1 & c2) == c1)
10174 	    return omit_one_operand_loc (loc, type, arg1,
10175 					 TREE_OPERAND (arg0, 0));
10176 
10177 	  wide_int msk = wi::mask (width, false,
10178 				   TYPE_PRECISION (TREE_TYPE (arg1)));
10179 
10180 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
10181 	  if (wi::bit_and_not (msk, c1 | c2) == 0)
10182 	    {
10183 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10184 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10185 	    }
10186 
10187 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10188 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10189 	     mode which allows further optimizations.  */
10190 	  c1 &= msk;
10191 	  c2 &= msk;
10192 	  wide_int c3 = wi::bit_and_not (c1, c2);
10193 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10194 	    {
10195 	      wide_int mask = wi::mask (w, false,
10196 					TYPE_PRECISION (type));
10197 	      if (((c1 | c2) & mask) == mask
10198 		  && wi::bit_and_not (c1, mask) == 0)
10199 		{
10200 		  c3 = mask;
10201 		  break;
10202 		}
10203 	    }
10204 
10205 	  if (c3 != c1)
10206 	    {
10207 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10208 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10209 				     wide_int_to_tree (type, c3));
10210 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10211 	    }
10212 	}
10213 
10214       /* See if this can be simplified into a rotate first.  If that
10215 	 is unsuccessful continue in the association code.  */
10216       goto bit_rotate;
10217 
10218     case BIT_XOR_EXPR:
10219       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
10220       if (TREE_CODE (arg0) == BIT_AND_EXPR
10221 	  && INTEGRAL_TYPE_P (type)
10222 	  && integer_onep (TREE_OPERAND (arg0, 1))
10223 	  && integer_onep (arg1))
10224 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10225 				build_zero_cst (TREE_TYPE (arg0)));
10226 
10227       /* See if this can be simplified into a rotate first.  If that
10228 	 is unsuccessful continue in the association code.  */
10229       goto bit_rotate;
10230 
10231     case BIT_AND_EXPR:
10232       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
10233       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10234 	  && INTEGRAL_TYPE_P (type)
10235 	  && integer_onep (TREE_OPERAND (arg0, 1))
10236 	  && integer_onep (arg1))
10237 	{
10238 	  tree tem2;
10239 	  tem = TREE_OPERAND (arg0, 0);
10240 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10241 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10242 				  tem, tem2);
10243 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10244 				  build_zero_cst (TREE_TYPE (tem)));
10245 	}
10246       /* Fold ~X & 1 as (X & 1) == 0.  */
10247       if (TREE_CODE (arg0) == BIT_NOT_EXPR
10248 	  && INTEGRAL_TYPE_P (type)
10249 	  && integer_onep (arg1))
10250 	{
10251 	  tree tem2;
10252 	  tem = TREE_OPERAND (arg0, 0);
10253 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10254 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10255 				  tem, tem2);
10256 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10257 				  build_zero_cst (TREE_TYPE (tem)));
10258 	}
10259       /* Fold !X & 1 as X == 0.  */
10260       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10261 	  && integer_onep (arg1))
10262 	{
10263 	  tem = TREE_OPERAND (arg0, 0);
10264 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
10265 				  build_zero_cst (TREE_TYPE (tem)));
10266 	}
10267 
10268       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10269          multiple of 1 << CST.  */
10270       if (TREE_CODE (arg1) == INTEGER_CST)
10271 	{
10272 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10273 	  wide_int ncst1 = -cst1;
10274 	  if ((cst1 & ncst1) == ncst1
10275 	      && multiple_of_p (type, arg0,
10276 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10277 	    return fold_convert_loc (loc, type, arg0);
10278 	}
10279 
10280       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10281          bits from CST2.  */
10282       if (TREE_CODE (arg1) == INTEGER_CST
10283 	  && TREE_CODE (arg0) == MULT_EXPR
10284 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10285 	{
10286 	  wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10287 	  wide_int masked
10288 	    = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10289 
10290 	  if (masked == 0)
10291 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
10292 	                                  arg0, arg1);
10293 	  else if (masked != warg1)
10294 	    {
10295 	      /* Avoid the transform if arg1 is a mask of some
10296 	         mode which allows further optimizations.  */
10297 	      int pop = wi::popcount (warg1);
10298 	      if (!(pop >= BITS_PER_UNIT
10299 		    && pow2p_hwi (pop)
10300 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10301 		return fold_build2_loc (loc, code, type, op0,
10302 					wide_int_to_tree (type, masked));
10303 	    }
10304 	}
10305 
10306       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
10307       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10308 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10309 	{
10310 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10311 
10312 	  wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10313 	  if (mask == -1)
10314 	    return
10315 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10316 	}
10317 
10318       goto associate;
10319 
10320     case RDIV_EXPR:
10321       /* Don't touch a floating-point divide by zero unless the mode
10322 	 of the constant can represent infinity.  */
10323       if (TREE_CODE (arg1) == REAL_CST
10324 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10325 	  && real_zerop (arg1))
10326 	return NULL_TREE;
10327 
10328       /* (-A) / (-B) -> A / B  */
10329       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10330 	return fold_build2_loc (loc, RDIV_EXPR, type,
10331 			    TREE_OPERAND (arg0, 0),
10332 			    negate_expr (arg1));
10333       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10334 	return fold_build2_loc (loc, RDIV_EXPR, type,
10335 			    negate_expr (arg0),
10336 			    TREE_OPERAND (arg1, 0));
10337       return NULL_TREE;
10338 
10339     case TRUNC_DIV_EXPR:
10340       /* Fall through */
10341 
10342     case FLOOR_DIV_EXPR:
10343       /* Simplify A / (B << N) where A and B are positive and B is
10344 	 a power of 2, to A >> (N + log2(B)).  */
10345       strict_overflow_p = false;
10346       if (TREE_CODE (arg1) == LSHIFT_EXPR
10347 	  && (TYPE_UNSIGNED (type)
10348 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10349 	{
10350 	  tree sval = TREE_OPERAND (arg1, 0);
10351 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10352 	    {
10353 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
10354 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10355 					 wi::exact_log2 (wi::to_wide (sval)));
10356 
10357 	      if (strict_overflow_p)
10358 		fold_overflow_warning (("assuming signed overflow does not "
10359 					"occur when simplifying A / (B << N)"),
10360 				       WARN_STRICT_OVERFLOW_MISC);
10361 
10362 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10363 					sh_cnt, pow2);
10364 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
10365 				      fold_convert_loc (loc, type, arg0), sh_cnt);
10366 	    }
10367 	}
10368 
10369       /* Fall through */
10370 
10371     case ROUND_DIV_EXPR:
10372     case CEIL_DIV_EXPR:
10373     case EXACT_DIV_EXPR:
10374       if (integer_zerop (arg1))
10375 	return NULL_TREE;
10376 
10377       /* Convert -A / -B to A / B when the type is signed and overflow is
10378 	 undefined.  */
10379       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10380 	  && TREE_CODE (op0) == NEGATE_EXPR
10381 	  && negate_expr_p (op1))
10382 	{
10383 	  if (INTEGRAL_TYPE_P (type))
10384 	    fold_overflow_warning (("assuming signed overflow does not occur "
10385 				    "when distributing negation across "
10386 				    "division"),
10387 				   WARN_STRICT_OVERFLOW_MISC);
10388 	  return fold_build2_loc (loc, code, type,
10389 				  fold_convert_loc (loc, type,
10390 						    TREE_OPERAND (arg0, 0)),
10391 				  negate_expr (op1));
10392 	}
10393       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10394 	  && TREE_CODE (arg1) == NEGATE_EXPR
10395 	  && negate_expr_p (op0))
10396 	{
10397 	  if (INTEGRAL_TYPE_P (type))
10398 	    fold_overflow_warning (("assuming signed overflow does not occur "
10399 				    "when distributing negation across "
10400 				    "division"),
10401 				   WARN_STRICT_OVERFLOW_MISC);
10402 	  return fold_build2_loc (loc, code, type,
10403 				  negate_expr (op0),
10404 				  fold_convert_loc (loc, type,
10405 						    TREE_OPERAND (arg1, 0)));
10406 	}
10407 
10408       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10409 	 operation, EXACT_DIV_EXPR.
10410 
10411 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10412 	 At one time others generated faster code, it's not clear if they do
10413 	 after the last round to changes to the DIV code in expmed.c.  */
10414       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10415 	  && multiple_of_p (type, arg0, arg1))
10416 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10417 				fold_convert (type, arg0),
10418 				fold_convert (type, arg1));
10419 
10420       strict_overflow_p = false;
10421       if (TREE_CODE (arg1) == INTEGER_CST
10422 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10423 				    &strict_overflow_p)) != 0)
10424 	{
10425 	  if (strict_overflow_p)
10426 	    fold_overflow_warning (("assuming signed overflow does not occur "
10427 				    "when simplifying division"),
10428 				   WARN_STRICT_OVERFLOW_MISC);
10429 	  return fold_convert_loc (loc, type, tem);
10430 	}
10431 
10432       return NULL_TREE;
10433 
10434     case CEIL_MOD_EXPR:
10435     case FLOOR_MOD_EXPR:
10436     case ROUND_MOD_EXPR:
10437     case TRUNC_MOD_EXPR:
10438       strict_overflow_p = false;
10439       if (TREE_CODE (arg1) == INTEGER_CST
10440 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10441 				    &strict_overflow_p)) != 0)
10442 	{
10443 	  if (strict_overflow_p)
10444 	    fold_overflow_warning (("assuming signed overflow does not occur "
10445 				    "when simplifying modulus"),
10446 				   WARN_STRICT_OVERFLOW_MISC);
10447 	  return fold_convert_loc (loc, type, tem);
10448 	}
10449 
10450       return NULL_TREE;
10451 
10452     case LROTATE_EXPR:
10453     case RROTATE_EXPR:
10454     case RSHIFT_EXPR:
10455     case LSHIFT_EXPR:
10456       /* Since negative shift count is not well-defined,
10457 	 don't try to compute it in the compiler.  */
10458       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10459 	return NULL_TREE;
10460 
10461       prec = element_precision (type);
10462 
10463       /* If we have a rotate of a bit operation with the rotate count and
10464 	 the second operand of the bit operation both constant,
10465 	 permute the two operations.  */
10466       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10467 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
10468 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
10469 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
10470 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10471 	{
10472 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10473 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10474 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
10475 				  fold_build2_loc (loc, code, type,
10476 						   arg00, arg1),
10477 				  fold_build2_loc (loc, code, type,
10478 						   arg01, arg1));
10479 	}
10480 
10481       /* Two consecutive rotates adding up to the some integer
10482 	 multiple of the precision of the type can be ignored.  */
10483       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10484 	  && TREE_CODE (arg0) == RROTATE_EXPR
10485 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10486 	  && wi::umod_trunc (wi::to_wide (arg1)
10487 			     + wi::to_wide (TREE_OPERAND (arg0, 1)),
10488 			     prec) == 0)
10489 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10490 
10491       return NULL_TREE;
10492 
10493     case MIN_EXPR:
10494     case MAX_EXPR:
10495       goto associate;
10496 
10497     case TRUTH_ANDIF_EXPR:
10498       /* Note that the operands of this must be ints
10499 	 and their values must be 0 or 1.
10500 	 ("true" is a fixed value perhaps depending on the language.)  */
10501       /* If first arg is constant zero, return it.  */
10502       if (integer_zerop (arg0))
10503 	return fold_convert_loc (loc, type, arg0);
10504       /* FALLTHRU */
10505     case TRUTH_AND_EXPR:
10506       /* If either arg is constant true, drop it.  */
10507       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10508 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10509       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10510 	  /* Preserve sequence points.  */
10511 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10512 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10513       /* If second arg is constant zero, result is zero, but first arg
10514 	 must be evaluated.  */
10515       if (integer_zerop (arg1))
10516 	return omit_one_operand_loc (loc, type, arg1, arg0);
10517       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10518 	 case will be handled here.  */
10519       if (integer_zerop (arg0))
10520 	return omit_one_operand_loc (loc, type, arg0, arg1);
10521 
10522       /* !X && X is always false.  */
10523       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10524 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10525 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10526       /* X && !X is always false.  */
10527       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10528 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10529 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10530 
10531       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
10532 	 means A >= Y && A != MAX, but in this case we know that
10533 	 A < X <= MAX.  */
10534 
10535       if (!TREE_SIDE_EFFECTS (arg0)
10536 	  && !TREE_SIDE_EFFECTS (arg1))
10537 	{
10538 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10539 	  if (tem && !operand_equal_p (tem, arg0, 0))
10540 	    return fold_build2_loc (loc, code, type, tem, arg1);
10541 
10542 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10543 	  if (tem && !operand_equal_p (tem, arg1, 0))
10544 	    return fold_build2_loc (loc, code, type, arg0, tem);
10545 	}
10546 
10547       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10548           != NULL_TREE)
10549         return tem;
10550 
10551       return NULL_TREE;
10552 
10553     case TRUTH_ORIF_EXPR:
10554       /* Note that the operands of this must be ints
10555 	 and their values must be 0 or true.
10556 	 ("true" is a fixed value perhaps depending on the language.)  */
10557       /* If first arg is constant true, return it.  */
10558       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10559 	return fold_convert_loc (loc, type, arg0);
10560       /* FALLTHRU */
10561     case TRUTH_OR_EXPR:
10562       /* If either arg is constant zero, drop it.  */
10563       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10564 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10565       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10566 	  /* Preserve sequence points.  */
10567 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10568 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10569       /* If second arg is constant true, result is true, but we must
10570 	 evaluate first arg.  */
10571       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10572 	return omit_one_operand_loc (loc, type, arg1, arg0);
10573       /* Likewise for first arg, but note this only occurs here for
10574 	 TRUTH_OR_EXPR.  */
10575       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10576 	return omit_one_operand_loc (loc, type, arg0, arg1);
10577 
10578       /* !X || X is always true.  */
10579       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10580 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10581 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10582       /* X || !X is always true.  */
10583       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10584 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10585 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10586 
10587       /* (X && !Y) || (!X && Y) is X ^ Y */
10588       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10589 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10590         {
10591 	  tree a0, a1, l0, l1, n0, n1;
10592 
10593 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10594 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10595 
10596 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10597 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10598 
10599 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10600 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10601 
10602 	  if ((operand_equal_p (n0, a0, 0)
10603 	       && operand_equal_p (n1, a1, 0))
10604 	      || (operand_equal_p (n0, a1, 0)
10605 		  && operand_equal_p (n1, a0, 0)))
10606 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10607 	}
10608 
10609       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10610           != NULL_TREE)
10611         return tem;
10612 
10613       return NULL_TREE;
10614 
10615     case TRUTH_XOR_EXPR:
10616       /* If the second arg is constant zero, drop it.  */
10617       if (integer_zerop (arg1))
10618 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10619       /* If the second arg is constant true, this is a logical inversion.  */
10620       if (integer_onep (arg1))
10621 	{
10622 	  tem = invert_truthvalue_loc (loc, arg0);
10623 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10624 	}
10625       /* Identical arguments cancel to zero.  */
10626       if (operand_equal_p (arg0, arg1, 0))
10627 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10628 
10629       /* !X ^ X is always true.  */
10630       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10631 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10632 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10633 
10634       /* X ^ !X is always true.  */
10635       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10636 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10637 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10638 
10639       return NULL_TREE;
10640 
10641     case EQ_EXPR:
10642     case NE_EXPR:
10643       STRIP_NOPS (arg0);
10644       STRIP_NOPS (arg1);
10645 
10646       tem = fold_comparison (loc, code, type, op0, op1);
10647       if (tem != NULL_TREE)
10648 	return tem;
10649 
10650       /* bool_var != 1 becomes !bool_var. */
10651       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10652           && code == NE_EXPR)
10653         return fold_convert_loc (loc, type,
10654 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10655 						  TREE_TYPE (arg0), arg0));
10656 
10657       /* bool_var == 0 becomes !bool_var. */
10658       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10659           && code == EQ_EXPR)
10660         return fold_convert_loc (loc, type,
10661 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10662 						  TREE_TYPE (arg0), arg0));
10663 
10664       /* !exp != 0 becomes !exp */
10665       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10666 	  && code == NE_EXPR)
10667         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10668 
10669       /* If this is an EQ or NE comparison with zero and ARG0 is
10670 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
10671 	 two operations, but the latter can be done in one less insn
10672 	 on machines that have only two-operand insns or on which a
10673 	 constant cannot be the first operand.  */
10674       if (TREE_CODE (arg0) == BIT_AND_EXPR
10675 	  && integer_zerop (arg1))
10676 	{
10677 	  tree arg00 = TREE_OPERAND (arg0, 0);
10678 	  tree arg01 = TREE_OPERAND (arg0, 1);
10679 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
10680 	      && integer_onep (TREE_OPERAND (arg00, 0)))
10681 	    {
10682 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10683 				      arg01, TREE_OPERAND (arg00, 1));
10684 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10685 				 build_int_cst (TREE_TYPE (arg0), 1));
10686 	      return fold_build2_loc (loc, code, type,
10687 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10688 				  arg1);
10689 	    }
10690 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
10691 		   && integer_onep (TREE_OPERAND (arg01, 0)))
10692 	    {
10693 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10694 				      arg00, TREE_OPERAND (arg01, 1));
10695 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10696 				 build_int_cst (TREE_TYPE (arg0), 1));
10697 	      return fold_build2_loc (loc, code, type,
10698 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10699 				  arg1);
10700 	    }
10701 	}
10702 
10703       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10704 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
10705 	 a single bit.  */
10706       if (TREE_CODE (arg0) == BIT_AND_EXPR
10707 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10708 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10709 	     == INTEGER_CST
10710 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10711 	  && integer_zerop (arg1))
10712 	{
10713 	  tree itype = TREE_TYPE (arg0);
10714 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10715 	  prec = TYPE_PRECISION (itype);
10716 
10717 	  /* Check for a valid shift count.  */
10718 	  if (wi::ltu_p (wi::to_wide (arg001), prec))
10719 	    {
10720 	      tree arg01 = TREE_OPERAND (arg0, 1);
10721 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10722 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10723 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10724 		 can be rewritten as (X & (C2 << C1)) != 0.  */
10725 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10726 		{
10727 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10728 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10729 		  return fold_build2_loc (loc, code, type, tem,
10730 					  fold_convert_loc (loc, itype, arg1));
10731 		}
10732 	      /* Otherwise, for signed (arithmetic) shifts,
10733 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10734 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
10735 	      else if (!TYPE_UNSIGNED (itype))
10736 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10737 				    arg000, build_int_cst (itype, 0));
10738 	      /* Otherwise, of unsigned (logical) shifts,
10739 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10740 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
10741 	      else
10742 		return omit_one_operand_loc (loc, type,
10743 					 code == EQ_EXPR ? integer_one_node
10744 							 : integer_zero_node,
10745 					 arg000);
10746 	    }
10747 	}
10748 
10749       /* If this is a comparison of a field, we may be able to simplify it.  */
10750       if ((TREE_CODE (arg0) == COMPONENT_REF
10751 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
10752 	  /* Handle the constant case even without -O
10753 	     to make sure the warnings are given.  */
10754 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10755 	{
10756 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10757 	  if (t1)
10758 	    return t1;
10759 	}
10760 
10761       /* Optimize comparisons of strlen vs zero to a compare of the
10762 	 first character of the string vs zero.  To wit,
10763 		strlen(ptr) == 0   =>  *ptr == 0
10764 		strlen(ptr) != 0   =>  *ptr != 0
10765 	 Other cases should reduce to one of these two (or a constant)
10766 	 due to the return value of strlen being unsigned.  */
10767       if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
10768 	{
10769 	  tree fndecl = get_callee_fndecl (arg0);
10770 
10771 	  if (fndecl
10772 	      && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
10773 	      && call_expr_nargs (arg0) == 1
10774 	      && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
10775 		  == POINTER_TYPE))
10776 	    {
10777 	      tree ptrtype
10778 		= build_pointer_type (build_qualified_type (char_type_node,
10779 							    TYPE_QUAL_CONST));
10780 	      tree ptr = fold_convert_loc (loc, ptrtype,
10781 					   CALL_EXPR_ARG (arg0, 0));
10782 	      tree iref = build_fold_indirect_ref_loc (loc, ptr);
10783 	      return fold_build2_loc (loc, code, type, iref,
10784 				      build_int_cst (TREE_TYPE (iref), 0));
10785 	    }
10786 	}
10787 
10788       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10789 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
10790       if (TREE_CODE (arg0) == RSHIFT_EXPR
10791 	  && integer_zerop (arg1)
10792 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10793 	{
10794 	  tree arg00 = TREE_OPERAND (arg0, 0);
10795 	  tree arg01 = TREE_OPERAND (arg0, 1);
10796 	  tree itype = TREE_TYPE (arg00);
10797 	  if (wi::to_wide (arg01) == element_precision (itype) - 1)
10798 	    {
10799 	      if (TYPE_UNSIGNED (itype))
10800 		{
10801 		  itype = signed_type_for (itype);
10802 		  arg00 = fold_convert_loc (loc, itype, arg00);
10803 		}
10804 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10805 				  type, arg00, build_zero_cst (itype));
10806 	    }
10807 	}
10808 
10809       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10810 	 (X & C) == 0 when C is a single bit.  */
10811       if (TREE_CODE (arg0) == BIT_AND_EXPR
10812 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10813 	  && integer_zerop (arg1)
10814 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10815 	{
10816 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10817 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10818 				 TREE_OPERAND (arg0, 1));
10819 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10820 				  type, tem,
10821 				  fold_convert_loc (loc, TREE_TYPE (arg0),
10822 						    arg1));
10823 	}
10824 
10825       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10826 	 constant C is a power of two, i.e. a single bit.  */
10827       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10828 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10829 	  && integer_zerop (arg1)
10830 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10831 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10832 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10833 	{
10834 	  tree arg00 = TREE_OPERAND (arg0, 0);
10835 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10836 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
10837 	}
10838 
10839       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10840 	 when is C is a power of two, i.e. a single bit.  */
10841       if (TREE_CODE (arg0) == BIT_AND_EXPR
10842 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10843 	  && integer_zerop (arg1)
10844 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10845 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10846 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10847 	{
10848 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10849 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10850 			     arg000, TREE_OPERAND (arg0, 1));
10851 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10852 			      tem, build_int_cst (TREE_TYPE (tem), 0));
10853 	}
10854 
10855       if (integer_zerop (arg1)
10856 	  && tree_expr_nonzero_p (arg0))
10857         {
10858 	  tree res = constant_boolean_node (code==NE_EXPR, type);
10859 	  return omit_one_operand_loc (loc, type, res, arg0);
10860 	}
10861 
10862       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
10863       if (TREE_CODE (arg0) == BIT_AND_EXPR
10864 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
10865 	{
10866 	  tree arg00 = TREE_OPERAND (arg0, 0);
10867 	  tree arg01 = TREE_OPERAND (arg0, 1);
10868 	  tree arg10 = TREE_OPERAND (arg1, 0);
10869 	  tree arg11 = TREE_OPERAND (arg1, 1);
10870 	  tree itype = TREE_TYPE (arg0);
10871 
10872 	  if (operand_equal_p (arg01, arg11, 0))
10873 	    {
10874 	      tem = fold_convert_loc (loc, itype, arg10);
10875 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10876 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10877 	      return fold_build2_loc (loc, code, type, tem,
10878 				      build_zero_cst (itype));
10879 	    }
10880 	  if (operand_equal_p (arg01, arg10, 0))
10881 	    {
10882 	      tem = fold_convert_loc (loc, itype, arg11);
10883 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10884 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10885 	      return fold_build2_loc (loc, code, type, tem,
10886 				      build_zero_cst (itype));
10887 	    }
10888 	  if (operand_equal_p (arg00, arg11, 0))
10889 	    {
10890 	      tem = fold_convert_loc (loc, itype, arg10);
10891 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10892 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10893 	      return fold_build2_loc (loc, code, type, tem,
10894 				      build_zero_cst (itype));
10895 	    }
10896 	  if (operand_equal_p (arg00, arg10, 0))
10897 	    {
10898 	      tem = fold_convert_loc (loc, itype, arg11);
10899 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10900 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10901 	      return fold_build2_loc (loc, code, type, tem,
10902 				      build_zero_cst (itype));
10903 	    }
10904 	}
10905 
10906       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10907 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
10908 	{
10909 	  tree arg00 = TREE_OPERAND (arg0, 0);
10910 	  tree arg01 = TREE_OPERAND (arg0, 1);
10911 	  tree arg10 = TREE_OPERAND (arg1, 0);
10912 	  tree arg11 = TREE_OPERAND (arg1, 1);
10913 	  tree itype = TREE_TYPE (arg0);
10914 
10915 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10916 	     operand_equal_p guarantees no side-effects so we don't need
10917 	     to use omit_one_operand on Z.  */
10918 	  if (operand_equal_p (arg01, arg11, 0))
10919 	    return fold_build2_loc (loc, code, type, arg00,
10920 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10921 						      arg10));
10922 	  if (operand_equal_p (arg01, arg10, 0))
10923 	    return fold_build2_loc (loc, code, type, arg00,
10924 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10925 						      arg11));
10926 	  if (operand_equal_p (arg00, arg11, 0))
10927 	    return fold_build2_loc (loc, code, type, arg01,
10928 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10929 						      arg10));
10930 	  if (operand_equal_p (arg00, arg10, 0))
10931 	    return fold_build2_loc (loc, code, type, arg01,
10932 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10933 						      arg11));
10934 
10935 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
10936 	  if (TREE_CODE (arg01) == INTEGER_CST
10937 	      && TREE_CODE (arg11) == INTEGER_CST)
10938 	    {
10939 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10940 				     fold_convert_loc (loc, itype, arg11));
10941 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10942 	      return fold_build2_loc (loc, code, type, tem,
10943 				      fold_convert_loc (loc, itype, arg10));
10944 	    }
10945 	}
10946 
10947       /* Attempt to simplify equality/inequality comparisons of complex
10948 	 values.  Only lower the comparison if the result is known or
10949 	 can be simplified to a single scalar comparison.  */
10950       if ((TREE_CODE (arg0) == COMPLEX_EXPR
10951 	   || TREE_CODE (arg0) == COMPLEX_CST)
10952 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
10953 	      || TREE_CODE (arg1) == COMPLEX_CST))
10954 	{
10955 	  tree real0, imag0, real1, imag1;
10956 	  tree rcond, icond;
10957 
10958 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
10959 	    {
10960 	      real0 = TREE_OPERAND (arg0, 0);
10961 	      imag0 = TREE_OPERAND (arg0, 1);
10962 	    }
10963 	  else
10964 	    {
10965 	      real0 = TREE_REALPART (arg0);
10966 	      imag0 = TREE_IMAGPART (arg0);
10967 	    }
10968 
10969 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
10970 	    {
10971 	      real1 = TREE_OPERAND (arg1, 0);
10972 	      imag1 = TREE_OPERAND (arg1, 1);
10973 	    }
10974 	  else
10975 	    {
10976 	      real1 = TREE_REALPART (arg1);
10977 	      imag1 = TREE_IMAGPART (arg1);
10978 	    }
10979 
10980 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
10981 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10982 	    {
10983 	      if (integer_zerop (rcond))
10984 		{
10985 		  if (code == EQ_EXPR)
10986 		    return omit_two_operands_loc (loc, type, boolean_false_node,
10987 					      imag0, imag1);
10988 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10989 		}
10990 	      else
10991 		{
10992 		  if (code == NE_EXPR)
10993 		    return omit_two_operands_loc (loc, type, boolean_true_node,
10994 					      imag0, imag1);
10995 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10996 		}
10997 	    }
10998 
10999 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
11000 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
11001 	    {
11002 	      if (integer_zerop (icond))
11003 		{
11004 		  if (code == EQ_EXPR)
11005 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11006 					      real0, real1);
11007 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11008 		}
11009 	      else
11010 		{
11011 		  if (code == NE_EXPR)
11012 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11013 					      real0, real1);
11014 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11015 		}
11016 	    }
11017 	}
11018 
11019       return NULL_TREE;
11020 
11021     case LT_EXPR:
11022     case GT_EXPR:
11023     case LE_EXPR:
11024     case GE_EXPR:
11025       tem = fold_comparison (loc, code, type, op0, op1);
11026       if (tem != NULL_TREE)
11027 	return tem;
11028 
11029       /* Transform comparisons of the form X +- C CMP X.  */
11030       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11031 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11032 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11033 	  && !HONOR_SNANS (arg0))
11034 	{
11035 	  tree arg01 = TREE_OPERAND (arg0, 1);
11036 	  enum tree_code code0 = TREE_CODE (arg0);
11037 	  int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11038 
11039 	  /* (X - c) > X becomes false.  */
11040 	  if (code == GT_EXPR
11041 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11042 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11043 	    return constant_boolean_node (0, type);
11044 
11045 	  /* Likewise (X + c) < X becomes false.  */
11046 	  if (code == LT_EXPR
11047 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11048 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11049 	    return constant_boolean_node (0, type);
11050 
11051 	  /* Convert (X - c) <= X to true.  */
11052 	  if (!HONOR_NANS (arg1)
11053 	      && code == LE_EXPR
11054 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11055 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11056 	    return constant_boolean_node (1, type);
11057 
11058 	  /* Convert (X + c) >= X to true.  */
11059 	  if (!HONOR_NANS (arg1)
11060 	      && code == GE_EXPR
11061 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11062 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11063 	    return constant_boolean_node (1, type);
11064 	}
11065 
11066       /* If we are comparing an ABS_EXPR with a constant, we can
11067 	 convert all the cases into explicit comparisons, but they may
11068 	 well not be faster than doing the ABS and one comparison.
11069 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11070 	 and a comparison, and is probably faster.  */
11071       if (code == LE_EXPR
11072 	  && TREE_CODE (arg1) == INTEGER_CST
11073 	  && TREE_CODE (arg0) == ABS_EXPR
11074 	  && ! TREE_SIDE_EFFECTS (arg0)
11075 	  && (tem = negate_expr (arg1)) != 0
11076 	  && TREE_CODE (tem) == INTEGER_CST
11077 	  && !TREE_OVERFLOW (tem))
11078 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11079 			    build2 (GE_EXPR, type,
11080 				    TREE_OPERAND (arg0, 0), tem),
11081 			    build2 (LE_EXPR, type,
11082 				    TREE_OPERAND (arg0, 0), arg1));
11083 
11084       /* Convert ABS_EXPR<x> >= 0 to true.  */
11085       strict_overflow_p = false;
11086       if (code == GE_EXPR
11087 	  && (integer_zerop (arg1)
11088 	      || (! HONOR_NANS (arg0)
11089 		  && real_zerop (arg1)))
11090 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11091 	{
11092 	  if (strict_overflow_p)
11093 	    fold_overflow_warning (("assuming signed overflow does not occur "
11094 				    "when simplifying comparison of "
11095 				    "absolute value and zero"),
11096 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11097 	  return omit_one_operand_loc (loc, type,
11098 				       constant_boolean_node (true, type),
11099 				       arg0);
11100 	}
11101 
11102       /* Convert ABS_EXPR<x> < 0 to false.  */
11103       strict_overflow_p = false;
11104       if (code == LT_EXPR
11105 	  && (integer_zerop (arg1) || real_zerop (arg1))
11106 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11107 	{
11108 	  if (strict_overflow_p)
11109 	    fold_overflow_warning (("assuming signed overflow does not occur "
11110 				    "when simplifying comparison of "
11111 				    "absolute value and zero"),
11112 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11113 	  return omit_one_operand_loc (loc, type,
11114 				       constant_boolean_node (false, type),
11115 				       arg0);
11116 	}
11117 
11118       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11119 	 and similarly for >= into !=.  */
11120       if ((code == LT_EXPR || code == GE_EXPR)
11121 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11122 	  && TREE_CODE (arg1) == LSHIFT_EXPR
11123 	  && integer_onep (TREE_OPERAND (arg1, 0)))
11124 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11125 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11126 				   TREE_OPERAND (arg1, 1)),
11127 			   build_zero_cst (TREE_TYPE (arg0)));
11128 
11129       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
11130 	 otherwise Y might be >= # of bits in X's type and thus e.g.
11131 	 (unsigned char) (1 << Y) for Y 15 might be 0.
11132 	 If the cast is widening, then 1 << Y should have unsigned type,
11133 	 otherwise if Y is number of bits in the signed shift type minus 1,
11134 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
11135 	 31 might be 0xffffffff80000000.  */
11136       if ((code == LT_EXPR || code == GE_EXPR)
11137 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11138 	  && CONVERT_EXPR_P (arg1)
11139 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11140 	  && (element_precision (TREE_TYPE (arg1))
11141 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11142 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11143 	      || (element_precision (TREE_TYPE (arg1))
11144 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11145 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11146 	{
11147 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11148 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11149 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11150 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11151 			     build_zero_cst (TREE_TYPE (arg0)));
11152 	}
11153 
11154       return NULL_TREE;
11155 
11156     case UNORDERED_EXPR:
11157     case ORDERED_EXPR:
11158     case UNLT_EXPR:
11159     case UNLE_EXPR:
11160     case UNGT_EXPR:
11161     case UNGE_EXPR:
11162     case UNEQ_EXPR:
11163     case LTGT_EXPR:
11164       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
11165       {
11166 	tree targ0 = strip_float_extensions (arg0);
11167 	tree targ1 = strip_float_extensions (arg1);
11168 	tree newtype = TREE_TYPE (targ0);
11169 
11170 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11171 	  newtype = TREE_TYPE (targ1);
11172 
11173 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11174 	  return fold_build2_loc (loc, code, type,
11175 			      fold_convert_loc (loc, newtype, targ0),
11176 			      fold_convert_loc (loc, newtype, targ1));
11177       }
11178 
11179       return NULL_TREE;
11180 
11181     case COMPOUND_EXPR:
11182       /* When pedantic, a compound expression can be neither an lvalue
11183 	 nor an integer constant expression.  */
11184       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11185 	return NULL_TREE;
11186       /* Don't let (0, 0) be null pointer constant.  */
11187       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11188 				 : fold_convert_loc (loc, type, arg1);
11189       return pedantic_non_lvalue_loc (loc, tem);
11190 
11191     case ASSERT_EXPR:
11192       /* An ASSERT_EXPR should never be passed to fold_binary.  */
11193       gcc_unreachable ();
11194 
11195     default:
11196       return NULL_TREE;
11197     } /* switch (code) */
11198 }
11199 
11200 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11201    ((A & N) + B) & M -> (A + B) & M
11202    Similarly if (N & M) == 0,
11203    ((A | N) + B) & M -> (A + B) & M
11204    and for - instead of + (or unary - instead of +)
11205    and/or ^ instead of |.
11206    If B is constant and (B & M) == 0, fold into A & M.
11207 
11208    This function is a helper for match.pd patterns.  Return non-NULL
11209    type in which the simplified operation should be performed only
11210    if any optimization is possible.
11211 
11212    ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11213    then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
11214    Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
11215    +/-.  */
11216 tree
11217 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
11218 		   tree arg00, enum tree_code code00, tree arg000, tree arg001,
11219 		   tree arg01, enum tree_code code01, tree arg010, tree arg011,
11220 		   tree *pmop)
11221 {
11222   gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
11223   gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
11224   wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11225   if (~cst1 == 0
11226       || (cst1 & (cst1 + 1)) != 0
11227       || !INTEGRAL_TYPE_P (type)
11228       || (!TYPE_OVERFLOW_WRAPS (type)
11229 	  && TREE_CODE (type) != INTEGER_TYPE)
11230       || (wi::max_value (type) & cst1) != cst1)
11231     return NULL_TREE;
11232 
11233   enum tree_code codes[2] = { code00, code01 };
11234   tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
11235   int which = 0;
11236   wide_int cst0;
11237 
11238   /* Now we know that arg0 is (C + D) or (C - D) or -C and
11239      arg1 (M) is == (1LL << cst) - 1.
11240      Store C into PMOP[0] and D into PMOP[1].  */
11241   pmop[0] = arg00;
11242   pmop[1] = arg01;
11243   which = code != NEGATE_EXPR;
11244 
11245   for (; which >= 0; which--)
11246     switch (codes[which])
11247       {
11248       case BIT_AND_EXPR:
11249       case BIT_IOR_EXPR:
11250       case BIT_XOR_EXPR:
11251 	gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
11252 	cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
11253 	if (codes[which] == BIT_AND_EXPR)
11254 	  {
11255 	    if (cst0 != cst1)
11256 	      break;
11257 	  }
11258 	else if (cst0 != 0)
11259 	  break;
11260 	/* If C or D is of the form (A & N) where
11261 	   (N & M) == M, or of the form (A | N) or
11262 	   (A ^ N) where (N & M) == 0, replace it with A.  */
11263 	pmop[which] = arg0xx[2 * which];
11264 	break;
11265       case ERROR_MARK:
11266 	if (TREE_CODE (pmop[which]) != INTEGER_CST)
11267 	  break;
11268 	/* If C or D is a N where (N & M) == 0, it can be
11269 	   omitted (replaced with 0).  */
11270 	if ((code == PLUS_EXPR
11271 	     || (code == MINUS_EXPR && which == 0))
11272 	    && (cst1 & wi::to_wide (pmop[which])) == 0)
11273 	  pmop[which] = build_int_cst (type, 0);
11274 	/* Similarly, with C - N where (-N & M) == 0.  */
11275 	if (code == MINUS_EXPR
11276 	    && which == 1
11277 	    && (cst1 & -wi::to_wide (pmop[which])) == 0)
11278 	  pmop[which] = build_int_cst (type, 0);
11279 	break;
11280       default:
11281 	gcc_unreachable ();
11282       }
11283 
11284   /* Only build anything new if we optimized one or both arguments above.  */
11285   if (pmop[0] == arg00 && pmop[1] == arg01)
11286     return NULL_TREE;
11287 
11288   if (TYPE_OVERFLOW_WRAPS (type))
11289     return type;
11290   else
11291     return unsigned_type_for (type);
11292 }
11293 
11294 /* Used by contains_label_[p1].  */
11295 
11296 struct contains_label_data
11297 {
11298   hash_set<tree> *pset;
11299   bool inside_switch_p;
11300 };
11301 
11302 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
11303    a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11304    return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
11305 
11306 static tree
11307 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11308 {
11309   contains_label_data *d = (contains_label_data *) data;
11310   switch (TREE_CODE (*tp))
11311     {
11312     case LABEL_EXPR:
11313       return *tp;
11314 
11315     case CASE_LABEL_EXPR:
11316       if (!d->inside_switch_p)
11317 	return *tp;
11318       return NULL_TREE;
11319 
11320     case SWITCH_EXPR:
11321       if (!d->inside_switch_p)
11322 	{
11323 	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11324 	    return *tp;
11325 	  d->inside_switch_p = true;
11326 	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11327 	    return *tp;
11328 	  d->inside_switch_p = false;
11329 	  *walk_subtrees = 0;
11330 	}
11331       return NULL_TREE;
11332 
11333     case GOTO_EXPR:
11334       *walk_subtrees = 0;
11335       return NULL_TREE;
11336 
11337     default:
11338       return NULL_TREE;
11339     }
11340 }
11341 
11342 /* Return whether the sub-tree ST contains a label which is accessible from
11343    outside the sub-tree.  */
11344 
11345 static bool
11346 contains_label_p (tree st)
11347 {
11348   hash_set<tree> pset;
11349   contains_label_data data = { &pset, false };
11350   return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11351 }
11352 
11353 /* Fold a ternary expression of code CODE and type TYPE with operands
11354    OP0, OP1, and OP2.  Return the folded expression if folding is
11355    successful.  Otherwise, return NULL_TREE.  */
11356 
11357 tree
11358 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11359 		  tree op0, tree op1, tree op2)
11360 {
11361   tree tem;
11362   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11363   enum tree_code_class kind = TREE_CODE_CLASS (code);
11364 
11365   gcc_assert (IS_EXPR_CODE_CLASS (kind)
11366 	      && TREE_CODE_LENGTH (code) == 3);
11367 
11368   /* If this is a commutative operation, and OP0 is a constant, move it
11369      to OP1 to reduce the number of tests below.  */
11370   if (commutative_ternary_tree_code (code)
11371       && tree_swap_operands_p (op0, op1))
11372     return fold_build3_loc (loc, code, type, op1, op0, op2);
11373 
11374   tem = generic_simplify (loc, code, type, op0, op1, op2);
11375   if (tem)
11376     return tem;
11377 
11378   /* Strip any conversions that don't change the mode.  This is safe
11379      for every expression, except for a comparison expression because
11380      its signedness is derived from its operands.  So, in the latter
11381      case, only strip conversions that don't change the signedness.
11382 
11383      Note that this is done as an internal manipulation within the
11384      constant folder, in order to find the simplest representation of
11385      the arguments so that their form can be studied.  In any cases,
11386      the appropriate type conversions should be put back in the tree
11387      that will get out of the constant folder.  */
11388   if (op0)
11389     {
11390       arg0 = op0;
11391       STRIP_NOPS (arg0);
11392     }
11393 
11394   if (op1)
11395     {
11396       arg1 = op1;
11397       STRIP_NOPS (arg1);
11398     }
11399 
11400   if (op2)
11401     {
11402       arg2 = op2;
11403       STRIP_NOPS (arg2);
11404     }
11405 
11406   switch (code)
11407     {
11408     case COMPONENT_REF:
11409       if (TREE_CODE (arg0) == CONSTRUCTOR
11410 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11411 	{
11412 	  unsigned HOST_WIDE_INT idx;
11413 	  tree field, value;
11414 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11415 	    if (field == arg1)
11416 	      return value;
11417 	}
11418       return NULL_TREE;
11419 
11420     case COND_EXPR:
11421     case VEC_COND_EXPR:
11422       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11423 	 so all simple results must be passed through pedantic_non_lvalue.  */
11424       if (TREE_CODE (arg0) == INTEGER_CST)
11425 	{
11426 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
11427 	  tem = integer_zerop (arg0) ? op2 : op1;
11428 	  /* Only optimize constant conditions when the selected branch
11429 	     has the same type as the COND_EXPR.  This avoids optimizing
11430              away "c ? x : throw", where the throw has a void type.
11431              Avoid throwing away that operand which contains label.  */
11432           if ((!TREE_SIDE_EFFECTS (unused_op)
11433                || !contains_label_p (unused_op))
11434               && (! VOID_TYPE_P (TREE_TYPE (tem))
11435                   || VOID_TYPE_P (type)))
11436 	    return pedantic_non_lvalue_loc (loc, tem);
11437 	  return NULL_TREE;
11438 	}
11439       else if (TREE_CODE (arg0) == VECTOR_CST)
11440 	{
11441 	  unsigned HOST_WIDE_INT nelts;
11442 	  if ((TREE_CODE (arg1) == VECTOR_CST
11443 	       || TREE_CODE (arg1) == CONSTRUCTOR)
11444 	      && (TREE_CODE (arg2) == VECTOR_CST
11445 		  || TREE_CODE (arg2) == CONSTRUCTOR)
11446 	      && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11447 	    {
11448 	      vec_perm_builder sel (nelts, nelts, 1);
11449 	      for (unsigned int i = 0; i < nelts; i++)
11450 		{
11451 		  tree val = VECTOR_CST_ELT (arg0, i);
11452 		  if (integer_all_onesp (val))
11453 		    sel.quick_push (i);
11454 		  else if (integer_zerop (val))
11455 		    sel.quick_push (nelts + i);
11456 		  else /* Currently unreachable.  */
11457 		    return NULL_TREE;
11458 		}
11459 	      vec_perm_indices indices (sel, 2, nelts);
11460 	      tree t = fold_vec_perm (type, arg1, arg2, indices);
11461 	      if (t != NULL_TREE)
11462 		return t;
11463 	    }
11464 	}
11465 
11466       /* If we have A op B ? A : C, we may be able to convert this to a
11467 	 simpler expression, depending on the operation and the values
11468 	 of B and C.  Signed zeros prevent all of these transformations,
11469 	 for reasons given above each one.
11470 
11471          Also try swapping the arguments and inverting the conditional.  */
11472       if (COMPARISON_CLASS_P (arg0)
11473 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11474 	  && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11475 	{
11476 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11477 	  if (tem)
11478 	    return tem;
11479 	}
11480 
11481       if (COMPARISON_CLASS_P (arg0)
11482 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11483 	  && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11484 	{
11485 	  location_t loc0 = expr_location_or (arg0, loc);
11486 	  tem = fold_invert_truthvalue (loc0, arg0);
11487 	  if (tem && COMPARISON_CLASS_P (tem))
11488 	    {
11489 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11490 	      if (tem)
11491 		return tem;
11492 	    }
11493 	}
11494 
11495       /* If the second operand is simpler than the third, swap them
11496 	 since that produces better jump optimization results.  */
11497       if (truth_value_p (TREE_CODE (arg0))
11498 	  && tree_swap_operands_p (op1, op2))
11499 	{
11500 	  location_t loc0 = expr_location_or (arg0, loc);
11501 	  /* See if this can be inverted.  If it can't, possibly because
11502 	     it was a floating-point inequality comparison, don't do
11503 	     anything.  */
11504 	  tem = fold_invert_truthvalue (loc0, arg0);
11505 	  if (tem)
11506 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
11507 	}
11508 
11509       /* Convert A ? 1 : 0 to simply A.  */
11510       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11511 				 : (integer_onep (op1)
11512 				    && !VECTOR_TYPE_P (type)))
11513 	  && integer_zerop (op2)
11514 	  /* If we try to convert OP0 to our type, the
11515 	     call to fold will try to move the conversion inside
11516 	     a COND, which will recurse.  In that case, the COND_EXPR
11517 	     is probably the best choice, so leave it alone.  */
11518 	  && type == TREE_TYPE (arg0))
11519 	return pedantic_non_lvalue_loc (loc, arg0);
11520 
11521       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
11522 	 over COND_EXPR in cases such as floating point comparisons.  */
11523       if (integer_zerop (op1)
11524 	  && code == COND_EXPR
11525 	  && integer_onep (op2)
11526 	  && !VECTOR_TYPE_P (type)
11527 	  && truth_value_p (TREE_CODE (arg0)))
11528 	return pedantic_non_lvalue_loc (loc,
11529 				    fold_convert_loc (loc, type,
11530 					      invert_truthvalue_loc (loc,
11531 								     arg0)));
11532 
11533       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
11534       if (TREE_CODE (arg0) == LT_EXPR
11535 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11536 	  && integer_zerop (op2)
11537 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11538 	{
11539 	  /* sign_bit_p looks through both zero and sign extensions,
11540 	     but for this optimization only sign extensions are
11541 	     usable.  */
11542 	  tree tem2 = TREE_OPERAND (arg0, 0);
11543 	  while (tem != tem2)
11544 	    {
11545 	      if (TREE_CODE (tem2) != NOP_EXPR
11546 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11547 		{
11548 		  tem = NULL_TREE;
11549 		  break;
11550 		}
11551 	      tem2 = TREE_OPERAND (tem2, 0);
11552 	    }
11553 	  /* sign_bit_p only checks ARG1 bits within A's precision.
11554 	     If <sign bit of A> has wider type than A, bits outside
11555 	     of A's precision in <sign bit of A> need to be checked.
11556 	     If they are all 0, this optimization needs to be done
11557 	     in unsigned A's type, if they are all 1 in signed A's type,
11558 	     otherwise this can't be done.  */
11559 	  if (tem
11560 	      && TYPE_PRECISION (TREE_TYPE (tem))
11561 		 < TYPE_PRECISION (TREE_TYPE (arg1))
11562 	      && TYPE_PRECISION (TREE_TYPE (tem))
11563 		 < TYPE_PRECISION (type))
11564 	    {
11565 	      int inner_width, outer_width;
11566 	      tree tem_type;
11567 
11568 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11569 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11570 	      if (outer_width > TYPE_PRECISION (type))
11571 		outer_width = TYPE_PRECISION (type);
11572 
11573 	      wide_int mask = wi::shifted_mask
11574 		(inner_width, outer_width - inner_width, false,
11575 		 TYPE_PRECISION (TREE_TYPE (arg1)));
11576 
11577 	      wide_int common = mask & wi::to_wide (arg1);
11578 	      if (common == mask)
11579 		{
11580 		  tem_type = signed_type_for (TREE_TYPE (tem));
11581 		  tem = fold_convert_loc (loc, tem_type, tem);
11582 		}
11583 	      else if (common == 0)
11584 		{
11585 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
11586 		  tem = fold_convert_loc (loc, tem_type, tem);
11587 		}
11588 	      else
11589 		tem = NULL;
11590 	    }
11591 
11592 	  if (tem)
11593 	    return
11594 	      fold_convert_loc (loc, type,
11595 				fold_build2_loc (loc, BIT_AND_EXPR,
11596 					     TREE_TYPE (tem), tem,
11597 					     fold_convert_loc (loc,
11598 							       TREE_TYPE (tem),
11599 							       arg1)));
11600 	}
11601 
11602       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
11603 	 already handled above.  */
11604       if (TREE_CODE (arg0) == BIT_AND_EXPR
11605 	  && integer_onep (TREE_OPERAND (arg0, 1))
11606 	  && integer_zerop (op2)
11607 	  && integer_pow2p (arg1))
11608 	{
11609 	  tree tem = TREE_OPERAND (arg0, 0);
11610 	  STRIP_NOPS (tem);
11611 	  if (TREE_CODE (tem) == RSHIFT_EXPR
11612 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11613               && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11614 		 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11615 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
11616 				    fold_convert_loc (loc, type,
11617 						      TREE_OPERAND (tem, 0)),
11618 				    op1);
11619 	}
11620 
11621       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
11622 	 is probably obsolete because the first operand should be a
11623 	 truth value (that's why we have the two cases above), but let's
11624 	 leave it in until we can confirm this for all front-ends.  */
11625       if (integer_zerop (op2)
11626 	  && TREE_CODE (arg0) == NE_EXPR
11627 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11628 	  && integer_pow2p (arg1)
11629 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11630 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11631 			      arg1, OEP_ONLY_CONST)
11632 	  /* operand_equal_p compares just value, not precision, so e.g.
11633 	     arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
11634 	     second operand 32-bit -128, which is not a power of two (or vice
11635 	     versa.  */
11636 	  && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
11637 	return pedantic_non_lvalue_loc (loc,
11638 					fold_convert_loc (loc, type,
11639 							  TREE_OPERAND (arg0,
11640 									0)));
11641 
11642       /* Disable the transformations below for vectors, since
11643 	 fold_binary_op_with_conditional_arg may undo them immediately,
11644 	 yielding an infinite loop.  */
11645       if (code == VEC_COND_EXPR)
11646 	return NULL_TREE;
11647 
11648       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
11649       if (integer_zerop (op2)
11650 	  && truth_value_p (TREE_CODE (arg0))
11651 	  && truth_value_p (TREE_CODE (arg1))
11652 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11653 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11654 							   : TRUTH_ANDIF_EXPR,
11655 				type, fold_convert_loc (loc, type, arg0), op1);
11656 
11657       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
11658       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11659 	  && truth_value_p (TREE_CODE (arg0))
11660 	  && truth_value_p (TREE_CODE (arg1))
11661 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11662 	{
11663 	  location_t loc0 = expr_location_or (arg0, loc);
11664 	  /* Only perform transformation if ARG0 is easily inverted.  */
11665 	  tem = fold_invert_truthvalue (loc0, arg0);
11666 	  if (tem)
11667 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11668 					 ? BIT_IOR_EXPR
11669 					 : TRUTH_ORIF_EXPR,
11670 				    type, fold_convert_loc (loc, type, tem),
11671 				    op1);
11672 	}
11673 
11674       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
11675       if (integer_zerop (arg1)
11676 	  && truth_value_p (TREE_CODE (arg0))
11677 	  && truth_value_p (TREE_CODE (op2))
11678 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11679 	{
11680 	  location_t loc0 = expr_location_or (arg0, loc);
11681 	  /* Only perform transformation if ARG0 is easily inverted.  */
11682 	  tem = fold_invert_truthvalue (loc0, arg0);
11683 	  if (tem)
11684 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11685 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11686 				    type, fold_convert_loc (loc, type, tem),
11687 				    op2);
11688 	}
11689 
11690       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
11691       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11692 	  && truth_value_p (TREE_CODE (arg0))
11693 	  && truth_value_p (TREE_CODE (op2))
11694 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11695 	return fold_build2_loc (loc, code == VEC_COND_EXPR
11696 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11697 				type, fold_convert_loc (loc, type, arg0), op2);
11698 
11699       return NULL_TREE;
11700 
11701     case CALL_EXPR:
11702       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
11703 	 of fold_ternary on them.  */
11704       gcc_unreachable ();
11705 
11706     case BIT_FIELD_REF:
11707       if (TREE_CODE (arg0) == VECTOR_CST
11708 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
11709 	      || (VECTOR_TYPE_P (type)
11710 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
11711 	  && tree_fits_uhwi_p (op1)
11712 	  && tree_fits_uhwi_p (op2))
11713 	{
11714 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11715 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11716 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11717 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11718 
11719 	  if (n != 0
11720 	      && (idx % width) == 0
11721 	      && (n % width) == 0
11722 	      && known_le ((idx + n) / width,
11723 			   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
11724 	    {
11725 	      idx = idx / width;
11726 	      n = n / width;
11727 
11728 	      if (TREE_CODE (arg0) == VECTOR_CST)
11729 		{
11730 		  if (n == 1)
11731 		    {
11732 		      tem = VECTOR_CST_ELT (arg0, idx);
11733 		      if (VECTOR_TYPE_P (type))
11734 			tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
11735 		      return tem;
11736 		    }
11737 
11738 		  tree_vector_builder vals (type, n, 1);
11739 		  for (unsigned i = 0; i < n; ++i)
11740 		    vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11741 		  return vals.build ();
11742 		}
11743 	    }
11744 	}
11745 
11746       /* On constants we can use native encode/interpret to constant
11747          fold (nearly) all BIT_FIELD_REFs.  */
11748       if (CONSTANT_CLASS_P (arg0)
11749 	  && can_native_interpret_type_p (type)
11750 	  && BITS_PER_UNIT == 8
11751 	  && tree_fits_uhwi_p (op1)
11752 	  && tree_fits_uhwi_p (op2))
11753 	{
11754 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11755 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11756 	  /* Limit us to a reasonable amount of work.  To relax the
11757 	     other limitations we need bit-shifting of the buffer
11758 	     and rounding up the size.  */
11759 	  if (bitpos % BITS_PER_UNIT == 0
11760 	      && bitsize % BITS_PER_UNIT == 0
11761 	      && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11762 	    {
11763 	      unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11764 	      unsigned HOST_WIDE_INT len
11765 		= native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11766 				      bitpos / BITS_PER_UNIT);
11767 	      if (len > 0
11768 		  && len * BITS_PER_UNIT >= bitsize)
11769 		{
11770 		  tree v = native_interpret_expr (type, b,
11771 						  bitsize / BITS_PER_UNIT);
11772 		  if (v)
11773 		    return v;
11774 		}
11775 	    }
11776 	}
11777 
11778       return NULL_TREE;
11779 
11780     case VEC_PERM_EXPR:
11781       if (TREE_CODE (arg2) == VECTOR_CST)
11782 	{
11783 	  /* Build a vector of integers from the tree mask.  */
11784 	  vec_perm_builder builder;
11785 	  if (!tree_to_vec_perm_builder (&builder, arg2))
11786 	    return NULL_TREE;
11787 
11788 	  /* Create a vec_perm_indices for the integer vector.  */
11789 	  poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
11790 	  bool single_arg = (op0 == op1);
11791 	  vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
11792 
11793 	  /* Check for cases that fold to OP0 or OP1 in their original
11794 	     element order.  */
11795 	  if (sel.series_p (0, 1, 0, 1))
11796 	    return op0;
11797 	  if (sel.series_p (0, 1, nelts, 1))
11798 	    return op1;
11799 
11800 	  if (!single_arg)
11801 	    {
11802 	      if (sel.all_from_input_p (0))
11803 		op1 = op0;
11804 	      else if (sel.all_from_input_p (1))
11805 		{
11806 		  op0 = op1;
11807 		  sel.rotate_inputs (1);
11808 		}
11809 	    }
11810 
11811 	  if ((TREE_CODE (op0) == VECTOR_CST
11812 	       || TREE_CODE (op0) == CONSTRUCTOR)
11813 	      && (TREE_CODE (op1) == VECTOR_CST
11814 		  || TREE_CODE (op1) == CONSTRUCTOR))
11815 	    {
11816 	      tree t = fold_vec_perm (type, op0, op1, sel);
11817 	      if (t != NULL_TREE)
11818 		return t;
11819 	    }
11820 
11821 	  bool changed = (op0 == op1 && !single_arg);
11822 
11823 	  /* Generate a canonical form of the selector.  */
11824 	  if (arg2 == op2 && sel.encoding () != builder)
11825 	    {
11826 	      /* Some targets are deficient and fail to expand a single
11827 		 argument permutation while still allowing an equivalent
11828 		 2-argument version.  */
11829 	      if (sel.ninputs () == 2
11830 		  || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
11831 		op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11832 	      else
11833 		{
11834 		  vec_perm_indices sel2 (builder, 2, nelts);
11835 		  if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
11836 		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
11837 		  else
11838 		    /* Not directly supported with either encoding,
11839 		       so use the preferred form.  */
11840 		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11841 		}
11842 	      changed = true;
11843 	    }
11844 
11845 	  if (changed)
11846 	    return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11847 	}
11848       return NULL_TREE;
11849 
11850     case BIT_INSERT_EXPR:
11851       /* Perform (partial) constant folding of BIT_INSERT_EXPR.  */
11852       if (TREE_CODE (arg0) == INTEGER_CST
11853 	  && TREE_CODE (arg1) == INTEGER_CST)
11854 	{
11855 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11856 	  unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11857 	  wide_int tem = (wi::to_wide (arg0)
11858 			  & wi::shifted_mask (bitpos, bitsize, true,
11859 					      TYPE_PRECISION (type)));
11860 	  wide_int tem2
11861 	    = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11862 				    bitsize), bitpos);
11863 	  return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11864 	}
11865       else if (TREE_CODE (arg0) == VECTOR_CST
11866 	       && CONSTANT_CLASS_P (arg1)
11867 	       && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11868 				      TREE_TYPE (arg1)))
11869 	{
11870 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11871 	  unsigned HOST_WIDE_INT elsize
11872 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11873 	  if (bitpos % elsize == 0)
11874 	    {
11875 	      unsigned k = bitpos / elsize;
11876 	      unsigned HOST_WIDE_INT nelts;
11877 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11878 		return arg0;
11879 	      else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
11880 		{
11881 		  tree_vector_builder elts (type, nelts, 1);
11882 		  elts.quick_grow (nelts);
11883 		  for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
11884 		    elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11885 		  return elts.build ();
11886 		}
11887 	    }
11888 	}
11889       return NULL_TREE;
11890 
11891     default:
11892       return NULL_TREE;
11893     } /* switch (code) */
11894 }
11895 
11896 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11897    of an array (or vector).  */
11898 
11899 tree
11900 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11901 {
11902   tree index_type = NULL_TREE;
11903   offset_int low_bound = 0;
11904 
11905   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11906     {
11907       tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11908       if (domain_type && TYPE_MIN_VALUE (domain_type))
11909 	{
11910 	  /* Static constructors for variably sized objects makes no sense.  */
11911 	  gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11912 	  index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11913 	  low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11914 	}
11915     }
11916 
11917   if (index_type)
11918     access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11919 			    TYPE_SIGN (index_type));
11920 
11921   offset_int index = low_bound - 1;
11922   if (index_type)
11923     index = wi::ext (index, TYPE_PRECISION (index_type),
11924 		     TYPE_SIGN (index_type));
11925 
11926   offset_int max_index;
11927   unsigned HOST_WIDE_INT cnt;
11928   tree cfield, cval;
11929 
11930   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11931     {
11932       /* Array constructor might explicitly set index, or specify a range,
11933 	 or leave index NULL meaning that it is next index after previous
11934 	 one.  */
11935       if (cfield)
11936 	{
11937 	  if (TREE_CODE (cfield) == INTEGER_CST)
11938 	    max_index = index = wi::to_offset (cfield);
11939 	  else
11940 	    {
11941 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11942 	      index = wi::to_offset (TREE_OPERAND (cfield, 0));
11943 	      max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11944 	    }
11945 	}
11946       else
11947 	{
11948 	  index += 1;
11949 	  if (index_type)
11950 	    index = wi::ext (index, TYPE_PRECISION (index_type),
11951 			     TYPE_SIGN (index_type));
11952 	  max_index = index;
11953 	}
11954 
11955     /* Do we have match?  */
11956     if (wi::cmpu (access_index, index) >= 0
11957 	&& wi::cmpu (access_index, max_index) <= 0)
11958       return cval;
11959   }
11960   return NULL_TREE;
11961 }
11962 
11963 /* Perform constant folding and related simplification of EXPR.
11964    The related simplifications include x*1 => x, x*0 => 0, etc.,
11965    and application of the associative law.
11966    NOP_EXPR conversions may be removed freely (as long as we
11967    are careful not to change the type of the overall expression).
11968    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11969    but we can constant-fold them if they have constant operands.  */
11970 
11971 #ifdef ENABLE_FOLD_CHECKING
11972 # define fold(x) fold_1 (x)
11973 static tree fold_1 (tree);
11974 static
11975 #endif
11976 tree
11977 fold (tree expr)
11978 {
11979   const tree t = expr;
11980   enum tree_code code = TREE_CODE (t);
11981   enum tree_code_class kind = TREE_CODE_CLASS (code);
11982   tree tem;
11983   location_t loc = EXPR_LOCATION (expr);
11984 
11985   /* Return right away if a constant.  */
11986   if (kind == tcc_constant)
11987     return t;
11988 
11989   /* CALL_EXPR-like objects with variable numbers of operands are
11990      treated specially.  */
11991   if (kind == tcc_vl_exp)
11992     {
11993       if (code == CALL_EXPR)
11994 	{
11995 	  tem = fold_call_expr (loc, expr, false);
11996 	  return tem ? tem : expr;
11997 	}
11998       return expr;
11999     }
12000 
12001   if (IS_EXPR_CODE_CLASS (kind))
12002     {
12003       tree type = TREE_TYPE (t);
12004       tree op0, op1, op2;
12005 
12006       switch (TREE_CODE_LENGTH (code))
12007 	{
12008 	case 1:
12009 	  op0 = TREE_OPERAND (t, 0);
12010 	  tem = fold_unary_loc (loc, code, type, op0);
12011 	  return tem ? tem : expr;
12012 	case 2:
12013 	  op0 = TREE_OPERAND (t, 0);
12014 	  op1 = TREE_OPERAND (t, 1);
12015 	  tem = fold_binary_loc (loc, code, type, op0, op1);
12016 	  return tem ? tem : expr;
12017 	case 3:
12018 	  op0 = TREE_OPERAND (t, 0);
12019 	  op1 = TREE_OPERAND (t, 1);
12020 	  op2 = TREE_OPERAND (t, 2);
12021 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12022 	  return tem ? tem : expr;
12023 	default:
12024 	  break;
12025 	}
12026     }
12027 
12028   switch (code)
12029     {
12030     case ARRAY_REF:
12031       {
12032 	tree op0 = TREE_OPERAND (t, 0);
12033 	tree op1 = TREE_OPERAND (t, 1);
12034 
12035 	if (TREE_CODE (op1) == INTEGER_CST
12036 	    && TREE_CODE (op0) == CONSTRUCTOR
12037 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12038 	  {
12039 	    tree val = get_array_ctor_element_at_index (op0,
12040 							wi::to_offset (op1));
12041 	    if (val)
12042 	      return val;
12043 	  }
12044 
12045 	return t;
12046       }
12047 
12048       /* Return a VECTOR_CST if possible.  */
12049     case CONSTRUCTOR:
12050       {
12051 	tree type = TREE_TYPE (t);
12052 	if (TREE_CODE (type) != VECTOR_TYPE)
12053 	  return t;
12054 
12055 	unsigned i;
12056 	tree val;
12057 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12058 	  if (! CONSTANT_CLASS_P (val))
12059 	    return t;
12060 
12061 	return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12062       }
12063 
12064     case CONST_DECL:
12065       return fold (DECL_INITIAL (t));
12066 
12067     default:
12068       return t;
12069     } /* switch (code) */
12070 }
12071 
12072 #ifdef ENABLE_FOLD_CHECKING
12073 #undef fold
12074 
12075 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12076 				hash_table<nofree_ptr_hash<const tree_node> > *);
12077 static void fold_check_failed (const_tree, const_tree);
12078 void print_fold_checksum (const_tree);
12079 
12080 /* When --enable-checking=fold, compute a digest of expr before
12081    and after actual fold call to see if fold did not accidentally
12082    change original expr.  */
12083 
12084 tree
12085 fold (tree expr)
12086 {
12087   tree ret;
12088   struct md5_ctx ctx;
12089   unsigned char checksum_before[16], checksum_after[16];
12090   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12091 
12092   md5_init_ctx (&ctx);
12093   fold_checksum_tree (expr, &ctx, &ht);
12094   md5_finish_ctx (&ctx, checksum_before);
12095   ht.empty ();
12096 
12097   ret = fold_1 (expr);
12098 
12099   md5_init_ctx (&ctx);
12100   fold_checksum_tree (expr, &ctx, &ht);
12101   md5_finish_ctx (&ctx, checksum_after);
12102 
12103   if (memcmp (checksum_before, checksum_after, 16))
12104     fold_check_failed (expr, ret);
12105 
12106   return ret;
12107 }
12108 
12109 void
12110 print_fold_checksum (const_tree expr)
12111 {
12112   struct md5_ctx ctx;
12113   unsigned char checksum[16], cnt;
12114   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12115 
12116   md5_init_ctx (&ctx);
12117   fold_checksum_tree (expr, &ctx, &ht);
12118   md5_finish_ctx (&ctx, checksum);
12119   for (cnt = 0; cnt < 16; ++cnt)
12120     fprintf (stderr, "%02x", checksum[cnt]);
12121   putc ('\n', stderr);
12122 }
12123 
12124 static void
12125 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12126 {
12127   internal_error ("fold check: original tree changed by fold");
12128 }
12129 
12130 static void
12131 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12132 		    hash_table<nofree_ptr_hash <const tree_node> > *ht)
12133 {
12134   const tree_node **slot;
12135   enum tree_code code;
12136   union tree_node *buf;
12137   int i, len;
12138 
12139  recursive_label:
12140   if (expr == NULL)
12141     return;
12142   slot = ht->find_slot (expr, INSERT);
12143   if (*slot != NULL)
12144     return;
12145   *slot = expr;
12146   code = TREE_CODE (expr);
12147   if (TREE_CODE_CLASS (code) == tcc_declaration
12148       && HAS_DECL_ASSEMBLER_NAME_P (expr))
12149     {
12150       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
12151       size_t sz = tree_size (expr);
12152       buf = XALLOCAVAR (union tree_node, sz);
12153       memcpy ((char *) buf, expr, sz);
12154       SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
12155       buf->decl_with_vis.symtab_node = NULL;
12156       buf->base.nowarning_flag = 0;
12157       expr = (tree) buf;
12158     }
12159   else if (TREE_CODE_CLASS (code) == tcc_type
12160 	   && (TYPE_POINTER_TO (expr)
12161 	       || TYPE_REFERENCE_TO (expr)
12162 	       || TYPE_CACHED_VALUES_P (expr)
12163 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12164 	       || TYPE_NEXT_VARIANT (expr)
12165 	       || TYPE_ALIAS_SET_KNOWN_P (expr)))
12166     {
12167       /* Allow these fields to be modified.  */
12168       tree tmp;
12169       size_t sz = tree_size (expr);
12170       buf = XALLOCAVAR (union tree_node, sz);
12171       memcpy ((char *) buf, expr, sz);
12172       expr = tmp = (tree) buf;
12173       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12174       TYPE_POINTER_TO (tmp) = NULL;
12175       TYPE_REFERENCE_TO (tmp) = NULL;
12176       TYPE_NEXT_VARIANT (tmp) = NULL;
12177       TYPE_ALIAS_SET (tmp) = -1;
12178       if (TYPE_CACHED_VALUES_P (tmp))
12179 	{
12180 	  TYPE_CACHED_VALUES_P (tmp) = 0;
12181 	  TYPE_CACHED_VALUES (tmp) = NULL;
12182 	}
12183     }
12184   else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
12185     {
12186       /* Allow TREE_NO_WARNING to be set.  Perhaps we shouldn't allow that
12187 	 and change builtins.c etc. instead - see PR89543.  */
12188       size_t sz = tree_size (expr);
12189       buf = XALLOCAVAR (union tree_node, sz);
12190       memcpy ((char *) buf, expr, sz);
12191       buf->base.nowarning_flag = 0;
12192       expr = (tree) buf;
12193     }
12194   md5_process_bytes (expr, tree_size (expr), ctx);
12195   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12196     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12197   if (TREE_CODE_CLASS (code) != tcc_type
12198       && TREE_CODE_CLASS (code) != tcc_declaration
12199       && code != TREE_LIST
12200       && code != SSA_NAME
12201       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12202     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12203   switch (TREE_CODE_CLASS (code))
12204     {
12205     case tcc_constant:
12206       switch (code)
12207 	{
12208 	case STRING_CST:
12209 	  md5_process_bytes (TREE_STRING_POINTER (expr),
12210 			     TREE_STRING_LENGTH (expr), ctx);
12211 	  break;
12212 	case COMPLEX_CST:
12213 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12214 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12215 	  break;
12216 	case VECTOR_CST:
12217 	  len = vector_cst_encoded_nelts (expr);
12218 	  for (i = 0; i < len; ++i)
12219 	    fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12220 	  break;
12221 	default:
12222 	  break;
12223 	}
12224       break;
12225     case tcc_exceptional:
12226       switch (code)
12227 	{
12228 	case TREE_LIST:
12229 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12230 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12231 	  expr = TREE_CHAIN (expr);
12232 	  goto recursive_label;
12233 	  break;
12234 	case TREE_VEC:
12235 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12236 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12237 	  break;
12238 	default:
12239 	  break;
12240 	}
12241       break;
12242     case tcc_expression:
12243     case tcc_reference:
12244     case tcc_comparison:
12245     case tcc_unary:
12246     case tcc_binary:
12247     case tcc_statement:
12248     case tcc_vl_exp:
12249       len = TREE_OPERAND_LENGTH (expr);
12250       for (i = 0; i < len; ++i)
12251 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12252       break;
12253     case tcc_declaration:
12254       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12255       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12256       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12257 	{
12258 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12259 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12260 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12261 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12262 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12263 	}
12264 
12265       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12266 	{
12267 	  if (TREE_CODE (expr) == FUNCTION_DECL)
12268 	    {
12269 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12270 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12271 	    }
12272 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12273 	}
12274       break;
12275     case tcc_type:
12276       if (TREE_CODE (expr) == ENUMERAL_TYPE)
12277         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12278       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12279       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12280       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12281       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12282       if (INTEGRAL_TYPE_P (expr)
12283           || SCALAR_FLOAT_TYPE_P (expr))
12284 	{
12285 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12286 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12287 	}
12288       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12289       if (TREE_CODE (expr) == RECORD_TYPE
12290 	  || TREE_CODE (expr) == UNION_TYPE
12291 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
12292 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12293       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12294       break;
12295     default:
12296       break;
12297     }
12298 }
12299 
12300 /* Helper function for outputting the checksum of a tree T.  When
12301    debugging with gdb, you can "define mynext" to be "next" followed
12302    by "call debug_fold_checksum (op0)", then just trace down till the
12303    outputs differ.  */
12304 
12305 DEBUG_FUNCTION void
12306 debug_fold_checksum (const_tree t)
12307 {
12308   int i;
12309   unsigned char checksum[16];
12310   struct md5_ctx ctx;
12311   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12312 
12313   md5_init_ctx (&ctx);
12314   fold_checksum_tree (t, &ctx, &ht);
12315   md5_finish_ctx (&ctx, checksum);
12316   ht.empty ();
12317 
12318   for (i = 0; i < 16; i++)
12319     fprintf (stderr, "%d ", checksum[i]);
12320 
12321   fprintf (stderr, "\n");
12322 }
12323 
12324 #endif
12325 
12326 /* Fold a unary tree expression with code CODE of type TYPE with an
12327    operand OP0.  LOC is the location of the resulting expression.
12328    Return a folded expression if successful.  Otherwise, return a tree
12329    expression with code CODE of type TYPE with an operand OP0.  */
12330 
12331 tree
12332 fold_build1_loc (location_t loc,
12333 		 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12334 {
12335   tree tem;
12336 #ifdef ENABLE_FOLD_CHECKING
12337   unsigned char checksum_before[16], checksum_after[16];
12338   struct md5_ctx ctx;
12339   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12340 
12341   md5_init_ctx (&ctx);
12342   fold_checksum_tree (op0, &ctx, &ht);
12343   md5_finish_ctx (&ctx, checksum_before);
12344   ht.empty ();
12345 #endif
12346 
12347   tem = fold_unary_loc (loc, code, type, op0);
12348   if (!tem)
12349     tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12350 
12351 #ifdef ENABLE_FOLD_CHECKING
12352   md5_init_ctx (&ctx);
12353   fold_checksum_tree (op0, &ctx, &ht);
12354   md5_finish_ctx (&ctx, checksum_after);
12355 
12356   if (memcmp (checksum_before, checksum_after, 16))
12357     fold_check_failed (op0, tem);
12358 #endif
12359   return tem;
12360 }
12361 
12362 /* Fold a binary tree expression with code CODE of type TYPE with
12363    operands OP0 and OP1.  LOC is the location of the resulting
12364    expression.  Return a folded expression if successful.  Otherwise,
12365    return a tree expression with code CODE of type TYPE with operands
12366    OP0 and OP1.  */
12367 
12368 tree
12369 fold_build2_loc (location_t loc,
12370 		      enum tree_code code, tree type, tree op0, tree op1
12371 		      MEM_STAT_DECL)
12372 {
12373   tree tem;
12374 #ifdef ENABLE_FOLD_CHECKING
12375   unsigned char checksum_before_op0[16],
12376                 checksum_before_op1[16],
12377 		checksum_after_op0[16],
12378 		checksum_after_op1[16];
12379   struct md5_ctx ctx;
12380   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12381 
12382   md5_init_ctx (&ctx);
12383   fold_checksum_tree (op0, &ctx, &ht);
12384   md5_finish_ctx (&ctx, checksum_before_op0);
12385   ht.empty ();
12386 
12387   md5_init_ctx (&ctx);
12388   fold_checksum_tree (op1, &ctx, &ht);
12389   md5_finish_ctx (&ctx, checksum_before_op1);
12390   ht.empty ();
12391 #endif
12392 
12393   tem = fold_binary_loc (loc, code, type, op0, op1);
12394   if (!tem)
12395     tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12396 
12397 #ifdef ENABLE_FOLD_CHECKING
12398   md5_init_ctx (&ctx);
12399   fold_checksum_tree (op0, &ctx, &ht);
12400   md5_finish_ctx (&ctx, checksum_after_op0);
12401   ht.empty ();
12402 
12403   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12404     fold_check_failed (op0, tem);
12405 
12406   md5_init_ctx (&ctx);
12407   fold_checksum_tree (op1, &ctx, &ht);
12408   md5_finish_ctx (&ctx, checksum_after_op1);
12409 
12410   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12411     fold_check_failed (op1, tem);
12412 #endif
12413   return tem;
12414 }
12415 
12416 /* Fold a ternary tree expression with code CODE of type TYPE with
12417    operands OP0, OP1, and OP2.  Return a folded expression if
12418    successful.  Otherwise, return a tree expression with code CODE of
12419    type TYPE with operands OP0, OP1, and OP2.  */
12420 
12421 tree
12422 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12423 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
12424 {
12425   tree tem;
12426 #ifdef ENABLE_FOLD_CHECKING
12427   unsigned char checksum_before_op0[16],
12428                 checksum_before_op1[16],
12429                 checksum_before_op2[16],
12430 		checksum_after_op0[16],
12431 		checksum_after_op1[16],
12432 		checksum_after_op2[16];
12433   struct md5_ctx ctx;
12434   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12435 
12436   md5_init_ctx (&ctx);
12437   fold_checksum_tree (op0, &ctx, &ht);
12438   md5_finish_ctx (&ctx, checksum_before_op0);
12439   ht.empty ();
12440 
12441   md5_init_ctx (&ctx);
12442   fold_checksum_tree (op1, &ctx, &ht);
12443   md5_finish_ctx (&ctx, checksum_before_op1);
12444   ht.empty ();
12445 
12446   md5_init_ctx (&ctx);
12447   fold_checksum_tree (op2, &ctx, &ht);
12448   md5_finish_ctx (&ctx, checksum_before_op2);
12449   ht.empty ();
12450 #endif
12451 
12452   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12453   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12454   if (!tem)
12455     tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12456 
12457 #ifdef ENABLE_FOLD_CHECKING
12458   md5_init_ctx (&ctx);
12459   fold_checksum_tree (op0, &ctx, &ht);
12460   md5_finish_ctx (&ctx, checksum_after_op0);
12461   ht.empty ();
12462 
12463   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12464     fold_check_failed (op0, tem);
12465 
12466   md5_init_ctx (&ctx);
12467   fold_checksum_tree (op1, &ctx, &ht);
12468   md5_finish_ctx (&ctx, checksum_after_op1);
12469   ht.empty ();
12470 
12471   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12472     fold_check_failed (op1, tem);
12473 
12474   md5_init_ctx (&ctx);
12475   fold_checksum_tree (op2, &ctx, &ht);
12476   md5_finish_ctx (&ctx, checksum_after_op2);
12477 
12478   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12479     fold_check_failed (op2, tem);
12480 #endif
12481   return tem;
12482 }
12483 
12484 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12485    arguments in ARGARRAY, and a null static chain.
12486    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
12487    of type TYPE from the given operands as constructed by build_call_array.  */
12488 
12489 tree
12490 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12491 			   int nargs, tree *argarray)
12492 {
12493   tree tem;
12494 #ifdef ENABLE_FOLD_CHECKING
12495   unsigned char checksum_before_fn[16],
12496                 checksum_before_arglist[16],
12497 		checksum_after_fn[16],
12498 		checksum_after_arglist[16];
12499   struct md5_ctx ctx;
12500   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12501   int i;
12502 
12503   md5_init_ctx (&ctx);
12504   fold_checksum_tree (fn, &ctx, &ht);
12505   md5_finish_ctx (&ctx, checksum_before_fn);
12506   ht.empty ();
12507 
12508   md5_init_ctx (&ctx);
12509   for (i = 0; i < nargs; i++)
12510     fold_checksum_tree (argarray[i], &ctx, &ht);
12511   md5_finish_ctx (&ctx, checksum_before_arglist);
12512   ht.empty ();
12513 #endif
12514 
12515   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12516   if (!tem)
12517     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12518 
12519 #ifdef ENABLE_FOLD_CHECKING
12520   md5_init_ctx (&ctx);
12521   fold_checksum_tree (fn, &ctx, &ht);
12522   md5_finish_ctx (&ctx, checksum_after_fn);
12523   ht.empty ();
12524 
12525   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12526     fold_check_failed (fn, tem);
12527 
12528   md5_init_ctx (&ctx);
12529   for (i = 0; i < nargs; i++)
12530     fold_checksum_tree (argarray[i], &ctx, &ht);
12531   md5_finish_ctx (&ctx, checksum_after_arglist);
12532 
12533   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12534     fold_check_failed (NULL_TREE, tem);
12535 #endif
12536   return tem;
12537 }
12538 
12539 /* Perform constant folding and related simplification of initializer
12540    expression EXPR.  These behave identically to "fold_buildN" but ignore
12541    potential run-time traps and exceptions that fold must preserve.  */
12542 
12543 #define START_FOLD_INIT \
12544   int saved_signaling_nans = flag_signaling_nans;\
12545   int saved_trapping_math = flag_trapping_math;\
12546   int saved_rounding_math = flag_rounding_math;\
12547   int saved_trapv = flag_trapv;\
12548   int saved_folding_initializer = folding_initializer;\
12549   flag_signaling_nans = 0;\
12550   flag_trapping_math = 0;\
12551   flag_rounding_math = 0;\
12552   flag_trapv = 0;\
12553   folding_initializer = 1;
12554 
12555 #define END_FOLD_INIT \
12556   flag_signaling_nans = saved_signaling_nans;\
12557   flag_trapping_math = saved_trapping_math;\
12558   flag_rounding_math = saved_rounding_math;\
12559   flag_trapv = saved_trapv;\
12560   folding_initializer = saved_folding_initializer;
12561 
12562 tree
12563 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12564 			     tree type, tree op)
12565 {
12566   tree result;
12567   START_FOLD_INIT;
12568 
12569   result = fold_build1_loc (loc, code, type, op);
12570 
12571   END_FOLD_INIT;
12572   return result;
12573 }
12574 
12575 tree
12576 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12577 			     tree type, tree op0, tree op1)
12578 {
12579   tree result;
12580   START_FOLD_INIT;
12581 
12582   result = fold_build2_loc (loc, code, type, op0, op1);
12583 
12584   END_FOLD_INIT;
12585   return result;
12586 }
12587 
12588 tree
12589 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12590 				       int nargs, tree *argarray)
12591 {
12592   tree result;
12593   START_FOLD_INIT;
12594 
12595   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12596 
12597   END_FOLD_INIT;
12598   return result;
12599 }
12600 
12601 #undef START_FOLD_INIT
12602 #undef END_FOLD_INIT
12603 
12604 /* Determine if first argument is a multiple of second argument.  Return 0 if
12605    it is not, or we cannot easily determined it to be.
12606 
12607    An example of the sort of thing we care about (at this point; this routine
12608    could surely be made more general, and expanded to do what the *_DIV_EXPR's
12609    fold cases do now) is discovering that
12610 
12611      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12612 
12613    is a multiple of
12614 
12615      SAVE_EXPR (J * 8)
12616 
12617    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12618 
12619    This code also handles discovering that
12620 
12621      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12622 
12623    is a multiple of 8 so we don't have to worry about dealing with a
12624    possible remainder.
12625 
12626    Note that we *look* inside a SAVE_EXPR only to determine how it was
12627    calculated; it is not safe for fold to do much of anything else with the
12628    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12629    at run time.  For example, the latter example above *cannot* be implemented
12630    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12631    evaluation time of the original SAVE_EXPR is not necessarily the same at
12632    the time the new expression is evaluated.  The only optimization of this
12633    sort that would be valid is changing
12634 
12635      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12636 
12637    divided by 8 to
12638 
12639      SAVE_EXPR (I) * SAVE_EXPR (J)
12640 
12641    (where the same SAVE_EXPR (J) is used in the original and the
12642    transformed version).  */
12643 
12644 int
12645 multiple_of_p (tree type, const_tree top, const_tree bottom)
12646 {
12647   gimple *stmt;
12648   tree t1, op1, op2;
12649 
12650   if (operand_equal_p (top, bottom, 0))
12651     return 1;
12652 
12653   if (TREE_CODE (type) != INTEGER_TYPE)
12654     return 0;
12655 
12656   switch (TREE_CODE (top))
12657     {
12658     case BIT_AND_EXPR:
12659       /* Bitwise and provides a power of two multiple.  If the mask is
12660 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
12661       if (!integer_pow2p (bottom))
12662 	return 0;
12663       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12664 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12665 
12666     case MULT_EXPR:
12667       if (TREE_CODE (bottom) == INTEGER_CST)
12668 	{
12669 	  op1 = TREE_OPERAND (top, 0);
12670 	  op2 = TREE_OPERAND (top, 1);
12671 	  if (TREE_CODE (op1) == INTEGER_CST)
12672 	    std::swap (op1, op2);
12673 	  if (TREE_CODE (op2) == INTEGER_CST)
12674 	    {
12675 	      if (multiple_of_p (type, op2, bottom))
12676 		return 1;
12677 	      /* Handle multiple_of_p ((x * 2 + 2) * 4, 8).  */
12678 	      if (multiple_of_p (type, bottom, op2))
12679 		{
12680 		  widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
12681 						 wi::to_widest (op2));
12682 		  if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
12683 		    {
12684 		      op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
12685 		      return multiple_of_p (type, op1, op2);
12686 		    }
12687 		}
12688 	      return multiple_of_p (type, op1, bottom);
12689 	    }
12690 	}
12691       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12692 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12693 
12694     case MINUS_EXPR:
12695       /* It is impossible to prove if op0 - op1 is multiple of bottom
12696 	 precisely, so be conservative here checking if both op0 and op1
12697 	 are multiple of bottom.  Note we check the second operand first
12698 	 since it's usually simpler.  */
12699       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12700 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12701 
12702     case PLUS_EXPR:
12703       /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12704 	 as op0 - 3 if the expression has unsigned type.  For example,
12705 	 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not.  */
12706       op1 = TREE_OPERAND (top, 1);
12707       if (TYPE_UNSIGNED (type)
12708 	  && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12709 	op1 = fold_build1 (NEGATE_EXPR, type, op1);
12710       return (multiple_of_p (type, op1, bottom)
12711 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12712 
12713     case LSHIFT_EXPR:
12714       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12715 	{
12716 	  op1 = TREE_OPERAND (top, 1);
12717 	  /* const_binop may not detect overflow correctly,
12718 	     so check for it explicitly here.  */
12719 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12720 			 wi::to_wide (op1))
12721 	      && (t1 = fold_convert (type,
12722 				     const_binop (LSHIFT_EXPR, size_one_node,
12723 						  op1))) != 0
12724 	      && !TREE_OVERFLOW (t1))
12725 	    return multiple_of_p (type, t1, bottom);
12726 	}
12727       return 0;
12728 
12729     case NOP_EXPR:
12730       /* Can't handle conversions from non-integral or wider integral type.  */
12731       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12732 	  || (TYPE_PRECISION (type)
12733 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12734 	return 0;
12735 
12736       /* fall through */
12737 
12738     case SAVE_EXPR:
12739       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12740 
12741     case COND_EXPR:
12742       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12743 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12744 
12745     case INTEGER_CST:
12746       if (TREE_CODE (bottom) != INTEGER_CST
12747 	  || integer_zerop (bottom)
12748 	  || (TYPE_UNSIGNED (type)
12749 	      && (tree_int_cst_sgn (top) < 0
12750 		  || tree_int_cst_sgn (bottom) < 0)))
12751 	return 0;
12752       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12753 				SIGNED);
12754 
12755     case SSA_NAME:
12756       if (TREE_CODE (bottom) == INTEGER_CST
12757 	  && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12758 	  && gimple_code (stmt) == GIMPLE_ASSIGN)
12759 	{
12760 	  enum tree_code code = gimple_assign_rhs_code (stmt);
12761 
12762 	  /* Check for special cases to see if top is defined as multiple
12763 	     of bottom:
12764 
12765 	       top = (X & ~(bottom - 1) ; bottom is power of 2
12766 
12767 	     or
12768 
12769 	       Y = X % bottom
12770 	       top = X - Y.  */
12771 	  if (code == BIT_AND_EXPR
12772 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12773 	      && TREE_CODE (op2) == INTEGER_CST
12774 	      && integer_pow2p (bottom)
12775 	      && wi::multiple_of_p (wi::to_widest (op2),
12776 				    wi::to_widest (bottom), UNSIGNED))
12777 	    return 1;
12778 
12779 	  op1 = gimple_assign_rhs1 (stmt);
12780 	  if (code == MINUS_EXPR
12781 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12782 	      && TREE_CODE (op2) == SSA_NAME
12783 	      && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12784 	      && gimple_code (stmt) == GIMPLE_ASSIGN
12785 	      && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12786 	      && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12787 	      && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12788 	    return 1;
12789 	}
12790 
12791       /* fall through */
12792 
12793     default:
12794       if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
12795 	return multiple_p (wi::to_poly_widest (top),
12796 			   wi::to_poly_widest (bottom));
12797 
12798       return 0;
12799     }
12800 }
12801 
12802 #define tree_expr_nonnegative_warnv_p(X, Y) \
12803   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12804 
12805 #define RECURSE(X) \
12806   ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12807 
12808 /* Return true if CODE or TYPE is known to be non-negative. */
12809 
12810 static bool
12811 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12812 {
12813   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12814       && truth_value_p (code))
12815     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12816        have a signed:1 type (where the value is -1 and 0).  */
12817     return true;
12818   return false;
12819 }
12820 
12821 /* Return true if (CODE OP0) is known to be non-negative.  If the return
12822    value is based on the assumption that signed overflow is undefined,
12823    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12824    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12825 
12826 bool
12827 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12828 				bool *strict_overflow_p, int depth)
12829 {
12830   if (TYPE_UNSIGNED (type))
12831     return true;
12832 
12833   switch (code)
12834     {
12835     case ABS_EXPR:
12836       /* We can't return 1 if flag_wrapv is set because
12837 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
12838       if (!ANY_INTEGRAL_TYPE_P (type))
12839 	return true;
12840       if (TYPE_OVERFLOW_UNDEFINED (type))
12841 	{
12842 	  *strict_overflow_p = true;
12843 	  return true;
12844 	}
12845       break;
12846 
12847     case NON_LVALUE_EXPR:
12848     case FLOAT_EXPR:
12849     case FIX_TRUNC_EXPR:
12850       return RECURSE (op0);
12851 
12852     CASE_CONVERT:
12853       {
12854 	tree inner_type = TREE_TYPE (op0);
12855 	tree outer_type = type;
12856 
12857 	if (TREE_CODE (outer_type) == REAL_TYPE)
12858 	  {
12859 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12860 	      return RECURSE (op0);
12861 	    if (INTEGRAL_TYPE_P (inner_type))
12862 	      {
12863 		if (TYPE_UNSIGNED (inner_type))
12864 		  return true;
12865 		return RECURSE (op0);
12866 	      }
12867 	  }
12868 	else if (INTEGRAL_TYPE_P (outer_type))
12869 	  {
12870 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12871 	      return RECURSE (op0);
12872 	    if (INTEGRAL_TYPE_P (inner_type))
12873 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12874 		      && TYPE_UNSIGNED (inner_type);
12875 	  }
12876       }
12877       break;
12878 
12879     default:
12880       return tree_simple_nonnegative_warnv_p (code, type);
12881     }
12882 
12883   /* We don't know sign of `t', so be conservative and return false.  */
12884   return false;
12885 }
12886 
12887 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
12888    value is based on the assumption that signed overflow is undefined,
12889    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12890    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12891 
12892 bool
12893 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12894 				 tree op1, bool *strict_overflow_p,
12895 				 int depth)
12896 {
12897   if (TYPE_UNSIGNED (type))
12898     return true;
12899 
12900   switch (code)
12901     {
12902     case POINTER_PLUS_EXPR:
12903     case PLUS_EXPR:
12904       if (FLOAT_TYPE_P (type))
12905 	return RECURSE (op0) && RECURSE (op1);
12906 
12907       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12908 	 both unsigned and at least 2 bits shorter than the result.  */
12909       if (TREE_CODE (type) == INTEGER_TYPE
12910 	  && TREE_CODE (op0) == NOP_EXPR
12911 	  && TREE_CODE (op1) == NOP_EXPR)
12912 	{
12913 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12914 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12915 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12916 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12917 	    {
12918 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
12919 				       TYPE_PRECISION (inner2)) + 1;
12920 	      return prec < TYPE_PRECISION (type);
12921 	    }
12922 	}
12923       break;
12924 
12925     case MULT_EXPR:
12926       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12927 	{
12928 	  /* x * x is always non-negative for floating point x
12929 	     or without overflow.  */
12930 	  if (operand_equal_p (op0, op1, 0)
12931 	      || (RECURSE (op0) && RECURSE (op1)))
12932 	    {
12933 	      if (ANY_INTEGRAL_TYPE_P (type)
12934 		  && TYPE_OVERFLOW_UNDEFINED (type))
12935 		*strict_overflow_p = true;
12936 	      return true;
12937 	    }
12938 	}
12939 
12940       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12941 	 both unsigned and their total bits is shorter than the result.  */
12942       if (TREE_CODE (type) == INTEGER_TYPE
12943 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12944 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12945 	{
12946 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12947 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
12948 	    : TREE_TYPE (op0);
12949 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12950 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
12951 	    : TREE_TYPE (op1);
12952 
12953 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
12954 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
12955 
12956 	  if (TREE_CODE (op0) == INTEGER_CST)
12957 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12958 
12959 	  if (TREE_CODE (op1) == INTEGER_CST)
12960 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12961 
12962 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12963 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12964 	    {
12965 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12966 		? tree_int_cst_min_precision (op0, UNSIGNED)
12967 		: TYPE_PRECISION (inner0);
12968 
12969 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12970 		? tree_int_cst_min_precision (op1, UNSIGNED)
12971 		: TYPE_PRECISION (inner1);
12972 
12973 	      return precision0 + precision1 < TYPE_PRECISION (type);
12974 	    }
12975 	}
12976       return false;
12977 
12978     case BIT_AND_EXPR:
12979     case MAX_EXPR:
12980       return RECURSE (op0) || RECURSE (op1);
12981 
12982     case BIT_IOR_EXPR:
12983     case BIT_XOR_EXPR:
12984     case MIN_EXPR:
12985     case RDIV_EXPR:
12986     case TRUNC_DIV_EXPR:
12987     case CEIL_DIV_EXPR:
12988     case FLOOR_DIV_EXPR:
12989     case ROUND_DIV_EXPR:
12990       return RECURSE (op0) && RECURSE (op1);
12991 
12992     case TRUNC_MOD_EXPR:
12993       return RECURSE (op0);
12994 
12995     case FLOOR_MOD_EXPR:
12996       return RECURSE (op1);
12997 
12998     case CEIL_MOD_EXPR:
12999     case ROUND_MOD_EXPR:
13000     default:
13001       return tree_simple_nonnegative_warnv_p (code, type);
13002     }
13003 
13004   /* We don't know sign of `t', so be conservative and return false.  */
13005   return false;
13006 }
13007 
13008 /* Return true if T is known to be non-negative.  If the return
13009    value is based on the assumption that signed overflow is undefined,
13010    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13011    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13012 
13013 bool
13014 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13015 {
13016   if (TYPE_UNSIGNED (TREE_TYPE (t)))
13017     return true;
13018 
13019   switch (TREE_CODE (t))
13020     {
13021     case INTEGER_CST:
13022       return tree_int_cst_sgn (t) >= 0;
13023 
13024     case REAL_CST:
13025       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13026 
13027     case FIXED_CST:
13028       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13029 
13030     case COND_EXPR:
13031       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13032 
13033     case SSA_NAME:
13034       /* Limit the depth of recursion to avoid quadratic behavior.
13035 	 This is expected to catch almost all occurrences in practice.
13036 	 If this code misses important cases that unbounded recursion
13037 	 would not, passes that need this information could be revised
13038 	 to provide it through dataflow propagation.  */
13039       return (!name_registered_for_update_p (t)
13040 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13041 	      && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13042 						  strict_overflow_p, depth));
13043 
13044     default:
13045       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13046     }
13047 }
13048 
13049 /* Return true if T is known to be non-negative.  If the return
13050    value is based on the assumption that signed overflow is undefined,
13051    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13052    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13053 
13054 bool
13055 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13056 			       bool *strict_overflow_p, int depth)
13057 {
13058   switch (fn)
13059     {
13060     CASE_CFN_ACOS:
13061     CASE_CFN_ACOSH:
13062     CASE_CFN_CABS:
13063     CASE_CFN_COSH:
13064     CASE_CFN_ERFC:
13065     CASE_CFN_EXP:
13066     CASE_CFN_EXP10:
13067     CASE_CFN_EXP2:
13068     CASE_CFN_FABS:
13069     CASE_CFN_FDIM:
13070     CASE_CFN_HYPOT:
13071     CASE_CFN_POW10:
13072     CASE_CFN_FFS:
13073     CASE_CFN_PARITY:
13074     CASE_CFN_POPCOUNT:
13075     CASE_CFN_CLZ:
13076     CASE_CFN_CLRSB:
13077     case CFN_BUILT_IN_BSWAP32:
13078     case CFN_BUILT_IN_BSWAP64:
13079       /* Always true.  */
13080       return true;
13081 
13082     CASE_CFN_SQRT:
13083     CASE_CFN_SQRT_FN:
13084       /* sqrt(-0.0) is -0.0.  */
13085       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13086 	return true;
13087       return RECURSE (arg0);
13088 
13089     CASE_CFN_ASINH:
13090     CASE_CFN_ATAN:
13091     CASE_CFN_ATANH:
13092     CASE_CFN_CBRT:
13093     CASE_CFN_CEIL:
13094     CASE_CFN_CEIL_FN:
13095     CASE_CFN_ERF:
13096     CASE_CFN_EXPM1:
13097     CASE_CFN_FLOOR:
13098     CASE_CFN_FLOOR_FN:
13099     CASE_CFN_FMOD:
13100     CASE_CFN_FREXP:
13101     CASE_CFN_ICEIL:
13102     CASE_CFN_IFLOOR:
13103     CASE_CFN_IRINT:
13104     CASE_CFN_IROUND:
13105     CASE_CFN_LCEIL:
13106     CASE_CFN_LDEXP:
13107     CASE_CFN_LFLOOR:
13108     CASE_CFN_LLCEIL:
13109     CASE_CFN_LLFLOOR:
13110     CASE_CFN_LLRINT:
13111     CASE_CFN_LLROUND:
13112     CASE_CFN_LRINT:
13113     CASE_CFN_LROUND:
13114     CASE_CFN_MODF:
13115     CASE_CFN_NEARBYINT:
13116     CASE_CFN_NEARBYINT_FN:
13117     CASE_CFN_RINT:
13118     CASE_CFN_RINT_FN:
13119     CASE_CFN_ROUND:
13120     CASE_CFN_ROUND_FN:
13121     CASE_CFN_SCALB:
13122     CASE_CFN_SCALBLN:
13123     CASE_CFN_SCALBN:
13124     CASE_CFN_SIGNBIT:
13125     CASE_CFN_SIGNIFICAND:
13126     CASE_CFN_SINH:
13127     CASE_CFN_TANH:
13128     CASE_CFN_TRUNC:
13129     CASE_CFN_TRUNC_FN:
13130       /* True if the 1st argument is nonnegative.  */
13131       return RECURSE (arg0);
13132 
13133     CASE_CFN_FMAX:
13134     CASE_CFN_FMAX_FN:
13135       /* True if the 1st OR 2nd arguments are nonnegative.  */
13136       return RECURSE (arg0) || RECURSE (arg1);
13137 
13138     CASE_CFN_FMIN:
13139     CASE_CFN_FMIN_FN:
13140       /* True if the 1st AND 2nd arguments are nonnegative.  */
13141       return RECURSE (arg0) && RECURSE (arg1);
13142 
13143     CASE_CFN_COPYSIGN:
13144     CASE_CFN_COPYSIGN_FN:
13145       /* True if the 2nd argument is nonnegative.  */
13146       return RECURSE (arg1);
13147 
13148     CASE_CFN_POWI:
13149       /* True if the 1st argument is nonnegative or the second
13150 	 argument is an even integer.  */
13151       if (TREE_CODE (arg1) == INTEGER_CST
13152 	  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13153 	return true;
13154       return RECURSE (arg0);
13155 
13156     CASE_CFN_POW:
13157       /* True if the 1st argument is nonnegative or the second
13158 	 argument is an even integer valued real.  */
13159       if (TREE_CODE (arg1) == REAL_CST)
13160 	{
13161 	  REAL_VALUE_TYPE c;
13162 	  HOST_WIDE_INT n;
13163 
13164 	  c = TREE_REAL_CST (arg1);
13165 	  n = real_to_integer (&c);
13166 	  if ((n & 1) == 0)
13167 	    {
13168 	      REAL_VALUE_TYPE cint;
13169 	      real_from_integer (&cint, VOIDmode, n, SIGNED);
13170 	      if (real_identical (&c, &cint))
13171 		return true;
13172 	    }
13173 	}
13174       return RECURSE (arg0);
13175 
13176     default:
13177       break;
13178     }
13179   return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13180 }
13181 
13182 /* Return true if T is known to be non-negative.  If the return
13183    value is based on the assumption that signed overflow is undefined,
13184    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13185    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13186 
13187 static bool
13188 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13189 {
13190   enum tree_code code = TREE_CODE (t);
13191   if (TYPE_UNSIGNED (TREE_TYPE (t)))
13192     return true;
13193 
13194   switch (code)
13195     {
13196     case TARGET_EXPR:
13197       {
13198 	tree temp = TARGET_EXPR_SLOT (t);
13199 	t = TARGET_EXPR_INITIAL (t);
13200 
13201 	/* If the initializer is non-void, then it's a normal expression
13202 	   that will be assigned to the slot.  */
13203 	if (!VOID_TYPE_P (t))
13204 	  return RECURSE (t);
13205 
13206 	/* Otherwise, the initializer sets the slot in some way.  One common
13207 	   way is an assignment statement at the end of the initializer.  */
13208 	while (1)
13209 	  {
13210 	    if (TREE_CODE (t) == BIND_EXPR)
13211 	      t = expr_last (BIND_EXPR_BODY (t));
13212 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13213 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
13214 	      t = expr_last (TREE_OPERAND (t, 0));
13215 	    else if (TREE_CODE (t) == STATEMENT_LIST)
13216 	      t = expr_last (t);
13217 	    else
13218 	      break;
13219 	  }
13220 	if (TREE_CODE (t) == MODIFY_EXPR
13221 	    && TREE_OPERAND (t, 0) == temp)
13222 	  return RECURSE (TREE_OPERAND (t, 1));
13223 
13224 	return false;
13225       }
13226 
13227     case CALL_EXPR:
13228       {
13229 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
13230 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
13231 
13232 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13233 					      get_call_combined_fn (t),
13234 					      arg0,
13235 					      arg1,
13236 					      strict_overflow_p, depth);
13237       }
13238     case COMPOUND_EXPR:
13239     case MODIFY_EXPR:
13240       return RECURSE (TREE_OPERAND (t, 1));
13241 
13242     case BIND_EXPR:
13243       return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13244 
13245     case SAVE_EXPR:
13246       return RECURSE (TREE_OPERAND (t, 0));
13247 
13248     default:
13249       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13250     }
13251 }
13252 
13253 #undef RECURSE
13254 #undef tree_expr_nonnegative_warnv_p
13255 
13256 /* Return true if T is known to be non-negative.  If the return
13257    value is based on the assumption that signed overflow is undefined,
13258    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13259    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13260 
13261 bool
13262 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13263 {
13264   enum tree_code code;
13265   if (t == error_mark_node)
13266     return false;
13267 
13268   code = TREE_CODE (t);
13269   switch (TREE_CODE_CLASS (code))
13270     {
13271     case tcc_binary:
13272     case tcc_comparison:
13273       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13274 					      TREE_TYPE (t),
13275 					      TREE_OPERAND (t, 0),
13276 					      TREE_OPERAND (t, 1),
13277 					      strict_overflow_p, depth);
13278 
13279     case tcc_unary:
13280       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13281 					     TREE_TYPE (t),
13282 					     TREE_OPERAND (t, 0),
13283 					     strict_overflow_p, depth);
13284 
13285     case tcc_constant:
13286     case tcc_declaration:
13287     case tcc_reference:
13288       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13289 
13290     default:
13291       break;
13292     }
13293 
13294   switch (code)
13295     {
13296     case TRUTH_AND_EXPR:
13297     case TRUTH_OR_EXPR:
13298     case TRUTH_XOR_EXPR:
13299       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13300 					      TREE_TYPE (t),
13301 					      TREE_OPERAND (t, 0),
13302 					      TREE_OPERAND (t, 1),
13303 					      strict_overflow_p, depth);
13304     case TRUTH_NOT_EXPR:
13305       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13306 					     TREE_TYPE (t),
13307 					     TREE_OPERAND (t, 0),
13308 					     strict_overflow_p, depth);
13309 
13310     case COND_EXPR:
13311     case CONSTRUCTOR:
13312     case OBJ_TYPE_REF:
13313     case ASSERT_EXPR:
13314     case ADDR_EXPR:
13315     case WITH_SIZE_EXPR:
13316     case SSA_NAME:
13317       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13318 
13319     default:
13320       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13321     }
13322 }
13323 
13324 /* Return true if `t' is known to be non-negative.  Handle warnings
13325    about undefined signed overflow.  */
13326 
13327 bool
13328 tree_expr_nonnegative_p (tree t)
13329 {
13330   bool ret, strict_overflow_p;
13331 
13332   strict_overflow_p = false;
13333   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13334   if (strict_overflow_p)
13335     fold_overflow_warning (("assuming signed overflow does not occur when "
13336 			    "determining that expression is always "
13337 			    "non-negative"),
13338 			   WARN_STRICT_OVERFLOW_MISC);
13339   return ret;
13340 }
13341 
13342 
13343 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13344    For floating point we further ensure that T is not denormal.
13345    Similar logic is present in nonzero_address in rtlanal.h.
13346 
13347    If the return value is based on the assumption that signed overflow
13348    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13349    change *STRICT_OVERFLOW_P.  */
13350 
13351 bool
13352 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13353 				 bool *strict_overflow_p)
13354 {
13355   switch (code)
13356     {
13357     case ABS_EXPR:
13358       return tree_expr_nonzero_warnv_p (op0,
13359 					strict_overflow_p);
13360 
13361     case NOP_EXPR:
13362       {
13363 	tree inner_type = TREE_TYPE (op0);
13364 	tree outer_type = type;
13365 
13366 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13367 		&& tree_expr_nonzero_warnv_p (op0,
13368 					      strict_overflow_p));
13369       }
13370       break;
13371 
13372     case NON_LVALUE_EXPR:
13373       return tree_expr_nonzero_warnv_p (op0,
13374 					strict_overflow_p);
13375 
13376     default:
13377       break;
13378   }
13379 
13380   return false;
13381 }
13382 
13383 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13384    For floating point we further ensure that T is not denormal.
13385    Similar logic is present in nonzero_address in rtlanal.h.
13386 
13387    If the return value is based on the assumption that signed overflow
13388    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13389    change *STRICT_OVERFLOW_P.  */
13390 
13391 bool
13392 tree_binary_nonzero_warnv_p (enum tree_code code,
13393 			     tree type,
13394 			     tree op0,
13395 			     tree op1, bool *strict_overflow_p)
13396 {
13397   bool sub_strict_overflow_p;
13398   switch (code)
13399     {
13400     case POINTER_PLUS_EXPR:
13401     case PLUS_EXPR:
13402       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13403 	{
13404 	  /* With the presence of negative values it is hard
13405 	     to say something.  */
13406 	  sub_strict_overflow_p = false;
13407 	  if (!tree_expr_nonnegative_warnv_p (op0,
13408 					      &sub_strict_overflow_p)
13409 	      || !tree_expr_nonnegative_warnv_p (op1,
13410 						 &sub_strict_overflow_p))
13411 	    return false;
13412 	  /* One of operands must be positive and the other non-negative.  */
13413 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
13414 	     overflows, on a twos-complement machine the sum of two
13415 	     nonnegative numbers can never be zero.  */
13416 	  return (tree_expr_nonzero_warnv_p (op0,
13417 					     strict_overflow_p)
13418 		  || tree_expr_nonzero_warnv_p (op1,
13419 						strict_overflow_p));
13420 	}
13421       break;
13422 
13423     case MULT_EXPR:
13424       if (TYPE_OVERFLOW_UNDEFINED (type))
13425 	{
13426 	  if (tree_expr_nonzero_warnv_p (op0,
13427 					 strict_overflow_p)
13428 	      && tree_expr_nonzero_warnv_p (op1,
13429 					    strict_overflow_p))
13430 	    {
13431 	      *strict_overflow_p = true;
13432 	      return true;
13433 	    }
13434 	}
13435       break;
13436 
13437     case MIN_EXPR:
13438       sub_strict_overflow_p = false;
13439       if (tree_expr_nonzero_warnv_p (op0,
13440 				     &sub_strict_overflow_p)
13441 	  && tree_expr_nonzero_warnv_p (op1,
13442 					&sub_strict_overflow_p))
13443 	{
13444 	  if (sub_strict_overflow_p)
13445 	    *strict_overflow_p = true;
13446 	}
13447       break;
13448 
13449     case MAX_EXPR:
13450       sub_strict_overflow_p = false;
13451       if (tree_expr_nonzero_warnv_p (op0,
13452 				     &sub_strict_overflow_p))
13453 	{
13454 	  if (sub_strict_overflow_p)
13455 	    *strict_overflow_p = true;
13456 
13457 	  /* When both operands are nonzero, then MAX must be too.  */
13458 	  if (tree_expr_nonzero_warnv_p (op1,
13459 					 strict_overflow_p))
13460 	    return true;
13461 
13462 	  /* MAX where operand 0 is positive is positive.  */
13463 	  return tree_expr_nonnegative_warnv_p (op0,
13464 					       strict_overflow_p);
13465 	}
13466       /* MAX where operand 1 is positive is positive.  */
13467       else if (tree_expr_nonzero_warnv_p (op1,
13468 					  &sub_strict_overflow_p)
13469 	       && tree_expr_nonnegative_warnv_p (op1,
13470 						 &sub_strict_overflow_p))
13471 	{
13472 	  if (sub_strict_overflow_p)
13473 	    *strict_overflow_p = true;
13474 	  return true;
13475 	}
13476       break;
13477 
13478     case BIT_IOR_EXPR:
13479       return (tree_expr_nonzero_warnv_p (op1,
13480 					 strict_overflow_p)
13481 	      || tree_expr_nonzero_warnv_p (op0,
13482 					    strict_overflow_p));
13483 
13484     default:
13485       break;
13486   }
13487 
13488   return false;
13489 }
13490 
13491 /* Return true when T is an address and is known to be nonzero.
13492    For floating point we further ensure that T is not denormal.
13493    Similar logic is present in nonzero_address in rtlanal.h.
13494 
13495    If the return value is based on the assumption that signed overflow
13496    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13497    change *STRICT_OVERFLOW_P.  */
13498 
13499 bool
13500 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13501 {
13502   bool sub_strict_overflow_p;
13503   switch (TREE_CODE (t))
13504     {
13505     case INTEGER_CST:
13506       return !integer_zerop (t);
13507 
13508     case ADDR_EXPR:
13509       {
13510 	tree base = TREE_OPERAND (t, 0);
13511 
13512 	if (!DECL_P (base))
13513 	  base = get_base_address (base);
13514 
13515 	if (base && TREE_CODE (base) == TARGET_EXPR)
13516 	  base = TARGET_EXPR_SLOT (base);
13517 
13518 	if (!base)
13519 	  return false;
13520 
13521 	/* For objects in symbol table check if we know they are non-zero.
13522 	   Don't do anything for variables and functions before symtab is built;
13523 	   it is quite possible that they will be declared weak later.  */
13524 	int nonzero_addr = maybe_nonzero_address (base);
13525 	if (nonzero_addr >= 0)
13526 	  return nonzero_addr;
13527 
13528 	/* Constants are never weak.  */
13529 	if (CONSTANT_CLASS_P (base))
13530 	  return true;
13531 
13532 	return false;
13533       }
13534 
13535     case COND_EXPR:
13536       sub_strict_overflow_p = false;
13537       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13538 				     &sub_strict_overflow_p)
13539 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13540 					&sub_strict_overflow_p))
13541 	{
13542 	  if (sub_strict_overflow_p)
13543 	    *strict_overflow_p = true;
13544 	  return true;
13545 	}
13546       break;
13547 
13548     case SSA_NAME:
13549       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13550 	break;
13551       return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13552 
13553     default:
13554       break;
13555     }
13556   return false;
13557 }
13558 
13559 #define integer_valued_real_p(X) \
13560   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13561 
13562 #define RECURSE(X) \
13563   ((integer_valued_real_p) (X, depth + 1))
13564 
13565 /* Return true if the floating point result of (CODE OP0) has an
13566    integer value.  We also allow +Inf, -Inf and NaN to be considered
13567    integer values. Return false for signaling NaN.
13568 
13569    DEPTH is the current nesting depth of the query.  */
13570 
13571 bool
13572 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13573 {
13574   switch (code)
13575     {
13576     case FLOAT_EXPR:
13577       return true;
13578 
13579     case ABS_EXPR:
13580       return RECURSE (op0);
13581 
13582     CASE_CONVERT:
13583       {
13584 	tree type = TREE_TYPE (op0);
13585 	if (TREE_CODE (type) == INTEGER_TYPE)
13586 	  return true;
13587 	if (TREE_CODE (type) == REAL_TYPE)
13588 	  return RECURSE (op0);
13589 	break;
13590       }
13591 
13592     default:
13593       break;
13594     }
13595   return false;
13596 }
13597 
13598 /* Return true if the floating point result of (CODE OP0 OP1) has an
13599    integer value.  We also allow +Inf, -Inf and NaN to be considered
13600    integer values. Return false for signaling NaN.
13601 
13602    DEPTH is the current nesting depth of the query.  */
13603 
13604 bool
13605 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13606 {
13607   switch (code)
13608     {
13609     case PLUS_EXPR:
13610     case MINUS_EXPR:
13611     case MULT_EXPR:
13612     case MIN_EXPR:
13613     case MAX_EXPR:
13614       return RECURSE (op0) && RECURSE (op1);
13615 
13616     default:
13617       break;
13618     }
13619   return false;
13620 }
13621 
13622 /* Return true if the floating point result of calling FNDECL with arguments
13623    ARG0 and ARG1 has an integer value.  We also allow +Inf, -Inf and NaN to be
13624    considered integer values. Return false for signaling NaN.  If FNDECL
13625    takes fewer than 2 arguments, the remaining ARGn are null.
13626 
13627    DEPTH is the current nesting depth of the query.  */
13628 
13629 bool
13630 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13631 {
13632   switch (fn)
13633     {
13634     CASE_CFN_CEIL:
13635     CASE_CFN_CEIL_FN:
13636     CASE_CFN_FLOOR:
13637     CASE_CFN_FLOOR_FN:
13638     CASE_CFN_NEARBYINT:
13639     CASE_CFN_NEARBYINT_FN:
13640     CASE_CFN_RINT:
13641     CASE_CFN_RINT_FN:
13642     CASE_CFN_ROUND:
13643     CASE_CFN_ROUND_FN:
13644     CASE_CFN_TRUNC:
13645     CASE_CFN_TRUNC_FN:
13646       return true;
13647 
13648     CASE_CFN_FMIN:
13649     CASE_CFN_FMIN_FN:
13650     CASE_CFN_FMAX:
13651     CASE_CFN_FMAX_FN:
13652       return RECURSE (arg0) && RECURSE (arg1);
13653 
13654     default:
13655       break;
13656     }
13657   return false;
13658 }
13659 
13660 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13661    has an integer value.  We also allow +Inf, -Inf and NaN to be
13662    considered integer values. Return false for signaling NaN.
13663 
13664    DEPTH is the current nesting depth of the query.  */
13665 
13666 bool
13667 integer_valued_real_single_p (tree t, int depth)
13668 {
13669   switch (TREE_CODE (t))
13670     {
13671     case REAL_CST:
13672       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13673 
13674     case COND_EXPR:
13675       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13676 
13677     case SSA_NAME:
13678       /* Limit the depth of recursion to avoid quadratic behavior.
13679 	 This is expected to catch almost all occurrences in practice.
13680 	 If this code misses important cases that unbounded recursion
13681 	 would not, passes that need this information could be revised
13682 	 to provide it through dataflow propagation.  */
13683       return (!name_registered_for_update_p (t)
13684 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13685 	      && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13686 						    depth));
13687 
13688     default:
13689       break;
13690     }
13691   return false;
13692 }
13693 
13694 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13695    has an integer value.  We also allow +Inf, -Inf and NaN to be
13696    considered integer values. Return false for signaling NaN.
13697 
13698    DEPTH is the current nesting depth of the query.  */
13699 
13700 static bool
13701 integer_valued_real_invalid_p (tree t, int depth)
13702 {
13703   switch (TREE_CODE (t))
13704     {
13705     case COMPOUND_EXPR:
13706     case MODIFY_EXPR:
13707     case BIND_EXPR:
13708       return RECURSE (TREE_OPERAND (t, 1));
13709 
13710     case SAVE_EXPR:
13711       return RECURSE (TREE_OPERAND (t, 0));
13712 
13713     default:
13714       break;
13715     }
13716   return false;
13717 }
13718 
13719 #undef RECURSE
13720 #undef integer_valued_real_p
13721 
13722 /* Return true if the floating point expression T has an integer value.
13723    We also allow +Inf, -Inf and NaN to be considered integer values.
13724    Return false for signaling NaN.
13725 
13726    DEPTH is the current nesting depth of the query.  */
13727 
13728 bool
13729 integer_valued_real_p (tree t, int depth)
13730 {
13731   if (t == error_mark_node)
13732     return false;
13733 
13734   STRIP_ANY_LOCATION_WRAPPER (t);
13735 
13736   tree_code code = TREE_CODE (t);
13737   switch (TREE_CODE_CLASS (code))
13738     {
13739     case tcc_binary:
13740     case tcc_comparison:
13741       return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13742 					   TREE_OPERAND (t, 1), depth);
13743 
13744     case tcc_unary:
13745       return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13746 
13747     case tcc_constant:
13748     case tcc_declaration:
13749     case tcc_reference:
13750       return integer_valued_real_single_p (t, depth);
13751 
13752     default:
13753       break;
13754     }
13755 
13756   switch (code)
13757     {
13758     case COND_EXPR:
13759     case SSA_NAME:
13760       return integer_valued_real_single_p (t, depth);
13761 
13762     case CALL_EXPR:
13763       {
13764 	tree arg0 = (call_expr_nargs (t) > 0
13765 		     ? CALL_EXPR_ARG (t, 0)
13766 		     : NULL_TREE);
13767 	tree arg1 = (call_expr_nargs (t) > 1
13768 		     ? CALL_EXPR_ARG (t, 1)
13769 		     : NULL_TREE);
13770 	return integer_valued_real_call_p (get_call_combined_fn (t),
13771 					   arg0, arg1, depth);
13772       }
13773 
13774     default:
13775       return integer_valued_real_invalid_p (t, depth);
13776     }
13777 }
13778 
13779 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13780    attempt to fold the expression to a constant without modifying TYPE,
13781    OP0 or OP1.
13782 
13783    If the expression could be simplified to a constant, then return
13784    the constant.  If the expression would not be simplified to a
13785    constant, then return NULL_TREE.  */
13786 
13787 tree
13788 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13789 {
13790   tree tem = fold_binary (code, type, op0, op1);
13791   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13792 }
13793 
13794 /* Given the components of a unary expression CODE, TYPE and OP0,
13795    attempt to fold the expression to a constant without modifying
13796    TYPE or OP0.
13797 
13798    If the expression could be simplified to a constant, then return
13799    the constant.  If the expression would not be simplified to a
13800    constant, then return NULL_TREE.  */
13801 
13802 tree
13803 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13804 {
13805   tree tem = fold_unary (code, type, op0);
13806   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13807 }
13808 
13809 /* If EXP represents referencing an element in a constant string
13810    (either via pointer arithmetic or array indexing), return the
13811    tree representing the value accessed, otherwise return NULL.  */
13812 
13813 tree
13814 fold_read_from_constant_string (tree exp)
13815 {
13816   if ((TREE_CODE (exp) == INDIRECT_REF
13817        || TREE_CODE (exp) == ARRAY_REF)
13818       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13819     {
13820       tree exp1 = TREE_OPERAND (exp, 0);
13821       tree index;
13822       tree string;
13823       location_t loc = EXPR_LOCATION (exp);
13824 
13825       if (TREE_CODE (exp) == INDIRECT_REF)
13826 	string = string_constant (exp1, &index, NULL, NULL);
13827       else
13828 	{
13829 	  tree low_bound = array_ref_low_bound (exp);
13830 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13831 
13832 	  /* Optimize the special-case of a zero lower bound.
13833 
13834 	     We convert the low_bound to sizetype to avoid some problems
13835 	     with constant folding.  (E.g. suppose the lower bound is 1,
13836 	     and its mode is QI.  Without the conversion,l (ARRAY
13837 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13838 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
13839 	  if (! integer_zerop (low_bound))
13840 	    index = size_diffop_loc (loc, index,
13841 				 fold_convert_loc (loc, sizetype, low_bound));
13842 
13843 	  string = exp1;
13844 	}
13845 
13846       scalar_int_mode char_mode;
13847       if (string
13848 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13849 	  && TREE_CODE (string) == STRING_CST
13850 	  && TREE_CODE (index) == INTEGER_CST
13851 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13852 	  && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13853 			  &char_mode)
13854 	  && GET_MODE_SIZE (char_mode) == 1)
13855 	return build_int_cst_type (TREE_TYPE (exp),
13856 				   (TREE_STRING_POINTER (string)
13857 				    [TREE_INT_CST_LOW (index)]));
13858     }
13859   return NULL;
13860 }
13861 
13862 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13863    an integer constant, real, or fixed-point constant.
13864 
13865    TYPE is the type of the result.  */
13866 
13867 static tree
13868 fold_negate_const (tree arg0, tree type)
13869 {
13870   tree t = NULL_TREE;
13871 
13872   switch (TREE_CODE (arg0))
13873     {
13874     case REAL_CST:
13875       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13876       break;
13877 
13878     case FIXED_CST:
13879       {
13880         FIXED_VALUE_TYPE f;
13881         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13882 					    &(TREE_FIXED_CST (arg0)), NULL,
13883 					    TYPE_SATURATING (type));
13884 	t = build_fixed (type, f);
13885 	/* Propagate overflow flags.  */
13886 	if (overflow_p | TREE_OVERFLOW (arg0))
13887 	  TREE_OVERFLOW (t) = 1;
13888 	break;
13889       }
13890 
13891     default:
13892       if (poly_int_tree_p (arg0))
13893 	{
13894 	  wi::overflow_type overflow;
13895 	  poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
13896 	  t = force_fit_type (type, res, 1,
13897 			      (overflow && ! TYPE_UNSIGNED (type))
13898 			      || TREE_OVERFLOW (arg0));
13899 	  break;
13900 	}
13901 
13902       gcc_unreachable ();
13903     }
13904 
13905   return t;
13906 }
13907 
13908 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13909    an integer constant or real constant.
13910 
13911    TYPE is the type of the result.  */
13912 
13913 tree
13914 fold_abs_const (tree arg0, tree type)
13915 {
13916   tree t = NULL_TREE;
13917 
13918   switch (TREE_CODE (arg0))
13919     {
13920     case INTEGER_CST:
13921       {
13922         /* If the value is unsigned or non-negative, then the absolute value
13923 	   is the same as the ordinary value.  */
13924 	wide_int val = wi::to_wide (arg0);
13925 	wi::overflow_type overflow = wi::OVF_NONE;
13926 	if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
13927 	  ;
13928 
13929 	/* If the value is negative, then the absolute value is
13930 	   its negation.  */
13931 	else
13932 	  val = wi::neg (val, &overflow);
13933 
13934 	/* Force to the destination type, set TREE_OVERFLOW for signed
13935 	   TYPE only.  */
13936 	t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
13937       }
13938     break;
13939 
13940     case REAL_CST:
13941       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13942 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13943       else
13944 	t =  arg0;
13945       break;
13946 
13947     default:
13948       gcc_unreachable ();
13949     }
13950 
13951   return t;
13952 }
13953 
13954 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13955    constant.  TYPE is the type of the result.  */
13956 
13957 static tree
13958 fold_not_const (const_tree arg0, tree type)
13959 {
13960   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13961 
13962   return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13963 }
13964 
13965 /* Given CODE, a relational operator, the target type, TYPE and two
13966    constant operands OP0 and OP1, return the result of the
13967    relational operation.  If the result is not a compile time
13968    constant, then return NULL_TREE.  */
13969 
13970 static tree
13971 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13972 {
13973   int result, invert;
13974 
13975   /* From here on, the only cases we handle are when the result is
13976      known to be a constant.  */
13977 
13978   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13979     {
13980       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13981       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13982 
13983       /* Handle the cases where either operand is a NaN.  */
13984       if (real_isnan (c0) || real_isnan (c1))
13985 	{
13986 	  switch (code)
13987 	    {
13988 	    case EQ_EXPR:
13989 	    case ORDERED_EXPR:
13990 	      result = 0;
13991 	      break;
13992 
13993 	    case NE_EXPR:
13994 	    case UNORDERED_EXPR:
13995 	    case UNLT_EXPR:
13996 	    case UNLE_EXPR:
13997 	    case UNGT_EXPR:
13998 	    case UNGE_EXPR:
13999 	    case UNEQ_EXPR:
14000               result = 1;
14001 	      break;
14002 
14003 	    case LT_EXPR:
14004 	    case LE_EXPR:
14005 	    case GT_EXPR:
14006 	    case GE_EXPR:
14007 	    case LTGT_EXPR:
14008 	      if (flag_trapping_math)
14009 		return NULL_TREE;
14010 	      result = 0;
14011 	      break;
14012 
14013 	    default:
14014 	      gcc_unreachable ();
14015 	    }
14016 
14017 	  return constant_boolean_node (result, type);
14018 	}
14019 
14020       return constant_boolean_node (real_compare (code, c0, c1), type);
14021     }
14022 
14023   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14024     {
14025       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14026       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14027       return constant_boolean_node (fixed_compare (code, c0, c1), type);
14028     }
14029 
14030   /* Handle equality/inequality of complex constants.  */
14031   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14032     {
14033       tree rcond = fold_relational_const (code, type,
14034 					  TREE_REALPART (op0),
14035 					  TREE_REALPART (op1));
14036       tree icond = fold_relational_const (code, type,
14037 					  TREE_IMAGPART (op0),
14038 					  TREE_IMAGPART (op1));
14039       if (code == EQ_EXPR)
14040 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14041       else if (code == NE_EXPR)
14042 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14043       else
14044 	return NULL_TREE;
14045     }
14046 
14047   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14048     {
14049       if (!VECTOR_TYPE_P (type))
14050 	{
14051 	  /* Have vector comparison with scalar boolean result.  */
14052 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14053 		      && known_eq (VECTOR_CST_NELTS (op0),
14054 				   VECTOR_CST_NELTS (op1)));
14055 	  unsigned HOST_WIDE_INT nunits;
14056 	  if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14057 	    return NULL_TREE;
14058 	  for (unsigned i = 0; i < nunits; i++)
14059 	    {
14060 	      tree elem0 = VECTOR_CST_ELT (op0, i);
14061 	      tree elem1 = VECTOR_CST_ELT (op1, i);
14062 	      tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14063 	      if (tmp == NULL_TREE)
14064 		return NULL_TREE;
14065 	      if (integer_zerop (tmp))
14066 		return constant_boolean_node (code == NE_EXPR, type);
14067 	    }
14068 	  return constant_boolean_node (code == EQ_EXPR, type);
14069 	}
14070       tree_vector_builder elts;
14071       if (!elts.new_binary_operation (type, op0, op1, false))
14072 	return NULL_TREE;
14073       unsigned int count = elts.encoded_nelts ();
14074       for (unsigned i = 0; i < count; i++)
14075 	{
14076 	  tree elem_type = TREE_TYPE (type);
14077 	  tree elem0 = VECTOR_CST_ELT (op0, i);
14078 	  tree elem1 = VECTOR_CST_ELT (op1, i);
14079 
14080 	  tree tem = fold_relational_const (code, elem_type,
14081 					    elem0, elem1);
14082 
14083 	  if (tem == NULL_TREE)
14084 	    return NULL_TREE;
14085 
14086 	  elts.quick_push (build_int_cst (elem_type,
14087 					  integer_zerop (tem) ? 0 : -1));
14088 	}
14089 
14090       return elts.build ();
14091     }
14092 
14093   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14094 
14095      To compute GT, swap the arguments and do LT.
14096      To compute GE, do LT and invert the result.
14097      To compute LE, swap the arguments, do LT and invert the result.
14098      To compute NE, do EQ and invert the result.
14099 
14100      Therefore, the code below must handle only EQ and LT.  */
14101 
14102   if (code == LE_EXPR || code == GT_EXPR)
14103     {
14104       std::swap (op0, op1);
14105       code = swap_tree_comparison (code);
14106     }
14107 
14108   /* Note that it is safe to invert for real values here because we
14109      have already handled the one case that it matters.  */
14110 
14111   invert = 0;
14112   if (code == NE_EXPR || code == GE_EXPR)
14113     {
14114       invert = 1;
14115       code = invert_tree_comparison (code, false);
14116     }
14117 
14118   /* Compute a result for LT or EQ if args permit;
14119      Otherwise return T.  */
14120   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14121     {
14122       if (code == EQ_EXPR)
14123 	result = tree_int_cst_equal (op0, op1);
14124       else
14125 	result = tree_int_cst_lt (op0, op1);
14126     }
14127   else
14128     return NULL_TREE;
14129 
14130   if (invert)
14131     result ^= 1;
14132   return constant_boolean_node (result, type);
14133 }
14134 
14135 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14136    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
14137    itself.  */
14138 
14139 tree
14140 fold_build_cleanup_point_expr (tree type, tree expr)
14141 {
14142   /* If the expression does not have side effects then we don't have to wrap
14143      it with a cleanup point expression.  */
14144   if (!TREE_SIDE_EFFECTS (expr))
14145     return expr;
14146 
14147   /* If the expression is a return, check to see if the expression inside the
14148      return has no side effects or the right hand side of the modify expression
14149      inside the return. If either don't have side effects set we don't need to
14150      wrap the expression in a cleanup point expression.  Note we don't check the
14151      left hand side of the modify because it should always be a return decl.  */
14152   if (TREE_CODE (expr) == RETURN_EXPR)
14153     {
14154       tree op = TREE_OPERAND (expr, 0);
14155       if (!op || !TREE_SIDE_EFFECTS (op))
14156         return expr;
14157       op = TREE_OPERAND (op, 1);
14158       if (!TREE_SIDE_EFFECTS (op))
14159         return expr;
14160     }
14161 
14162   return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14163 }
14164 
14165 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14166    of an indirection through OP0, or NULL_TREE if no simplification is
14167    possible.  */
14168 
14169 tree
14170 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14171 {
14172   tree sub = op0;
14173   tree subtype;
14174   poly_uint64 const_op01;
14175 
14176   STRIP_NOPS (sub);
14177   subtype = TREE_TYPE (sub);
14178   if (!POINTER_TYPE_P (subtype)
14179       || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14180     return NULL_TREE;
14181 
14182   if (TREE_CODE (sub) == ADDR_EXPR)
14183     {
14184       tree op = TREE_OPERAND (sub, 0);
14185       tree optype = TREE_TYPE (op);
14186 
14187       /* *&CONST_DECL -> to the value of the const decl.  */
14188       if (TREE_CODE (op) == CONST_DECL)
14189 	return DECL_INITIAL (op);
14190       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
14191       if (type == optype)
14192 	{
14193 	  tree fop = fold_read_from_constant_string (op);
14194 	  if (fop)
14195 	    return fop;
14196 	  else
14197 	    return op;
14198 	}
14199       /* *(foo *)&fooarray => fooarray[0] */
14200       else if (TREE_CODE (optype) == ARRAY_TYPE
14201 	       && type == TREE_TYPE (optype)
14202 	       && (!in_gimple_form
14203 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14204 	{
14205 	  tree type_domain = TYPE_DOMAIN (optype);
14206 	  tree min_val = size_zero_node;
14207 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
14208 	    min_val = TYPE_MIN_VALUE (type_domain);
14209 	  if (in_gimple_form
14210 	      && TREE_CODE (min_val) != INTEGER_CST)
14211 	    return NULL_TREE;
14212 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
14213 			     NULL_TREE, NULL_TREE);
14214 	}
14215       /* *(foo *)&complexfoo => __real__ complexfoo */
14216       else if (TREE_CODE (optype) == COMPLEX_TYPE
14217 	       && type == TREE_TYPE (optype))
14218 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
14219       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14220       else if (VECTOR_TYPE_P (optype)
14221 	       && type == TREE_TYPE (optype))
14222 	{
14223 	  tree part_width = TYPE_SIZE (type);
14224 	  tree index = bitsize_int (0);
14225 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14226 				  index);
14227 	}
14228     }
14229 
14230   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14231       && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14232     {
14233       tree op00 = TREE_OPERAND (sub, 0);
14234       tree op01 = TREE_OPERAND (sub, 1);
14235 
14236       STRIP_NOPS (op00);
14237       if (TREE_CODE (op00) == ADDR_EXPR)
14238 	{
14239 	  tree op00type;
14240 	  op00 = TREE_OPERAND (op00, 0);
14241 	  op00type = TREE_TYPE (op00);
14242 
14243 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14244 	  if (VECTOR_TYPE_P (op00type)
14245 	      && type == TREE_TYPE (op00type)
14246 	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14247 		 but we want to treat offsets with MSB set as negative.
14248 		 For the code below negative offsets are invalid and
14249 		 TYPE_SIZE of the element is something unsigned, so
14250 		 check whether op01 fits into poly_int64, which implies
14251 		 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14252 		 then just use poly_uint64 because we want to treat the
14253 		 value as unsigned.  */
14254 	      && tree_fits_poly_int64_p (op01))
14255 	    {
14256 	      tree part_width = TYPE_SIZE (type);
14257 	      poly_uint64 max_offset
14258 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
14259 		   * TYPE_VECTOR_SUBPARTS (op00type));
14260 	      if (known_lt (const_op01, max_offset))
14261 		{
14262 		  tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
14263 		  return fold_build3_loc (loc,
14264 					  BIT_FIELD_REF, type, op00,
14265 					  part_width, index);
14266 		}
14267 	    }
14268 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14269 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
14270 		   && type == TREE_TYPE (op00type))
14271 	    {
14272 	      if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
14273 			    const_op01))
14274 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14275 	    }
14276 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
14277 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
14278 		   && type == TREE_TYPE (op00type))
14279 	    {
14280 	      tree type_domain = TYPE_DOMAIN (op00type);
14281 	      tree min_val = size_zero_node;
14282 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
14283 		min_val = TYPE_MIN_VALUE (type_domain);
14284 	      poly_uint64 type_size, index;
14285 	      if (poly_int_tree_p (min_val)
14286 		  && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
14287 		  && multiple_p (const_op01, type_size, &index))
14288 		{
14289 		  poly_offset_int off = index + wi::to_poly_offset (min_val);
14290 		  op01 = wide_int_to_tree (sizetype, off);
14291 		  return build4_loc (loc, ARRAY_REF, type, op00, op01,
14292 				     NULL_TREE, NULL_TREE);
14293 		}
14294 	    }
14295 	}
14296     }
14297 
14298   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14299   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14300       && type == TREE_TYPE (TREE_TYPE (subtype))
14301       && (!in_gimple_form
14302 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14303     {
14304       tree type_domain;
14305       tree min_val = size_zero_node;
14306       sub = build_fold_indirect_ref_loc (loc, sub);
14307       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14308       if (type_domain && TYPE_MIN_VALUE (type_domain))
14309 	min_val = TYPE_MIN_VALUE (type_domain);
14310       if (in_gimple_form
14311 	  && TREE_CODE (min_val) != INTEGER_CST)
14312 	return NULL_TREE;
14313       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14314 			 NULL_TREE);
14315     }
14316 
14317   return NULL_TREE;
14318 }
14319 
14320 /* Builds an expression for an indirection through T, simplifying some
14321    cases.  */
14322 
14323 tree
14324 build_fold_indirect_ref_loc (location_t loc, tree t)
14325 {
14326   tree type = TREE_TYPE (TREE_TYPE (t));
14327   tree sub = fold_indirect_ref_1 (loc, type, t);
14328 
14329   if (sub)
14330     return sub;
14331 
14332   return build1_loc (loc, INDIRECT_REF, type, t);
14333 }
14334 
14335 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
14336 
14337 tree
14338 fold_indirect_ref_loc (location_t loc, tree t)
14339 {
14340   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14341 
14342   if (sub)
14343     return sub;
14344   else
14345     return t;
14346 }
14347 
14348 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14349    whose result is ignored.  The type of the returned tree need not be
14350    the same as the original expression.  */
14351 
14352 tree
14353 fold_ignored_result (tree t)
14354 {
14355   if (!TREE_SIDE_EFFECTS (t))
14356     return integer_zero_node;
14357 
14358   for (;;)
14359     switch (TREE_CODE_CLASS (TREE_CODE (t)))
14360       {
14361       case tcc_unary:
14362 	t = TREE_OPERAND (t, 0);
14363 	break;
14364 
14365       case tcc_binary:
14366       case tcc_comparison:
14367 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14368 	  t = TREE_OPERAND (t, 0);
14369 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14370 	  t = TREE_OPERAND (t, 1);
14371 	else
14372 	  return t;
14373 	break;
14374 
14375       case tcc_expression:
14376 	switch (TREE_CODE (t))
14377 	  {
14378 	  case COMPOUND_EXPR:
14379 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14380 	      return t;
14381 	    t = TREE_OPERAND (t, 0);
14382 	    break;
14383 
14384 	  case COND_EXPR:
14385 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14386 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14387 	      return t;
14388 	    t = TREE_OPERAND (t, 0);
14389 	    break;
14390 
14391 	  default:
14392 	    return t;
14393 	  }
14394 	break;
14395 
14396       default:
14397 	return t;
14398       }
14399 }
14400 
14401 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14402 
14403 tree
14404 round_up_loc (location_t loc, tree value, unsigned int divisor)
14405 {
14406   tree div = NULL_TREE;
14407 
14408   if (divisor == 1)
14409     return value;
14410 
14411   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14412      have to do anything.  Only do this when we are not given a const,
14413      because in that case, this check is more expensive than just
14414      doing it.  */
14415   if (TREE_CODE (value) != INTEGER_CST)
14416     {
14417       div = build_int_cst (TREE_TYPE (value), divisor);
14418 
14419       if (multiple_of_p (TREE_TYPE (value), value, div))
14420 	return value;
14421     }
14422 
14423   /* If divisor is a power of two, simplify this to bit manipulation.  */
14424   if (pow2_or_zerop (divisor))
14425     {
14426       if (TREE_CODE (value) == INTEGER_CST)
14427 	{
14428 	  wide_int val = wi::to_wide (value);
14429 	  bool overflow_p;
14430 
14431 	  if ((val & (divisor - 1)) == 0)
14432 	    return value;
14433 
14434 	  overflow_p = TREE_OVERFLOW (value);
14435 	  val += divisor - 1;
14436 	  val &= (int) -divisor;
14437 	  if (val == 0)
14438 	    overflow_p = true;
14439 
14440 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14441 	}
14442       else
14443 	{
14444 	  tree t;
14445 
14446 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
14447 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
14448 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14449 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14450 	}
14451     }
14452   else
14453     {
14454       if (!div)
14455 	div = build_int_cst (TREE_TYPE (value), divisor);
14456       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14457       value = size_binop_loc (loc, MULT_EXPR, value, div);
14458     }
14459 
14460   return value;
14461 }
14462 
14463 /* Likewise, but round down.  */
14464 
14465 tree
14466 round_down_loc (location_t loc, tree value, int divisor)
14467 {
14468   tree div = NULL_TREE;
14469 
14470   gcc_assert (divisor > 0);
14471   if (divisor == 1)
14472     return value;
14473 
14474   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14475      have to do anything.  Only do this when we are not given a const,
14476      because in that case, this check is more expensive than just
14477      doing it.  */
14478   if (TREE_CODE (value) != INTEGER_CST)
14479     {
14480       div = build_int_cst (TREE_TYPE (value), divisor);
14481 
14482       if (multiple_of_p (TREE_TYPE (value), value, div))
14483 	return value;
14484     }
14485 
14486   /* If divisor is a power of two, simplify this to bit manipulation.  */
14487   if (pow2_or_zerop (divisor))
14488     {
14489       tree t;
14490 
14491       t = build_int_cst (TREE_TYPE (value), -divisor);
14492       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14493     }
14494   else
14495     {
14496       if (!div)
14497 	div = build_int_cst (TREE_TYPE (value), divisor);
14498       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14499       value = size_binop_loc (loc, MULT_EXPR, value, div);
14500     }
14501 
14502   return value;
14503 }
14504 
14505 /* Returns the pointer to the base of the object addressed by EXP and
14506    extracts the information about the offset of the access, storing it
14507    to PBITPOS and POFFSET.  */
14508 
14509 static tree
14510 split_address_to_core_and_offset (tree exp,
14511 				  poly_int64_pod *pbitpos, tree *poffset)
14512 {
14513   tree core;
14514   machine_mode mode;
14515   int unsignedp, reversep, volatilep;
14516   poly_int64 bitsize;
14517   location_t loc = EXPR_LOCATION (exp);
14518 
14519   if (TREE_CODE (exp) == ADDR_EXPR)
14520     {
14521       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14522 				  poffset, &mode, &unsignedp, &reversep,
14523 				  &volatilep);
14524       core = build_fold_addr_expr_loc (loc, core);
14525     }
14526   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14527     {
14528       core = TREE_OPERAND (exp, 0);
14529       STRIP_NOPS (core);
14530       *pbitpos = 0;
14531       *poffset = TREE_OPERAND (exp, 1);
14532       if (poly_int_tree_p (*poffset))
14533 	{
14534 	  poly_offset_int tem
14535 	    = wi::sext (wi::to_poly_offset (*poffset),
14536 			TYPE_PRECISION (TREE_TYPE (*poffset)));
14537 	  tem <<= LOG2_BITS_PER_UNIT;
14538 	  if (tem.to_shwi (pbitpos))
14539 	    *poffset = NULL_TREE;
14540 	}
14541     }
14542   else
14543     {
14544       core = exp;
14545       *pbitpos = 0;
14546       *poffset = NULL_TREE;
14547     }
14548 
14549   return core;
14550 }
14551 
14552 /* Returns true if addresses of E1 and E2 differ by a constant, false
14553    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
14554 
14555 bool
14556 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
14557 {
14558   tree core1, core2;
14559   poly_int64 bitpos1, bitpos2;
14560   tree toffset1, toffset2, tdiff, type;
14561 
14562   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14563   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14564 
14565   poly_int64 bytepos1, bytepos2;
14566   if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
14567       || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
14568       || !operand_equal_p (core1, core2, 0))
14569     return false;
14570 
14571   if (toffset1 && toffset2)
14572     {
14573       type = TREE_TYPE (toffset1);
14574       if (type != TREE_TYPE (toffset2))
14575 	toffset2 = fold_convert (type, toffset2);
14576 
14577       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14578       if (!cst_and_fits_in_hwi (tdiff))
14579 	return false;
14580 
14581       *diff = int_cst_value (tdiff);
14582     }
14583   else if (toffset1 || toffset2)
14584     {
14585       /* If only one of the offsets is non-constant, the difference cannot
14586 	 be a constant.  */
14587       return false;
14588     }
14589   else
14590     *diff = 0;
14591 
14592   *diff += bytepos1 - bytepos2;
14593   return true;
14594 }
14595 
14596 /* Return OFF converted to a pointer offset type suitable as offset for
14597    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
14598 tree
14599 convert_to_ptrofftype_loc (location_t loc, tree off)
14600 {
14601   return fold_convert_loc (loc, sizetype, off);
14602 }
14603 
14604 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14605 tree
14606 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14607 {
14608   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14609 			  ptr, convert_to_ptrofftype_loc (loc, off));
14610 }
14611 
14612 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14613 tree
14614 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14615 {
14616   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14617 			  ptr, size_int (off));
14618 }
14619 
14620 /* Return a pointer P to a NUL-terminated string containing the sequence
14621    of bytes corresponding to the representation of the object referred to
14622    by SRC (or a subsequence of such bytes within it if SRC is a reference
14623    to an initialized constant array plus some constant offset).
14624    If STRSIZE is non-null, store the number of bytes in the constant
14625    sequence including the terminating NUL byte.  *STRSIZE is equal to
14626    sizeof(A) - OFFSET where A is the array that stores the constant
14627    sequence that SRC points to and OFFSET is the byte offset of SRC from
14628    the beginning of A.  SRC need not point to a string or even an array
14629    of characters but may point to an object of any type.  */
14630 
14631 const char *
14632 c_getstr (tree src, unsigned HOST_WIDE_INT *strsize /* = NULL */)
14633 {
14634   /* The offset into the array A storing the string, and A's byte size.  */
14635   tree offset_node;
14636   tree mem_size;
14637 
14638   if (strsize)
14639     *strsize = 0;
14640 
14641   src = string_constant (src, &offset_node, &mem_size, NULL);
14642   if (!src)
14643     return NULL;
14644 
14645   unsigned HOST_WIDE_INT offset = 0;
14646   if (offset_node != NULL_TREE)
14647     {
14648       if (!tree_fits_uhwi_p (offset_node))
14649 	return NULL;
14650       else
14651 	offset = tree_to_uhwi (offset_node);
14652     }
14653 
14654   if (!tree_fits_uhwi_p (mem_size))
14655     return NULL;
14656 
14657   /* ARRAY_SIZE is the byte size of the array the constant sequence
14658      is stored in and equal to sizeof A.  INIT_BYTES is the number
14659      of bytes in the constant sequence used to initialize the array,
14660      including any embedded NULs as well as the terminating NUL (for
14661      strings), but not including any trailing zeros/NULs past
14662      the terminating one appended implicitly to a string literal to
14663      zero out the remainder of the array it's stored in.  For example,
14664      given:
14665        const char a[7] = "abc\0d";
14666        n = strlen (a + 1);
14667      ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1.  For a valid
14668      (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
14669      is equal to strlen (A) + 1.  */
14670   const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
14671   unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
14672 
14673   /* Ideally this would turn into a gcc_checking_assert over time.  */
14674   if (init_bytes > array_size)
14675     init_bytes = array_size;
14676 
14677   const char *string = TREE_STRING_POINTER (src);
14678 
14679   /* Ideally this would turn into a gcc_checking_assert over time.  */
14680   if (init_bytes > array_size)
14681     init_bytes = array_size;
14682 
14683   if (init_bytes == 0 || offset >= array_size)
14684     return NULL;
14685 
14686   if (strsize)
14687     {
14688       /* Compute and store the number of characters from the beginning
14689 	 of the substring at OFFSET to the end, including the terminating
14690 	 nul.  Offsets past the initial length refer to null strings.  */
14691       if (offset < init_bytes)
14692 	*strsize = init_bytes - offset;
14693       else
14694 	*strsize = 1;
14695     }
14696   else
14697     {
14698       tree eltype = TREE_TYPE (TREE_TYPE (src));
14699       /* Support only properly NUL-terminated single byte strings.  */
14700       if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
14701 	return NULL;
14702       if (string[init_bytes - 1] != '\0')
14703 	return NULL;
14704     }
14705 
14706   return offset < init_bytes ? string + offset : "";
14707 }
14708 
14709 /* Given a tree T, compute which bits in T may be nonzero.  */
14710 
14711 wide_int
14712 tree_nonzero_bits (const_tree t)
14713 {
14714   switch (TREE_CODE (t))
14715     {
14716     case INTEGER_CST:
14717       return wi::to_wide (t);
14718     case SSA_NAME:
14719       return get_nonzero_bits (t);
14720     case NON_LVALUE_EXPR:
14721     case SAVE_EXPR:
14722       return tree_nonzero_bits (TREE_OPERAND (t, 0));
14723     case BIT_AND_EXPR:
14724       return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14725 			  tree_nonzero_bits (TREE_OPERAND (t, 1)));
14726     case BIT_IOR_EXPR:
14727     case BIT_XOR_EXPR:
14728       return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14729 			 tree_nonzero_bits (TREE_OPERAND (t, 1)));
14730     case COND_EXPR:
14731       return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
14732 			 tree_nonzero_bits (TREE_OPERAND (t, 2)));
14733     CASE_CONVERT:
14734       return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14735 			     TYPE_PRECISION (TREE_TYPE (t)),
14736 			     TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
14737     case PLUS_EXPR:
14738       if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
14739 	{
14740 	  wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
14741 	  wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
14742 	  if (wi::bit_and (nzbits1, nzbits2) == 0)
14743 	    return wi::bit_or (nzbits1, nzbits2);
14744 	}
14745       break;
14746     case LSHIFT_EXPR:
14747       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
14748 	{
14749 	  tree type = TREE_TYPE (t);
14750 	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
14751 	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
14752 				       TYPE_PRECISION (type));
14753 	  return wi::neg_p (arg1)
14754 		 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
14755 		 : wi::lshift (nzbits, arg1);
14756 	}
14757       break;
14758     case RSHIFT_EXPR:
14759       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
14760         {
14761 	  tree type = TREE_TYPE (t);
14762 	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
14763 	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
14764 				       TYPE_PRECISION (type));
14765 	  return wi::neg_p (arg1)
14766 		 ? wi::lshift (nzbits, -arg1)
14767 		 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
14768         }
14769       break;
14770     default:
14771       break;
14772     }
14773 
14774   return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
14775 }
14776 
14777 #if CHECKING_P
14778 
14779 namespace selftest {
14780 
14781 /* Helper functions for writing tests of folding trees.  */
14782 
14783 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT.  */
14784 
14785 static void
14786 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14787 			     tree constant)
14788 {
14789   ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14790 }
14791 
14792 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14793    wrapping WRAPPED_EXPR.  */
14794 
14795 static void
14796 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14797 				 tree wrapped_expr)
14798 {
14799   tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14800   ASSERT_NE (wrapped_expr, result);
14801   ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14802   ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14803 }
14804 
14805 /* Verify that various arithmetic binary operations are folded
14806    correctly.  */
14807 
14808 static void
14809 test_arithmetic_folding ()
14810 {
14811   tree type = integer_type_node;
14812   tree x = create_tmp_var_raw (type, "x");
14813   tree zero = build_zero_cst (type);
14814   tree one = build_int_cst (type, 1);
14815 
14816   /* Addition.  */
14817   /* 1 <-- (0 + 1) */
14818   assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14819 			       one);
14820   assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14821 			       one);
14822 
14823   /* (nonlvalue)x <-- (x + 0) */
14824   assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14825 				   x);
14826 
14827   /* Subtraction.  */
14828   /* 0 <-- (x - x) */
14829   assert_binop_folds_to_const (x, MINUS_EXPR, x,
14830 			       zero);
14831   assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14832 				   x);
14833 
14834   /* Multiplication.  */
14835   /* 0 <-- (x * 0) */
14836   assert_binop_folds_to_const (x, MULT_EXPR, zero,
14837 			       zero);
14838 
14839   /* (nonlvalue)x <-- (x * 1) */
14840   assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14841 				   x);
14842 }
14843 
14844 /* Verify that various binary operations on vectors are folded
14845    correctly.  */
14846 
14847 static void
14848 test_vector_folding ()
14849 {
14850   tree inner_type = integer_type_node;
14851   tree type = build_vector_type (inner_type, 4);
14852   tree zero = build_zero_cst (type);
14853   tree one = build_one_cst (type);
14854   tree index = build_index_vector (type, 0, 1);
14855 
14856   /* Verify equality tests that return a scalar boolean result.  */
14857   tree res_type = boolean_type_node;
14858   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14859   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14860   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14861   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14862   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
14863   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
14864 					       index, one)));
14865   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
14866 					      index, index)));
14867   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
14868 					      index, index)));
14869 }
14870 
14871 /* Verify folding of VEC_DUPLICATE_EXPRs.  */
14872 
14873 static void
14874 test_vec_duplicate_folding ()
14875 {
14876   scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
14877   machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
14878   /* This will be 1 if VEC_MODE isn't a vector mode.  */
14879   poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
14880 
14881   tree type = build_vector_type (ssizetype, nunits);
14882   tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
14883   tree dup5_cst = build_vector_from_val (type, ssize_int (5));
14884   ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
14885 }
14886 
14887 /* Run all of the selftests within this file.  */
14888 
14889 void
14890 fold_const_c_tests ()
14891 {
14892   test_arithmetic_folding ();
14893   test_vector_folding ();
14894   test_vec_duplicate_folding ();
14895 }
14896 
14897 } // namespace selftest
14898 
14899 #endif /* CHECKING_P */
14900