xref: /dflybsd-src/contrib/gcc-8.0/gcc/fold-const.c (revision 58e805e64ba1cecf4e203f4573319a183d9c0088)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 
87 /* Nonzero if we are folding constants inside an initializer; zero
88    otherwise.  */
89 int folding_initializer = 0;
90 
91 /* The following constants represent a bit based encoding of GCC's
92    comparison operators.  This encoding simplifies transformations
93    on relational comparison operators, such as AND and OR.  */
94 enum comparison_code {
95   COMPCODE_FALSE = 0,
96   COMPCODE_LT = 1,
97   COMPCODE_EQ = 2,
98   COMPCODE_LE = 3,
99   COMPCODE_GT = 4,
100   COMPCODE_LTGT = 5,
101   COMPCODE_GE = 6,
102   COMPCODE_ORD = 7,
103   COMPCODE_UNORD = 8,
104   COMPCODE_UNLT = 9,
105   COMPCODE_UNEQ = 10,
106   COMPCODE_UNLE = 11,
107   COMPCODE_UNGT = 12,
108   COMPCODE_NE = 13,
109   COMPCODE_UNGE = 14,
110   COMPCODE_TRUE = 15
111 };
112 
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static int twoval_comparison_p (tree, tree *, tree *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree optimize_bit_field_compare (location_t, enum tree_code,
121 					tree, tree, tree);
122 static int simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 						 enum tree_code, tree,
134 						 tree, tree,
135 						 tree, tree, int);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static tree fold_negate_expr (location_t, tree);
142 
143 
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145    Otherwise, return LOC.  */
146 
147 static location_t
148 expr_location_or (tree t, location_t loc)
149 {
150   location_t tloc = EXPR_LOCATION (t);
151   return tloc == UNKNOWN_LOCATION ? loc : tloc;
152 }
153 
154 /* Similar to protected_set_expr_location, but never modify x in place,
155    if location can and needs to be set, unshare it.  */
156 
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
159 {
160   if (CAN_HAVE_LOCATION_P (x)
161       && EXPR_LOCATION (x) != loc
162       && !(TREE_CODE (x) == SAVE_EXPR
163 	   || TREE_CODE (x) == TARGET_EXPR
164 	   || TREE_CODE (x) == BIND_EXPR))
165     {
166       x = copy_node (x);
167       SET_EXPR_LOCATION (x, loc);
168     }
169   return x;
170 }
171 
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173    division and returns the quotient.  Otherwise returns
174    NULL_TREE.  */
175 
176 tree
177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 {
179   widest_int quo;
180 
181   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 			 SIGNED, &quo))
183     return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 
185   return NULL_TREE;
186 }
187 
188 /* This is nonzero if we should defer warnings about undefined
189    overflow.  This facility exists because these warnings are a
190    special case.  The code to estimate loop iterations does not want
191    to issue any warnings, since it works with expressions which do not
192    occur in user code.  Various bits of cleanup code call fold(), but
193    only use the result if it has certain characteristics (e.g., is a
194    constant); that code only wants to issue a warning if the result is
195    used.  */
196 
197 static int fold_deferring_overflow_warnings;
198 
199 /* If a warning about undefined overflow is deferred, this is the
200    warning.  Note that this may cause us to turn two warnings into
201    one, but that is fine since it is sufficient to only give one
202    warning per expression.  */
203 
204 static const char* fold_deferred_overflow_warning;
205 
206 /* If a warning about undefined overflow is deferred, this is the
207    level at which the warning should be emitted.  */
208 
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 
211 /* Start deferring overflow warnings.  We could use a stack here to
212    permit nested calls, but at present it is not necessary.  */
213 
214 void
215 fold_defer_overflow_warnings (void)
216 {
217   ++fold_deferring_overflow_warnings;
218 }
219 
220 /* Stop deferring overflow warnings.  If there is a pending warning,
221    and ISSUE is true, then issue the warning if appropriate.  STMT is
222    the statement with which the warning should be associated (used for
223    location information); STMT may be NULL.  CODE is the level of the
224    warning--a warn_strict_overflow_code value.  This function will use
225    the smaller of CODE and the deferred code when deciding whether to
226    issue the warning.  CODE may be zero to mean to always use the
227    deferred code.  */
228 
229 void
230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 {
232   const char *warnmsg;
233   location_t locus;
234 
235   gcc_assert (fold_deferring_overflow_warnings > 0);
236   --fold_deferring_overflow_warnings;
237   if (fold_deferring_overflow_warnings > 0)
238     {
239       if (fold_deferred_overflow_warning != NULL
240 	  && code != 0
241 	  && code < (int) fold_deferred_overflow_code)
242 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243       return;
244     }
245 
246   warnmsg = fold_deferred_overflow_warning;
247   fold_deferred_overflow_warning = NULL;
248 
249   if (!issue || warnmsg == NULL)
250     return;
251 
252   if (gimple_no_warning_p (stmt))
253     return;
254 
255   /* Use the smallest code level when deciding to issue the
256      warning.  */
257   if (code == 0 || code > (int) fold_deferred_overflow_code)
258     code = fold_deferred_overflow_code;
259 
260   if (!issue_strict_overflow_warning (code))
261     return;
262 
263   if (stmt == NULL)
264     locus = input_location;
265   else
266     locus = gimple_location (stmt);
267   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 }
269 
270 /* Stop deferring overflow warnings, ignoring any deferred
271    warnings.  */
272 
273 void
274 fold_undefer_and_ignore_overflow_warnings (void)
275 {
276   fold_undefer_overflow_warnings (false, NULL, 0);
277 }
278 
279 /* Whether we are deferring overflow warnings.  */
280 
281 bool
282 fold_deferring_overflow_warnings_p (void)
283 {
284   return fold_deferring_overflow_warnings > 0;
285 }
286 
287 /* This is called when we fold something based on the fact that signed
288    overflow is undefined.  */
289 
290 void
291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 {
293   if (fold_deferring_overflow_warnings > 0)
294     {
295       if (fold_deferred_overflow_warning == NULL
296 	  || wc < fold_deferred_overflow_code)
297 	{
298 	  fold_deferred_overflow_warning = gmsgid;
299 	  fold_deferred_overflow_code = wc;
300 	}
301     }
302   else if (issue_strict_overflow_warning (wc))
303     warning (OPT_Wstrict_overflow, gmsgid);
304 }
305 
306 /* Return true if the built-in mathematical function specified by CODE
307    is odd, i.e. -f(x) == f(-x).  */
308 
309 bool
310 negate_mathfn_p (combined_fn fn)
311 {
312   switch (fn)
313     {
314     CASE_CFN_ASIN:
315     CASE_CFN_ASINH:
316     CASE_CFN_ATAN:
317     CASE_CFN_ATANH:
318     CASE_CFN_CASIN:
319     CASE_CFN_CASINH:
320     CASE_CFN_CATAN:
321     CASE_CFN_CATANH:
322     CASE_CFN_CBRT:
323     CASE_CFN_CPROJ:
324     CASE_CFN_CSIN:
325     CASE_CFN_CSINH:
326     CASE_CFN_CTAN:
327     CASE_CFN_CTANH:
328     CASE_CFN_ERF:
329     CASE_CFN_LLROUND:
330     CASE_CFN_LROUND:
331     CASE_CFN_ROUND:
332     CASE_CFN_SIN:
333     CASE_CFN_SINH:
334     CASE_CFN_TAN:
335     CASE_CFN_TANH:
336     CASE_CFN_TRUNC:
337       return true;
338 
339     CASE_CFN_LLRINT:
340     CASE_CFN_LRINT:
341     CASE_CFN_NEARBYINT:
342     CASE_CFN_RINT:
343       return !flag_rounding_math;
344 
345     default:
346       break;
347     }
348   return false;
349 }
350 
351 /* Check whether we may negate an integer constant T without causing
352    overflow.  */
353 
354 bool
355 may_negate_without_overflow_p (const_tree t)
356 {
357   tree type;
358 
359   gcc_assert (TREE_CODE (t) == INTEGER_CST);
360 
361   type = TREE_TYPE (t);
362   if (TYPE_UNSIGNED (type))
363     return false;
364 
365   return !wi::only_sign_bit_p (wi::to_wide (t));
366 }
367 
368 /* Determine whether an expression T can be cheaply negated using
369    the function negate_expr without introducing undefined overflow.  */
370 
371 static bool
372 negate_expr_p (tree t)
373 {
374   tree type;
375 
376   if (t == 0)
377     return false;
378 
379   type = TREE_TYPE (t);
380 
381   STRIP_SIGN_NOPS (t);
382   switch (TREE_CODE (t))
383     {
384     case INTEGER_CST:
385       if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
386 	return true;
387 
388       /* Check that -CST will not overflow type.  */
389       return may_negate_without_overflow_p (t);
390     case BIT_NOT_EXPR:
391       return (INTEGRAL_TYPE_P (type)
392 	      && TYPE_OVERFLOW_WRAPS (type));
393 
394     case FIXED_CST:
395       return true;
396 
397     case NEGATE_EXPR:
398       return !TYPE_OVERFLOW_SANITIZED (type);
399 
400     case REAL_CST:
401       /* We want to canonicalize to positive real constants.  Pretend
402          that only negative ones can be easily negated.  */
403       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
404 
405     case COMPLEX_CST:
406       return negate_expr_p (TREE_REALPART (t))
407 	     && negate_expr_p (TREE_IMAGPART (t));
408 
409     case VECTOR_CST:
410       {
411 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
412 	  return true;
413 
414 	/* Steps don't prevent negation.  */
415 	unsigned int count = vector_cst_encoded_nelts (t);
416 	for (unsigned int i = 0; i < count; ++i)
417 	  if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
418 	    return false;
419 
420 	return true;
421       }
422 
423     case COMPLEX_EXPR:
424       return negate_expr_p (TREE_OPERAND (t, 0))
425 	     && negate_expr_p (TREE_OPERAND (t, 1));
426 
427     case CONJ_EXPR:
428       return negate_expr_p (TREE_OPERAND (t, 0));
429 
430     case PLUS_EXPR:
431       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
432 	  || HONOR_SIGNED_ZEROS (element_mode (type))
433 	  || (ANY_INTEGRAL_TYPE_P (type)
434 	      && ! TYPE_OVERFLOW_WRAPS (type)))
435 	return false;
436       /* -(A + B) -> (-B) - A.  */
437       if (negate_expr_p (TREE_OPERAND (t, 1)))
438 	return true;
439       /* -(A + B) -> (-A) - B.  */
440       return negate_expr_p (TREE_OPERAND (t, 0));
441 
442     case MINUS_EXPR:
443       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
444       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
445 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
446 	     && (! ANY_INTEGRAL_TYPE_P (type)
447 		 || TYPE_OVERFLOW_WRAPS (type));
448 
449     case MULT_EXPR:
450       if (TYPE_UNSIGNED (type))
451 	break;
452       /* INT_MIN/n * n doesn't overflow while negating one operand it does
453          if n is a (negative) power of two.  */
454       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
455 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
456 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
457 		 && (wi::popcount
458 		     (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
459 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 		    && (wi::popcount
461 			(wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
462 	break;
463 
464       /* Fall through.  */
465 
466     case RDIV_EXPR:
467       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
468 	return negate_expr_p (TREE_OPERAND (t, 1))
469 	       || negate_expr_p (TREE_OPERAND (t, 0));
470       break;
471 
472     case TRUNC_DIV_EXPR:
473     case ROUND_DIV_EXPR:
474     case EXACT_DIV_EXPR:
475       if (TYPE_UNSIGNED (type))
476 	break;
477       /* In general we can't negate A in A / B, because if A is INT_MIN and
478          B is not 1 we change the sign of the result.  */
479       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
480 	  && negate_expr_p (TREE_OPERAND (t, 0)))
481 	return true;
482       /* In general we can't negate B in A / B, because if A is INT_MIN and
483 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 	 and actually traps on some architectures.  */
485       if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
486 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
487 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
488 	      && ! integer_onep (TREE_OPERAND (t, 1))))
489 	return negate_expr_p (TREE_OPERAND (t, 1));
490       break;
491 
492     case NOP_EXPR:
493       /* Negate -((double)float) as (double)(-float).  */
494       if (TREE_CODE (type) == REAL_TYPE)
495 	{
496 	  tree tem = strip_float_extensions (t);
497 	  if (tem != t)
498 	    return negate_expr_p (tem);
499 	}
500       break;
501 
502     case CALL_EXPR:
503       /* Negate -f(x) as f(-x).  */
504       if (negate_mathfn_p (get_call_combined_fn (t)))
505 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
506       break;
507 
508     case RSHIFT_EXPR:
509       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
510       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
511 	{
512 	  tree op1 = TREE_OPERAND (t, 1);
513 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
514 	    return true;
515 	}
516       break;
517 
518     default:
519       break;
520     }
521   return false;
522 }
523 
524 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
525    simplification is possible.
526    If negate_expr_p would return true for T, NULL_TREE will never be
527    returned.  */
528 
529 static tree
530 fold_negate_expr_1 (location_t loc, tree t)
531 {
532   tree type = TREE_TYPE (t);
533   tree tem;
534 
535   switch (TREE_CODE (t))
536     {
537     /* Convert - (~A) to A + 1.  */
538     case BIT_NOT_EXPR:
539       if (INTEGRAL_TYPE_P (type))
540         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
541 				build_one_cst (type));
542       break;
543 
544     case INTEGER_CST:
545       tem = fold_negate_const (t, type);
546       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
547 	  || (ANY_INTEGRAL_TYPE_P (type)
548 	      && !TYPE_OVERFLOW_TRAPS (type)
549 	      && TYPE_OVERFLOW_WRAPS (type))
550 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
551 	return tem;
552       break;
553 
554     case POLY_INT_CST:
555     case REAL_CST:
556     case FIXED_CST:
557       tem = fold_negate_const (t, type);
558       return tem;
559 
560     case COMPLEX_CST:
561       {
562 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
563 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
564 	if (rpart && ipart)
565 	  return build_complex (type, rpart, ipart);
566       }
567       break;
568 
569     case VECTOR_CST:
570       {
571 	tree_vector_builder elts;
572 	elts.new_unary_operation (type, t, true);
573 	unsigned int count = elts.encoded_nelts ();
574 	for (unsigned int i = 0; i < count; ++i)
575 	  {
576 	    tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
577 	    if (elt == NULL_TREE)
578 	      return NULL_TREE;
579 	    elts.quick_push (elt);
580 	  }
581 
582 	return elts.build ();
583       }
584 
585     case COMPLEX_EXPR:
586       if (negate_expr_p (t))
587 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
588 				fold_negate_expr (loc, TREE_OPERAND (t, 0)),
589 				fold_negate_expr (loc, TREE_OPERAND (t, 1)));
590       break;
591 
592     case CONJ_EXPR:
593       if (negate_expr_p (t))
594 	return fold_build1_loc (loc, CONJ_EXPR, type,
595 				fold_negate_expr (loc, TREE_OPERAND (t, 0)));
596       break;
597 
598     case NEGATE_EXPR:
599       if (!TYPE_OVERFLOW_SANITIZED (type))
600 	return TREE_OPERAND (t, 0);
601       break;
602 
603     case PLUS_EXPR:
604       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
605 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
606 	{
607 	  /* -(A + B) -> (-B) - A.  */
608 	  if (negate_expr_p (TREE_OPERAND (t, 1)))
609 	    {
610 	      tem = negate_expr (TREE_OPERAND (t, 1));
611 	      return fold_build2_loc (loc, MINUS_EXPR, type,
612 				      tem, TREE_OPERAND (t, 0));
613 	    }
614 
615 	  /* -(A + B) -> (-A) - B.  */
616 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
617 	    {
618 	      tem = negate_expr (TREE_OPERAND (t, 0));
619 	      return fold_build2_loc (loc, MINUS_EXPR, type,
620 				      tem, TREE_OPERAND (t, 1));
621 	    }
622 	}
623       break;
624 
625     case MINUS_EXPR:
626       /* - (A - B) -> B - A  */
627       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
628 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
629 	return fold_build2_loc (loc, MINUS_EXPR, type,
630 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
631       break;
632 
633     case MULT_EXPR:
634       if (TYPE_UNSIGNED (type))
635         break;
636 
637       /* Fall through.  */
638 
639     case RDIV_EXPR:
640       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
641 	{
642 	  tem = TREE_OPERAND (t, 1);
643 	  if (negate_expr_p (tem))
644 	    return fold_build2_loc (loc, TREE_CODE (t), type,
645 				    TREE_OPERAND (t, 0), negate_expr (tem));
646 	  tem = TREE_OPERAND (t, 0);
647 	  if (negate_expr_p (tem))
648 	    return fold_build2_loc (loc, TREE_CODE (t), type,
649 				    negate_expr (tem), TREE_OPERAND (t, 1));
650 	}
651       break;
652 
653     case TRUNC_DIV_EXPR:
654     case ROUND_DIV_EXPR:
655     case EXACT_DIV_EXPR:
656       if (TYPE_UNSIGNED (type))
657 	break;
658       /* In general we can't negate A in A / B, because if A is INT_MIN and
659 	 B is not 1 we change the sign of the result.  */
660       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
661 	  && negate_expr_p (TREE_OPERAND (t, 0)))
662 	return fold_build2_loc (loc, TREE_CODE (t), type,
663 				negate_expr (TREE_OPERAND (t, 0)),
664 				TREE_OPERAND (t, 1));
665       /* In general we can't negate B in A / B, because if A is INT_MIN and
666 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
667 	 and actually traps on some architectures.  */
668       if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
669 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
670 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
671 	       && ! integer_onep (TREE_OPERAND (t, 1))))
672 	  && negate_expr_p (TREE_OPERAND (t, 1)))
673 	return fold_build2_loc (loc, TREE_CODE (t), type,
674 				TREE_OPERAND (t, 0),
675 				negate_expr (TREE_OPERAND (t, 1)));
676       break;
677 
678     case NOP_EXPR:
679       /* Convert -((double)float) into (double)(-float).  */
680       if (TREE_CODE (type) == REAL_TYPE)
681 	{
682 	  tem = strip_float_extensions (t);
683 	  if (tem != t && negate_expr_p (tem))
684 	    return fold_convert_loc (loc, type, negate_expr (tem));
685 	}
686       break;
687 
688     case CALL_EXPR:
689       /* Negate -f(x) as f(-x).  */
690       if (negate_mathfn_p (get_call_combined_fn (t))
691 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
692 	{
693 	  tree fndecl, arg;
694 
695 	  fndecl = get_callee_fndecl (t);
696 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
697 	  return build_call_expr_loc (loc, fndecl, 1, arg);
698 	}
699       break;
700 
701     case RSHIFT_EXPR:
702       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
703       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
704 	{
705 	  tree op1 = TREE_OPERAND (t, 1);
706 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
707 	    {
708 	      tree ntype = TYPE_UNSIGNED (type)
709 			   ? signed_type_for (type)
710 			   : unsigned_type_for (type);
711 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
712 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
713 	      return fold_convert_loc (loc, type, temp);
714 	    }
715 	}
716       break;
717 
718     default:
719       break;
720     }
721 
722   return NULL_TREE;
723 }
724 
725 /* A wrapper for fold_negate_expr_1.  */
726 
727 static tree
728 fold_negate_expr (location_t loc, tree t)
729 {
730   tree type = TREE_TYPE (t);
731   STRIP_SIGN_NOPS (t);
732   tree tem = fold_negate_expr_1 (loc, t);
733   if (tem == NULL_TREE)
734     return NULL_TREE;
735   return fold_convert_loc (loc, type, tem);
736 }
737 
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
740    return NULL_TREE. */
741 
742 static tree
743 negate_expr (tree t)
744 {
745   tree type, tem;
746   location_t loc;
747 
748   if (t == NULL_TREE)
749     return NULL_TREE;
750 
751   loc = EXPR_LOCATION (t);
752   type = TREE_TYPE (t);
753   STRIP_SIGN_NOPS (t);
754 
755   tem = fold_negate_expr (loc, t);
756   if (!tem)
757     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758   return fold_convert_loc (loc, type, tem);
759 }
760 
761 /* Split a tree IN into a constant, literal and variable parts that could be
762    combined with CODE to make IN.  "constant" means an expression with
763    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
764    commutative arithmetic operation.  Store the constant part into *CONP,
765    the literal in *LITP and return the variable part.  If a part isn't
766    present, set it to null.  If the tree does not decompose in this way,
767    return the entire tree as the variable part and the other parts as null.
768 
769    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
770    case, we negate an operand that was subtracted.  Except if it is a
771    literal for which we use *MINUS_LITP instead.
772 
773    If NEGATE_P is true, we are negating all of IN, again except a literal
774    for which we use *MINUS_LITP instead.  If a variable part is of pointer
775    type, it is negated after converting to TYPE.  This prevents us from
776    generating illegal MINUS pointer expression.  LOC is the location of
777    the converted variable part.
778 
779    If IN is itself a literal or constant, return it as appropriate.
780 
781    Note that we do not guarantee that any of the three values will be the
782    same type as IN, but they will have the same signedness and mode.  */
783 
784 static tree
785 split_tree (tree in, tree type, enum tree_code code,
786 	    tree *minus_varp, tree *conp, tree *minus_conp,
787 	    tree *litp, tree *minus_litp, int negate_p)
788 {
789   tree var = 0;
790   *minus_varp = 0;
791   *conp = 0;
792   *minus_conp = 0;
793   *litp = 0;
794   *minus_litp = 0;
795 
796   /* Strip any conversions that don't change the machine mode or signedness.  */
797   STRIP_SIGN_NOPS (in);
798 
799   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
800       || TREE_CODE (in) == FIXED_CST)
801     *litp = in;
802   else if (TREE_CODE (in) == code
803 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
804 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
805 	       /* We can associate addition and subtraction together (even
806 		  though the C standard doesn't say so) for integers because
807 		  the value is not affected.  For reals, the value might be
808 		  affected, so we can't.  */
809 	       && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
810 		   || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
811 		   || (code == MINUS_EXPR
812 		       && (TREE_CODE (in) == PLUS_EXPR
813 			   || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
814     {
815       tree op0 = TREE_OPERAND (in, 0);
816       tree op1 = TREE_OPERAND (in, 1);
817       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
818       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
819 
820       /* First see if either of the operands is a literal, then a constant.  */
821       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
822 	  || TREE_CODE (op0) == FIXED_CST)
823 	*litp = op0, op0 = 0;
824       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
825 	       || TREE_CODE (op1) == FIXED_CST)
826 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
827 
828       if (op0 != 0 && TREE_CONSTANT (op0))
829 	*conp = op0, op0 = 0;
830       else if (op1 != 0 && TREE_CONSTANT (op1))
831 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
832 
833       /* If we haven't dealt with either operand, this is not a case we can
834 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
835       if (op0 != 0 && op1 != 0)
836 	var = in;
837       else if (op0 != 0)
838 	var = op0;
839       else
840 	var = op1, neg_var_p = neg1_p;
841 
842       /* Now do any needed negations.  */
843       if (neg_litp_p)
844 	*minus_litp = *litp, *litp = 0;
845       if (neg_conp_p && *conp)
846 	*minus_conp = *conp, *conp = 0;
847       if (neg_var_p && var)
848 	*minus_varp = var, var = 0;
849     }
850   else if (TREE_CONSTANT (in))
851     *conp = in;
852   else if (TREE_CODE (in) == BIT_NOT_EXPR
853 	   && code == PLUS_EXPR)
854     {
855       /* -1 - X is folded to ~X, undo that here.  Do _not_ do this
856          when IN is constant.  */
857       *litp = build_minus_one_cst (type);
858       *minus_varp = TREE_OPERAND (in, 0);
859     }
860   else
861     var = in;
862 
863   if (negate_p)
864     {
865       if (*litp)
866 	*minus_litp = *litp, *litp = 0;
867       else if (*minus_litp)
868 	*litp = *minus_litp, *minus_litp = 0;
869       if (*conp)
870 	*minus_conp = *conp, *conp = 0;
871       else if (*minus_conp)
872 	*conp = *minus_conp, *minus_conp = 0;
873       if (var)
874 	*minus_varp = var, var = 0;
875       else if (*minus_varp)
876 	var = *minus_varp, *minus_varp = 0;
877     }
878 
879   if (*litp
880       && TREE_OVERFLOW_P (*litp))
881     *litp = drop_tree_overflow (*litp);
882   if (*minus_litp
883       && TREE_OVERFLOW_P (*minus_litp))
884     *minus_litp = drop_tree_overflow (*minus_litp);
885 
886   return var;
887 }
888 
889 /* Re-associate trees split by the above function.  T1 and T2 are
890    either expressions to associate or null.  Return the new
891    expression, if any.  LOC is the location of the new expression.  If
892    we build an operation, do it in TYPE and with CODE.  */
893 
894 static tree
895 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
896 {
897   if (t1 == 0)
898     {
899       gcc_assert (t2 == 0 || code != MINUS_EXPR);
900       return t2;
901     }
902   else if (t2 == 0)
903     return t1;
904 
905   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
906      try to fold this since we will have infinite recursion.  But do
907      deal with any NEGATE_EXPRs.  */
908   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
909       || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
910       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
911     {
912       if (code == PLUS_EXPR)
913 	{
914 	  if (TREE_CODE (t1) == NEGATE_EXPR)
915 	    return build2_loc (loc, MINUS_EXPR, type,
916 			       fold_convert_loc (loc, type, t2),
917 			       fold_convert_loc (loc, type,
918 						 TREE_OPERAND (t1, 0)));
919 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
920 	    return build2_loc (loc, MINUS_EXPR, type,
921 			       fold_convert_loc (loc, type, t1),
922 			       fold_convert_loc (loc, type,
923 						 TREE_OPERAND (t2, 0)));
924 	  else if (integer_zerop (t2))
925 	    return fold_convert_loc (loc, type, t1);
926 	}
927       else if (code == MINUS_EXPR)
928 	{
929 	  if (integer_zerop (t2))
930 	    return fold_convert_loc (loc, type, t1);
931 	}
932 
933       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 			 fold_convert_loc (loc, type, t2));
935     }
936 
937   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 			  fold_convert_loc (loc, type, t2));
939 }
940 
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942    for use in int_const_binop, size_binop and size_diffop.  */
943 
944 static bool
945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
946 {
947   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948     return false;
949   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950     return false;
951 
952   switch (code)
953     {
954     case LSHIFT_EXPR:
955     case RSHIFT_EXPR:
956     case LROTATE_EXPR:
957     case RROTATE_EXPR:
958       return true;
959 
960     default:
961       break;
962     }
963 
964   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
967 }
968 
969 /* Subroutine of int_const_binop_1 that handles two INTEGER_CSTs.  */
970 
971 static tree
972 int_const_binop_2 (enum tree_code code, const_tree parg1, const_tree parg2,
973 		   int overflowable)
974 {
975   wide_int res;
976   tree t;
977   tree type = TREE_TYPE (parg1);
978   signop sign = TYPE_SIGN (type);
979   bool overflow = false;
980 
981   wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
982   wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
983 
984   switch (code)
985     {
986     case BIT_IOR_EXPR:
987       res = wi::bit_or (arg1, arg2);
988       break;
989 
990     case BIT_XOR_EXPR:
991       res = wi::bit_xor (arg1, arg2);
992       break;
993 
994     case BIT_AND_EXPR:
995       res = wi::bit_and (arg1, arg2);
996       break;
997 
998     case RSHIFT_EXPR:
999     case LSHIFT_EXPR:
1000       if (wi::neg_p (arg2))
1001 	{
1002 	  arg2 = -arg2;
1003 	  if (code == RSHIFT_EXPR)
1004 	    code = LSHIFT_EXPR;
1005 	  else
1006 	    code = RSHIFT_EXPR;
1007 	}
1008 
1009       if (code == RSHIFT_EXPR)
1010 	/* It's unclear from the C standard whether shifts can overflow.
1011 	   The following code ignores overflow; perhaps a C standard
1012 	   interpretation ruling is needed.  */
1013 	res = wi::rshift (arg1, arg2, sign);
1014       else
1015 	res = wi::lshift (arg1, arg2);
1016       break;
1017 
1018     case RROTATE_EXPR:
1019     case LROTATE_EXPR:
1020       if (wi::neg_p (arg2))
1021 	{
1022 	  arg2 = -arg2;
1023 	  if (code == RROTATE_EXPR)
1024 	    code = LROTATE_EXPR;
1025 	  else
1026 	    code = RROTATE_EXPR;
1027 	}
1028 
1029       if (code == RROTATE_EXPR)
1030 	res = wi::rrotate (arg1, arg2);
1031       else
1032 	res = wi::lrotate (arg1, arg2);
1033       break;
1034 
1035     case PLUS_EXPR:
1036       res = wi::add (arg1, arg2, sign, &overflow);
1037       break;
1038 
1039     case MINUS_EXPR:
1040       res = wi::sub (arg1, arg2, sign, &overflow);
1041       break;
1042 
1043     case MULT_EXPR:
1044       res = wi::mul (arg1, arg2, sign, &overflow);
1045       break;
1046 
1047     case MULT_HIGHPART_EXPR:
1048       res = wi::mul_high (arg1, arg2, sign);
1049       break;
1050 
1051     case TRUNC_DIV_EXPR:
1052     case EXACT_DIV_EXPR:
1053       if (arg2 == 0)
1054 	return NULL_TREE;
1055       res = wi::div_trunc (arg1, arg2, sign, &overflow);
1056       break;
1057 
1058     case FLOOR_DIV_EXPR:
1059       if (arg2 == 0)
1060 	return NULL_TREE;
1061       res = wi::div_floor (arg1, arg2, sign, &overflow);
1062       break;
1063 
1064     case CEIL_DIV_EXPR:
1065       if (arg2 == 0)
1066 	return NULL_TREE;
1067       res = wi::div_ceil (arg1, arg2, sign, &overflow);
1068       break;
1069 
1070     case ROUND_DIV_EXPR:
1071       if (arg2 == 0)
1072 	return NULL_TREE;
1073       res = wi::div_round (arg1, arg2, sign, &overflow);
1074       break;
1075 
1076     case TRUNC_MOD_EXPR:
1077       if (arg2 == 0)
1078 	return NULL_TREE;
1079       res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1080       break;
1081 
1082     case FLOOR_MOD_EXPR:
1083       if (arg2 == 0)
1084 	return NULL_TREE;
1085       res = wi::mod_floor (arg1, arg2, sign, &overflow);
1086       break;
1087 
1088     case CEIL_MOD_EXPR:
1089       if (arg2 == 0)
1090 	return NULL_TREE;
1091       res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1092       break;
1093 
1094     case ROUND_MOD_EXPR:
1095       if (arg2 == 0)
1096 	return NULL_TREE;
1097       res = wi::mod_round (arg1, arg2, sign, &overflow);
1098       break;
1099 
1100     case MIN_EXPR:
1101       res = wi::min (arg1, arg2, sign);
1102       break;
1103 
1104     case MAX_EXPR:
1105       res = wi::max (arg1, arg2, sign);
1106       break;
1107 
1108     default:
1109       return NULL_TREE;
1110     }
1111 
1112   t = force_fit_type (type, res, overflowable,
1113 		      (((sign == SIGNED || overflowable == -1)
1114 			&& overflow)
1115 		       | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1116 
1117   return t;
1118 }
1119 
1120 /* Combine two integer constants PARG1 and PARG2 under operation CODE
1121    to produce a new constant.  Return NULL_TREE if we don't know how
1122    to evaluate CODE at compile-time.  */
1123 
1124 static tree
1125 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
1126 		   int overflowable)
1127 {
1128   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1129     return int_const_binop_2 (code, arg1, arg2, overflowable);
1130 
1131   gcc_assert (NUM_POLY_INT_COEFFS != 1);
1132 
1133   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1134     {
1135       poly_wide_int res;
1136       bool overflow;
1137       tree type = TREE_TYPE (arg1);
1138       signop sign = TYPE_SIGN (type);
1139       switch (code)
1140 	{
1141 	case PLUS_EXPR:
1142 	  res = wi::add (wi::to_poly_wide (arg1),
1143 			 wi::to_poly_wide (arg2), sign, &overflow);
1144 	  break;
1145 
1146 	case MINUS_EXPR:
1147 	  res = wi::sub (wi::to_poly_wide (arg1),
1148 			 wi::to_poly_wide (arg2), sign, &overflow);
1149 	  break;
1150 
1151 	case MULT_EXPR:
1152 	  if (TREE_CODE (arg2) == INTEGER_CST)
1153 	    res = wi::mul (wi::to_poly_wide (arg1),
1154 			   wi::to_wide (arg2), sign, &overflow);
1155 	  else if (TREE_CODE (arg1) == INTEGER_CST)
1156 	    res = wi::mul (wi::to_poly_wide (arg2),
1157 			   wi::to_wide (arg1), sign, &overflow);
1158 	  else
1159 	    return NULL_TREE;
1160 	  break;
1161 
1162 	case LSHIFT_EXPR:
1163 	  if (TREE_CODE (arg2) == INTEGER_CST)
1164 	    res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1165 	  else
1166 	    return NULL_TREE;
1167 	  break;
1168 
1169 	case BIT_IOR_EXPR:
1170 	  if (TREE_CODE (arg2) != INTEGER_CST
1171 	      || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1172 			     &res))
1173 	    return NULL_TREE;
1174 	  break;
1175 
1176 	default:
1177 	  return NULL_TREE;
1178 	}
1179       return force_fit_type (type, res, overflowable,
1180 			     (((sign == SIGNED || overflowable == -1)
1181 			       && overflow)
1182 			      | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1183     }
1184 
1185   return NULL_TREE;
1186 }
1187 
1188 tree
1189 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1190 {
1191   return int_const_binop_1 (code, arg1, arg2, 1);
1192 }
1193 
1194 /* Return true if binary operation OP distributes over addition in operand
1195    OPNO, with the other operand being held constant.  OPNO counts from 1.  */
1196 
1197 static bool
1198 distributes_over_addition_p (tree_code op, int opno)
1199 {
1200   switch (op)
1201     {
1202     case PLUS_EXPR:
1203     case MINUS_EXPR:
1204     case MULT_EXPR:
1205       return true;
1206 
1207     case LSHIFT_EXPR:
1208       return opno == 1;
1209 
1210     default:
1211       return false;
1212     }
1213 }
1214 
1215 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1216    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1217    are the same kind of constant and the same machine mode.  Return zero if
1218    combining the constants is not allowed in the current operating mode.  */
1219 
1220 static tree
1221 const_binop (enum tree_code code, tree arg1, tree arg2)
1222 {
1223   /* Sanity check for the recursive cases.  */
1224   if (!arg1 || !arg2)
1225     return NULL_TREE;
1226 
1227   STRIP_NOPS (arg1);
1228   STRIP_NOPS (arg2);
1229 
1230   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1231     {
1232       if (code == POINTER_PLUS_EXPR)
1233 	return int_const_binop (PLUS_EXPR,
1234 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1235 
1236       return int_const_binop (code, arg1, arg2);
1237     }
1238 
1239   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1240     {
1241       machine_mode mode;
1242       REAL_VALUE_TYPE d1;
1243       REAL_VALUE_TYPE d2;
1244       REAL_VALUE_TYPE value;
1245       REAL_VALUE_TYPE result;
1246       bool inexact;
1247       tree t, type;
1248 
1249       /* The following codes are handled by real_arithmetic.  */
1250       switch (code)
1251 	{
1252 	case PLUS_EXPR:
1253 	case MINUS_EXPR:
1254 	case MULT_EXPR:
1255 	case RDIV_EXPR:
1256 	case MIN_EXPR:
1257 	case MAX_EXPR:
1258 	  break;
1259 
1260 	default:
1261 	  return NULL_TREE;
1262 	}
1263 
1264       d1 = TREE_REAL_CST (arg1);
1265       d2 = TREE_REAL_CST (arg2);
1266 
1267       type = TREE_TYPE (arg1);
1268       mode = TYPE_MODE (type);
1269 
1270       /* Don't perform operation if we honor signaling NaNs and
1271 	 either operand is a signaling NaN.  */
1272       if (HONOR_SNANS (mode)
1273 	  && (REAL_VALUE_ISSIGNALING_NAN (d1)
1274 	      || REAL_VALUE_ISSIGNALING_NAN (d2)))
1275 	return NULL_TREE;
1276 
1277       /* Don't perform operation if it would raise a division
1278 	 by zero exception.  */
1279       if (code == RDIV_EXPR
1280 	  && real_equal (&d2, &dconst0)
1281 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1282 	return NULL_TREE;
1283 
1284       /* If either operand is a NaN, just return it.  Otherwise, set up
1285 	 for floating-point trap; we return an overflow.  */
1286       if (REAL_VALUE_ISNAN (d1))
1287       {
1288 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1289 	   is off.  */
1290 	d1.signalling = 0;
1291 	t = build_real (type, d1);
1292 	return t;
1293       }
1294       else if (REAL_VALUE_ISNAN (d2))
1295       {
1296 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1297 	   is off.  */
1298 	d2.signalling = 0;
1299 	t = build_real (type, d2);
1300 	return t;
1301       }
1302 
1303       inexact = real_arithmetic (&value, code, &d1, &d2);
1304       real_convert (&result, mode, &value);
1305 
1306       /* Don't constant fold this floating point operation if
1307 	 the result has overflowed and flag_trapping_math.  */
1308       if (flag_trapping_math
1309 	  && MODE_HAS_INFINITIES (mode)
1310 	  && REAL_VALUE_ISINF (result)
1311 	  && !REAL_VALUE_ISINF (d1)
1312 	  && !REAL_VALUE_ISINF (d2))
1313 	return NULL_TREE;
1314 
1315       /* Don't constant fold this floating point operation if the
1316 	 result may dependent upon the run-time rounding mode and
1317 	 flag_rounding_math is set, or if GCC's software emulation
1318 	 is unable to accurately represent the result.  */
1319       if ((flag_rounding_math
1320 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1321 	  && (inexact || !real_identical (&result, &value)))
1322 	return NULL_TREE;
1323 
1324       t = build_real (type, result);
1325 
1326       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1327       return t;
1328     }
1329 
1330   if (TREE_CODE (arg1) == FIXED_CST)
1331     {
1332       FIXED_VALUE_TYPE f1;
1333       FIXED_VALUE_TYPE f2;
1334       FIXED_VALUE_TYPE result;
1335       tree t, type;
1336       int sat_p;
1337       bool overflow_p;
1338 
1339       /* The following codes are handled by fixed_arithmetic.  */
1340       switch (code)
1341         {
1342 	case PLUS_EXPR:
1343 	case MINUS_EXPR:
1344 	case MULT_EXPR:
1345 	case TRUNC_DIV_EXPR:
1346 	  if (TREE_CODE (arg2) != FIXED_CST)
1347 	    return NULL_TREE;
1348 	  f2 = TREE_FIXED_CST (arg2);
1349 	  break;
1350 
1351 	case LSHIFT_EXPR:
1352 	case RSHIFT_EXPR:
1353 	  {
1354 	    if (TREE_CODE (arg2) != INTEGER_CST)
1355 	      return NULL_TREE;
1356 	    wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1357 	    f2.data.high = w2.elt (1);
1358 	    f2.data.low = w2.ulow ();
1359 	    f2.mode = SImode;
1360 	  }
1361 	  break;
1362 
1363         default:
1364 	  return NULL_TREE;
1365         }
1366 
1367       f1 = TREE_FIXED_CST (arg1);
1368       type = TREE_TYPE (arg1);
1369       sat_p = TYPE_SATURATING (type);
1370       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1371       t = build_fixed (type, result);
1372       /* Propagate overflow flags.  */
1373       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1374 	TREE_OVERFLOW (t) = 1;
1375       return t;
1376     }
1377 
1378   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1379     {
1380       tree type = TREE_TYPE (arg1);
1381       tree r1 = TREE_REALPART (arg1);
1382       tree i1 = TREE_IMAGPART (arg1);
1383       tree r2 = TREE_REALPART (arg2);
1384       tree i2 = TREE_IMAGPART (arg2);
1385       tree real, imag;
1386 
1387       switch (code)
1388 	{
1389 	case PLUS_EXPR:
1390 	case MINUS_EXPR:
1391 	  real = const_binop (code, r1, r2);
1392 	  imag = const_binop (code, i1, i2);
1393 	  break;
1394 
1395 	case MULT_EXPR:
1396 	  if (COMPLEX_FLOAT_TYPE_P (type))
1397 	    return do_mpc_arg2 (arg1, arg2, type,
1398 				/* do_nonfinite= */ folding_initializer,
1399 				mpc_mul);
1400 
1401 	  real = const_binop (MINUS_EXPR,
1402 			      const_binop (MULT_EXPR, r1, r2),
1403 			      const_binop (MULT_EXPR, i1, i2));
1404 	  imag = const_binop (PLUS_EXPR,
1405 			      const_binop (MULT_EXPR, r1, i2),
1406 			      const_binop (MULT_EXPR, i1, r2));
1407 	  break;
1408 
1409 	case RDIV_EXPR:
1410 	  if (COMPLEX_FLOAT_TYPE_P (type))
1411 	    return do_mpc_arg2 (arg1, arg2, type,
1412                                 /* do_nonfinite= */ folding_initializer,
1413 				mpc_div);
1414 	  /* Fallthru. */
1415 	case TRUNC_DIV_EXPR:
1416 	case CEIL_DIV_EXPR:
1417 	case FLOOR_DIV_EXPR:
1418 	case ROUND_DIV_EXPR:
1419 	  if (flag_complex_method == 0)
1420 	  {
1421 	    /* Keep this algorithm in sync with
1422 	       tree-complex.c:expand_complex_div_straight().
1423 
1424 	       Expand complex division to scalars, straightforward algorithm.
1425 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1426 	       t = br*br + bi*bi
1427 	    */
1428 	    tree magsquared
1429 	      = const_binop (PLUS_EXPR,
1430 			     const_binop (MULT_EXPR, r2, r2),
1431 			     const_binop (MULT_EXPR, i2, i2));
1432 	    tree t1
1433 	      = const_binop (PLUS_EXPR,
1434 			     const_binop (MULT_EXPR, r1, r2),
1435 			     const_binop (MULT_EXPR, i1, i2));
1436 	    tree t2
1437 	      = const_binop (MINUS_EXPR,
1438 			     const_binop (MULT_EXPR, i1, r2),
1439 			     const_binop (MULT_EXPR, r1, i2));
1440 
1441 	    real = const_binop (code, t1, magsquared);
1442 	    imag = const_binop (code, t2, magsquared);
1443 	  }
1444 	  else
1445 	  {
1446 	    /* Keep this algorithm in sync with
1447                tree-complex.c:expand_complex_div_wide().
1448 
1449 	       Expand complex division to scalars, modified algorithm to minimize
1450 	       overflow with wide input ranges.  */
1451 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1452 					fold_abs_const (r2, TREE_TYPE (type)),
1453 					fold_abs_const (i2, TREE_TYPE (type)));
1454 
1455 	    if (integer_nonzerop (compare))
1456 	      {
1457 		/* In the TRUE branch, we compute
1458 		   ratio = br/bi;
1459 		   div = (br * ratio) + bi;
1460 		   tr = (ar * ratio) + ai;
1461 		   ti = (ai * ratio) - ar;
1462 		   tr = tr / div;
1463 		   ti = ti / div;  */
1464 		tree ratio = const_binop (code, r2, i2);
1465 		tree div = const_binop (PLUS_EXPR, i2,
1466 					const_binop (MULT_EXPR, r2, ratio));
1467 		real = const_binop (MULT_EXPR, r1, ratio);
1468 		real = const_binop (PLUS_EXPR, real, i1);
1469 		real = const_binop (code, real, div);
1470 
1471 		imag = const_binop (MULT_EXPR, i1, ratio);
1472 		imag = const_binop (MINUS_EXPR, imag, r1);
1473 		imag = const_binop (code, imag, div);
1474 	      }
1475 	    else
1476 	      {
1477 		/* In the FALSE branch, we compute
1478 		   ratio = d/c;
1479 		   divisor = (d * ratio) + c;
1480 		   tr = (b * ratio) + a;
1481 		   ti = b - (a * ratio);
1482 		   tr = tr / div;
1483 		   ti = ti / div;  */
1484 		tree ratio = const_binop (code, i2, r2);
1485 		tree div = const_binop (PLUS_EXPR, r2,
1486                                         const_binop (MULT_EXPR, i2, ratio));
1487 
1488 		real = const_binop (MULT_EXPR, i1, ratio);
1489 		real = const_binop (PLUS_EXPR, real, r1);
1490 		real = const_binop (code, real, div);
1491 
1492 		imag = const_binop (MULT_EXPR, r1, ratio);
1493 		imag = const_binop (MINUS_EXPR, i1, imag);
1494 		imag = const_binop (code, imag, div);
1495 	      }
1496 	  }
1497 	  break;
1498 
1499 	default:
1500 	  return NULL_TREE;
1501 	}
1502 
1503       if (real && imag)
1504 	return build_complex (type, real, imag);
1505     }
1506 
1507   if (TREE_CODE (arg1) == VECTOR_CST
1508       && TREE_CODE (arg2) == VECTOR_CST
1509       && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1510 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1511     {
1512       tree type = TREE_TYPE (arg1);
1513       bool step_ok_p;
1514       if (VECTOR_CST_STEPPED_P (arg1)
1515 	  && VECTOR_CST_STEPPED_P (arg2))
1516 	/* We can operate directly on the encoding if:
1517 
1518 	      a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1519 	    implies
1520 	      (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1521 
1522 	   Addition and subtraction are the supported operators
1523 	   for which this is true.  */
1524 	step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1525       else if (VECTOR_CST_STEPPED_P (arg1))
1526 	/* We can operate directly on stepped encodings if:
1527 
1528 	     a3 - a2 == a2 - a1
1529 	   implies:
1530 	     (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1531 
1532 	   which is true if (x -> x op c) distributes over addition.  */
1533 	step_ok_p = distributes_over_addition_p (code, 1);
1534       else
1535 	/* Similarly in reverse.  */
1536 	step_ok_p = distributes_over_addition_p (code, 2);
1537       tree_vector_builder elts;
1538       if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1539 	return NULL_TREE;
1540       unsigned int count = elts.encoded_nelts ();
1541       for (unsigned int i = 0; i < count; ++i)
1542 	{
1543 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1544 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1545 
1546 	  tree elt = const_binop (code, elem1, elem2);
1547 
1548 	  /* It is possible that const_binop cannot handle the given
1549 	     code and return NULL_TREE */
1550 	  if (elt == NULL_TREE)
1551 	    return NULL_TREE;
1552 	  elts.quick_push (elt);
1553 	}
1554 
1555       return elts.build ();
1556     }
1557 
1558   /* Shifts allow a scalar offset for a vector.  */
1559   if (TREE_CODE (arg1) == VECTOR_CST
1560       && TREE_CODE (arg2) == INTEGER_CST)
1561     {
1562       tree type = TREE_TYPE (arg1);
1563       bool step_ok_p = distributes_over_addition_p (code, 1);
1564       tree_vector_builder elts;
1565       if (!elts.new_unary_operation (type, arg1, step_ok_p))
1566 	return NULL_TREE;
1567       unsigned int count = elts.encoded_nelts ();
1568       for (unsigned int i = 0; i < count; ++i)
1569 	{
1570 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1571 
1572 	  tree elt = const_binop (code, elem1, arg2);
1573 
1574 	  /* It is possible that const_binop cannot handle the given
1575 	     code and return NULL_TREE.  */
1576 	  if (elt == NULL_TREE)
1577 	    return NULL_TREE;
1578 	  elts.quick_push (elt);
1579 	}
1580 
1581       return elts.build ();
1582     }
1583   return NULL_TREE;
1584 }
1585 
1586 /* Overload that adds a TYPE parameter to be able to dispatch
1587    to fold_relational_const.  */
1588 
1589 tree
1590 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1591 {
1592   if (TREE_CODE_CLASS (code) == tcc_comparison)
1593     return fold_relational_const (code, type, arg1, arg2);
1594 
1595   /* ???  Until we make the const_binop worker take the type of the
1596      result as argument put those cases that need it here.  */
1597   switch (code)
1598     {
1599     case VEC_SERIES_EXPR:
1600       if (CONSTANT_CLASS_P (arg1)
1601 	  && CONSTANT_CLASS_P (arg2))
1602 	return build_vec_series (type, arg1, arg2);
1603       return NULL_TREE;
1604 
1605     case COMPLEX_EXPR:
1606       if ((TREE_CODE (arg1) == REAL_CST
1607 	   && TREE_CODE (arg2) == REAL_CST)
1608 	  || (TREE_CODE (arg1) == INTEGER_CST
1609 	      && TREE_CODE (arg2) == INTEGER_CST))
1610 	return build_complex (type, arg1, arg2);
1611       return NULL_TREE;
1612 
1613     case POINTER_DIFF_EXPR:
1614       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1615 	{
1616 	  offset_int res = wi::sub (wi::to_offset (arg1),
1617 				    wi::to_offset (arg2));
1618 	  return force_fit_type (type, res, 1,
1619 				 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1620 	}
1621       return NULL_TREE;
1622 
1623     case VEC_PACK_TRUNC_EXPR:
1624     case VEC_PACK_FIX_TRUNC_EXPR:
1625       {
1626 	unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1627 
1628 	if (TREE_CODE (arg1) != VECTOR_CST
1629 	    || TREE_CODE (arg2) != VECTOR_CST)
1630 	  return NULL_TREE;
1631 
1632 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1633 	  return NULL_TREE;
1634 
1635 	out_nelts = in_nelts * 2;
1636 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1637 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1638 
1639 	tree_vector_builder elts (type, out_nelts, 1);
1640 	for (i = 0; i < out_nelts; i++)
1641 	  {
1642 	    tree elt = (i < in_nelts
1643 			? VECTOR_CST_ELT (arg1, i)
1644 			: VECTOR_CST_ELT (arg2, i - in_nelts));
1645 	    elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1646 				      ? NOP_EXPR : FIX_TRUNC_EXPR,
1647 				      TREE_TYPE (type), elt);
1648 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1649 	      return NULL_TREE;
1650 	    elts.quick_push (elt);
1651 	  }
1652 
1653 	return elts.build ();
1654       }
1655 
1656     case VEC_WIDEN_MULT_LO_EXPR:
1657     case VEC_WIDEN_MULT_HI_EXPR:
1658     case VEC_WIDEN_MULT_EVEN_EXPR:
1659     case VEC_WIDEN_MULT_ODD_EXPR:
1660       {
1661 	unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1662 
1663 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1664 	  return NULL_TREE;
1665 
1666 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1667 	  return NULL_TREE;
1668 	out_nelts = in_nelts / 2;
1669 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1670 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1671 
1672 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1673 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1674 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1675 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1676 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1677 	  scale = 1, ofs = 0;
1678 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1679 	  scale = 1, ofs = 1;
1680 
1681 	tree_vector_builder elts (type, out_nelts, 1);
1682 	for (out = 0; out < out_nelts; out++)
1683 	  {
1684 	    unsigned int in = (out << scale) + ofs;
1685 	    tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1686 					  VECTOR_CST_ELT (arg1, in));
1687 	    tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1688 					  VECTOR_CST_ELT (arg2, in));
1689 
1690 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1691 	      return NULL_TREE;
1692 	    tree elt = const_binop (MULT_EXPR, t1, t2);
1693 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1694 	      return NULL_TREE;
1695 	    elts.quick_push (elt);
1696 	  }
1697 
1698 	return elts.build ();
1699       }
1700 
1701     default:;
1702     }
1703 
1704   if (TREE_CODE_CLASS (code) != tcc_binary)
1705     return NULL_TREE;
1706 
1707   /* Make sure type and arg0 have the same saturating flag.  */
1708   gcc_checking_assert (TYPE_SATURATING (type)
1709 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1710 
1711   return const_binop (code, arg1, arg2);
1712 }
1713 
1714 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1715    Return zero if computing the constants is not possible.  */
1716 
1717 tree
1718 const_unop (enum tree_code code, tree type, tree arg0)
1719 {
1720   /* Don't perform the operation, other than NEGATE and ABS, if
1721      flag_signaling_nans is on and the operand is a signaling NaN.  */
1722   if (TREE_CODE (arg0) == REAL_CST
1723       && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1724       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1725       && code != NEGATE_EXPR
1726       && code != ABS_EXPR)
1727     return NULL_TREE;
1728 
1729   switch (code)
1730     {
1731     CASE_CONVERT:
1732     case FLOAT_EXPR:
1733     case FIX_TRUNC_EXPR:
1734     case FIXED_CONVERT_EXPR:
1735       return fold_convert_const (code, type, arg0);
1736 
1737     case ADDR_SPACE_CONVERT_EXPR:
1738       /* If the source address is 0, and the source address space
1739 	 cannot have a valid object at 0, fold to dest type null.  */
1740       if (integer_zerop (arg0)
1741 	  && !(targetm.addr_space.zero_address_valid
1742 	       (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1743 	return fold_convert_const (code, type, arg0);
1744       break;
1745 
1746     case VIEW_CONVERT_EXPR:
1747       return fold_view_convert_expr (type, arg0);
1748 
1749     case NEGATE_EXPR:
1750       {
1751 	/* Can't call fold_negate_const directly here as that doesn't
1752 	   handle all cases and we might not be able to negate some
1753 	   constants.  */
1754 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1755 	if (tem && CONSTANT_CLASS_P (tem))
1756 	  return tem;
1757 	break;
1758       }
1759 
1760     case ABS_EXPR:
1761       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1762 	return fold_abs_const (arg0, type);
1763       break;
1764 
1765     case CONJ_EXPR:
1766       if (TREE_CODE (arg0) == COMPLEX_CST)
1767 	{
1768 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1769 					  TREE_TYPE (type));
1770 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1771 	}
1772       break;
1773 
1774     case BIT_NOT_EXPR:
1775       if (TREE_CODE (arg0) == INTEGER_CST)
1776 	return fold_not_const (arg0, type);
1777       else if (POLY_INT_CST_P (arg0))
1778 	return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1779       /* Perform BIT_NOT_EXPR on each element individually.  */
1780       else if (TREE_CODE (arg0) == VECTOR_CST)
1781 	{
1782 	  tree elem;
1783 
1784 	  /* This can cope with stepped encodings because ~x == -1 - x.  */
1785 	  tree_vector_builder elements;
1786 	  elements.new_unary_operation (type, arg0, true);
1787 	  unsigned int i, count = elements.encoded_nelts ();
1788 	  for (i = 0; i < count; ++i)
1789 	    {
1790 	      elem = VECTOR_CST_ELT (arg0, i);
1791 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1792 	      if (elem == NULL_TREE)
1793 		break;
1794 	      elements.quick_push (elem);
1795 	    }
1796 	  if (i == count)
1797 	    return elements.build ();
1798 	}
1799       break;
1800 
1801     case TRUTH_NOT_EXPR:
1802       if (TREE_CODE (arg0) == INTEGER_CST)
1803 	return constant_boolean_node (integer_zerop (arg0), type);
1804       break;
1805 
1806     case REALPART_EXPR:
1807       if (TREE_CODE (arg0) == COMPLEX_CST)
1808 	return fold_convert (type, TREE_REALPART (arg0));
1809       break;
1810 
1811     case IMAGPART_EXPR:
1812       if (TREE_CODE (arg0) == COMPLEX_CST)
1813 	return fold_convert (type, TREE_IMAGPART (arg0));
1814       break;
1815 
1816     case VEC_UNPACK_LO_EXPR:
1817     case VEC_UNPACK_HI_EXPR:
1818     case VEC_UNPACK_FLOAT_LO_EXPR:
1819     case VEC_UNPACK_FLOAT_HI_EXPR:
1820       {
1821 	unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1822 	enum tree_code subcode;
1823 
1824 	if (TREE_CODE (arg0) != VECTOR_CST)
1825 	  return NULL_TREE;
1826 
1827 	if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1828 	  return NULL_TREE;
1829 	out_nelts = in_nelts / 2;
1830 	gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1831 
1832 	unsigned int offset = 0;
1833 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1834 				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
1835 	  offset = out_nelts;
1836 
1837 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1838 	  subcode = NOP_EXPR;
1839 	else
1840 	  subcode = FLOAT_EXPR;
1841 
1842 	tree_vector_builder elts (type, out_nelts, 1);
1843 	for (i = 0; i < out_nelts; i++)
1844 	  {
1845 	    tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1846 					   VECTOR_CST_ELT (arg0, i + offset));
1847 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1848 	      return NULL_TREE;
1849 	    elts.quick_push (elt);
1850 	  }
1851 
1852 	return elts.build ();
1853       }
1854 
1855     case VEC_DUPLICATE_EXPR:
1856       if (CONSTANT_CLASS_P (arg0))
1857 	return build_vector_from_val (type, arg0);
1858       return NULL_TREE;
1859 
1860     default:
1861       break;
1862     }
1863 
1864   return NULL_TREE;
1865 }
1866 
1867 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1868    indicates which particular sizetype to create.  */
1869 
1870 tree
1871 size_int_kind (poly_int64 number, enum size_type_kind kind)
1872 {
1873   return build_int_cst (sizetype_tab[(int) kind], number);
1874 }
1875 
1876 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1877    is a tree code.  The type of the result is taken from the operands.
1878    Both must be equivalent integer types, ala int_binop_types_match_p.
1879    If the operands are constant, so is the result.  */
1880 
1881 tree
1882 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1883 {
1884   tree type = TREE_TYPE (arg0);
1885 
1886   if (arg0 == error_mark_node || arg1 == error_mark_node)
1887     return error_mark_node;
1888 
1889   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1890                                        TREE_TYPE (arg1)));
1891 
1892   /* Handle the special case of two poly_int constants faster.  */
1893   if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1894     {
1895       /* And some specific cases even faster than that.  */
1896       if (code == PLUS_EXPR)
1897 	{
1898 	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1899 	    return arg1;
1900 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1901 	    return arg0;
1902 	}
1903       else if (code == MINUS_EXPR)
1904 	{
1905 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1906 	    return arg0;
1907 	}
1908       else if (code == MULT_EXPR)
1909 	{
1910 	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1911 	    return arg1;
1912 	}
1913 
1914       /* Handle general case of two integer constants.  For sizetype
1915          constant calculations we always want to know about overflow,
1916 	 even in the unsigned case.  */
1917       tree res = int_const_binop_1 (code, arg0, arg1, -1);
1918       if (res != NULL_TREE)
1919 	return res;
1920     }
1921 
1922   return fold_build2_loc (loc, code, type, arg0, arg1);
1923 }
1924 
1925 /* Given two values, either both of sizetype or both of bitsizetype,
1926    compute the difference between the two values.  Return the value
1927    in signed type corresponding to the type of the operands.  */
1928 
1929 tree
1930 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1931 {
1932   tree type = TREE_TYPE (arg0);
1933   tree ctype;
1934 
1935   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1936 				       TREE_TYPE (arg1)));
1937 
1938   /* If the type is already signed, just do the simple thing.  */
1939   if (!TYPE_UNSIGNED (type))
1940     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1941 
1942   if (type == sizetype)
1943     ctype = ssizetype;
1944   else if (type == bitsizetype)
1945     ctype = sbitsizetype;
1946   else
1947     ctype = signed_type_for (type);
1948 
1949   /* If either operand is not a constant, do the conversions to the signed
1950      type and subtract.  The hardware will do the right thing with any
1951      overflow in the subtraction.  */
1952   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1953     return size_binop_loc (loc, MINUS_EXPR,
1954 			   fold_convert_loc (loc, ctype, arg0),
1955 			   fold_convert_loc (loc, ctype, arg1));
1956 
1957   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1958      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1959      overflow) and negate (which can't either).  Special-case a result
1960      of zero while we're here.  */
1961   if (tree_int_cst_equal (arg0, arg1))
1962     return build_int_cst (ctype, 0);
1963   else if (tree_int_cst_lt (arg1, arg0))
1964     return fold_convert_loc (loc, ctype,
1965 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1966   else
1967     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1968 			   fold_convert_loc (loc, ctype,
1969 					     size_binop_loc (loc,
1970 							     MINUS_EXPR,
1971 							     arg1, arg0)));
1972 }
1973 
1974 /* A subroutine of fold_convert_const handling conversions of an
1975    INTEGER_CST to another integer type.  */
1976 
1977 static tree
1978 fold_convert_const_int_from_int (tree type, const_tree arg1)
1979 {
1980   /* Given an integer constant, make new constant with new type,
1981      appropriately sign-extended or truncated.  Use widest_int
1982      so that any extension is done according ARG1's type.  */
1983   return force_fit_type (type, wi::to_widest (arg1),
1984 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1985 			 TREE_OVERFLOW (arg1));
1986 }
1987 
1988 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1989    to an integer type.  */
1990 
1991 static tree
1992 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1993 {
1994   bool overflow = false;
1995   tree t;
1996 
1997   /* The following code implements the floating point to integer
1998      conversion rules required by the Java Language Specification,
1999      that IEEE NaNs are mapped to zero and values that overflow
2000      the target precision saturate, i.e. values greater than
2001      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2002      are mapped to INT_MIN.  These semantics are allowed by the
2003      C and C++ standards that simply state that the behavior of
2004      FP-to-integer conversion is unspecified upon overflow.  */
2005 
2006   wide_int val;
2007   REAL_VALUE_TYPE r;
2008   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2009 
2010   switch (code)
2011     {
2012     case FIX_TRUNC_EXPR:
2013       real_trunc (&r, VOIDmode, &x);
2014       break;
2015 
2016     default:
2017       gcc_unreachable ();
2018     }
2019 
2020   /* If R is NaN, return zero and show we have an overflow.  */
2021   if (REAL_VALUE_ISNAN (r))
2022     {
2023       overflow = true;
2024       val = wi::zero (TYPE_PRECISION (type));
2025     }
2026 
2027   /* See if R is less than the lower bound or greater than the
2028      upper bound.  */
2029 
2030   if (! overflow)
2031     {
2032       tree lt = TYPE_MIN_VALUE (type);
2033       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2034       if (real_less (&r, &l))
2035 	{
2036 	  overflow = true;
2037 	  val = wi::to_wide (lt);
2038 	}
2039     }
2040 
2041   if (! overflow)
2042     {
2043       tree ut = TYPE_MAX_VALUE (type);
2044       if (ut)
2045 	{
2046 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2047 	  if (real_less (&u, &r))
2048 	    {
2049 	      overflow = true;
2050 	      val = wi::to_wide (ut);
2051 	    }
2052 	}
2053     }
2054 
2055   if (! overflow)
2056     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2057 
2058   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2059   return t;
2060 }
2061 
2062 /* A subroutine of fold_convert_const handling conversions of a
2063    FIXED_CST to an integer type.  */
2064 
2065 static tree
2066 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2067 {
2068   tree t;
2069   double_int temp, temp_trunc;
2070   scalar_mode mode;
2071 
2072   /* Right shift FIXED_CST to temp by fbit.  */
2073   temp = TREE_FIXED_CST (arg1).data;
2074   mode = TREE_FIXED_CST (arg1).mode;
2075   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2076     {
2077       temp = temp.rshift (GET_MODE_FBIT (mode),
2078 			  HOST_BITS_PER_DOUBLE_INT,
2079 			  SIGNED_FIXED_POINT_MODE_P (mode));
2080 
2081       /* Left shift temp to temp_trunc by fbit.  */
2082       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2083 				HOST_BITS_PER_DOUBLE_INT,
2084 				SIGNED_FIXED_POINT_MODE_P (mode));
2085     }
2086   else
2087     {
2088       temp = double_int_zero;
2089       temp_trunc = double_int_zero;
2090     }
2091 
2092   /* If FIXED_CST is negative, we need to round the value toward 0.
2093      By checking if the fractional bits are not zero to add 1 to temp.  */
2094   if (SIGNED_FIXED_POINT_MODE_P (mode)
2095       && temp_trunc.is_negative ()
2096       && TREE_FIXED_CST (arg1).data != temp_trunc)
2097     temp += double_int_one;
2098 
2099   /* Given a fixed-point constant, make new constant with new type,
2100      appropriately sign-extended or truncated.  */
2101   t = force_fit_type (type, temp, -1,
2102 		      (temp.is_negative ()
2103 		       && (TYPE_UNSIGNED (type)
2104 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2105 		      | TREE_OVERFLOW (arg1));
2106 
2107   return t;
2108 }
2109 
2110 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2111    to another floating point type.  */
2112 
2113 static tree
2114 fold_convert_const_real_from_real (tree type, const_tree arg1)
2115 {
2116   REAL_VALUE_TYPE value;
2117   tree t;
2118 
2119   /* Don't perform the operation if flag_signaling_nans is on
2120      and the operand is a signaling NaN.  */
2121   if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2122       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2123     return NULL_TREE;
2124 
2125   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2126   t = build_real (type, value);
2127 
2128   /* If converting an infinity or NAN to a representation that doesn't
2129      have one, set the overflow bit so that we can produce some kind of
2130      error message at the appropriate point if necessary.  It's not the
2131      most user-friendly message, but it's better than nothing.  */
2132   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2133       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2134     TREE_OVERFLOW (t) = 1;
2135   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2136 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2137     TREE_OVERFLOW (t) = 1;
2138   /* Regular overflow, conversion produced an infinity in a mode that
2139      can't represent them.  */
2140   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2141 	   && REAL_VALUE_ISINF (value)
2142 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2143     TREE_OVERFLOW (t) = 1;
2144   else
2145     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2146   return t;
2147 }
2148 
2149 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2150    to a floating point type.  */
2151 
2152 static tree
2153 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2154 {
2155   REAL_VALUE_TYPE value;
2156   tree t;
2157 
2158   real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2159 			   &TREE_FIXED_CST (arg1));
2160   t = build_real (type, value);
2161 
2162   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2163   return t;
2164 }
2165 
2166 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2167    to another fixed-point type.  */
2168 
2169 static tree
2170 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2171 {
2172   FIXED_VALUE_TYPE value;
2173   tree t;
2174   bool overflow_p;
2175 
2176   overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2177 			      &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2178   t = build_fixed (type, value);
2179 
2180   /* Propagate overflow flags.  */
2181   if (overflow_p | TREE_OVERFLOW (arg1))
2182     TREE_OVERFLOW (t) = 1;
2183   return t;
2184 }
2185 
2186 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2187    to a fixed-point type.  */
2188 
2189 static tree
2190 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2191 {
2192   FIXED_VALUE_TYPE value;
2193   tree t;
2194   bool overflow_p;
2195   double_int di;
2196 
2197   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2198 
2199   di.low = TREE_INT_CST_ELT (arg1, 0);
2200   if (TREE_INT_CST_NUNITS (arg1) == 1)
2201     di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2202   else
2203     di.high = TREE_INT_CST_ELT (arg1, 1);
2204 
2205   overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2206 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2207 				       TYPE_SATURATING (type));
2208   t = build_fixed (type, value);
2209 
2210   /* Propagate overflow flags.  */
2211   if (overflow_p | TREE_OVERFLOW (arg1))
2212     TREE_OVERFLOW (t) = 1;
2213   return t;
2214 }
2215 
2216 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2217    to a fixed-point type.  */
2218 
2219 static tree
2220 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2221 {
2222   FIXED_VALUE_TYPE value;
2223   tree t;
2224   bool overflow_p;
2225 
2226   overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2227 					&TREE_REAL_CST (arg1),
2228 					TYPE_SATURATING (type));
2229   t = build_fixed (type, value);
2230 
2231   /* Propagate overflow flags.  */
2232   if (overflow_p | TREE_OVERFLOW (arg1))
2233     TREE_OVERFLOW (t) = 1;
2234   return t;
2235 }
2236 
2237 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2238    type TYPE.  If no simplification can be done return NULL_TREE.  */
2239 
2240 static tree
2241 fold_convert_const (enum tree_code code, tree type, tree arg1)
2242 {
2243   tree arg_type = TREE_TYPE (arg1);
2244   if (arg_type == type)
2245     return arg1;
2246 
2247   /* We can't widen types, since the runtime value could overflow the
2248      original type before being extended to the new type.  */
2249   if (POLY_INT_CST_P (arg1)
2250       && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2251       && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2252     return build_poly_int_cst (type,
2253 			       poly_wide_int::from (poly_int_cst_value (arg1),
2254 						    TYPE_PRECISION (type),
2255 						    TYPE_SIGN (arg_type)));
2256 
2257   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2258       || TREE_CODE (type) == OFFSET_TYPE)
2259     {
2260       if (TREE_CODE (arg1) == INTEGER_CST)
2261 	return fold_convert_const_int_from_int (type, arg1);
2262       else if (TREE_CODE (arg1) == REAL_CST)
2263 	return fold_convert_const_int_from_real (code, type, arg1);
2264       else if (TREE_CODE (arg1) == FIXED_CST)
2265 	return fold_convert_const_int_from_fixed (type, arg1);
2266     }
2267   else if (TREE_CODE (type) == REAL_TYPE)
2268     {
2269       if (TREE_CODE (arg1) == INTEGER_CST)
2270 	return build_real_from_int_cst (type, arg1);
2271       else if (TREE_CODE (arg1) == REAL_CST)
2272 	return fold_convert_const_real_from_real (type, arg1);
2273       else if (TREE_CODE (arg1) == FIXED_CST)
2274 	return fold_convert_const_real_from_fixed (type, arg1);
2275     }
2276   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2277     {
2278       if (TREE_CODE (arg1) == FIXED_CST)
2279 	return fold_convert_const_fixed_from_fixed (type, arg1);
2280       else if (TREE_CODE (arg1) == INTEGER_CST)
2281 	return fold_convert_const_fixed_from_int (type, arg1);
2282       else if (TREE_CODE (arg1) == REAL_CST)
2283 	return fold_convert_const_fixed_from_real (type, arg1);
2284     }
2285   else if (TREE_CODE (type) == VECTOR_TYPE)
2286     {
2287       if (TREE_CODE (arg1) == VECTOR_CST
2288 	  && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2289 	{
2290 	  tree elttype = TREE_TYPE (type);
2291 	  tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2292 	  /* We can't handle steps directly when extending, since the
2293 	     values need to wrap at the original precision first.  */
2294 	  bool step_ok_p
2295 	    = (INTEGRAL_TYPE_P (elttype)
2296 	       && INTEGRAL_TYPE_P (arg1_elttype)
2297 	       && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2298 	  tree_vector_builder v;
2299 	  if (!v.new_unary_operation (type, arg1, step_ok_p))
2300 	    return NULL_TREE;
2301 	  unsigned int len = v.encoded_nelts ();
2302 	  for (unsigned int i = 0; i < len; ++i)
2303 	    {
2304 	      tree elt = VECTOR_CST_ELT (arg1, i);
2305 	      tree cvt = fold_convert_const (code, elttype, elt);
2306 	      if (cvt == NULL_TREE)
2307 		return NULL_TREE;
2308 	      v.quick_push (cvt);
2309 	    }
2310 	  return v.build ();
2311 	}
2312     }
2313   return NULL_TREE;
2314 }
2315 
2316 /* Construct a vector of zero elements of vector type TYPE.  */
2317 
2318 static tree
2319 build_zero_vector (tree type)
2320 {
2321   tree t;
2322 
2323   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2324   return build_vector_from_val (type, t);
2325 }
2326 
2327 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2328 
2329 bool
2330 fold_convertible_p (const_tree type, const_tree arg)
2331 {
2332   tree orig = TREE_TYPE (arg);
2333 
2334   if (type == orig)
2335     return true;
2336 
2337   if (TREE_CODE (arg) == ERROR_MARK
2338       || TREE_CODE (type) == ERROR_MARK
2339       || TREE_CODE (orig) == ERROR_MARK)
2340     return false;
2341 
2342   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2343     return true;
2344 
2345   switch (TREE_CODE (type))
2346     {
2347     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2348     case POINTER_TYPE: case REFERENCE_TYPE:
2349     case OFFSET_TYPE:
2350       return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2351 	      || TREE_CODE (orig) == OFFSET_TYPE);
2352 
2353     case REAL_TYPE:
2354     case FIXED_POINT_TYPE:
2355     case VECTOR_TYPE:
2356     case VOID_TYPE:
2357       return TREE_CODE (type) == TREE_CODE (orig);
2358 
2359     default:
2360       return false;
2361     }
2362 }
2363 
2364 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2365    simple conversions in preference to calling the front-end's convert.  */
2366 
2367 tree
2368 fold_convert_loc (location_t loc, tree type, tree arg)
2369 {
2370   tree orig = TREE_TYPE (arg);
2371   tree tem;
2372 
2373   if (type == orig)
2374     return arg;
2375 
2376   if (TREE_CODE (arg) == ERROR_MARK
2377       || TREE_CODE (type) == ERROR_MARK
2378       || TREE_CODE (orig) == ERROR_MARK)
2379     return error_mark_node;
2380 
2381   switch (TREE_CODE (type))
2382     {
2383     case POINTER_TYPE:
2384     case REFERENCE_TYPE:
2385       /* Handle conversions between pointers to different address spaces.  */
2386       if (POINTER_TYPE_P (orig)
2387 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2388 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2389 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2390       /* fall through */
2391 
2392     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2393     case OFFSET_TYPE:
2394       if (TREE_CODE (arg) == INTEGER_CST)
2395 	{
2396 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2397 	  if (tem != NULL_TREE)
2398 	    return tem;
2399 	}
2400       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2401 	  || TREE_CODE (orig) == OFFSET_TYPE)
2402 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2403       if (TREE_CODE (orig) == COMPLEX_TYPE)
2404 	return fold_convert_loc (loc, type,
2405 				 fold_build1_loc (loc, REALPART_EXPR,
2406 						  TREE_TYPE (orig), arg));
2407       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2408 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2409       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2410 
2411     case REAL_TYPE:
2412       if (TREE_CODE (arg) == INTEGER_CST)
2413 	{
2414 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2415 	  if (tem != NULL_TREE)
2416 	    return tem;
2417 	}
2418       else if (TREE_CODE (arg) == REAL_CST)
2419 	{
2420 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2421 	  if (tem != NULL_TREE)
2422 	    return tem;
2423 	}
2424       else if (TREE_CODE (arg) == FIXED_CST)
2425 	{
2426 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2427 	  if (tem != NULL_TREE)
2428 	    return tem;
2429 	}
2430 
2431       switch (TREE_CODE (orig))
2432 	{
2433 	case INTEGER_TYPE:
2434 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2435 	case POINTER_TYPE: case REFERENCE_TYPE:
2436 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2437 
2438 	case REAL_TYPE:
2439 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2440 
2441 	case FIXED_POINT_TYPE:
2442 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2443 
2444 	case COMPLEX_TYPE:
2445 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2446 	  return fold_convert_loc (loc, type, tem);
2447 
2448 	default:
2449 	  gcc_unreachable ();
2450 	}
2451 
2452     case FIXED_POINT_TYPE:
2453       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2454 	  || TREE_CODE (arg) == REAL_CST)
2455 	{
2456 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2457 	  if (tem != NULL_TREE)
2458 	    goto fold_convert_exit;
2459 	}
2460 
2461       switch (TREE_CODE (orig))
2462 	{
2463 	case FIXED_POINT_TYPE:
2464 	case INTEGER_TYPE:
2465 	case ENUMERAL_TYPE:
2466 	case BOOLEAN_TYPE:
2467 	case REAL_TYPE:
2468 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2469 
2470 	case COMPLEX_TYPE:
2471 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2472 	  return fold_convert_loc (loc, type, tem);
2473 
2474 	default:
2475 	  gcc_unreachable ();
2476 	}
2477 
2478     case COMPLEX_TYPE:
2479       switch (TREE_CODE (orig))
2480 	{
2481 	case INTEGER_TYPE:
2482 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2483 	case POINTER_TYPE: case REFERENCE_TYPE:
2484 	case REAL_TYPE:
2485 	case FIXED_POINT_TYPE:
2486 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2487 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2488 			      fold_convert_loc (loc, TREE_TYPE (type),
2489 					    integer_zero_node));
2490 	case COMPLEX_TYPE:
2491 	  {
2492 	    tree rpart, ipart;
2493 
2494 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2495 	      {
2496 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2497 				      TREE_OPERAND (arg, 0));
2498 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2499 				      TREE_OPERAND (arg, 1));
2500 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2501 	      }
2502 
2503 	    arg = save_expr (arg);
2504 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2505 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2506 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2507 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2508 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2509 	  }
2510 
2511 	default:
2512 	  gcc_unreachable ();
2513 	}
2514 
2515     case VECTOR_TYPE:
2516       if (integer_zerop (arg))
2517 	return build_zero_vector (type);
2518       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2519       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2520 		  || TREE_CODE (orig) == VECTOR_TYPE);
2521       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2522 
2523     case VOID_TYPE:
2524       tem = fold_ignored_result (arg);
2525       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2526 
2527     default:
2528       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2529 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2530       gcc_unreachable ();
2531     }
2532  fold_convert_exit:
2533   protected_set_expr_location_unshare (tem, loc);
2534   return tem;
2535 }
2536 
2537 /* Return false if expr can be assumed not to be an lvalue, true
2538    otherwise.  */
2539 
2540 static bool
2541 maybe_lvalue_p (const_tree x)
2542 {
2543   /* We only need to wrap lvalue tree codes.  */
2544   switch (TREE_CODE (x))
2545   {
2546   case VAR_DECL:
2547   case PARM_DECL:
2548   case RESULT_DECL:
2549   case LABEL_DECL:
2550   case FUNCTION_DECL:
2551   case SSA_NAME:
2552 
2553   case COMPONENT_REF:
2554   case MEM_REF:
2555   case INDIRECT_REF:
2556   case ARRAY_REF:
2557   case ARRAY_RANGE_REF:
2558   case BIT_FIELD_REF:
2559   case OBJ_TYPE_REF:
2560 
2561   case REALPART_EXPR:
2562   case IMAGPART_EXPR:
2563   case PREINCREMENT_EXPR:
2564   case PREDECREMENT_EXPR:
2565   case SAVE_EXPR:
2566   case TRY_CATCH_EXPR:
2567   case WITH_CLEANUP_EXPR:
2568   case COMPOUND_EXPR:
2569   case MODIFY_EXPR:
2570   case TARGET_EXPR:
2571   case COND_EXPR:
2572   case BIND_EXPR:
2573     break;
2574 
2575   default:
2576     /* Assume the worst for front-end tree codes.  */
2577     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2578       break;
2579     return false;
2580   }
2581 
2582   return true;
2583 }
2584 
2585 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2586 
2587 tree
2588 non_lvalue_loc (location_t loc, tree x)
2589 {
2590   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2591      us.  */
2592   if (in_gimple_form)
2593     return x;
2594 
2595   if (! maybe_lvalue_p (x))
2596     return x;
2597   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2598 }
2599 
2600 /* When pedantic, return an expr equal to X but certainly not valid as a
2601    pedantic lvalue.  Otherwise, return X.  */
2602 
2603 static tree
2604 pedantic_non_lvalue_loc (location_t loc, tree x)
2605 {
2606   return protected_set_expr_location_unshare (x, loc);
2607 }
2608 
2609 /* Given a tree comparison code, return the code that is the logical inverse.
2610    It is generally not safe to do this for floating-point comparisons, except
2611    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2612    ERROR_MARK in this case.  */
2613 
2614 enum tree_code
2615 invert_tree_comparison (enum tree_code code, bool honor_nans)
2616 {
2617   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2618       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2619     return ERROR_MARK;
2620 
2621   switch (code)
2622     {
2623     case EQ_EXPR:
2624       return NE_EXPR;
2625     case NE_EXPR:
2626       return EQ_EXPR;
2627     case GT_EXPR:
2628       return honor_nans ? UNLE_EXPR : LE_EXPR;
2629     case GE_EXPR:
2630       return honor_nans ? UNLT_EXPR : LT_EXPR;
2631     case LT_EXPR:
2632       return honor_nans ? UNGE_EXPR : GE_EXPR;
2633     case LE_EXPR:
2634       return honor_nans ? UNGT_EXPR : GT_EXPR;
2635     case LTGT_EXPR:
2636       return UNEQ_EXPR;
2637     case UNEQ_EXPR:
2638       return LTGT_EXPR;
2639     case UNGT_EXPR:
2640       return LE_EXPR;
2641     case UNGE_EXPR:
2642       return LT_EXPR;
2643     case UNLT_EXPR:
2644       return GE_EXPR;
2645     case UNLE_EXPR:
2646       return GT_EXPR;
2647     case ORDERED_EXPR:
2648       return UNORDERED_EXPR;
2649     case UNORDERED_EXPR:
2650       return ORDERED_EXPR;
2651     default:
2652       gcc_unreachable ();
2653     }
2654 }
2655 
2656 /* Similar, but return the comparison that results if the operands are
2657    swapped.  This is safe for floating-point.  */
2658 
2659 enum tree_code
2660 swap_tree_comparison (enum tree_code code)
2661 {
2662   switch (code)
2663     {
2664     case EQ_EXPR:
2665     case NE_EXPR:
2666     case ORDERED_EXPR:
2667     case UNORDERED_EXPR:
2668     case LTGT_EXPR:
2669     case UNEQ_EXPR:
2670       return code;
2671     case GT_EXPR:
2672       return LT_EXPR;
2673     case GE_EXPR:
2674       return LE_EXPR;
2675     case LT_EXPR:
2676       return GT_EXPR;
2677     case LE_EXPR:
2678       return GE_EXPR;
2679     case UNGT_EXPR:
2680       return UNLT_EXPR;
2681     case UNGE_EXPR:
2682       return UNLE_EXPR;
2683     case UNLT_EXPR:
2684       return UNGT_EXPR;
2685     case UNLE_EXPR:
2686       return UNGE_EXPR;
2687     default:
2688       gcc_unreachable ();
2689     }
2690 }
2691 
2692 
2693 /* Convert a comparison tree code from an enum tree_code representation
2694    into a compcode bit-based encoding.  This function is the inverse of
2695    compcode_to_comparison.  */
2696 
2697 static enum comparison_code
2698 comparison_to_compcode (enum tree_code code)
2699 {
2700   switch (code)
2701     {
2702     case LT_EXPR:
2703       return COMPCODE_LT;
2704     case EQ_EXPR:
2705       return COMPCODE_EQ;
2706     case LE_EXPR:
2707       return COMPCODE_LE;
2708     case GT_EXPR:
2709       return COMPCODE_GT;
2710     case NE_EXPR:
2711       return COMPCODE_NE;
2712     case GE_EXPR:
2713       return COMPCODE_GE;
2714     case ORDERED_EXPR:
2715       return COMPCODE_ORD;
2716     case UNORDERED_EXPR:
2717       return COMPCODE_UNORD;
2718     case UNLT_EXPR:
2719       return COMPCODE_UNLT;
2720     case UNEQ_EXPR:
2721       return COMPCODE_UNEQ;
2722     case UNLE_EXPR:
2723       return COMPCODE_UNLE;
2724     case UNGT_EXPR:
2725       return COMPCODE_UNGT;
2726     case LTGT_EXPR:
2727       return COMPCODE_LTGT;
2728     case UNGE_EXPR:
2729       return COMPCODE_UNGE;
2730     default:
2731       gcc_unreachable ();
2732     }
2733 }
2734 
2735 /* Convert a compcode bit-based encoding of a comparison operator back
2736    to GCC's enum tree_code representation.  This function is the
2737    inverse of comparison_to_compcode.  */
2738 
2739 static enum tree_code
2740 compcode_to_comparison (enum comparison_code code)
2741 {
2742   switch (code)
2743     {
2744     case COMPCODE_LT:
2745       return LT_EXPR;
2746     case COMPCODE_EQ:
2747       return EQ_EXPR;
2748     case COMPCODE_LE:
2749       return LE_EXPR;
2750     case COMPCODE_GT:
2751       return GT_EXPR;
2752     case COMPCODE_NE:
2753       return NE_EXPR;
2754     case COMPCODE_GE:
2755       return GE_EXPR;
2756     case COMPCODE_ORD:
2757       return ORDERED_EXPR;
2758     case COMPCODE_UNORD:
2759       return UNORDERED_EXPR;
2760     case COMPCODE_UNLT:
2761       return UNLT_EXPR;
2762     case COMPCODE_UNEQ:
2763       return UNEQ_EXPR;
2764     case COMPCODE_UNLE:
2765       return UNLE_EXPR;
2766     case COMPCODE_UNGT:
2767       return UNGT_EXPR;
2768     case COMPCODE_LTGT:
2769       return LTGT_EXPR;
2770     case COMPCODE_UNGE:
2771       return UNGE_EXPR;
2772     default:
2773       gcc_unreachable ();
2774     }
2775 }
2776 
2777 /* Return a tree for the comparison which is the combination of
2778    doing the AND or OR (depending on CODE) of the two operations LCODE
2779    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2780    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2781    if this makes the transformation invalid.  */
2782 
2783 tree
2784 combine_comparisons (location_t loc,
2785 		     enum tree_code code, enum tree_code lcode,
2786 		     enum tree_code rcode, tree truth_type,
2787 		     tree ll_arg, tree lr_arg)
2788 {
2789   bool honor_nans = HONOR_NANS (ll_arg);
2790   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2791   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2792   int compcode;
2793 
2794   switch (code)
2795     {
2796     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2797       compcode = lcompcode & rcompcode;
2798       break;
2799 
2800     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2801       compcode = lcompcode | rcompcode;
2802       break;
2803 
2804     default:
2805       return NULL_TREE;
2806     }
2807 
2808   if (!honor_nans)
2809     {
2810       /* Eliminate unordered comparisons, as well as LTGT and ORD
2811 	 which are not used unless the mode has NaNs.  */
2812       compcode &= ~COMPCODE_UNORD;
2813       if (compcode == COMPCODE_LTGT)
2814 	compcode = COMPCODE_NE;
2815       else if (compcode == COMPCODE_ORD)
2816 	compcode = COMPCODE_TRUE;
2817     }
2818    else if (flag_trapping_math)
2819      {
2820 	/* Check that the original operation and the optimized ones will trap
2821 	   under the same condition.  */
2822 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2823 		     && (lcompcode != COMPCODE_EQ)
2824 		     && (lcompcode != COMPCODE_ORD);
2825 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2826 		     && (rcompcode != COMPCODE_EQ)
2827 		     && (rcompcode != COMPCODE_ORD);
2828 	bool trap = (compcode & COMPCODE_UNORD) == 0
2829 		    && (compcode != COMPCODE_EQ)
2830 		    && (compcode != COMPCODE_ORD);
2831 
2832         /* In a short-circuited boolean expression the LHS might be
2833 	   such that the RHS, if evaluated, will never trap.  For
2834 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2835 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2836 	   example, the expression above will never trap, hence
2837 	   optimizing it to x < y would be invalid).  */
2838         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2839             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2840           rtrap = false;
2841 
2842         /* If the comparison was short-circuited, and only the RHS
2843 	   trapped, we may now generate a spurious trap.  */
2844 	if (rtrap && !ltrap
2845 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2846 	  return NULL_TREE;
2847 
2848 	/* If we changed the conditions that cause a trap, we lose.  */
2849 	if ((ltrap || rtrap) != trap)
2850 	  return NULL_TREE;
2851       }
2852 
2853   if (compcode == COMPCODE_TRUE)
2854     return constant_boolean_node (true, truth_type);
2855   else if (compcode == COMPCODE_FALSE)
2856     return constant_boolean_node (false, truth_type);
2857   else
2858     {
2859       enum tree_code tcode;
2860 
2861       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2862       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2863     }
2864 }
2865 
2866 /* Return nonzero if two operands (typically of the same tree node)
2867    are necessarily equal. FLAGS modifies behavior as follows:
2868 
2869    If OEP_ONLY_CONST is set, only return nonzero for constants.
2870    This function tests whether the operands are indistinguishable;
2871    it does not test whether they are equal using C's == operation.
2872    The distinction is important for IEEE floating point, because
2873    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2874    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2875 
2876    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2877    even though it may hold multiple values during a function.
2878    This is because a GCC tree node guarantees that nothing else is
2879    executed between the evaluation of its "operands" (which may often
2880    be evaluated in arbitrary order).  Hence if the operands themselves
2881    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2882    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2883    unset means assuming isochronic (or instantaneous) tree equivalence.
2884    Unless comparing arbitrary expression trees, such as from different
2885    statements, this flag can usually be left unset.
2886 
2887    If OEP_PURE_SAME is set, then pure functions with identical arguments
2888    are considered the same.  It is used when the caller has other ways
2889    to ensure that global memory is unchanged in between.
2890 
2891    If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2892    not values of expressions.
2893 
2894    If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2895    such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2896 
2897    Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2898    any operand with side effect.  This is unnecesarily conservative in the
2899    case we know that arg0 and arg1 are in disjoint code paths (such as in
2900    ?: operator).  In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2901    addresses with TREE_CONSTANT flag set so we know that &var == &var
2902    even if var is volatile.  */
2903 
2904 int
2905 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2906 {
2907   /* When checking, verify at the outermost operand_equal_p call that
2908      if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2909      hash value.  */
2910   if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2911     {
2912       if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2913 	{
2914 	  if (arg0 != arg1)
2915 	    {
2916 	      inchash::hash hstate0 (0), hstate1 (0);
2917 	      inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2918 	      inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2919 	      hashval_t h0 = hstate0.end ();
2920 	      hashval_t h1 = hstate1.end ();
2921 	      gcc_assert (h0 == h1);
2922 	    }
2923 	  return 1;
2924 	}
2925       else
2926 	return 0;
2927     }
2928 
2929   /* If either is ERROR_MARK, they aren't equal.  */
2930   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2931       || TREE_TYPE (arg0) == error_mark_node
2932       || TREE_TYPE (arg1) == error_mark_node)
2933     return 0;
2934 
2935   /* Similar, if either does not have a type (like a released SSA name),
2936      they aren't equal.  */
2937   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2938     return 0;
2939 
2940   /* We cannot consider pointers to different address space equal.  */
2941   if (POINTER_TYPE_P (TREE_TYPE (arg0))
2942       && POINTER_TYPE_P (TREE_TYPE (arg1))
2943       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2944 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2945     return 0;
2946 
2947   /* Check equality of integer constants before bailing out due to
2948      precision differences.  */
2949   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2950     {
2951       /* Address of INTEGER_CST is not defined; check that we did not forget
2952 	 to drop the OEP_ADDRESS_OF flags.  */
2953       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2954       return tree_int_cst_equal (arg0, arg1);
2955     }
2956 
2957   if (!(flags & OEP_ADDRESS_OF))
2958     {
2959       /* If both types don't have the same signedness, then we can't consider
2960 	 them equal.  We must check this before the STRIP_NOPS calls
2961 	 because they may change the signedness of the arguments.  As pointers
2962 	 strictly don't have a signedness, require either two pointers or
2963 	 two non-pointers as well.  */
2964       if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2965 	  || POINTER_TYPE_P (TREE_TYPE (arg0))
2966 			     != POINTER_TYPE_P (TREE_TYPE (arg1)))
2967 	return 0;
2968 
2969       /* If both types don't have the same precision, then it is not safe
2970 	 to strip NOPs.  */
2971       if (element_precision (TREE_TYPE (arg0))
2972 	  != element_precision (TREE_TYPE (arg1)))
2973 	return 0;
2974 
2975       STRIP_NOPS (arg0);
2976       STRIP_NOPS (arg1);
2977     }
2978 #if 0
2979   /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2980      sanity check once the issue is solved.  */
2981   else
2982     /* Addresses of conversions and SSA_NAMEs (and many other things)
2983        are not defined.  Check that we did not forget to drop the
2984        OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags.  */
2985     gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2986 			 && TREE_CODE (arg0) != SSA_NAME);
2987 #endif
2988 
2989   /* In case both args are comparisons but with different comparison
2990      code, try to swap the comparison operands of one arg to produce
2991      a match and compare that variant.  */
2992   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2993       && COMPARISON_CLASS_P (arg0)
2994       && COMPARISON_CLASS_P (arg1))
2995     {
2996       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2997 
2998       if (TREE_CODE (arg0) == swap_code)
2999 	return operand_equal_p (TREE_OPERAND (arg0, 0),
3000 			        TREE_OPERAND (arg1, 1), flags)
3001 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
3002 				   TREE_OPERAND (arg1, 0), flags);
3003     }
3004 
3005   if (TREE_CODE (arg0) != TREE_CODE (arg1))
3006     {
3007       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
3008       if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3009 	;
3010       else if (flags & OEP_ADDRESS_OF)
3011 	{
3012 	  /* If we are interested in comparing addresses ignore
3013 	     MEM_REF wrappings of the base that can appear just for
3014 	     TBAA reasons.  */
3015 	  if (TREE_CODE (arg0) == MEM_REF
3016 	      && DECL_P (arg1)
3017 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3018 	      && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3019 	      && integer_zerop (TREE_OPERAND (arg0, 1)))
3020 	    return 1;
3021 	  else if (TREE_CODE (arg1) == MEM_REF
3022 		   && DECL_P (arg0)
3023 		   && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3024 		   && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3025 		   && integer_zerop (TREE_OPERAND (arg1, 1)))
3026 	    return 1;
3027 	  return 0;
3028 	}
3029       else
3030 	return 0;
3031     }
3032 
3033   /* When not checking adddresses, this is needed for conversions and for
3034      COMPONENT_REF.  Might as well play it safe and always test this.  */
3035   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3036       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3037       || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3038 	  && !(flags & OEP_ADDRESS_OF)))
3039     return 0;
3040 
3041   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3042      We don't care about side effects in that case because the SAVE_EXPR
3043      takes care of that for us. In all other cases, two expressions are
3044      equal if they have no side effects.  If we have two identical
3045      expressions with side effects that should be treated the same due
3046      to the only side effects being identical SAVE_EXPR's, that will
3047      be detected in the recursive calls below.
3048      If we are taking an invariant address of two identical objects
3049      they are necessarily equal as well.  */
3050   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3051       && (TREE_CODE (arg0) == SAVE_EXPR
3052 	  || (flags & OEP_MATCH_SIDE_EFFECTS)
3053 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3054     return 1;
3055 
3056   /* Next handle constant cases, those for which we can return 1 even
3057      if ONLY_CONST is set.  */
3058   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3059     switch (TREE_CODE (arg0))
3060       {
3061       case INTEGER_CST:
3062 	return tree_int_cst_equal (arg0, arg1);
3063 
3064       case FIXED_CST:
3065 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3066 				       TREE_FIXED_CST (arg1));
3067 
3068       case REAL_CST:
3069 	if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3070 	  return 1;
3071 
3072 
3073 	if (!HONOR_SIGNED_ZEROS (arg0))
3074 	  {
3075 	    /* If we do not distinguish between signed and unsigned zero,
3076 	       consider them equal.  */
3077 	    if (real_zerop (arg0) && real_zerop (arg1))
3078 	      return 1;
3079 	  }
3080 	return 0;
3081 
3082       case VECTOR_CST:
3083 	{
3084 	  if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3085 	      != VECTOR_CST_LOG2_NPATTERNS (arg1))
3086 	    return 0;
3087 
3088 	  if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3089 	      != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3090 	    return 0;
3091 
3092 	  unsigned int count = vector_cst_encoded_nelts (arg0);
3093 	  for (unsigned int i = 0; i < count; ++i)
3094 	    if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3095 				  VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3096 	      return 0;
3097 	  return 1;
3098 	}
3099 
3100       case COMPLEX_CST:
3101 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3102 				 flags)
3103 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3104 				    flags));
3105 
3106       case STRING_CST:
3107 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3108 		&& ! memcmp (TREE_STRING_POINTER (arg0),
3109 			      TREE_STRING_POINTER (arg1),
3110 			      TREE_STRING_LENGTH (arg0)));
3111 
3112       case ADDR_EXPR:
3113 	gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3114 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3115 				flags | OEP_ADDRESS_OF
3116 				| OEP_MATCH_SIDE_EFFECTS);
3117       case CONSTRUCTOR:
3118 	/* In GIMPLE empty constructors are allowed in initializers of
3119 	   aggregates.  */
3120 	return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3121       default:
3122 	break;
3123       }
3124 
3125   if (flags & OEP_ONLY_CONST)
3126     return 0;
3127 
3128 /* Define macros to test an operand from arg0 and arg1 for equality and a
3129    variant that allows null and views null as being different from any
3130    non-null value.  In the latter case, if either is null, the both
3131    must be; otherwise, do the normal comparison.  */
3132 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
3133 				    TREE_OPERAND (arg1, N), flags)
3134 
3135 #define OP_SAME_WITH_NULL(N)				\
3136   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3137    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3138 
3139   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3140     {
3141     case tcc_unary:
3142       /* Two conversions are equal only if signedness and modes match.  */
3143       switch (TREE_CODE (arg0))
3144         {
3145 	CASE_CONVERT:
3146         case FIX_TRUNC_EXPR:
3147 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3148 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3149 	    return 0;
3150 	  break;
3151 	default:
3152 	  break;
3153 	}
3154 
3155       return OP_SAME (0);
3156 
3157 
3158     case tcc_comparison:
3159     case tcc_binary:
3160       if (OP_SAME (0) && OP_SAME (1))
3161 	return 1;
3162 
3163       /* For commutative ops, allow the other order.  */
3164       return (commutative_tree_code (TREE_CODE (arg0))
3165 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3166 				  TREE_OPERAND (arg1, 1), flags)
3167 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3168 				  TREE_OPERAND (arg1, 0), flags));
3169 
3170     case tcc_reference:
3171       /* If either of the pointer (or reference) expressions we are
3172 	 dereferencing contain a side effect, these cannot be equal,
3173 	 but their addresses can be.  */
3174       if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3175 	  && (TREE_SIDE_EFFECTS (arg0)
3176 	      || TREE_SIDE_EFFECTS (arg1)))
3177 	return 0;
3178 
3179       switch (TREE_CODE (arg0))
3180 	{
3181 	case INDIRECT_REF:
3182 	  if (!(flags & OEP_ADDRESS_OF)
3183 	      && (TYPE_ALIGN (TREE_TYPE (arg0))
3184 		  != TYPE_ALIGN (TREE_TYPE (arg1))))
3185 	    return 0;
3186 	  flags &= ~OEP_ADDRESS_OF;
3187 	  return OP_SAME (0);
3188 
3189 	case IMAGPART_EXPR:
3190 	  /* Require the same offset.  */
3191 	  if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3192 				TYPE_SIZE (TREE_TYPE (arg1)),
3193 				flags & ~OEP_ADDRESS_OF))
3194 	    return 0;
3195 
3196 	/* Fallthru.  */
3197 	case REALPART_EXPR:
3198 	case VIEW_CONVERT_EXPR:
3199 	  return OP_SAME (0);
3200 
3201 	case TARGET_MEM_REF:
3202 	case MEM_REF:
3203 	  if (!(flags & OEP_ADDRESS_OF))
3204 	    {
3205 	      /* Require equal access sizes */
3206 	      if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3207 		  && (!TYPE_SIZE (TREE_TYPE (arg0))
3208 		      || !TYPE_SIZE (TREE_TYPE (arg1))
3209 		      || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3210 					   TYPE_SIZE (TREE_TYPE (arg1)),
3211 					   flags)))
3212 		return 0;
3213 	      /* Verify that access happens in similar types.  */
3214 	      if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3215 		return 0;
3216 	      /* Verify that accesses are TBAA compatible.  */
3217 	      if (!alias_ptr_types_compatible_p
3218 		    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3219 		     TREE_TYPE (TREE_OPERAND (arg1, 1)))
3220 		  || (MR_DEPENDENCE_CLIQUE (arg0)
3221 		      != MR_DEPENDENCE_CLIQUE (arg1))
3222 		  || (MR_DEPENDENCE_BASE (arg0)
3223 		      != MR_DEPENDENCE_BASE (arg1)))
3224 		return 0;
3225 	     /* Verify that alignment is compatible.  */
3226 	     if (TYPE_ALIGN (TREE_TYPE (arg0))
3227 		 != TYPE_ALIGN (TREE_TYPE (arg1)))
3228 		return 0;
3229 	    }
3230 	  flags &= ~OEP_ADDRESS_OF;
3231 	  return (OP_SAME (0) && OP_SAME (1)
3232 		  /* TARGET_MEM_REF require equal extra operands.  */
3233 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
3234 		      || (OP_SAME_WITH_NULL (2)
3235 			  && OP_SAME_WITH_NULL (3)
3236 			  && OP_SAME_WITH_NULL (4))));
3237 
3238 	case ARRAY_REF:
3239 	case ARRAY_RANGE_REF:
3240 	  if (!OP_SAME (0))
3241 	    return 0;
3242 	  flags &= ~OEP_ADDRESS_OF;
3243 	  /* Compare the array index by value if it is constant first as we
3244 	     may have different types but same value here.  */
3245 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3246 				       TREE_OPERAND (arg1, 1))
3247 		   || OP_SAME (1))
3248 		  && OP_SAME_WITH_NULL (2)
3249 		  && OP_SAME_WITH_NULL (3)
3250 		  /* Compare low bound and element size as with OEP_ADDRESS_OF
3251 		     we have to account for the offset of the ref.  */
3252 		  && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3253 		      == TREE_TYPE (TREE_OPERAND (arg1, 0))
3254 		      || (operand_equal_p (array_ref_low_bound
3255 					     (CONST_CAST_TREE (arg0)),
3256 					   array_ref_low_bound
3257 					     (CONST_CAST_TREE (arg1)), flags)
3258 			  && operand_equal_p (array_ref_element_size
3259 					        (CONST_CAST_TREE (arg0)),
3260 					      array_ref_element_size
3261 					        (CONST_CAST_TREE (arg1)),
3262 					      flags))));
3263 
3264 	case COMPONENT_REF:
3265 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3266 	     may be NULL when we're called to compare MEM_EXPRs.  */
3267 	  if (!OP_SAME_WITH_NULL (0)
3268 	      || !OP_SAME (1))
3269 	    return 0;
3270 	  flags &= ~OEP_ADDRESS_OF;
3271 	  return OP_SAME_WITH_NULL (2);
3272 
3273 	case BIT_FIELD_REF:
3274 	  if (!OP_SAME (0))
3275 	    return 0;
3276 	  flags &= ~OEP_ADDRESS_OF;
3277 	  return OP_SAME (1) && OP_SAME (2);
3278 
3279 	default:
3280 	  return 0;
3281 	}
3282 
3283     case tcc_expression:
3284       switch (TREE_CODE (arg0))
3285 	{
3286 	case ADDR_EXPR:
3287 	  /* Be sure we pass right ADDRESS_OF flag.  */
3288 	  gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3289 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3290 				  TREE_OPERAND (arg1, 0),
3291 				  flags | OEP_ADDRESS_OF);
3292 
3293 	case TRUTH_NOT_EXPR:
3294 	  return OP_SAME (0);
3295 
3296 	case TRUTH_ANDIF_EXPR:
3297 	case TRUTH_ORIF_EXPR:
3298 	  return OP_SAME (0) && OP_SAME (1);
3299 
3300 	case FMA_EXPR:
3301 	case WIDEN_MULT_PLUS_EXPR:
3302 	case WIDEN_MULT_MINUS_EXPR:
3303 	  if (!OP_SAME (2))
3304 	    return 0;
3305 	  /* The multiplcation operands are commutative.  */
3306 	  /* FALLTHRU */
3307 
3308 	case TRUTH_AND_EXPR:
3309 	case TRUTH_OR_EXPR:
3310 	case TRUTH_XOR_EXPR:
3311 	  if (OP_SAME (0) && OP_SAME (1))
3312 	    return 1;
3313 
3314 	  /* Otherwise take into account this is a commutative operation.  */
3315 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3316 				   TREE_OPERAND (arg1, 1), flags)
3317 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3318 				      TREE_OPERAND (arg1, 0), flags));
3319 
3320 	case COND_EXPR:
3321 	  if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3322 	    return 0;
3323 	  flags &= ~OEP_ADDRESS_OF;
3324 	  return OP_SAME (0);
3325 
3326 	case BIT_INSERT_EXPR:
3327 	  /* BIT_INSERT_EXPR has an implict operand as the type precision
3328 	     of op1.  Need to check to make sure they are the same.  */
3329 	  if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3330 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3331 	      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3332 		 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3333 	    return false;
3334 	  /* FALLTHRU */
3335 
3336 	case VEC_COND_EXPR:
3337 	case DOT_PROD_EXPR:
3338 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3339 
3340 	case MODIFY_EXPR:
3341 	case INIT_EXPR:
3342 	case COMPOUND_EXPR:
3343 	case PREDECREMENT_EXPR:
3344 	case PREINCREMENT_EXPR:
3345 	case POSTDECREMENT_EXPR:
3346 	case POSTINCREMENT_EXPR:
3347 	  if (flags & OEP_LEXICOGRAPHIC)
3348 	    return OP_SAME (0) && OP_SAME (1);
3349 	  return 0;
3350 
3351 	case CLEANUP_POINT_EXPR:
3352 	case EXPR_STMT:
3353 	  if (flags & OEP_LEXICOGRAPHIC)
3354 	    return OP_SAME (0);
3355 	  return 0;
3356 
3357 	default:
3358 	  return 0;
3359 	}
3360 
3361     case tcc_vl_exp:
3362       switch (TREE_CODE (arg0))
3363 	{
3364 	case CALL_EXPR:
3365 	  if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3366 	      != (CALL_EXPR_FN (arg1) == NULL_TREE))
3367 	    /* If not both CALL_EXPRs are either internal or normal function
3368 	       functions, then they are not equal.  */
3369 	    return 0;
3370 	  else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3371 	    {
3372 	      /* If the CALL_EXPRs call different internal functions, then they
3373 		 are not equal.  */
3374 	      if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3375 		return 0;
3376 	    }
3377 	  else
3378 	    {
3379 	      /* If the CALL_EXPRs call different functions, then they are not
3380 		 equal.  */
3381 	      if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3382 				     flags))
3383 		return 0;
3384 	    }
3385 
3386 	  /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS.  */
3387 	  {
3388 	    unsigned int cef = call_expr_flags (arg0);
3389 	    if (flags & OEP_PURE_SAME)
3390 	      cef &= ECF_CONST | ECF_PURE;
3391 	    else
3392 	      cef &= ECF_CONST;
3393 	    if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3394 	      return 0;
3395 	  }
3396 
3397 	  /* Now see if all the arguments are the same.  */
3398 	  {
3399 	    const_call_expr_arg_iterator iter0, iter1;
3400 	    const_tree a0, a1;
3401 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3402 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3403 		 a0 && a1;
3404 		 a0 = next_const_call_expr_arg (&iter0),
3405 		   a1 = next_const_call_expr_arg (&iter1))
3406 	      if (! operand_equal_p (a0, a1, flags))
3407 		return 0;
3408 
3409 	    /* If we get here and both argument lists are exhausted
3410 	       then the CALL_EXPRs are equal.  */
3411 	    return ! (a0 || a1);
3412 	  }
3413 	default:
3414 	  return 0;
3415 	}
3416 
3417     case tcc_declaration:
3418       /* Consider __builtin_sqrt equal to sqrt.  */
3419       return (TREE_CODE (arg0) == FUNCTION_DECL
3420 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3421 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3422 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3423 
3424     case tcc_exceptional:
3425       if (TREE_CODE (arg0) == CONSTRUCTOR)
3426 	{
3427 	  /* In GIMPLE constructors are used only to build vectors from
3428 	     elements.  Individual elements in the constructor must be
3429 	     indexed in increasing order and form an initial sequence.
3430 
3431 	     We make no effort to compare constructors in generic.
3432 	     (see sem_variable::equals in ipa-icf which can do so for
3433 	      constants).  */
3434 	  if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3435 	      || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3436 	    return 0;
3437 
3438 	  /* Be sure that vectors constructed have the same representation.
3439 	     We only tested element precision and modes to match.
3440 	     Vectors may be BLKmode and thus also check that the number of
3441 	     parts match.  */
3442 	  if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3443 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3444 	    return 0;
3445 
3446 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3447 	  vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3448 	  unsigned int len = vec_safe_length (v0);
3449 
3450 	  if (len != vec_safe_length (v1))
3451 	    return 0;
3452 
3453 	  for (unsigned int i = 0; i < len; i++)
3454 	    {
3455 	      constructor_elt *c0 = &(*v0)[i];
3456 	      constructor_elt *c1 = &(*v1)[i];
3457 
3458 	      if (!operand_equal_p (c0->value, c1->value, flags)
3459 		  /* In GIMPLE the indexes can be either NULL or matching i.
3460 		     Double check this so we won't get false
3461 		     positives for GENERIC.  */
3462 		  || (c0->index
3463 		      && (TREE_CODE (c0->index) != INTEGER_CST
3464 			  || !compare_tree_int (c0->index, i)))
3465 		  || (c1->index
3466 		      && (TREE_CODE (c1->index) != INTEGER_CST
3467 			  || !compare_tree_int (c1->index, i))))
3468 		return 0;
3469 	    }
3470 	  return 1;
3471 	}
3472       else if (TREE_CODE (arg0) == STATEMENT_LIST
3473 	       && (flags & OEP_LEXICOGRAPHIC))
3474 	{
3475 	  /* Compare the STATEMENT_LISTs.  */
3476 	  tree_stmt_iterator tsi1, tsi2;
3477 	  tree body1 = CONST_CAST_TREE (arg0);
3478 	  tree body2 = CONST_CAST_TREE (arg1);
3479 	  for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3480 	       tsi_next (&tsi1), tsi_next (&tsi2))
3481 	    {
3482 	      /* The lists don't have the same number of statements.  */
3483 	      if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3484 		return 0;
3485 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3486 		return 1;
3487 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3488 				    flags & (OEP_LEXICOGRAPHIC
3489 					     | OEP_NO_HASH_CHECK)))
3490 		return 0;
3491 	    }
3492 	}
3493       return 0;
3494 
3495     case tcc_statement:
3496       switch (TREE_CODE (arg0))
3497 	{
3498 	case RETURN_EXPR:
3499 	  if (flags & OEP_LEXICOGRAPHIC)
3500 	    return OP_SAME_WITH_NULL (0);
3501 	  return 0;
3502 	case DEBUG_BEGIN_STMT:
3503 	  if (flags & OEP_LEXICOGRAPHIC)
3504 	    return 1;
3505 	  return 0;
3506 	default:
3507 	  return 0;
3508 	 }
3509 
3510     default:
3511       return 0;
3512     }
3513 
3514 #undef OP_SAME
3515 #undef OP_SAME_WITH_NULL
3516 }
3517 
3518 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3519    with a different signedness or a narrower precision.  */
3520 
3521 static bool
3522 operand_equal_for_comparison_p (tree arg0, tree arg1)
3523 {
3524   if (operand_equal_p (arg0, arg1, 0))
3525     return true;
3526 
3527   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3528       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3529     return false;
3530 
3531   /* Discard any conversions that don't change the modes of ARG0 and ARG1
3532      and see if the inner values are the same.  This removes any
3533      signedness comparison, which doesn't matter here.  */
3534   tree op0 = arg0;
3535   tree op1 = arg1;
3536   STRIP_NOPS (op0);
3537   STRIP_NOPS (op1);
3538   if (operand_equal_p (op0, op1, 0))
3539     return true;
3540 
3541   /* Discard a single widening conversion from ARG1 and see if the inner
3542      value is the same as ARG0.  */
3543   if (CONVERT_EXPR_P (arg1)
3544       && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3545       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3546          < TYPE_PRECISION (TREE_TYPE (arg1))
3547       && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3548     return true;
3549 
3550   return false;
3551 }
3552 
3553 /* See if ARG is an expression that is either a comparison or is performing
3554    arithmetic on comparisons.  The comparisons must only be comparing
3555    two different values, which will be stored in *CVAL1 and *CVAL2; if
3556    they are nonzero it means that some operands have already been found.
3557    No variables may be used anywhere else in the expression except in the
3558    comparisons.
3559 
3560    If this is true, return 1.  Otherwise, return zero.  */
3561 
3562 static int
3563 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3564 {
3565   enum tree_code code = TREE_CODE (arg);
3566   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3567 
3568   /* We can handle some of the tcc_expression cases here.  */
3569   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3570     tclass = tcc_unary;
3571   else if (tclass == tcc_expression
3572 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3573 	       || code == COMPOUND_EXPR))
3574     tclass = tcc_binary;
3575 
3576   switch (tclass)
3577     {
3578     case tcc_unary:
3579       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3580 
3581     case tcc_binary:
3582       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3583 	      && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3584 
3585     case tcc_constant:
3586       return 1;
3587 
3588     case tcc_expression:
3589       if (code == COND_EXPR)
3590 	return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3591 		&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3592 		&& twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3593       return 0;
3594 
3595     case tcc_comparison:
3596       /* First see if we can handle the first operand, then the second.  For
3597 	 the second operand, we know *CVAL1 can't be zero.  It must be that
3598 	 one side of the comparison is each of the values; test for the
3599 	 case where this isn't true by failing if the two operands
3600 	 are the same.  */
3601 
3602       if (operand_equal_p (TREE_OPERAND (arg, 0),
3603 			   TREE_OPERAND (arg, 1), 0))
3604 	return 0;
3605 
3606       if (*cval1 == 0)
3607 	*cval1 = TREE_OPERAND (arg, 0);
3608       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3609 	;
3610       else if (*cval2 == 0)
3611 	*cval2 = TREE_OPERAND (arg, 0);
3612       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3613 	;
3614       else
3615 	return 0;
3616 
3617       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3618 	;
3619       else if (*cval2 == 0)
3620 	*cval2 = TREE_OPERAND (arg, 1);
3621       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3622 	;
3623       else
3624 	return 0;
3625 
3626       return 1;
3627 
3628     default:
3629       return 0;
3630     }
3631 }
3632 
3633 /* ARG is a tree that is known to contain just arithmetic operations and
3634    comparisons.  Evaluate the operations in the tree substituting NEW0 for
3635    any occurrence of OLD0 as an operand of a comparison and likewise for
3636    NEW1 and OLD1.  */
3637 
3638 static tree
3639 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3640 	    tree old1, tree new1)
3641 {
3642   tree type = TREE_TYPE (arg);
3643   enum tree_code code = TREE_CODE (arg);
3644   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3645 
3646   /* We can handle some of the tcc_expression cases here.  */
3647   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3648     tclass = tcc_unary;
3649   else if (tclass == tcc_expression
3650 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3651     tclass = tcc_binary;
3652 
3653   switch (tclass)
3654     {
3655     case tcc_unary:
3656       return fold_build1_loc (loc, code, type,
3657 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3658 				      old0, new0, old1, new1));
3659 
3660     case tcc_binary:
3661       return fold_build2_loc (loc, code, type,
3662 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3663 				      old0, new0, old1, new1),
3664 			  eval_subst (loc, TREE_OPERAND (arg, 1),
3665 				      old0, new0, old1, new1));
3666 
3667     case tcc_expression:
3668       switch (code)
3669 	{
3670 	case SAVE_EXPR:
3671 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3672 			     old1, new1);
3673 
3674 	case COMPOUND_EXPR:
3675 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3676 			     old1, new1);
3677 
3678 	case COND_EXPR:
3679 	  return fold_build3_loc (loc, code, type,
3680 			      eval_subst (loc, TREE_OPERAND (arg, 0),
3681 					  old0, new0, old1, new1),
3682 			      eval_subst (loc, TREE_OPERAND (arg, 1),
3683 					  old0, new0, old1, new1),
3684 			      eval_subst (loc, TREE_OPERAND (arg, 2),
3685 					  old0, new0, old1, new1));
3686 	default:
3687 	  break;
3688 	}
3689       /* Fall through - ???  */
3690 
3691     case tcc_comparison:
3692       {
3693 	tree arg0 = TREE_OPERAND (arg, 0);
3694 	tree arg1 = TREE_OPERAND (arg, 1);
3695 
3696 	/* We need to check both for exact equality and tree equality.  The
3697 	   former will be true if the operand has a side-effect.  In that
3698 	   case, we know the operand occurred exactly once.  */
3699 
3700 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3701 	  arg0 = new0;
3702 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3703 	  arg0 = new1;
3704 
3705 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3706 	  arg1 = new0;
3707 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3708 	  arg1 = new1;
3709 
3710 	return fold_build2_loc (loc, code, type, arg0, arg1);
3711       }
3712 
3713     default:
3714       return arg;
3715     }
3716 }
3717 
3718 /* Return a tree for the case when the result of an expression is RESULT
3719    converted to TYPE and OMITTED was previously an operand of the expression
3720    but is now not needed (e.g., we folded OMITTED * 0).
3721 
3722    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3723    the conversion of RESULT to TYPE.  */
3724 
3725 tree
3726 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3727 {
3728   tree t = fold_convert_loc (loc, type, result);
3729 
3730   /* If the resulting operand is an empty statement, just return the omitted
3731      statement casted to void. */
3732   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3733     return build1_loc (loc, NOP_EXPR, void_type_node,
3734 		       fold_ignored_result (omitted));
3735 
3736   if (TREE_SIDE_EFFECTS (omitted))
3737     return build2_loc (loc, COMPOUND_EXPR, type,
3738 		       fold_ignored_result (omitted), t);
3739 
3740   return non_lvalue_loc (loc, t);
3741 }
3742 
3743 /* Return a tree for the case when the result of an expression is RESULT
3744    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3745    of the expression but are now not needed.
3746 
3747    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3748    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3749    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3750    just do the conversion of RESULT to TYPE.  */
3751 
3752 tree
3753 omit_two_operands_loc (location_t loc, tree type, tree result,
3754 		       tree omitted1, tree omitted2)
3755 {
3756   tree t = fold_convert_loc (loc, type, result);
3757 
3758   if (TREE_SIDE_EFFECTS (omitted2))
3759     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3760   if (TREE_SIDE_EFFECTS (omitted1))
3761     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3762 
3763   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3764 }
3765 
3766 
3767 /* Return a simplified tree node for the truth-negation of ARG.  This
3768    never alters ARG itself.  We assume that ARG is an operation that
3769    returns a truth value (0 or 1).
3770 
3771    FIXME: one would think we would fold the result, but it causes
3772    problems with the dominator optimizer.  */
3773 
3774 static tree
3775 fold_truth_not_expr (location_t loc, tree arg)
3776 {
3777   tree type = TREE_TYPE (arg);
3778   enum tree_code code = TREE_CODE (arg);
3779   location_t loc1, loc2;
3780 
3781   /* If this is a comparison, we can simply invert it, except for
3782      floating-point non-equality comparisons, in which case we just
3783      enclose a TRUTH_NOT_EXPR around what we have.  */
3784 
3785   if (TREE_CODE_CLASS (code) == tcc_comparison)
3786     {
3787       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3788       if (FLOAT_TYPE_P (op_type)
3789 	  && flag_trapping_math
3790 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3791 	  && code != NE_EXPR && code != EQ_EXPR)
3792 	return NULL_TREE;
3793 
3794       code = invert_tree_comparison (code, HONOR_NANS (op_type));
3795       if (code == ERROR_MARK)
3796 	return NULL_TREE;
3797 
3798       tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3799 			     TREE_OPERAND (arg, 1));
3800       if (TREE_NO_WARNING (arg))
3801 	TREE_NO_WARNING (ret) = 1;
3802       return ret;
3803     }
3804 
3805   switch (code)
3806     {
3807     case INTEGER_CST:
3808       return constant_boolean_node (integer_zerop (arg), type);
3809 
3810     case TRUTH_AND_EXPR:
3811       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3812       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3813       return build2_loc (loc, TRUTH_OR_EXPR, type,
3814 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3815 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3816 
3817     case TRUTH_OR_EXPR:
3818       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3819       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3820       return build2_loc (loc, TRUTH_AND_EXPR, type,
3821 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3822 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3823 
3824     case TRUTH_XOR_EXPR:
3825       /* Here we can invert either operand.  We invert the first operand
3826 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3827 	 result is the XOR of the first operand with the inside of the
3828 	 negation of the second operand.  */
3829 
3830       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3831 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3832 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3833       else
3834 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3835 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3836 			   TREE_OPERAND (arg, 1));
3837 
3838     case TRUTH_ANDIF_EXPR:
3839       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3840       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3841       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3842 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3843 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3844 
3845     case TRUTH_ORIF_EXPR:
3846       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3847       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3848       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3849 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3850 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3851 
3852     case TRUTH_NOT_EXPR:
3853       return TREE_OPERAND (arg, 0);
3854 
3855     case COND_EXPR:
3856       {
3857 	tree arg1 = TREE_OPERAND (arg, 1);
3858 	tree arg2 = TREE_OPERAND (arg, 2);
3859 
3860 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3861 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3862 
3863 	/* A COND_EXPR may have a throw as one operand, which
3864 	   then has void type.  Just leave void operands
3865 	   as they are.  */
3866 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3867 			   VOID_TYPE_P (TREE_TYPE (arg1))
3868 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3869 			   VOID_TYPE_P (TREE_TYPE (arg2))
3870 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3871       }
3872 
3873     case COMPOUND_EXPR:
3874       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3875       return build2_loc (loc, COMPOUND_EXPR, type,
3876 			 TREE_OPERAND (arg, 0),
3877 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3878 
3879     case NON_LVALUE_EXPR:
3880       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3881       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3882 
3883     CASE_CONVERT:
3884       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3885 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3886 
3887       /* fall through */
3888 
3889     case FLOAT_EXPR:
3890       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3891       return build1_loc (loc, TREE_CODE (arg), type,
3892 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3893 
3894     case BIT_AND_EXPR:
3895       if (!integer_onep (TREE_OPERAND (arg, 1)))
3896 	return NULL_TREE;
3897       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3898 
3899     case SAVE_EXPR:
3900       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3901 
3902     case CLEANUP_POINT_EXPR:
3903       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3904       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3905 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3906 
3907     default:
3908       return NULL_TREE;
3909     }
3910 }
3911 
3912 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
3913    assume that ARG is an operation that returns a truth value (0 or 1
3914    for scalars, 0 or -1 for vectors).  Return the folded expression if
3915    folding is successful.  Otherwise, return NULL_TREE.  */
3916 
3917 static tree
3918 fold_invert_truthvalue (location_t loc, tree arg)
3919 {
3920   tree type = TREE_TYPE (arg);
3921   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3922 			      ? BIT_NOT_EXPR
3923 			      : TRUTH_NOT_EXPR,
3924 			 type, arg);
3925 }
3926 
3927 /* Return a simplified tree node for the truth-negation of ARG.  This
3928    never alters ARG itself.  We assume that ARG is an operation that
3929    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
3930 
3931 tree
3932 invert_truthvalue_loc (location_t loc, tree arg)
3933 {
3934   if (TREE_CODE (arg) == ERROR_MARK)
3935     return arg;
3936 
3937   tree type = TREE_TYPE (arg);
3938   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3939 			       ? BIT_NOT_EXPR
3940 			       : TRUTH_NOT_EXPR,
3941 			  type, arg);
3942 }
3943 
3944 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3945    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero
3946    and uses reverse storage order if REVERSEP is nonzero.  ORIG_INNER
3947    is the original memory reference used to preserve the alias set of
3948    the access.  */
3949 
3950 static tree
3951 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3952 		    HOST_WIDE_INT bitsize, poly_int64 bitpos,
3953 		    int unsignedp, int reversep)
3954 {
3955   tree result, bftype;
3956 
3957   /* Attempt not to lose the access path if possible.  */
3958   if (TREE_CODE (orig_inner) == COMPONENT_REF)
3959     {
3960       tree ninner = TREE_OPERAND (orig_inner, 0);
3961       machine_mode nmode;
3962       poly_int64 nbitsize, nbitpos;
3963       tree noffset;
3964       int nunsignedp, nreversep, nvolatilep = 0;
3965       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3966 				       &noffset, &nmode, &nunsignedp,
3967 				       &nreversep, &nvolatilep);
3968       if (base == inner
3969 	  && noffset == NULL_TREE
3970 	  && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
3971 	  && !reversep
3972 	  && !nreversep
3973 	  && !nvolatilep)
3974 	{
3975 	  inner = ninner;
3976 	  bitpos -= nbitpos;
3977 	}
3978     }
3979 
3980   alias_set_type iset = get_alias_set (orig_inner);
3981   if (iset == 0 && get_alias_set (inner) != iset)
3982     inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3983 			 build_fold_addr_expr (inner),
3984 			 build_int_cst (ptr_type_node, 0));
3985 
3986   if (known_eq (bitpos, 0) && !reversep)
3987     {
3988       tree size = TYPE_SIZE (TREE_TYPE (inner));
3989       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3990 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3991 	  && tree_fits_shwi_p (size)
3992 	  && tree_to_shwi (size) == bitsize)
3993 	return fold_convert_loc (loc, type, inner);
3994     }
3995 
3996   bftype = type;
3997   if (TYPE_PRECISION (bftype) != bitsize
3998       || TYPE_UNSIGNED (bftype) == !unsignedp)
3999     bftype = build_nonstandard_integer_type (bitsize, 0);
4000 
4001   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4002 		       bitsize_int (bitsize), bitsize_int (bitpos));
4003   REF_REVERSE_STORAGE_ORDER (result) = reversep;
4004 
4005   if (bftype != type)
4006     result = fold_convert_loc (loc, type, result);
4007 
4008   return result;
4009 }
4010 
4011 /* Optimize a bit-field compare.
4012 
4013    There are two cases:  First is a compare against a constant and the
4014    second is a comparison of two items where the fields are at the same
4015    bit position relative to the start of a chunk (byte, halfword, word)
4016    large enough to contain it.  In these cases we can avoid the shift
4017    implicit in bitfield extractions.
4018 
4019    For constants, we emit a compare of the shifted constant with the
4020    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4021    compared.  For two fields at the same position, we do the ANDs with the
4022    similar mask and compare the result of the ANDs.
4023 
4024    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4025    COMPARE_TYPE is the type of the comparison, and LHS and RHS
4026    are the left and right operands of the comparison, respectively.
4027 
4028    If the optimization described above can be done, we return the resulting
4029    tree.  Otherwise we return zero.  */
4030 
4031 static tree
4032 optimize_bit_field_compare (location_t loc, enum tree_code code,
4033 			    tree compare_type, tree lhs, tree rhs)
4034 {
4035   poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4036   HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4037   tree type = TREE_TYPE (lhs);
4038   tree unsigned_type;
4039   int const_p = TREE_CODE (rhs) == INTEGER_CST;
4040   machine_mode lmode, rmode;
4041   scalar_int_mode nmode;
4042   int lunsignedp, runsignedp;
4043   int lreversep, rreversep;
4044   int lvolatilep = 0, rvolatilep = 0;
4045   tree linner, rinner = NULL_TREE;
4046   tree mask;
4047   tree offset;
4048 
4049   /* Get all the information about the extractions being done.  If the bit size
4050      is the same as the size of the underlying object, we aren't doing an
4051      extraction at all and so can do nothing.  We also don't want to
4052      do anything if the inner expression is a PLACEHOLDER_EXPR since we
4053      then will no longer be able to replace it.  */
4054   linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4055 				&lunsignedp, &lreversep, &lvolatilep);
4056   if (linner == lhs
4057       || !known_size_p (plbitsize)
4058       || !plbitsize.is_constant (&lbitsize)
4059       || !plbitpos.is_constant (&lbitpos)
4060       || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4061       || offset != 0
4062       || TREE_CODE (linner) == PLACEHOLDER_EXPR
4063       || lvolatilep)
4064     return 0;
4065 
4066   if (const_p)
4067     rreversep = lreversep;
4068   else
4069    {
4070      /* If this is not a constant, we can only do something if bit positions,
4071 	sizes, signedness and storage order are the same.  */
4072      rinner
4073        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4074 			      &runsignedp, &rreversep, &rvolatilep);
4075 
4076      if (rinner == rhs
4077 	 || maybe_ne (lbitpos, rbitpos)
4078 	 || maybe_ne (lbitsize, rbitsize)
4079 	 || lunsignedp != runsignedp
4080 	 || lreversep != rreversep
4081 	 || offset != 0
4082 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4083 	 || rvolatilep)
4084        return 0;
4085    }
4086 
4087   /* Honor the C++ memory model and mimic what RTL expansion does.  */
4088   poly_uint64 bitstart = 0;
4089   poly_uint64 bitend = 0;
4090   if (TREE_CODE (lhs) == COMPONENT_REF)
4091     {
4092       get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4093       if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4094 	return 0;
4095     }
4096 
4097   /* See if we can find a mode to refer to this field.  We should be able to,
4098      but fail if we can't.  */
4099   if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4100 		      const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4101 		      : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4102 			     TYPE_ALIGN (TREE_TYPE (rinner))),
4103 		      BITS_PER_WORD, false, &nmode))
4104     return 0;
4105 
4106   /* Set signed and unsigned types of the precision of this mode for the
4107      shifts below.  */
4108   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4109 
4110   /* Compute the bit position and size for the new reference and our offset
4111      within it. If the new reference is the same size as the original, we
4112      won't optimize anything, so return zero.  */
4113   nbitsize = GET_MODE_BITSIZE (nmode);
4114   nbitpos = lbitpos & ~ (nbitsize - 1);
4115   lbitpos -= nbitpos;
4116   if (nbitsize == lbitsize)
4117     return 0;
4118 
4119   if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4120     lbitpos = nbitsize - lbitsize - lbitpos;
4121 
4122   /* Make the mask to be used against the extracted field.  */
4123   mask = build_int_cst_type (unsigned_type, -1);
4124   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4125   mask = const_binop (RSHIFT_EXPR, mask,
4126 		      size_int (nbitsize - lbitsize - lbitpos));
4127 
4128   if (! const_p)
4129     {
4130       if (nbitpos < 0)
4131 	return 0;
4132 
4133       /* If not comparing with constant, just rework the comparison
4134 	 and return.  */
4135       tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4136 				    nbitsize, nbitpos, 1, lreversep);
4137       t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4138       tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4139 				    nbitsize, nbitpos, 1, rreversep);
4140       t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4141       return fold_build2_loc (loc, code, compare_type, t1, t2);
4142     }
4143 
4144   /* Otherwise, we are handling the constant case.  See if the constant is too
4145      big for the field.  Warn and return a tree for 0 (false) if so.  We do
4146      this not only for its own sake, but to avoid having to test for this
4147      error case below.  If we didn't, we might generate wrong code.
4148 
4149      For unsigned fields, the constant shifted right by the field length should
4150      be all zero.  For signed fields, the high-order bits should agree with
4151      the sign bit.  */
4152 
4153   if (lunsignedp)
4154     {
4155       if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4156 	{
4157 	  warning (0, "comparison is always %d due to width of bit-field",
4158 		   code == NE_EXPR);
4159 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4160 	}
4161     }
4162   else
4163     {
4164       wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4165       if (tem != 0 && tem != -1)
4166 	{
4167 	  warning (0, "comparison is always %d due to width of bit-field",
4168 		   code == NE_EXPR);
4169 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4170 	}
4171     }
4172 
4173   if (nbitpos < 0)
4174     return 0;
4175 
4176   /* Single-bit compares should always be against zero.  */
4177   if (lbitsize == 1 && ! integer_zerop (rhs))
4178     {
4179       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4180       rhs = build_int_cst (type, 0);
4181     }
4182 
4183   /* Make a new bitfield reference, shift the constant over the
4184      appropriate number of bits and mask it with the computed mask
4185      (in case this was a signed field).  If we changed it, make a new one.  */
4186   lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4187 			    nbitsize, nbitpos, 1, lreversep);
4188 
4189   rhs = const_binop (BIT_AND_EXPR,
4190 		     const_binop (LSHIFT_EXPR,
4191 				  fold_convert_loc (loc, unsigned_type, rhs),
4192 				  size_int (lbitpos)),
4193 		     mask);
4194 
4195   lhs = build2_loc (loc, code, compare_type,
4196 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4197   return lhs;
4198 }
4199 
4200 /* Subroutine for fold_truth_andor_1: decode a field reference.
4201 
4202    If EXP is a comparison reference, we return the innermost reference.
4203 
4204    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4205    set to the starting bit number.
4206 
4207    If the innermost field can be completely contained in a mode-sized
4208    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4209 
4210    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4211    otherwise it is not changed.
4212 
4213    *PUNSIGNEDP is set to the signedness of the field.
4214 
4215    *PREVERSEP is set to the storage order of the field.
4216 
4217    *PMASK is set to the mask used.  This is either contained in a
4218    BIT_AND_EXPR or derived from the width of the field.
4219 
4220    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4221 
4222    Return 0 if this is not a component reference or is one that we can't
4223    do anything with.  */
4224 
4225 static tree
4226 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4227 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4228 			int *punsignedp, int *preversep, int *pvolatilep,
4229 			tree *pmask, tree *pand_mask)
4230 {
4231   tree exp = *exp_;
4232   tree outer_type = 0;
4233   tree and_mask = 0;
4234   tree mask, inner, offset;
4235   tree unsigned_type;
4236   unsigned int precision;
4237 
4238   /* All the optimizations using this function assume integer fields.
4239      There are problems with FP fields since the type_for_size call
4240      below can fail for, e.g., XFmode.  */
4241   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4242     return 0;
4243 
4244   /* We are interested in the bare arrangement of bits, so strip everything
4245      that doesn't affect the machine mode.  However, record the type of the
4246      outermost expression if it may matter below.  */
4247   if (CONVERT_EXPR_P (exp)
4248       || TREE_CODE (exp) == NON_LVALUE_EXPR)
4249     outer_type = TREE_TYPE (exp);
4250   STRIP_NOPS (exp);
4251 
4252   if (TREE_CODE (exp) == BIT_AND_EXPR)
4253     {
4254       and_mask = TREE_OPERAND (exp, 1);
4255       exp = TREE_OPERAND (exp, 0);
4256       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4257       if (TREE_CODE (and_mask) != INTEGER_CST)
4258 	return 0;
4259     }
4260 
4261   poly_int64 poly_bitsize, poly_bitpos;
4262   inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4263 			       pmode, punsignedp, preversep, pvolatilep);
4264   if ((inner == exp && and_mask == 0)
4265       || !poly_bitsize.is_constant (pbitsize)
4266       || !poly_bitpos.is_constant (pbitpos)
4267       || *pbitsize < 0
4268       || offset != 0
4269       || TREE_CODE (inner) == PLACEHOLDER_EXPR
4270       /* Reject out-of-bound accesses (PR79731).  */
4271       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4272 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4273 			       *pbitpos + *pbitsize) < 0))
4274     return 0;
4275 
4276   *exp_ = exp;
4277 
4278   /* If the number of bits in the reference is the same as the bitsize of
4279      the outer type, then the outer type gives the signedness. Otherwise
4280      (in case of a small bitfield) the signedness is unchanged.  */
4281   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4282     *punsignedp = TYPE_UNSIGNED (outer_type);
4283 
4284   /* Compute the mask to access the bitfield.  */
4285   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4286   precision = TYPE_PRECISION (unsigned_type);
4287 
4288   mask = build_int_cst_type (unsigned_type, -1);
4289 
4290   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4291   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4292 
4293   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4294   if (and_mask != 0)
4295     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4296 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4297 
4298   *pmask = mask;
4299   *pand_mask = and_mask;
4300   return inner;
4301 }
4302 
4303 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4304    bit positions and MASK is SIGNED.  */
4305 
4306 static int
4307 all_ones_mask_p (const_tree mask, unsigned int size)
4308 {
4309   tree type = TREE_TYPE (mask);
4310   unsigned int precision = TYPE_PRECISION (type);
4311 
4312   /* If this function returns true when the type of the mask is
4313      UNSIGNED, then there will be errors.  In particular see
4314      gcc.c-torture/execute/990326-1.c.  There does not appear to be
4315      any documentation paper trail as to why this is so.  But the pre
4316      wide-int worked with that restriction and it has been preserved
4317      here.  */
4318   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4319     return false;
4320 
4321   return wi::mask (size, false, precision) == wi::to_wide (mask);
4322 }
4323 
4324 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4325    represents the sign bit of EXP's type.  If EXP represents a sign
4326    or zero extension, also test VAL against the unextended type.
4327    The return value is the (sub)expression whose sign bit is VAL,
4328    or NULL_TREE otherwise.  */
4329 
4330 tree
4331 sign_bit_p (tree exp, const_tree val)
4332 {
4333   int width;
4334   tree t;
4335 
4336   /* Tree EXP must have an integral type.  */
4337   t = TREE_TYPE (exp);
4338   if (! INTEGRAL_TYPE_P (t))
4339     return NULL_TREE;
4340 
4341   /* Tree VAL must be an integer constant.  */
4342   if (TREE_CODE (val) != INTEGER_CST
4343       || TREE_OVERFLOW (val))
4344     return NULL_TREE;
4345 
4346   width = TYPE_PRECISION (t);
4347   if (wi::only_sign_bit_p (wi::to_wide (val), width))
4348     return exp;
4349 
4350   /* Handle extension from a narrower type.  */
4351   if (TREE_CODE (exp) == NOP_EXPR
4352       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4353     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4354 
4355   return NULL_TREE;
4356 }
4357 
4358 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4359    to be evaluated unconditionally.  */
4360 
4361 static int
4362 simple_operand_p (const_tree exp)
4363 {
4364   /* Strip any conversions that don't change the machine mode.  */
4365   STRIP_NOPS (exp);
4366 
4367   return (CONSTANT_CLASS_P (exp)
4368   	  || TREE_CODE (exp) == SSA_NAME
4369 	  || (DECL_P (exp)
4370 	      && ! TREE_ADDRESSABLE (exp)
4371 	      && ! TREE_THIS_VOLATILE (exp)
4372 	      && ! DECL_NONLOCAL (exp)
4373 	      /* Don't regard global variables as simple.  They may be
4374 		 allocated in ways unknown to the compiler (shared memory,
4375 		 #pragma weak, etc).  */
4376 	      && ! TREE_PUBLIC (exp)
4377 	      && ! DECL_EXTERNAL (exp)
4378 	      /* Weakrefs are not safe to be read, since they can be NULL.
4379  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4380 		 have DECL_WEAK flag set.  */
4381 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4382 	      /* Loading a static variable is unduly expensive, but global
4383 		 registers aren't expensive.  */
4384 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4385 }
4386 
4387 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4388    to be evaluated unconditionally.
4389    I addition to simple_operand_p, we assume that comparisons, conversions,
4390    and logic-not operations are simple, if their operands are simple, too.  */
4391 
4392 static bool
4393 simple_operand_p_2 (tree exp)
4394 {
4395   enum tree_code code;
4396 
4397   if (TREE_SIDE_EFFECTS (exp)
4398       || tree_could_trap_p (exp))
4399     return false;
4400 
4401   while (CONVERT_EXPR_P (exp))
4402     exp = TREE_OPERAND (exp, 0);
4403 
4404   code = TREE_CODE (exp);
4405 
4406   if (TREE_CODE_CLASS (code) == tcc_comparison)
4407     return (simple_operand_p (TREE_OPERAND (exp, 0))
4408 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4409 
4410   if (code == TRUTH_NOT_EXPR)
4411       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4412 
4413   return simple_operand_p (exp);
4414 }
4415 
4416 
4417 /* The following functions are subroutines to fold_range_test and allow it to
4418    try to change a logical combination of comparisons into a range test.
4419 
4420    For example, both
4421 	X == 2 || X == 3 || X == 4 || X == 5
4422    and
4423 	X >= 2 && X <= 5
4424    are converted to
4425 	(unsigned) (X - 2) <= 3
4426 
4427    We describe each set of comparisons as being either inside or outside
4428    a range, using a variable named like IN_P, and then describe the
4429    range with a lower and upper bound.  If one of the bounds is omitted,
4430    it represents either the highest or lowest value of the type.
4431 
4432    In the comments below, we represent a range by two numbers in brackets
4433    preceded by a "+" to designate being inside that range, or a "-" to
4434    designate being outside that range, so the condition can be inverted by
4435    flipping the prefix.  An omitted bound is represented by a "-".  For
4436    example, "- [-, 10]" means being outside the range starting at the lowest
4437    possible value and ending at 10, in other words, being greater than 10.
4438    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4439    always false.
4440 
4441    We set up things so that the missing bounds are handled in a consistent
4442    manner so neither a missing bound nor "true" and "false" need to be
4443    handled using a special case.  */
4444 
4445 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4446    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4447    and UPPER1_P are nonzero if the respective argument is an upper bound
4448    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4449    must be specified for a comparison.  ARG1 will be converted to ARG0's
4450    type if both are specified.  */
4451 
4452 static tree
4453 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4454 	     tree arg1, int upper1_p)
4455 {
4456   tree tem;
4457   int result;
4458   int sgn0, sgn1;
4459 
4460   /* If neither arg represents infinity, do the normal operation.
4461      Else, if not a comparison, return infinity.  Else handle the special
4462      comparison rules. Note that most of the cases below won't occur, but
4463      are handled for consistency.  */
4464 
4465   if (arg0 != 0 && arg1 != 0)
4466     {
4467       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4468 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4469       STRIP_NOPS (tem);
4470       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4471     }
4472 
4473   if (TREE_CODE_CLASS (code) != tcc_comparison)
4474     return 0;
4475 
4476   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4477      for neither.  In real maths, we cannot assume open ended ranges are
4478      the same. But, this is computer arithmetic, where numbers are finite.
4479      We can therefore make the transformation of any unbounded range with
4480      the value Z, Z being greater than any representable number. This permits
4481      us to treat unbounded ranges as equal.  */
4482   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4483   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4484   switch (code)
4485     {
4486     case EQ_EXPR:
4487       result = sgn0 == sgn1;
4488       break;
4489     case NE_EXPR:
4490       result = sgn0 != sgn1;
4491       break;
4492     case LT_EXPR:
4493       result = sgn0 < sgn1;
4494       break;
4495     case LE_EXPR:
4496       result = sgn0 <= sgn1;
4497       break;
4498     case GT_EXPR:
4499       result = sgn0 > sgn1;
4500       break;
4501     case GE_EXPR:
4502       result = sgn0 >= sgn1;
4503       break;
4504     default:
4505       gcc_unreachable ();
4506     }
4507 
4508   return constant_boolean_node (result, type);
4509 }
4510 
4511 /* Helper routine for make_range.  Perform one step for it, return
4512    new expression if the loop should continue or NULL_TREE if it should
4513    stop.  */
4514 
4515 tree
4516 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4517 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4518 		 bool *strict_overflow_p)
4519 {
4520   tree arg0_type = TREE_TYPE (arg0);
4521   tree n_low, n_high, low = *p_low, high = *p_high;
4522   int in_p = *p_in_p, n_in_p;
4523 
4524   switch (code)
4525     {
4526     case TRUTH_NOT_EXPR:
4527       /* We can only do something if the range is testing for zero.  */
4528       if (low == NULL_TREE || high == NULL_TREE
4529 	  || ! integer_zerop (low) || ! integer_zerop (high))
4530 	return NULL_TREE;
4531       *p_in_p = ! in_p;
4532       return arg0;
4533 
4534     case EQ_EXPR: case NE_EXPR:
4535     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4536       /* We can only do something if the range is testing for zero
4537 	 and if the second operand is an integer constant.  Note that
4538 	 saying something is "in" the range we make is done by
4539 	 complementing IN_P since it will set in the initial case of
4540 	 being not equal to zero; "out" is leaving it alone.  */
4541       if (low == NULL_TREE || high == NULL_TREE
4542 	  || ! integer_zerop (low) || ! integer_zerop (high)
4543 	  || TREE_CODE (arg1) != INTEGER_CST)
4544 	return NULL_TREE;
4545 
4546       switch (code)
4547 	{
4548 	case NE_EXPR:  /* - [c, c]  */
4549 	  low = high = arg1;
4550 	  break;
4551 	case EQ_EXPR:  /* + [c, c]  */
4552 	  in_p = ! in_p, low = high = arg1;
4553 	  break;
4554 	case GT_EXPR:  /* - [-, c] */
4555 	  low = 0, high = arg1;
4556 	  break;
4557 	case GE_EXPR:  /* + [c, -] */
4558 	  in_p = ! in_p, low = arg1, high = 0;
4559 	  break;
4560 	case LT_EXPR:  /* - [c, -] */
4561 	  low = arg1, high = 0;
4562 	  break;
4563 	case LE_EXPR:  /* + [-, c] */
4564 	  in_p = ! in_p, low = 0, high = arg1;
4565 	  break;
4566 	default:
4567 	  gcc_unreachable ();
4568 	}
4569 
4570       /* If this is an unsigned comparison, we also know that EXP is
4571 	 greater than or equal to zero.  We base the range tests we make
4572 	 on that fact, so we record it here so we can parse existing
4573 	 range tests.  We test arg0_type since often the return type
4574 	 of, e.g. EQ_EXPR, is boolean.  */
4575       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4576 	{
4577 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4578 			      in_p, low, high, 1,
4579 			      build_int_cst (arg0_type, 0),
4580 			      NULL_TREE))
4581 	    return NULL_TREE;
4582 
4583 	  in_p = n_in_p, low = n_low, high = n_high;
4584 
4585 	  /* If the high bound is missing, but we have a nonzero low
4586 	     bound, reverse the range so it goes from zero to the low bound
4587 	     minus 1.  */
4588 	  if (high == 0 && low && ! integer_zerop (low))
4589 	    {
4590 	      in_p = ! in_p;
4591 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4592 				  build_int_cst (TREE_TYPE (low), 1), 0);
4593 	      low = build_int_cst (arg0_type, 0);
4594 	    }
4595 	}
4596 
4597       *p_low = low;
4598       *p_high = high;
4599       *p_in_p = in_p;
4600       return arg0;
4601 
4602     case NEGATE_EXPR:
4603       /* If flag_wrapv and ARG0_TYPE is signed, make sure
4604 	 low and high are non-NULL, then normalize will DTRT.  */
4605       if (!TYPE_UNSIGNED (arg0_type)
4606 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4607 	{
4608 	  if (low == NULL_TREE)
4609 	    low = TYPE_MIN_VALUE (arg0_type);
4610 	  if (high == NULL_TREE)
4611 	    high = TYPE_MAX_VALUE (arg0_type);
4612 	}
4613 
4614       /* (-x) IN [a,b] -> x in [-b, -a]  */
4615       n_low = range_binop (MINUS_EXPR, exp_type,
4616 			   build_int_cst (exp_type, 0),
4617 			   0, high, 1);
4618       n_high = range_binop (MINUS_EXPR, exp_type,
4619 			    build_int_cst (exp_type, 0),
4620 			    0, low, 0);
4621       if (n_high != 0 && TREE_OVERFLOW (n_high))
4622 	return NULL_TREE;
4623       goto normalize;
4624 
4625     case BIT_NOT_EXPR:
4626       /* ~ X -> -X - 1  */
4627       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4628 			 build_int_cst (exp_type, 1));
4629 
4630     case PLUS_EXPR:
4631     case MINUS_EXPR:
4632       if (TREE_CODE (arg1) != INTEGER_CST)
4633 	return NULL_TREE;
4634 
4635       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4636 	 move a constant to the other side.  */
4637       if (!TYPE_UNSIGNED (arg0_type)
4638 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4639 	return NULL_TREE;
4640 
4641       /* If EXP is signed, any overflow in the computation is undefined,
4642 	 so we don't worry about it so long as our computations on
4643 	 the bounds don't overflow.  For unsigned, overflow is defined
4644 	 and this is exactly the right thing.  */
4645       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4646 			   arg0_type, low, 0, arg1, 0);
4647       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4648 			    arg0_type, high, 1, arg1, 0);
4649       if ((n_low != 0 && TREE_OVERFLOW (n_low))
4650 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
4651 	return NULL_TREE;
4652 
4653       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4654 	*strict_overflow_p = true;
4655 
4656       normalize:
4657 	/* Check for an unsigned range which has wrapped around the maximum
4658 	   value thus making n_high < n_low, and normalize it.  */
4659 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4660 	  {
4661 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4662 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
4663 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4664 				build_int_cst (TREE_TYPE (n_low), 1), 0);
4665 
4666 	    /* If the range is of the form +/- [ x+1, x ], we won't
4667 	       be able to normalize it.  But then, it represents the
4668 	       whole range or the empty set, so make it
4669 	       +/- [ -, - ].  */
4670 	    if (tree_int_cst_equal (n_low, low)
4671 		&& tree_int_cst_equal (n_high, high))
4672 	      low = high = 0;
4673 	    else
4674 	      in_p = ! in_p;
4675 	  }
4676 	else
4677 	  low = n_low, high = n_high;
4678 
4679 	*p_low = low;
4680 	*p_high = high;
4681 	*p_in_p = in_p;
4682 	return arg0;
4683 
4684     CASE_CONVERT:
4685     case NON_LVALUE_EXPR:
4686       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4687 	return NULL_TREE;
4688 
4689       if (! INTEGRAL_TYPE_P (arg0_type)
4690 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
4691 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4692 	return NULL_TREE;
4693 
4694       n_low = low, n_high = high;
4695 
4696       if (n_low != 0)
4697 	n_low = fold_convert_loc (loc, arg0_type, n_low);
4698 
4699       if (n_high != 0)
4700 	n_high = fold_convert_loc (loc, arg0_type, n_high);
4701 
4702       /* If we're converting arg0 from an unsigned type, to exp,
4703 	 a signed type,  we will be doing the comparison as unsigned.
4704 	 The tests above have already verified that LOW and HIGH
4705 	 are both positive.
4706 
4707 	 So we have to ensure that we will handle large unsigned
4708 	 values the same way that the current signed bounds treat
4709 	 negative values.  */
4710 
4711       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4712 	{
4713 	  tree high_positive;
4714 	  tree equiv_type;
4715 	  /* For fixed-point modes, we need to pass the saturating flag
4716 	     as the 2nd parameter.  */
4717 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4718 	    equiv_type
4719 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4720 						TYPE_SATURATING (arg0_type));
4721 	  else
4722 	    equiv_type
4723 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4724 
4725 	  /* A range without an upper bound is, naturally, unbounded.
4726 	     Since convert would have cropped a very large value, use
4727 	     the max value for the destination type.  */
4728 	  high_positive
4729 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4730 	      : TYPE_MAX_VALUE (arg0_type);
4731 
4732 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4733 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4734 					     fold_convert_loc (loc, arg0_type,
4735 							       high_positive),
4736 					     build_int_cst (arg0_type, 1));
4737 
4738 	  /* If the low bound is specified, "and" the range with the
4739 	     range for which the original unsigned value will be
4740 	     positive.  */
4741 	  if (low != 0)
4742 	    {
4743 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4744 				  1, fold_convert_loc (loc, arg0_type,
4745 						       integer_zero_node),
4746 				  high_positive))
4747 		return NULL_TREE;
4748 
4749 	      in_p = (n_in_p == in_p);
4750 	    }
4751 	  else
4752 	    {
4753 	      /* Otherwise, "or" the range with the range of the input
4754 		 that will be interpreted as negative.  */
4755 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4756 				  1, fold_convert_loc (loc, arg0_type,
4757 						       integer_zero_node),
4758 				  high_positive))
4759 		return NULL_TREE;
4760 
4761 	      in_p = (in_p != n_in_p);
4762 	    }
4763 	}
4764 
4765       *p_low = n_low;
4766       *p_high = n_high;
4767       *p_in_p = in_p;
4768       return arg0;
4769 
4770     default:
4771       return NULL_TREE;
4772     }
4773 }
4774 
4775 /* Given EXP, a logical expression, set the range it is testing into
4776    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4777    actually being tested.  *PLOW and *PHIGH will be made of the same
4778    type as the returned expression.  If EXP is not a comparison, we
4779    will most likely not be returning a useful value and range.  Set
4780    *STRICT_OVERFLOW_P to true if the return value is only valid
4781    because signed overflow is undefined; otherwise, do not change
4782    *STRICT_OVERFLOW_P.  */
4783 
4784 tree
4785 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4786 	    bool *strict_overflow_p)
4787 {
4788   enum tree_code code;
4789   tree arg0, arg1 = NULL_TREE;
4790   tree exp_type, nexp;
4791   int in_p;
4792   tree low, high;
4793   location_t loc = EXPR_LOCATION (exp);
4794 
4795   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4796      and see if we can refine the range.  Some of the cases below may not
4797      happen, but it doesn't seem worth worrying about this.  We "continue"
4798      the outer loop when we've changed something; otherwise we "break"
4799      the switch, which will "break" the while.  */
4800 
4801   in_p = 0;
4802   low = high = build_int_cst (TREE_TYPE (exp), 0);
4803 
4804   while (1)
4805     {
4806       code = TREE_CODE (exp);
4807       exp_type = TREE_TYPE (exp);
4808       arg0 = NULL_TREE;
4809 
4810       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4811 	{
4812 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4813 	    arg0 = TREE_OPERAND (exp, 0);
4814 	  if (TREE_CODE_CLASS (code) == tcc_binary
4815 	      || TREE_CODE_CLASS (code) == tcc_comparison
4816 	      || (TREE_CODE_CLASS (code) == tcc_expression
4817 		  && TREE_OPERAND_LENGTH (exp) > 1))
4818 	    arg1 = TREE_OPERAND (exp, 1);
4819 	}
4820       if (arg0 == NULL_TREE)
4821 	break;
4822 
4823       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4824 			      &high, &in_p, strict_overflow_p);
4825       if (nexp == NULL_TREE)
4826 	break;
4827       exp = nexp;
4828     }
4829 
4830   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4831   if (TREE_CODE (exp) == INTEGER_CST)
4832     {
4833       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4834 						 exp, 0, low, 0))
4835 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4836 						    exp, 1, high, 1)));
4837       low = high = 0;
4838       exp = 0;
4839     }
4840 
4841   *pin_p = in_p, *plow = low, *phigh = high;
4842   return exp;
4843 }
4844 
4845 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4846    a bitwise check i.e. when
4847      LOW  == 0xXX...X00...0
4848      HIGH == 0xXX...X11...1
4849    Return corresponding mask in MASK and stem in VALUE.  */
4850 
4851 static bool
4852 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4853 		  tree *value)
4854 {
4855   if (TREE_CODE (low) != INTEGER_CST
4856       || TREE_CODE (high) != INTEGER_CST)
4857     return false;
4858 
4859   unsigned prec = TYPE_PRECISION (type);
4860   wide_int lo = wi::to_wide (low, prec);
4861   wide_int hi = wi::to_wide (high, prec);
4862 
4863   wide_int end_mask = lo ^ hi;
4864   if ((end_mask & (end_mask + 1)) != 0
4865       || (lo & end_mask) != 0)
4866     return false;
4867 
4868   wide_int stem_mask = ~end_mask;
4869   wide_int stem = lo & stem_mask;
4870   if (stem != (hi & stem_mask))
4871     return false;
4872 
4873   *mask = wide_int_to_tree (type, stem_mask);
4874   *value = wide_int_to_tree (type, stem);
4875 
4876   return true;
4877 }
4878 
4879 /* Helper routine for build_range_check and match.pd.  Return the type to
4880    perform the check or NULL if it shouldn't be optimized.  */
4881 
4882 tree
4883 range_check_type (tree etype)
4884 {
4885   /* First make sure that arithmetics in this type is valid, then make sure
4886      that it wraps around.  */
4887   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4888     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4889 					    TYPE_UNSIGNED (etype));
4890 
4891   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4892     {
4893       tree utype, minv, maxv;
4894 
4895       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4896 	 for the type in question, as we rely on this here.  */
4897       utype = unsigned_type_for (etype);
4898       maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4899       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4900 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
4901       minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4902 
4903       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4904 				      minv, 1, maxv, 1)))
4905 	etype = utype;
4906       else
4907 	return NULL_TREE;
4908     }
4909   return etype;
4910 }
4911 
4912 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4913    type, TYPE, return an expression to test if EXP is in (or out of, depending
4914    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4915 
4916 tree
4917 build_range_check (location_t loc, tree type, tree exp, int in_p,
4918 		   tree low, tree high)
4919 {
4920   tree etype = TREE_TYPE (exp), mask, value;
4921 
4922   /* Disable this optimization for function pointer expressions
4923      on targets that require function pointer canonicalization.  */
4924   if (targetm.have_canonicalize_funcptr_for_compare ()
4925       && POINTER_TYPE_P (etype)
4926       && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
4927     return NULL_TREE;
4928 
4929   if (! in_p)
4930     {
4931       value = build_range_check (loc, type, exp, 1, low, high);
4932       if (value != 0)
4933         return invert_truthvalue_loc (loc, value);
4934 
4935       return 0;
4936     }
4937 
4938   if (low == 0 && high == 0)
4939     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4940 
4941   if (low == 0)
4942     return fold_build2_loc (loc, LE_EXPR, type, exp,
4943 			    fold_convert_loc (loc, etype, high));
4944 
4945   if (high == 0)
4946     return fold_build2_loc (loc, GE_EXPR, type, exp,
4947 			    fold_convert_loc (loc, etype, low));
4948 
4949   if (operand_equal_p (low, high, 0))
4950     return fold_build2_loc (loc, EQ_EXPR, type, exp,
4951 			    fold_convert_loc (loc, etype, low));
4952 
4953   if (TREE_CODE (exp) == BIT_AND_EXPR
4954       && maskable_range_p (low, high, etype, &mask, &value))
4955     return fold_build2_loc (loc, EQ_EXPR, type,
4956 			    fold_build2_loc (loc, BIT_AND_EXPR, etype,
4957 					     exp, mask),
4958 			    value);
4959 
4960   if (integer_zerop (low))
4961     {
4962       if (! TYPE_UNSIGNED (etype))
4963 	{
4964 	  etype = unsigned_type_for (etype);
4965 	  high = fold_convert_loc (loc, etype, high);
4966 	  exp = fold_convert_loc (loc, etype, exp);
4967 	}
4968       return build_range_check (loc, type, exp, 1, 0, high);
4969     }
4970 
4971   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4972   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4973     {
4974       int prec = TYPE_PRECISION (etype);
4975 
4976       if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4977 	{
4978 	  if (TYPE_UNSIGNED (etype))
4979 	    {
4980 	      tree signed_etype = signed_type_for (etype);
4981 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4982 		etype
4983 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4984 	      else
4985 		etype = signed_etype;
4986 	      exp = fold_convert_loc (loc, etype, exp);
4987 	    }
4988 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
4989 				  build_int_cst (etype, 0));
4990 	}
4991     }
4992 
4993   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4994      This requires wrap-around arithmetics for the type of the expression.  */
4995   etype = range_check_type (etype);
4996   if (etype == NULL_TREE)
4997     return NULL_TREE;
4998 
4999   if (POINTER_TYPE_P (etype))
5000     etype = unsigned_type_for (etype);
5001 
5002   high = fold_convert_loc (loc, etype, high);
5003   low = fold_convert_loc (loc, etype, low);
5004   exp = fold_convert_loc (loc, etype, exp);
5005 
5006   value = const_binop (MINUS_EXPR, high, low);
5007 
5008   if (value != 0 && !TREE_OVERFLOW (value))
5009     return build_range_check (loc, type,
5010 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5011 			      1, build_int_cst (etype, 0), value);
5012 
5013   return 0;
5014 }
5015 
5016 /* Return the predecessor of VAL in its type, handling the infinite case.  */
5017 
5018 static tree
5019 range_predecessor (tree val)
5020 {
5021   tree type = TREE_TYPE (val);
5022 
5023   if (INTEGRAL_TYPE_P (type)
5024       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5025     return 0;
5026   else
5027     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5028 			build_int_cst (TREE_TYPE (val), 1), 0);
5029 }
5030 
5031 /* Return the successor of VAL in its type, handling the infinite case.  */
5032 
5033 static tree
5034 range_successor (tree val)
5035 {
5036   tree type = TREE_TYPE (val);
5037 
5038   if (INTEGRAL_TYPE_P (type)
5039       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5040     return 0;
5041   else
5042     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5043 			build_int_cst (TREE_TYPE (val), 1), 0);
5044 }
5045 
5046 /* Given two ranges, see if we can merge them into one.  Return 1 if we
5047    can, 0 if we can't.  Set the output range into the specified parameters.  */
5048 
5049 bool
5050 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5051 	      tree high0, int in1_p, tree low1, tree high1)
5052 {
5053   int no_overlap;
5054   int subset;
5055   int temp;
5056   tree tem;
5057   int in_p;
5058   tree low, high;
5059   int lowequal = ((low0 == 0 && low1 == 0)
5060 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5061 						low0, 0, low1, 0)));
5062   int highequal = ((high0 == 0 && high1 == 0)
5063 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5064 						 high0, 1, high1, 1)));
5065 
5066   /* Make range 0 be the range that starts first, or ends last if they
5067      start at the same value.  Swap them if it isn't.  */
5068   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5069 				 low0, 0, low1, 0))
5070       || (lowequal
5071 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
5072 					high1, 1, high0, 1))))
5073     {
5074       temp = in0_p, in0_p = in1_p, in1_p = temp;
5075       tem = low0, low0 = low1, low1 = tem;
5076       tem = high0, high0 = high1, high1 = tem;
5077     }
5078 
5079   /* Now flag two cases, whether the ranges are disjoint or whether the
5080      second range is totally subsumed in the first.  Note that the tests
5081      below are simplified by the ones above.  */
5082   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5083 					  high0, 1, low1, 0));
5084   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5085 				      high1, 1, high0, 1));
5086 
5087   /* We now have four cases, depending on whether we are including or
5088      excluding the two ranges.  */
5089   if (in0_p && in1_p)
5090     {
5091       /* If they don't overlap, the result is false.  If the second range
5092 	 is a subset it is the result.  Otherwise, the range is from the start
5093 	 of the second to the end of the first.  */
5094       if (no_overlap)
5095 	in_p = 0, low = high = 0;
5096       else if (subset)
5097 	in_p = 1, low = low1, high = high1;
5098       else
5099 	in_p = 1, low = low1, high = high0;
5100     }
5101 
5102   else if (in0_p && ! in1_p)
5103     {
5104       /* If they don't overlap, the result is the first range.  If they are
5105 	 equal, the result is false.  If the second range is a subset of the
5106 	 first, and the ranges begin at the same place, we go from just after
5107 	 the end of the second range to the end of the first.  If the second
5108 	 range is not a subset of the first, or if it is a subset and both
5109 	 ranges end at the same place, the range starts at the start of the
5110 	 first range and ends just before the second range.
5111 	 Otherwise, we can't describe this as a single range.  */
5112       if (no_overlap)
5113 	in_p = 1, low = low0, high = high0;
5114       else if (lowequal && highequal)
5115 	in_p = 0, low = high = 0;
5116       else if (subset && lowequal)
5117 	{
5118 	  low = range_successor (high1);
5119 	  high = high0;
5120 	  in_p = 1;
5121 	  if (low == 0)
5122 	    {
5123 	      /* We are in the weird situation where high0 > high1 but
5124 		 high1 has no successor.  Punt.  */
5125 	      return 0;
5126 	    }
5127 	}
5128       else if (! subset || highequal)
5129 	{
5130 	  low = low0;
5131 	  high = range_predecessor (low1);
5132 	  in_p = 1;
5133 	  if (high == 0)
5134 	    {
5135 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5136 	      return 0;
5137 	    }
5138 	}
5139       else
5140 	return 0;
5141     }
5142 
5143   else if (! in0_p && in1_p)
5144     {
5145       /* If they don't overlap, the result is the second range.  If the second
5146 	 is a subset of the first, the result is false.  Otherwise,
5147 	 the range starts just after the first range and ends at the
5148 	 end of the second.  */
5149       if (no_overlap)
5150 	in_p = 1, low = low1, high = high1;
5151       else if (subset || highequal)
5152 	in_p = 0, low = high = 0;
5153       else
5154 	{
5155 	  low = range_successor (high0);
5156 	  high = high1;
5157 	  in_p = 1;
5158 	  if (low == 0)
5159 	    {
5160 	      /* high1 > high0 but high0 has no successor.  Punt.  */
5161 	      return 0;
5162 	    }
5163 	}
5164     }
5165 
5166   else
5167     {
5168       /* The case where we are excluding both ranges.  Here the complex case
5169 	 is if they don't overlap.  In that case, the only time we have a
5170 	 range is if they are adjacent.  If the second is a subset of the
5171 	 first, the result is the first.  Otherwise, the range to exclude
5172 	 starts at the beginning of the first range and ends at the end of the
5173 	 second.  */
5174       if (no_overlap)
5175 	{
5176 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5177 					 range_successor (high0),
5178 					 1, low1, 0)))
5179 	    in_p = 0, low = low0, high = high1;
5180 	  else
5181 	    {
5182 	      /* Canonicalize - [min, x] into - [-, x].  */
5183 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5184 		switch (TREE_CODE (TREE_TYPE (low0)))
5185 		  {
5186 		  case ENUMERAL_TYPE:
5187 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5188 				  GET_MODE_BITSIZE
5189 				    (TYPE_MODE (TREE_TYPE (low0)))))
5190 		      break;
5191 		    /* FALLTHROUGH */
5192 		  case INTEGER_TYPE:
5193 		    if (tree_int_cst_equal (low0,
5194 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5195 		      low0 = 0;
5196 		    break;
5197 		  case POINTER_TYPE:
5198 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5199 			&& integer_zerop (low0))
5200 		      low0 = 0;
5201 		    break;
5202 		  default:
5203 		    break;
5204 		  }
5205 
5206 	      /* Canonicalize - [x, max] into - [x, -].  */
5207 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5208 		switch (TREE_CODE (TREE_TYPE (high1)))
5209 		  {
5210 		  case ENUMERAL_TYPE:
5211 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5212 				  GET_MODE_BITSIZE
5213 				    (TYPE_MODE (TREE_TYPE (high1)))))
5214 		      break;
5215 		    /* FALLTHROUGH */
5216 		  case INTEGER_TYPE:
5217 		    if (tree_int_cst_equal (high1,
5218 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5219 		      high1 = 0;
5220 		    break;
5221 		  case POINTER_TYPE:
5222 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5223 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5224 						       high1, 1,
5225 						       build_int_cst (TREE_TYPE (high1), 1),
5226 						       1)))
5227 		      high1 = 0;
5228 		    break;
5229 		  default:
5230 		    break;
5231 		  }
5232 
5233 	      /* The ranges might be also adjacent between the maximum and
5234 	         minimum values of the given type.  For
5235 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5236 	         return + [x + 1, y - 1].  */
5237 	      if (low0 == 0 && high1 == 0)
5238 	        {
5239 		  low = range_successor (high0);
5240 		  high = range_predecessor (low1);
5241 		  if (low == 0 || high == 0)
5242 		    return 0;
5243 
5244 		  in_p = 1;
5245 		}
5246 	      else
5247 		return 0;
5248 	    }
5249 	}
5250       else if (subset)
5251 	in_p = 0, low = low0, high = high0;
5252       else
5253 	in_p = 0, low = low0, high = high1;
5254     }
5255 
5256   *pin_p = in_p, *plow = low, *phigh = high;
5257   return 1;
5258 }
5259 
5260 
5261 /* Subroutine of fold, looking inside expressions of the form
5262    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5263    of the COND_EXPR.  This function is being used also to optimize
5264    A op B ? C : A, by reversing the comparison first.
5265 
5266    Return a folded expression whose code is not a COND_EXPR
5267    anymore, or NULL_TREE if no folding opportunity is found.  */
5268 
5269 static tree
5270 fold_cond_expr_with_comparison (location_t loc, tree type,
5271 				tree arg0, tree arg1, tree arg2)
5272 {
5273   enum tree_code comp_code = TREE_CODE (arg0);
5274   tree arg00 = TREE_OPERAND (arg0, 0);
5275   tree arg01 = TREE_OPERAND (arg0, 1);
5276   tree arg1_type = TREE_TYPE (arg1);
5277   tree tem;
5278 
5279   STRIP_NOPS (arg1);
5280   STRIP_NOPS (arg2);
5281 
5282   /* If we have A op 0 ? A : -A, consider applying the following
5283      transformations:
5284 
5285      A == 0? A : -A    same as -A
5286      A != 0? A : -A    same as A
5287      A >= 0? A : -A    same as abs (A)
5288      A > 0?  A : -A    same as abs (A)
5289      A <= 0? A : -A    same as -abs (A)
5290      A < 0?  A : -A    same as -abs (A)
5291 
5292      None of these transformations work for modes with signed
5293      zeros.  If A is +/-0, the first two transformations will
5294      change the sign of the result (from +0 to -0, or vice
5295      versa).  The last four will fix the sign of the result,
5296      even though the original expressions could be positive or
5297      negative, depending on the sign of A.
5298 
5299      Note that all these transformations are correct if A is
5300      NaN, since the two alternatives (A and -A) are also NaNs.  */
5301   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5302       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5303 	  ? real_zerop (arg01)
5304 	  : integer_zerop (arg01))
5305       && ((TREE_CODE (arg2) == NEGATE_EXPR
5306 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5307 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5308 	        have already been folded to Y-X, check for that. */
5309 	  || (TREE_CODE (arg1) == MINUS_EXPR
5310 	      && TREE_CODE (arg2) == MINUS_EXPR
5311 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5312 				  TREE_OPERAND (arg2, 1), 0)
5313 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5314 				  TREE_OPERAND (arg2, 0), 0))))
5315     switch (comp_code)
5316       {
5317       case EQ_EXPR:
5318       case UNEQ_EXPR:
5319 	tem = fold_convert_loc (loc, arg1_type, arg1);
5320 	return fold_convert_loc (loc, type, negate_expr (tem));
5321       case NE_EXPR:
5322       case LTGT_EXPR:
5323 	return fold_convert_loc (loc, type, arg1);
5324       case UNGE_EXPR:
5325       case UNGT_EXPR:
5326 	if (flag_trapping_math)
5327 	  break;
5328 	/* Fall through.  */
5329       case GE_EXPR:
5330       case GT_EXPR:
5331 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5332 	  break;
5333 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5334 	return fold_convert_loc (loc, type, tem);
5335       case UNLE_EXPR:
5336       case UNLT_EXPR:
5337 	if (flag_trapping_math)
5338 	  break;
5339 	/* FALLTHRU */
5340       case LE_EXPR:
5341       case LT_EXPR:
5342 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5343 	  break;
5344 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5345 	return negate_expr (fold_convert_loc (loc, type, tem));
5346       default:
5347 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5348 	break;
5349       }
5350 
5351   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5352      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5353      both transformations are correct when A is NaN: A != 0
5354      is then true, and A == 0 is false.  */
5355 
5356   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5357       && integer_zerop (arg01) && integer_zerop (arg2))
5358     {
5359       if (comp_code == NE_EXPR)
5360 	return fold_convert_loc (loc, type, arg1);
5361       else if (comp_code == EQ_EXPR)
5362 	return build_zero_cst (type);
5363     }
5364 
5365   /* Try some transformations of A op B ? A : B.
5366 
5367      A == B? A : B    same as B
5368      A != B? A : B    same as A
5369      A >= B? A : B    same as max (A, B)
5370      A > B?  A : B    same as max (B, A)
5371      A <= B? A : B    same as min (A, B)
5372      A < B?  A : B    same as min (B, A)
5373 
5374      As above, these transformations don't work in the presence
5375      of signed zeros.  For example, if A and B are zeros of
5376      opposite sign, the first two transformations will change
5377      the sign of the result.  In the last four, the original
5378      expressions give different results for (A=+0, B=-0) and
5379      (A=-0, B=+0), but the transformed expressions do not.
5380 
5381      The first two transformations are correct if either A or B
5382      is a NaN.  In the first transformation, the condition will
5383      be false, and B will indeed be chosen.  In the case of the
5384      second transformation, the condition A != B will be true,
5385      and A will be chosen.
5386 
5387      The conversions to max() and min() are not correct if B is
5388      a number and A is not.  The conditions in the original
5389      expressions will be false, so all four give B.  The min()
5390      and max() versions would give a NaN instead.  */
5391   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5392       && operand_equal_for_comparison_p (arg01, arg2)
5393       /* Avoid these transformations if the COND_EXPR may be used
5394 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5395       && (in_gimple_form
5396 	  || VECTOR_TYPE_P (type)
5397 	  || (! lang_GNU_CXX ()
5398 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5399 	  || ! maybe_lvalue_p (arg1)
5400 	  || ! maybe_lvalue_p (arg2)))
5401     {
5402       tree comp_op0 = arg00;
5403       tree comp_op1 = arg01;
5404       tree comp_type = TREE_TYPE (comp_op0);
5405 
5406       switch (comp_code)
5407 	{
5408 	case EQ_EXPR:
5409 	  return fold_convert_loc (loc, type, arg2);
5410 	case NE_EXPR:
5411 	  return fold_convert_loc (loc, type, arg1);
5412 	case LE_EXPR:
5413 	case LT_EXPR:
5414 	case UNLE_EXPR:
5415 	case UNLT_EXPR:
5416 	  /* In C++ a ?: expression can be an lvalue, so put the
5417 	     operand which will be used if they are equal first
5418 	     so that we can convert this back to the
5419 	     corresponding COND_EXPR.  */
5420 	  if (!HONOR_NANS (arg1))
5421 	    {
5422 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5423 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5424 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5425 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5426 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5427 				   comp_op1, comp_op0);
5428 	      return fold_convert_loc (loc, type, tem);
5429 	    }
5430 	  break;
5431 	case GE_EXPR:
5432 	case GT_EXPR:
5433 	case UNGE_EXPR:
5434 	case UNGT_EXPR:
5435 	  if (!HONOR_NANS (arg1))
5436 	    {
5437 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5438 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5439 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5440 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5441 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5442 				   comp_op1, comp_op0);
5443 	      return fold_convert_loc (loc, type, tem);
5444 	    }
5445 	  break;
5446 	case UNEQ_EXPR:
5447 	  if (!HONOR_NANS (arg1))
5448 	    return fold_convert_loc (loc, type, arg2);
5449 	  break;
5450 	case LTGT_EXPR:
5451 	  if (!HONOR_NANS (arg1))
5452 	    return fold_convert_loc (loc, type, arg1);
5453 	  break;
5454 	default:
5455 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5456 	  break;
5457 	}
5458     }
5459 
5460   return NULL_TREE;
5461 }
5462 
5463 
5464 
5465 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5466 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5467   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5468 		false) >= 2)
5469 #endif
5470 
5471 /* EXP is some logical combination of boolean tests.  See if we can
5472    merge it into some range test.  Return the new tree if so.  */
5473 
5474 static tree
5475 fold_range_test (location_t loc, enum tree_code code, tree type,
5476 		 tree op0, tree op1)
5477 {
5478   int or_op = (code == TRUTH_ORIF_EXPR
5479 	       || code == TRUTH_OR_EXPR);
5480   int in0_p, in1_p, in_p;
5481   tree low0, low1, low, high0, high1, high;
5482   bool strict_overflow_p = false;
5483   tree tem, lhs, rhs;
5484   const char * const warnmsg = G_("assuming signed overflow does not occur "
5485 				  "when simplifying range test");
5486 
5487   if (!INTEGRAL_TYPE_P (type))
5488     return 0;
5489 
5490   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5491   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5492 
5493   /* If this is an OR operation, invert both sides; we will invert
5494      again at the end.  */
5495   if (or_op)
5496     in0_p = ! in0_p, in1_p = ! in1_p;
5497 
5498   /* If both expressions are the same, if we can merge the ranges, and we
5499      can build the range test, return it or it inverted.  If one of the
5500      ranges is always true or always false, consider it to be the same
5501      expression as the other.  */
5502   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5503       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5504 		       in1_p, low1, high1)
5505       && (tem = (build_range_check (loc, type,
5506 				    lhs != 0 ? lhs
5507 				    : rhs != 0 ? rhs : integer_zero_node,
5508 				    in_p, low, high))) != 0)
5509     {
5510       if (strict_overflow_p)
5511 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5512       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5513     }
5514 
5515   /* On machines where the branch cost is expensive, if this is a
5516      short-circuited branch and the underlying object on both sides
5517      is the same, make a non-short-circuit operation.  */
5518   else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5519 	   && !flag_sanitize_coverage
5520 	   && lhs != 0 && rhs != 0
5521 	   && (code == TRUTH_ANDIF_EXPR
5522 	       || code == TRUTH_ORIF_EXPR)
5523 	   && operand_equal_p (lhs, rhs, 0))
5524     {
5525       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5526 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5527 	 which cases we can't do this.  */
5528       if (simple_operand_p (lhs))
5529 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5530 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5531 			   type, op0, op1);
5532 
5533       else if (!lang_hooks.decls.global_bindings_p ()
5534 	       && !CONTAINS_PLACEHOLDER_P (lhs))
5535 	{
5536 	  tree common = save_expr (lhs);
5537 
5538 	  if ((lhs = build_range_check (loc, type, common,
5539 					or_op ? ! in0_p : in0_p,
5540 					low0, high0)) != 0
5541 	      && (rhs = build_range_check (loc, type, common,
5542 					   or_op ? ! in1_p : in1_p,
5543 					   low1, high1)) != 0)
5544 	    {
5545 	      if (strict_overflow_p)
5546 		fold_overflow_warning (warnmsg,
5547 				       WARN_STRICT_OVERFLOW_COMPARISON);
5548 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5549 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5550 				 type, lhs, rhs);
5551 	    }
5552 	}
5553     }
5554 
5555   return 0;
5556 }
5557 
5558 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5559    bit value.  Arrange things so the extra bits will be set to zero if and
5560    only if C is signed-extended to its full width.  If MASK is nonzero,
5561    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5562 
5563 static tree
5564 unextend (tree c, int p, int unsignedp, tree mask)
5565 {
5566   tree type = TREE_TYPE (c);
5567   int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5568   tree temp;
5569 
5570   if (p == modesize || unsignedp)
5571     return c;
5572 
5573   /* We work by getting just the sign bit into the low-order bit, then
5574      into the high-order bit, then sign-extend.  We then XOR that value
5575      with C.  */
5576   temp = build_int_cst (TREE_TYPE (c),
5577 			wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5578 
5579   /* We must use a signed type in order to get an arithmetic right shift.
5580      However, we must also avoid introducing accidental overflows, so that
5581      a subsequent call to integer_zerop will work.  Hence we must
5582      do the type conversion here.  At this point, the constant is either
5583      zero or one, and the conversion to a signed type can never overflow.
5584      We could get an overflow if this conversion is done anywhere else.  */
5585   if (TYPE_UNSIGNED (type))
5586     temp = fold_convert (signed_type_for (type), temp);
5587 
5588   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5589   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5590   if (mask != 0)
5591     temp = const_binop (BIT_AND_EXPR, temp,
5592 			fold_convert (TREE_TYPE (c), mask));
5593   /* If necessary, convert the type back to match the type of C.  */
5594   if (TYPE_UNSIGNED (type))
5595     temp = fold_convert (type, temp);
5596 
5597   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5598 }
5599 
5600 /* For an expression that has the form
5601      (A && B) || ~B
5602    or
5603      (A || B) && ~B,
5604    we can drop one of the inner expressions and simplify to
5605      A || ~B
5606    or
5607      A && ~B
5608    LOC is the location of the resulting expression.  OP is the inner
5609    logical operation; the left-hand side in the examples above, while CMPOP
5610    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
5611    removing a condition that guards another, as in
5612      (A != NULL && A->...) || A == NULL
5613    which we must not transform.  If RHS_ONLY is true, only eliminate the
5614    right-most operand of the inner logical operation.  */
5615 
5616 static tree
5617 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5618 				 bool rhs_only)
5619 {
5620   tree type = TREE_TYPE (cmpop);
5621   enum tree_code code = TREE_CODE (cmpop);
5622   enum tree_code truthop_code = TREE_CODE (op);
5623   tree lhs = TREE_OPERAND (op, 0);
5624   tree rhs = TREE_OPERAND (op, 1);
5625   tree orig_lhs = lhs, orig_rhs = rhs;
5626   enum tree_code rhs_code = TREE_CODE (rhs);
5627   enum tree_code lhs_code = TREE_CODE (lhs);
5628   enum tree_code inv_code;
5629 
5630   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5631     return NULL_TREE;
5632 
5633   if (TREE_CODE_CLASS (code) != tcc_comparison)
5634     return NULL_TREE;
5635 
5636   if (rhs_code == truthop_code)
5637     {
5638       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5639       if (newrhs != NULL_TREE)
5640 	{
5641 	  rhs = newrhs;
5642 	  rhs_code = TREE_CODE (rhs);
5643 	}
5644     }
5645   if (lhs_code == truthop_code && !rhs_only)
5646     {
5647       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5648       if (newlhs != NULL_TREE)
5649 	{
5650 	  lhs = newlhs;
5651 	  lhs_code = TREE_CODE (lhs);
5652 	}
5653     }
5654 
5655   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5656   if (inv_code == rhs_code
5657       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5658       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5659     return lhs;
5660   if (!rhs_only && inv_code == lhs_code
5661       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5662       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5663     return rhs;
5664   if (rhs != orig_rhs || lhs != orig_lhs)
5665     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5666 			    lhs, rhs);
5667   return NULL_TREE;
5668 }
5669 
5670 /* Find ways of folding logical expressions of LHS and RHS:
5671    Try to merge two comparisons to the same innermost item.
5672    Look for range tests like "ch >= '0' && ch <= '9'".
5673    Look for combinations of simple terms on machines with expensive branches
5674    and evaluate the RHS unconditionally.
5675 
5676    For example, if we have p->a == 2 && p->b == 4 and we can make an
5677    object large enough to span both A and B, we can do this with a comparison
5678    against the object ANDed with the a mask.
5679 
5680    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5681    operations to do this with one comparison.
5682 
5683    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5684    function and the one above.
5685 
5686    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5687    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5688 
5689    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5690    two operands.
5691 
5692    We return the simplified tree or 0 if no optimization is possible.  */
5693 
5694 static tree
5695 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5696 		    tree lhs, tree rhs)
5697 {
5698   /* If this is the "or" of two comparisons, we can do something if
5699      the comparisons are NE_EXPR.  If this is the "and", we can do something
5700      if the comparisons are EQ_EXPR.  I.e.,
5701 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5702 
5703      WANTED_CODE is this operation code.  For single bit fields, we can
5704      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5705      comparison for one-bit fields.  */
5706 
5707   enum tree_code wanted_code;
5708   enum tree_code lcode, rcode;
5709   tree ll_arg, lr_arg, rl_arg, rr_arg;
5710   tree ll_inner, lr_inner, rl_inner, rr_inner;
5711   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5712   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5713   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5714   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5715   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5716   int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5717   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5718   scalar_int_mode lnmode, rnmode;
5719   tree ll_mask, lr_mask, rl_mask, rr_mask;
5720   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5721   tree l_const, r_const;
5722   tree lntype, rntype, result;
5723   HOST_WIDE_INT first_bit, end_bit;
5724   int volatilep;
5725 
5726   /* Start by getting the comparison codes.  Fail if anything is volatile.
5727      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5728      it were surrounded with a NE_EXPR.  */
5729 
5730   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5731     return 0;
5732 
5733   lcode = TREE_CODE (lhs);
5734   rcode = TREE_CODE (rhs);
5735 
5736   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5737     {
5738       lhs = build2 (NE_EXPR, truth_type, lhs,
5739 		    build_int_cst (TREE_TYPE (lhs), 0));
5740       lcode = NE_EXPR;
5741     }
5742 
5743   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5744     {
5745       rhs = build2 (NE_EXPR, truth_type, rhs,
5746 		    build_int_cst (TREE_TYPE (rhs), 0));
5747       rcode = NE_EXPR;
5748     }
5749 
5750   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5751       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5752     return 0;
5753 
5754   ll_arg = TREE_OPERAND (lhs, 0);
5755   lr_arg = TREE_OPERAND (lhs, 1);
5756   rl_arg = TREE_OPERAND (rhs, 0);
5757   rr_arg = TREE_OPERAND (rhs, 1);
5758 
5759   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5760   if (simple_operand_p (ll_arg)
5761       && simple_operand_p (lr_arg))
5762     {
5763       if (operand_equal_p (ll_arg, rl_arg, 0)
5764           && operand_equal_p (lr_arg, rr_arg, 0))
5765 	{
5766           result = combine_comparisons (loc, code, lcode, rcode,
5767 					truth_type, ll_arg, lr_arg);
5768 	  if (result)
5769 	    return result;
5770 	}
5771       else if (operand_equal_p (ll_arg, rr_arg, 0)
5772                && operand_equal_p (lr_arg, rl_arg, 0))
5773 	{
5774           result = combine_comparisons (loc, code, lcode,
5775 					swap_tree_comparison (rcode),
5776 					truth_type, ll_arg, lr_arg);
5777 	  if (result)
5778 	    return result;
5779 	}
5780     }
5781 
5782   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5783 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5784 
5785   /* If the RHS can be evaluated unconditionally and its operands are
5786      simple, it wins to evaluate the RHS unconditionally on machines
5787      with expensive branches.  In this case, this isn't a comparison
5788      that can be merged.  */
5789 
5790   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5791 		   false) >= 2
5792       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5793       && simple_operand_p (rl_arg)
5794       && simple_operand_p (rr_arg))
5795     {
5796       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5797       if (code == TRUTH_OR_EXPR
5798 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5799 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5800 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5801 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5802 	return build2_loc (loc, NE_EXPR, truth_type,
5803 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5804 				   ll_arg, rl_arg),
5805 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5806 
5807       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5808       if (code == TRUTH_AND_EXPR
5809 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5810 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5811 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5812 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5813 	return build2_loc (loc, EQ_EXPR, truth_type,
5814 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5815 				   ll_arg, rl_arg),
5816 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5817     }
5818 
5819   /* See if the comparisons can be merged.  Then get all the parameters for
5820      each side.  */
5821 
5822   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5823       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5824     return 0;
5825 
5826   ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5827   volatilep = 0;
5828   ll_inner = decode_field_reference (loc, &ll_arg,
5829 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5830 				     &ll_unsignedp, &ll_reversep, &volatilep,
5831 				     &ll_mask, &ll_and_mask);
5832   lr_inner = decode_field_reference (loc, &lr_arg,
5833 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5834 				     &lr_unsignedp, &lr_reversep, &volatilep,
5835 				     &lr_mask, &lr_and_mask);
5836   rl_inner = decode_field_reference (loc, &rl_arg,
5837 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5838 				     &rl_unsignedp, &rl_reversep, &volatilep,
5839 				     &rl_mask, &rl_and_mask);
5840   rr_inner = decode_field_reference (loc, &rr_arg,
5841 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5842 				     &rr_unsignedp, &rr_reversep, &volatilep,
5843 				     &rr_mask, &rr_and_mask);
5844 
5845   /* It must be true that the inner operation on the lhs of each
5846      comparison must be the same if we are to be able to do anything.
5847      Then see if we have constants.  If not, the same must be true for
5848      the rhs's.  */
5849   if (volatilep
5850       || ll_reversep != rl_reversep
5851       || ll_inner == 0 || rl_inner == 0
5852       || ! operand_equal_p (ll_inner, rl_inner, 0))
5853     return 0;
5854 
5855   if (TREE_CODE (lr_arg) == INTEGER_CST
5856       && TREE_CODE (rr_arg) == INTEGER_CST)
5857     {
5858       l_const = lr_arg, r_const = rr_arg;
5859       lr_reversep = ll_reversep;
5860     }
5861   else if (lr_reversep != rr_reversep
5862 	   || lr_inner == 0 || rr_inner == 0
5863 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5864     return 0;
5865   else
5866     l_const = r_const = 0;
5867 
5868   /* If either comparison code is not correct for our logical operation,
5869      fail.  However, we can convert a one-bit comparison against zero into
5870      the opposite comparison against that bit being set in the field.  */
5871 
5872   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5873   if (lcode != wanted_code)
5874     {
5875       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5876 	{
5877 	  /* Make the left operand unsigned, since we are only interested
5878 	     in the value of one bit.  Otherwise we are doing the wrong
5879 	     thing below.  */
5880 	  ll_unsignedp = 1;
5881 	  l_const = ll_mask;
5882 	}
5883       else
5884 	return 0;
5885     }
5886 
5887   /* This is analogous to the code for l_const above.  */
5888   if (rcode != wanted_code)
5889     {
5890       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5891 	{
5892 	  rl_unsignedp = 1;
5893 	  r_const = rl_mask;
5894 	}
5895       else
5896 	return 0;
5897     }
5898 
5899   /* See if we can find a mode that contains both fields being compared on
5900      the left.  If we can't, fail.  Otherwise, update all constants and masks
5901      to be relative to a field of that size.  */
5902   first_bit = MIN (ll_bitpos, rl_bitpos);
5903   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5904   if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5905 		      TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5906 		      volatilep, &lnmode))
5907     return 0;
5908 
5909   lnbitsize = GET_MODE_BITSIZE (lnmode);
5910   lnbitpos = first_bit & ~ (lnbitsize - 1);
5911   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5912   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5913 
5914   if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5915     {
5916       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5917       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5918     }
5919 
5920   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5921 			 size_int (xll_bitpos));
5922   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5923 			 size_int (xrl_bitpos));
5924 
5925   if (l_const)
5926     {
5927       l_const = fold_convert_loc (loc, lntype, l_const);
5928       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5929       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5930       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5931 					fold_build1_loc (loc, BIT_NOT_EXPR,
5932 						     lntype, ll_mask))))
5933 	{
5934 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5935 
5936 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5937 	}
5938     }
5939   if (r_const)
5940     {
5941       r_const = fold_convert_loc (loc, lntype, r_const);
5942       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5943       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5944       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5945 					fold_build1_loc (loc, BIT_NOT_EXPR,
5946 						     lntype, rl_mask))))
5947 	{
5948 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5949 
5950 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5951 	}
5952     }
5953 
5954   /* If the right sides are not constant, do the same for it.  Also,
5955      disallow this optimization if a size, signedness or storage order
5956      mismatch occurs between the left and right sides.  */
5957   if (l_const == 0)
5958     {
5959       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5960 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5961 	  || ll_reversep != lr_reversep
5962 	  /* Make sure the two fields on the right
5963 	     correspond to the left without being swapped.  */
5964 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5965 	return 0;
5966 
5967       first_bit = MIN (lr_bitpos, rr_bitpos);
5968       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5969       if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5970 			  TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5971 			  volatilep, &rnmode))
5972 	return 0;
5973 
5974       rnbitsize = GET_MODE_BITSIZE (rnmode);
5975       rnbitpos = first_bit & ~ (rnbitsize - 1);
5976       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5977       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5978 
5979       if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5980 	{
5981 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5982 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5983 	}
5984 
5985       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5986 							    rntype, lr_mask),
5987 			     size_int (xlr_bitpos));
5988       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5989 							    rntype, rr_mask),
5990 			     size_int (xrr_bitpos));
5991 
5992       /* Make a mask that corresponds to both fields being compared.
5993 	 Do this for both items being compared.  If the operands are the
5994 	 same size and the bits being compared are in the same position
5995 	 then we can do this by masking both and comparing the masked
5996 	 results.  */
5997       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5998       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5999       if (lnbitsize == rnbitsize
6000 	  && xll_bitpos == xlr_bitpos
6001 	  && lnbitpos >= 0
6002 	  && rnbitpos >= 0)
6003 	{
6004 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6005 				    lntype, lnbitsize, lnbitpos,
6006 				    ll_unsignedp || rl_unsignedp, ll_reversep);
6007 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
6008 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6009 
6010 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6011 				    rntype, rnbitsize, rnbitpos,
6012 				    lr_unsignedp || rr_unsignedp, lr_reversep);
6013 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
6014 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6015 
6016 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6017 	}
6018 
6019       /* There is still another way we can do something:  If both pairs of
6020 	 fields being compared are adjacent, we may be able to make a wider
6021 	 field containing them both.
6022 
6023 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
6024 	 the mask must be shifted to account for the shift done by
6025 	 make_bit_field_ref.  */
6026       if (((ll_bitsize + ll_bitpos == rl_bitpos
6027 	    && lr_bitsize + lr_bitpos == rr_bitpos)
6028 	   || (ll_bitpos == rl_bitpos + rl_bitsize
6029 	       && lr_bitpos == rr_bitpos + rr_bitsize))
6030 	  && ll_bitpos >= 0
6031 	  && rl_bitpos >= 0
6032 	  && lr_bitpos >= 0
6033 	  && rr_bitpos >= 0)
6034 	{
6035 	  tree type;
6036 
6037 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6038 				    ll_bitsize + rl_bitsize,
6039 				    MIN (ll_bitpos, rl_bitpos),
6040 				    ll_unsignedp, ll_reversep);
6041 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6042 				    lr_bitsize + rr_bitsize,
6043 				    MIN (lr_bitpos, rr_bitpos),
6044 				    lr_unsignedp, lr_reversep);
6045 
6046 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6047 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
6048 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6049 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6050 
6051 	  /* Convert to the smaller type before masking out unwanted bits.  */
6052 	  type = lntype;
6053 	  if (lntype != rntype)
6054 	    {
6055 	      if (lnbitsize > rnbitsize)
6056 		{
6057 		  lhs = fold_convert_loc (loc, rntype, lhs);
6058 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6059 		  type = rntype;
6060 		}
6061 	      else if (lnbitsize < rnbitsize)
6062 		{
6063 		  rhs = fold_convert_loc (loc, lntype, rhs);
6064 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6065 		  type = lntype;
6066 		}
6067 	    }
6068 
6069 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6070 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6071 
6072 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6073 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6074 
6075 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6076 	}
6077 
6078       return 0;
6079     }
6080 
6081   /* Handle the case of comparisons with constants.  If there is something in
6082      common between the masks, those bits of the constants must be the same.
6083      If not, the condition is always false.  Test for this to avoid generating
6084      incorrect code below.  */
6085   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6086   if (! integer_zerop (result)
6087       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6088 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6089     {
6090       if (wanted_code == NE_EXPR)
6091 	{
6092 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
6093 	  return constant_boolean_node (true, truth_type);
6094 	}
6095       else
6096 	{
6097 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6098 	  return constant_boolean_node (false, truth_type);
6099 	}
6100     }
6101 
6102   if (lnbitpos < 0)
6103     return 0;
6104 
6105   /* Construct the expression we will return.  First get the component
6106      reference we will make.  Unless the mask is all ones the width of
6107      that field, perform the mask operation.  Then compare with the
6108      merged constant.  */
6109   result = make_bit_field_ref (loc, ll_inner, ll_arg,
6110 			       lntype, lnbitsize, lnbitpos,
6111 			       ll_unsignedp || rl_unsignedp, ll_reversep);
6112 
6113   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6114   if (! all_ones_mask_p (ll_mask, lnbitsize))
6115     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6116 
6117   return build2_loc (loc, wanted_code, truth_type, result,
6118 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
6119 }
6120 
6121 /* T is an integer expression that is being multiplied, divided, or taken a
6122    modulus (CODE says which and what kind of divide or modulus) by a
6123    constant C.  See if we can eliminate that operation by folding it with
6124    other operations already in T.  WIDE_TYPE, if non-null, is a type that
6125    should be used for the computation if wider than our type.
6126 
6127    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6128    (X * 2) + (Y * 4).  We must, however, be assured that either the original
6129    expression would not overflow or that overflow is undefined for the type
6130    in the language in question.
6131 
6132    If we return a non-null expression, it is an equivalent form of the
6133    original computation, but need not be in the original type.
6134 
6135    We set *STRICT_OVERFLOW_P to true if the return values depends on
6136    signed overflow being undefined.  Otherwise we do not change
6137    *STRICT_OVERFLOW_P.  */
6138 
6139 static tree
6140 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6141 		bool *strict_overflow_p)
6142 {
6143   /* To avoid exponential search depth, refuse to allow recursion past
6144      three levels.  Beyond that (1) it's highly unlikely that we'll find
6145      something interesting and (2) we've probably processed it before
6146      when we built the inner expression.  */
6147 
6148   static int depth;
6149   tree ret;
6150 
6151   if (depth > 3)
6152     return NULL;
6153 
6154   depth++;
6155   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6156   depth--;
6157 
6158   return ret;
6159 }
6160 
6161 static tree
6162 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6163 		  bool *strict_overflow_p)
6164 {
6165   tree type = TREE_TYPE (t);
6166   enum tree_code tcode = TREE_CODE (t);
6167   tree ctype = (wide_type != 0
6168 		&& (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6169 		    > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6170 		? wide_type : type);
6171   tree t1, t2;
6172   int same_p = tcode == code;
6173   tree op0 = NULL_TREE, op1 = NULL_TREE;
6174   bool sub_strict_overflow_p;
6175 
6176   /* Don't deal with constants of zero here; they confuse the code below.  */
6177   if (integer_zerop (c))
6178     return NULL_TREE;
6179 
6180   if (TREE_CODE_CLASS (tcode) == tcc_unary)
6181     op0 = TREE_OPERAND (t, 0);
6182 
6183   if (TREE_CODE_CLASS (tcode) == tcc_binary)
6184     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6185 
6186   /* Note that we need not handle conditional operations here since fold
6187      already handles those cases.  So just do arithmetic here.  */
6188   switch (tcode)
6189     {
6190     case INTEGER_CST:
6191       /* For a constant, we can always simplify if we are a multiply
6192 	 or (for divide and modulus) if it is a multiple of our constant.  */
6193       if (code == MULT_EXPR
6194 	  || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6195 				TYPE_SIGN (type)))
6196 	{
6197 	  tree tem = const_binop (code, fold_convert (ctype, t),
6198 				  fold_convert (ctype, c));
6199 	  /* If the multiplication overflowed, we lost information on it.
6200 	     See PR68142 and PR69845.  */
6201 	  if (TREE_OVERFLOW (tem))
6202 	    return NULL_TREE;
6203 	  return tem;
6204 	}
6205       break;
6206 
6207     CASE_CONVERT: case NON_LVALUE_EXPR:
6208       /* If op0 is an expression ...  */
6209       if ((COMPARISON_CLASS_P (op0)
6210 	   || UNARY_CLASS_P (op0)
6211 	   || BINARY_CLASS_P (op0)
6212 	   || VL_EXP_CLASS_P (op0)
6213 	   || EXPRESSION_CLASS_P (op0))
6214 	  /* ... and has wrapping overflow, and its type is smaller
6215 	     than ctype, then we cannot pass through as widening.  */
6216 	  && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6217 		&& TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6218 	       && (TYPE_PRECISION (ctype)
6219 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6220 	      /* ... or this is a truncation (t is narrower than op0),
6221 		 then we cannot pass through this narrowing.  */
6222 	      || (TYPE_PRECISION (type)
6223 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6224 	      /* ... or signedness changes for division or modulus,
6225 		 then we cannot pass through this conversion.  */
6226 	      || (code != MULT_EXPR
6227 		  && (TYPE_UNSIGNED (ctype)
6228 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6229 	      /* ... or has undefined overflow while the converted to
6230 		 type has not, we cannot do the operation in the inner type
6231 		 as that would introduce undefined overflow.  */
6232 	      || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6233 		   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6234 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6235 	break;
6236 
6237       /* Pass the constant down and see if we can make a simplification.  If
6238 	 we can, replace this expression with the inner simplification for
6239 	 possible later conversion to our or some other type.  */
6240       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6241 	  && TREE_CODE (t2) == INTEGER_CST
6242 	  && !TREE_OVERFLOW (t2)
6243 	  && (t1 = extract_muldiv (op0, t2, code,
6244 				   code == MULT_EXPR ? ctype : NULL_TREE,
6245 				   strict_overflow_p)) != 0)
6246 	return t1;
6247       break;
6248 
6249     case ABS_EXPR:
6250       /* If widening the type changes it from signed to unsigned, then we
6251          must avoid building ABS_EXPR itself as unsigned.  */
6252       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6253         {
6254           tree cstype = (*signed_type_for) (ctype);
6255           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6256 	      != 0)
6257             {
6258               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6259               return fold_convert (ctype, t1);
6260             }
6261           break;
6262         }
6263       /* If the constant is negative, we cannot simplify this.  */
6264       if (tree_int_cst_sgn (c) == -1)
6265         break;
6266       /* FALLTHROUGH */
6267     case NEGATE_EXPR:
6268       /* For division and modulus, type can't be unsigned, as e.g.
6269 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6270 	 For signed types, even with wrapping overflow, this is fine.  */
6271       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6272 	break;
6273       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6274 	  != 0)
6275 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6276       break;
6277 
6278     case MIN_EXPR:  case MAX_EXPR:
6279       /* If widening the type changes the signedness, then we can't perform
6280 	 this optimization as that changes the result.  */
6281       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6282 	break;
6283 
6284       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6285       sub_strict_overflow_p = false;
6286       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6287 				&sub_strict_overflow_p)) != 0
6288 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6289 				   &sub_strict_overflow_p)) != 0)
6290 	{
6291 	  if (tree_int_cst_sgn (c) < 0)
6292 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6293 	  if (sub_strict_overflow_p)
6294 	    *strict_overflow_p = true;
6295 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6296 			      fold_convert (ctype, t2));
6297 	}
6298       break;
6299 
6300     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6301       /* If the second operand is constant, this is a multiplication
6302 	 or floor division, by a power of two, so we can treat it that
6303 	 way unless the multiplier or divisor overflows.  Signed
6304 	 left-shift overflow is implementation-defined rather than
6305 	 undefined in C90, so do not convert signed left shift into
6306 	 multiplication.  */
6307       if (TREE_CODE (op1) == INTEGER_CST
6308 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6309 	  /* const_binop may not detect overflow correctly,
6310 	     so check for it explicitly here.  */
6311 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6312 			wi::to_wide (op1))
6313 	  && (t1 = fold_convert (ctype,
6314 				 const_binop (LSHIFT_EXPR, size_one_node,
6315 					      op1))) != 0
6316 	  && !TREE_OVERFLOW (t1))
6317 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6318 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6319 				       ctype,
6320 				       fold_convert (ctype, op0),
6321 				       t1),
6322 			       c, code, wide_type, strict_overflow_p);
6323       break;
6324 
6325     case PLUS_EXPR:  case MINUS_EXPR:
6326       /* See if we can eliminate the operation on both sides.  If we can, we
6327 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6328 	 cases where we can do anything are if the second operand is a
6329 	 constant.  */
6330       sub_strict_overflow_p = false;
6331       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6332       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6333       if (t1 != 0 && t2 != 0
6334 	  && TYPE_OVERFLOW_WRAPS (ctype)
6335 	  && (code == MULT_EXPR
6336 	      /* If not multiplication, we can only do this if both operands
6337 		 are divisible by c.  */
6338 	      || (multiple_of_p (ctype, op0, c)
6339 	          && multiple_of_p (ctype, op1, c))))
6340 	{
6341 	  if (sub_strict_overflow_p)
6342 	    *strict_overflow_p = true;
6343 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6344 			      fold_convert (ctype, t2));
6345 	}
6346 
6347       /* If this was a subtraction, negate OP1 and set it to be an addition.
6348 	 This simplifies the logic below.  */
6349       if (tcode == MINUS_EXPR)
6350 	{
6351 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6352 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6353 	  if (TREE_CODE (op0) == INTEGER_CST)
6354 	    {
6355 	      std::swap (op0, op1);
6356 	      std::swap (t1, t2);
6357 	    }
6358 	}
6359 
6360       if (TREE_CODE (op1) != INTEGER_CST)
6361 	break;
6362 
6363       /* If either OP1 or C are negative, this optimization is not safe for
6364 	 some of the division and remainder types while for others we need
6365 	 to change the code.  */
6366       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6367 	{
6368 	  if (code == CEIL_DIV_EXPR)
6369 	    code = FLOOR_DIV_EXPR;
6370 	  else if (code == FLOOR_DIV_EXPR)
6371 	    code = CEIL_DIV_EXPR;
6372 	  else if (code != MULT_EXPR
6373 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6374 	    break;
6375 	}
6376 
6377       /* If it's a multiply or a division/modulus operation of a multiple
6378          of our constant, do the operation and verify it doesn't overflow.  */
6379       if (code == MULT_EXPR
6380 	  || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6381 				TYPE_SIGN (type)))
6382 	{
6383 	  op1 = const_binop (code, fold_convert (ctype, op1),
6384 			     fold_convert (ctype, c));
6385 	  /* We allow the constant to overflow with wrapping semantics.  */
6386 	  if (op1 == 0
6387 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6388 	    break;
6389 	}
6390       else
6391 	break;
6392 
6393       /* If we have an unsigned type, we cannot widen the operation since it
6394 	 will change the result if the original computation overflowed.  */
6395       if (TYPE_UNSIGNED (ctype) && ctype != type)
6396 	break;
6397 
6398       /* The last case is if we are a multiply.  In that case, we can
6399 	 apply the distributive law to commute the multiply and addition
6400 	 if the multiplication of the constants doesn't overflow
6401 	 and overflow is defined.  With undefined overflow
6402 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.  */
6403       if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6404 	return fold_build2 (tcode, ctype,
6405 			    fold_build2 (code, ctype,
6406 					 fold_convert (ctype, op0),
6407 					 fold_convert (ctype, c)),
6408 			    op1);
6409 
6410       break;
6411 
6412     case MULT_EXPR:
6413       /* We have a special case here if we are doing something like
6414 	 (C * 8) % 4 since we know that's zero.  */
6415       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6416 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6417 	  /* If the multiplication can overflow we cannot optimize this.  */
6418 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6419 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6420 	  && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6421 				TYPE_SIGN (type)))
6422 	{
6423 	  *strict_overflow_p = true;
6424 	  return omit_one_operand (type, integer_zero_node, op0);
6425 	}
6426 
6427       /* ... fall through ...  */
6428 
6429     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6430     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6431       /* If we can extract our operation from the LHS, do so and return a
6432 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6433 	 do something only if the second operand is a constant.  */
6434       if (same_p
6435 	  && TYPE_OVERFLOW_WRAPS (ctype)
6436 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6437 				   strict_overflow_p)) != 0)
6438 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6439 			    fold_convert (ctype, op1));
6440       else if (tcode == MULT_EXPR && code == MULT_EXPR
6441 	       && TYPE_OVERFLOW_WRAPS (ctype)
6442 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6443 					strict_overflow_p)) != 0)
6444 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6445 			    fold_convert (ctype, t1));
6446       else if (TREE_CODE (op1) != INTEGER_CST)
6447 	return 0;
6448 
6449       /* If these are the same operation types, we can associate them
6450 	 assuming no overflow.  */
6451       if (tcode == code)
6452 	{
6453 	  bool overflow_p = false;
6454 	  bool overflow_mul_p;
6455 	  signop sign = TYPE_SIGN (ctype);
6456 	  unsigned prec = TYPE_PRECISION (ctype);
6457 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
6458 				  wi::to_wide (c, prec),
6459 				  sign, &overflow_mul_p);
6460 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6461 	  if (overflow_mul_p
6462 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6463 	    overflow_p = true;
6464 	  if (!overflow_p)
6465 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6466 				wide_int_to_tree (ctype, mul));
6467 	}
6468 
6469       /* If these operations "cancel" each other, we have the main
6470 	 optimizations of this pass, which occur when either constant is a
6471 	 multiple of the other, in which case we replace this with either an
6472 	 operation or CODE or TCODE.
6473 
6474 	 If we have an unsigned type, we cannot do this since it will change
6475 	 the result if the original computation overflowed.  */
6476       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6477 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6478 	      || (tcode == MULT_EXPR
6479 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6480 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6481 		  && code != MULT_EXPR)))
6482 	{
6483 	  if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6484 				 TYPE_SIGN (type)))
6485 	    {
6486 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6487 		*strict_overflow_p = true;
6488 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6489 				  fold_convert (ctype,
6490 						const_binop (TRUNC_DIV_EXPR,
6491 							     op1, c)));
6492 	    }
6493 	  else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6494 				      TYPE_SIGN (type)))
6495 	    {
6496 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6497 		*strict_overflow_p = true;
6498 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6499 				  fold_convert (ctype,
6500 						const_binop (TRUNC_DIV_EXPR,
6501 							     c, op1)));
6502 	    }
6503 	}
6504       break;
6505 
6506     default:
6507       break;
6508     }
6509 
6510   return 0;
6511 }
6512 
6513 /* Return a node which has the indicated constant VALUE (either 0 or
6514    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6515    and is of the indicated TYPE.  */
6516 
6517 tree
6518 constant_boolean_node (bool value, tree type)
6519 {
6520   if (type == integer_type_node)
6521     return value ? integer_one_node : integer_zero_node;
6522   else if (type == boolean_type_node)
6523     return value ? boolean_true_node : boolean_false_node;
6524   else if (TREE_CODE (type) == VECTOR_TYPE)
6525     return build_vector_from_val (type,
6526 				  build_int_cst (TREE_TYPE (type),
6527 						 value ? -1 : 0));
6528   else
6529     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6530 }
6531 
6532 
6533 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6534    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6535    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6536    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6537    COND is the first argument to CODE; otherwise (as in the example
6538    given here), it is the second argument.  TYPE is the type of the
6539    original expression.  Return NULL_TREE if no simplification is
6540    possible.  */
6541 
6542 static tree
6543 fold_binary_op_with_conditional_arg (location_t loc,
6544 				     enum tree_code code,
6545 				     tree type, tree op0, tree op1,
6546 				     tree cond, tree arg, int cond_first_p)
6547 {
6548   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6549   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6550   tree test, true_value, false_value;
6551   tree lhs = NULL_TREE;
6552   tree rhs = NULL_TREE;
6553   enum tree_code cond_code = COND_EXPR;
6554 
6555   if (TREE_CODE (cond) == COND_EXPR
6556       || TREE_CODE (cond) == VEC_COND_EXPR)
6557     {
6558       test = TREE_OPERAND (cond, 0);
6559       true_value = TREE_OPERAND (cond, 1);
6560       false_value = TREE_OPERAND (cond, 2);
6561       /* If this operand throws an expression, then it does not make
6562 	 sense to try to perform a logical or arithmetic operation
6563 	 involving it.  */
6564       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6565 	lhs = true_value;
6566       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6567 	rhs = false_value;
6568     }
6569   else if (!(TREE_CODE (type) != VECTOR_TYPE
6570 	     && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6571     {
6572       tree testtype = TREE_TYPE (cond);
6573       test = cond;
6574       true_value = constant_boolean_node (true, testtype);
6575       false_value = constant_boolean_node (false, testtype);
6576     }
6577   else
6578     /* Detect the case of mixing vector and scalar types - bail out.  */
6579     return NULL_TREE;
6580 
6581   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6582     cond_code = VEC_COND_EXPR;
6583 
6584   /* This transformation is only worthwhile if we don't have to wrap ARG
6585      in a SAVE_EXPR and the operation can be simplified without recursing
6586      on at least one of the branches once its pushed inside the COND_EXPR.  */
6587   if (!TREE_CONSTANT (arg)
6588       && (TREE_SIDE_EFFECTS (arg)
6589 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6590 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6591     return NULL_TREE;
6592 
6593   arg = fold_convert_loc (loc, arg_type, arg);
6594   if (lhs == 0)
6595     {
6596       true_value = fold_convert_loc (loc, cond_type, true_value);
6597       if (cond_first_p)
6598 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6599       else
6600 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6601     }
6602   if (rhs == 0)
6603     {
6604       false_value = fold_convert_loc (loc, cond_type, false_value);
6605       if (cond_first_p)
6606 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6607       else
6608 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6609     }
6610 
6611   /* Check that we have simplified at least one of the branches.  */
6612   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6613     return NULL_TREE;
6614 
6615   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6616 }
6617 
6618 
6619 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6620 
6621    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6622    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6623    ADDEND is the same as X.
6624 
6625    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6626    and finite.  The problematic cases are when X is zero, and its mode
6627    has signed zeros.  In the case of rounding towards -infinity,
6628    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6629    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6630 
6631 bool
6632 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6633 {
6634   if (!real_zerop (addend))
6635     return false;
6636 
6637   /* Don't allow the fold with -fsignaling-nans.  */
6638   if (HONOR_SNANS (element_mode (type)))
6639     return false;
6640 
6641   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6642   if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6643     return true;
6644 
6645   /* In a vector or complex, we would need to check the sign of all zeros.  */
6646   if (TREE_CODE (addend) != REAL_CST)
6647     return false;
6648 
6649   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6650   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6651     negate = !negate;
6652 
6653   /* The mode has signed zeros, and we have to honor their sign.
6654      In this situation, there is only one case we can return true for.
6655      X - 0 is the same as X unless rounding towards -infinity is
6656      supported.  */
6657   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6658 }
6659 
6660 /* Subroutine of match.pd that optimizes comparisons of a division by
6661    a nonzero integer constant against an integer constant, i.e.
6662    X/C1 op C2.
6663 
6664    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6665    GE_EXPR or LE_EXPR.  ARG01 and ARG1 must be a INTEGER_CST.  */
6666 
6667 enum tree_code
6668 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6669 		  tree *hi, bool *neg_overflow)
6670 {
6671   tree prod, tmp, type = TREE_TYPE (c1);
6672   signop sign = TYPE_SIGN (type);
6673   bool overflow;
6674 
6675   /* We have to do this the hard way to detect unsigned overflow.
6676      prod = int_const_binop (MULT_EXPR, c1, c2);  */
6677   wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6678   prod = force_fit_type (type, val, -1, overflow);
6679   *neg_overflow = false;
6680 
6681   if (sign == UNSIGNED)
6682     {
6683       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6684       *lo = prod;
6685 
6686       /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6687       val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6688       *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6689     }
6690   else if (tree_int_cst_sgn (c1) >= 0)
6691     {
6692       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6693       switch (tree_int_cst_sgn (c2))
6694 	{
6695 	case -1:
6696 	  *neg_overflow = true;
6697 	  *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6698 	  *hi = prod;
6699 	  break;
6700 
6701 	case 0:
6702 	  *lo = fold_negate_const (tmp, type);
6703 	  *hi = tmp;
6704 	  break;
6705 
6706 	case 1:
6707 	  *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6708 	  *lo = prod;
6709 	  break;
6710 
6711 	default:
6712 	  gcc_unreachable ();
6713 	}
6714     }
6715   else
6716     {
6717       /* A negative divisor reverses the relational operators.  */
6718       code = swap_tree_comparison (code);
6719 
6720       tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6721       switch (tree_int_cst_sgn (c2))
6722 	{
6723 	case -1:
6724 	  *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6725 	  *lo = prod;
6726 	  break;
6727 
6728 	case 0:
6729 	  *hi = fold_negate_const (tmp, type);
6730 	  *lo = tmp;
6731 	  break;
6732 
6733 	case 1:
6734 	  *neg_overflow = true;
6735 	  *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6736 	  *hi = prod;
6737 	  break;
6738 
6739 	default:
6740 	  gcc_unreachable ();
6741 	}
6742     }
6743 
6744   if (code != EQ_EXPR && code != NE_EXPR)
6745     return code;
6746 
6747   if (TREE_OVERFLOW (*lo)
6748       || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6749     *lo = NULL_TREE;
6750   if (TREE_OVERFLOW (*hi)
6751       || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6752     *hi = NULL_TREE;
6753 
6754   return code;
6755 }
6756 
6757 
6758 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6759    equality/inequality test, then return a simplified form of the test
6760    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6761    result type.  */
6762 
6763 static tree
6764 fold_single_bit_test_into_sign_test (location_t loc,
6765 				     enum tree_code code, tree arg0, tree arg1,
6766 				     tree result_type)
6767 {
6768   /* If this is testing a single bit, we can optimize the test.  */
6769   if ((code == NE_EXPR || code == EQ_EXPR)
6770       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6771       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6772     {
6773       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6774 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6775       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6776 
6777       if (arg00 != NULL_TREE
6778 	  /* This is only a win if casting to a signed type is cheap,
6779 	     i.e. when arg00's type is not a partial mode.  */
6780 	  && type_has_mode_precision_p (TREE_TYPE (arg00)))
6781 	{
6782 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6783 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6784 			      result_type,
6785 			      fold_convert_loc (loc, stype, arg00),
6786 			      build_int_cst (stype, 0));
6787 	}
6788     }
6789 
6790   return NULL_TREE;
6791 }
6792 
6793 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6794    equality/inequality test, then return a simplified form of
6795    the test using shifts and logical operations.  Otherwise return
6796    NULL.  TYPE is the desired result type.  */
6797 
6798 tree
6799 fold_single_bit_test (location_t loc, enum tree_code code,
6800 		      tree arg0, tree arg1, tree result_type)
6801 {
6802   /* If this is testing a single bit, we can optimize the test.  */
6803   if ((code == NE_EXPR || code == EQ_EXPR)
6804       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6805       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6806     {
6807       tree inner = TREE_OPERAND (arg0, 0);
6808       tree type = TREE_TYPE (arg0);
6809       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6810       scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6811       int ops_unsigned;
6812       tree signed_type, unsigned_type, intermediate_type;
6813       tree tem, one;
6814 
6815       /* First, see if we can fold the single bit test into a sign-bit
6816 	 test.  */
6817       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6818 						 result_type);
6819       if (tem)
6820 	return tem;
6821 
6822       /* Otherwise we have (A & C) != 0 where C is a single bit,
6823 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6824 	 Similarly for (A & C) == 0.  */
6825 
6826       /* If INNER is a right shift of a constant and it plus BITNUM does
6827 	 not overflow, adjust BITNUM and INNER.  */
6828       if (TREE_CODE (inner) == RSHIFT_EXPR
6829 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6830 	  && bitnum < TYPE_PRECISION (type)
6831 	  && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6832 			TYPE_PRECISION (type) - bitnum))
6833 	{
6834 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6835 	  inner = TREE_OPERAND (inner, 0);
6836 	}
6837 
6838       /* If we are going to be able to omit the AND below, we must do our
6839 	 operations as unsigned.  If we must use the AND, we have a choice.
6840 	 Normally unsigned is faster, but for some machines signed is.  */
6841       ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6842 		      && !flag_syntax_only) ? 0 : 1;
6843 
6844       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6845       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6846       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6847       inner = fold_convert_loc (loc, intermediate_type, inner);
6848 
6849       if (bitnum != 0)
6850 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6851 			inner, size_int (bitnum));
6852 
6853       one = build_int_cst (intermediate_type, 1);
6854 
6855       if (code == EQ_EXPR)
6856 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6857 
6858       /* Put the AND last so it can combine with more things.  */
6859       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6860 
6861       /* Make sure to return the proper type.  */
6862       inner = fold_convert_loc (loc, result_type, inner);
6863 
6864       return inner;
6865     }
6866   return NULL_TREE;
6867 }
6868 
6869 /* Test whether it is preferable two swap two operands, ARG0 and
6870    ARG1, for example because ARG0 is an integer constant and ARG1
6871    isn't.  */
6872 
6873 bool
6874 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6875 {
6876   if (CONSTANT_CLASS_P (arg1))
6877     return 0;
6878   if (CONSTANT_CLASS_P (arg0))
6879     return 1;
6880 
6881   STRIP_NOPS (arg0);
6882   STRIP_NOPS (arg1);
6883 
6884   if (TREE_CONSTANT (arg1))
6885     return 0;
6886   if (TREE_CONSTANT (arg0))
6887     return 1;
6888 
6889   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6890      for commutative and comparison operators.  Ensuring a canonical
6891      form allows the optimizers to find additional redundancies without
6892      having to explicitly check for both orderings.  */
6893   if (TREE_CODE (arg0) == SSA_NAME
6894       && TREE_CODE (arg1) == SSA_NAME
6895       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6896     return 1;
6897 
6898   /* Put SSA_NAMEs last.  */
6899   if (TREE_CODE (arg1) == SSA_NAME)
6900     return 0;
6901   if (TREE_CODE (arg0) == SSA_NAME)
6902     return 1;
6903 
6904   /* Put variables last.  */
6905   if (DECL_P (arg1))
6906     return 0;
6907   if (DECL_P (arg0))
6908     return 1;
6909 
6910   return 0;
6911 }
6912 
6913 
6914 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
6915    means A >= Y && A != MAX, but in this case we know that
6916    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
6917 
6918 static tree
6919 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6920 {
6921   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6922 
6923   if (TREE_CODE (bound) == LT_EXPR)
6924     a = TREE_OPERAND (bound, 0);
6925   else if (TREE_CODE (bound) == GT_EXPR)
6926     a = TREE_OPERAND (bound, 1);
6927   else
6928     return NULL_TREE;
6929 
6930   typea = TREE_TYPE (a);
6931   if (!INTEGRAL_TYPE_P (typea)
6932       && !POINTER_TYPE_P (typea))
6933     return NULL_TREE;
6934 
6935   if (TREE_CODE (ineq) == LT_EXPR)
6936     {
6937       a1 = TREE_OPERAND (ineq, 1);
6938       y = TREE_OPERAND (ineq, 0);
6939     }
6940   else if (TREE_CODE (ineq) == GT_EXPR)
6941     {
6942       a1 = TREE_OPERAND (ineq, 0);
6943       y = TREE_OPERAND (ineq, 1);
6944     }
6945   else
6946     return NULL_TREE;
6947 
6948   if (TREE_TYPE (a1) != typea)
6949     return NULL_TREE;
6950 
6951   if (POINTER_TYPE_P (typea))
6952     {
6953       /* Convert the pointer types into integer before taking the difference.  */
6954       tree ta = fold_convert_loc (loc, ssizetype, a);
6955       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6956       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6957     }
6958   else
6959     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6960 
6961   if (!diff || !integer_onep (diff))
6962    return NULL_TREE;
6963 
6964   return fold_build2_loc (loc, GE_EXPR, type, a, y);
6965 }
6966 
6967 /* Fold a sum or difference of at least one multiplication.
6968    Returns the folded tree or NULL if no simplification could be made.  */
6969 
6970 static tree
6971 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6972 			  tree arg0, tree arg1)
6973 {
6974   tree arg00, arg01, arg10, arg11;
6975   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6976 
6977   /* (A * C) +- (B * C) -> (A+-B) * C.
6978      (A * C) +- A -> A * (C+-1).
6979      We are most concerned about the case where C is a constant,
6980      but other combinations show up during loop reduction.  Since
6981      it is not difficult, try all four possibilities.  */
6982 
6983   if (TREE_CODE (arg0) == MULT_EXPR)
6984     {
6985       arg00 = TREE_OPERAND (arg0, 0);
6986       arg01 = TREE_OPERAND (arg0, 1);
6987     }
6988   else if (TREE_CODE (arg0) == INTEGER_CST)
6989     {
6990       arg00 = build_one_cst (type);
6991       arg01 = arg0;
6992     }
6993   else
6994     {
6995       /* We cannot generate constant 1 for fract.  */
6996       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6997 	return NULL_TREE;
6998       arg00 = arg0;
6999       arg01 = build_one_cst (type);
7000     }
7001   if (TREE_CODE (arg1) == MULT_EXPR)
7002     {
7003       arg10 = TREE_OPERAND (arg1, 0);
7004       arg11 = TREE_OPERAND (arg1, 1);
7005     }
7006   else if (TREE_CODE (arg1) == INTEGER_CST)
7007     {
7008       arg10 = build_one_cst (type);
7009       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7010 	 the purpose of this canonicalization.  */
7011       if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7012 	  && negate_expr_p (arg1)
7013 	  && code == PLUS_EXPR)
7014 	{
7015 	  arg11 = negate_expr (arg1);
7016 	  code = MINUS_EXPR;
7017 	}
7018       else
7019 	arg11 = arg1;
7020     }
7021   else
7022     {
7023       /* We cannot generate constant 1 for fract.  */
7024       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7025 	return NULL_TREE;
7026       arg10 = arg1;
7027       arg11 = build_one_cst (type);
7028     }
7029   same = NULL_TREE;
7030 
7031   /* Prefer factoring a common non-constant.  */
7032   if (operand_equal_p (arg00, arg10, 0))
7033     same = arg00, alt0 = arg01, alt1 = arg11;
7034   else if (operand_equal_p (arg01, arg11, 0))
7035     same = arg01, alt0 = arg00, alt1 = arg10;
7036   else if (operand_equal_p (arg00, arg11, 0))
7037     same = arg00, alt0 = arg01, alt1 = arg10;
7038   else if (operand_equal_p (arg01, arg10, 0))
7039     same = arg01, alt0 = arg00, alt1 = arg11;
7040 
7041   /* No identical multiplicands; see if we can find a common
7042      power-of-two factor in non-power-of-two multiplies.  This
7043      can help in multi-dimensional array access.  */
7044   else if (tree_fits_shwi_p (arg01)
7045 	   && tree_fits_shwi_p (arg11))
7046     {
7047       HOST_WIDE_INT int01, int11, tmp;
7048       bool swap = false;
7049       tree maybe_same;
7050       int01 = tree_to_shwi (arg01);
7051       int11 = tree_to_shwi (arg11);
7052 
7053       /* Move min of absolute values to int11.  */
7054       if (absu_hwi (int01) < absu_hwi (int11))
7055         {
7056 	  tmp = int01, int01 = int11, int11 = tmp;
7057 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7058 	  maybe_same = arg01;
7059 	  swap = true;
7060 	}
7061       else
7062 	maybe_same = arg11;
7063 
7064       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7065 	  /* The remainder should not be a constant, otherwise we
7066 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7067 	     increased the number of multiplications necessary.  */
7068 	  && TREE_CODE (arg10) != INTEGER_CST)
7069         {
7070 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7071 			      build_int_cst (TREE_TYPE (arg00),
7072 					     int01 / int11));
7073 	  alt1 = arg10;
7074 	  same = maybe_same;
7075 	  if (swap)
7076 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7077 	}
7078     }
7079 
7080   if (!same)
7081     return NULL_TREE;
7082 
7083   if (! INTEGRAL_TYPE_P (type)
7084       || TYPE_OVERFLOW_WRAPS (type)
7085       /* We are neither factoring zero nor minus one.  */
7086       || TREE_CODE (same) == INTEGER_CST)
7087     return fold_build2_loc (loc, MULT_EXPR, type,
7088 			fold_build2_loc (loc, code, type,
7089 				     fold_convert_loc (loc, type, alt0),
7090 				     fold_convert_loc (loc, type, alt1)),
7091 			fold_convert_loc (loc, type, same));
7092 
7093   /* Same may be zero and thus the operation 'code' may overflow.  Likewise
7094      same may be minus one and thus the multiplication may overflow.  Perform
7095      the sum operation in an unsigned type.  */
7096   tree utype = unsigned_type_for (type);
7097   tree tem = fold_build2_loc (loc, code, utype,
7098 			      fold_convert_loc (loc, utype, alt0),
7099 			      fold_convert_loc (loc, utype, alt1));
7100   /* If the sum evaluated to a constant that is not -INF the multiplication
7101      cannot overflow.  */
7102   if (TREE_CODE (tem) == INTEGER_CST
7103       && (wi::to_wide (tem)
7104 	  != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7105     return fold_build2_loc (loc, MULT_EXPR, type,
7106 			    fold_convert (type, tem), same);
7107 
7108   /* Do not resort to unsigned multiplication because
7109      we lose the no-overflow property of the expression.  */
7110   return NULL_TREE;
7111 }
7112 
7113 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7114    specified by EXPR into the buffer PTR of length LEN bytes.
7115    Return the number of bytes placed in the buffer, or zero
7116    upon failure.  */
7117 
7118 static int
7119 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7120 {
7121   tree type = TREE_TYPE (expr);
7122   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7123   int byte, offset, word, words;
7124   unsigned char value;
7125 
7126   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7127     return 0;
7128   if (off == -1)
7129     off = 0;
7130 
7131   if (ptr == NULL)
7132     /* Dry run.  */
7133     return MIN (len, total_bytes - off);
7134 
7135   words = total_bytes / UNITS_PER_WORD;
7136 
7137   for (byte = 0; byte < total_bytes; byte++)
7138     {
7139       int bitpos = byte * BITS_PER_UNIT;
7140       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7141 	 number of bytes.  */
7142       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7143 
7144       if (total_bytes > UNITS_PER_WORD)
7145 	{
7146 	  word = byte / UNITS_PER_WORD;
7147 	  if (WORDS_BIG_ENDIAN)
7148 	    word = (words - 1) - word;
7149 	  offset = word * UNITS_PER_WORD;
7150 	  if (BYTES_BIG_ENDIAN)
7151 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7152 	  else
7153 	    offset += byte % UNITS_PER_WORD;
7154 	}
7155       else
7156 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7157       if (offset >= off && offset - off < len)
7158 	ptr[offset - off] = value;
7159     }
7160   return MIN (len, total_bytes - off);
7161 }
7162 
7163 
7164 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7165    specified by EXPR into the buffer PTR of length LEN bytes.
7166    Return the number of bytes placed in the buffer, or zero
7167    upon failure.  */
7168 
7169 static int
7170 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7171 {
7172   tree type = TREE_TYPE (expr);
7173   scalar_mode mode = SCALAR_TYPE_MODE (type);
7174   int total_bytes = GET_MODE_SIZE (mode);
7175   FIXED_VALUE_TYPE value;
7176   tree i_value, i_type;
7177 
7178   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7179     return 0;
7180 
7181   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7182 
7183   if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7184     return 0;
7185 
7186   value = TREE_FIXED_CST (expr);
7187   i_value = double_int_to_tree (i_type, value.data);
7188 
7189   return native_encode_int (i_value, ptr, len, off);
7190 }
7191 
7192 
7193 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7194    specified by EXPR into the buffer PTR of length LEN bytes.
7195    Return the number of bytes placed in the buffer, or zero
7196    upon failure.  */
7197 
7198 static int
7199 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7200 {
7201   tree type = TREE_TYPE (expr);
7202   int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7203   int byte, offset, word, words, bitpos;
7204   unsigned char value;
7205 
7206   /* There are always 32 bits in each long, no matter the size of
7207      the hosts long.  We handle floating point representations with
7208      up to 192 bits.  */
7209   long tmp[6];
7210 
7211   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7212     return 0;
7213   if (off == -1)
7214     off = 0;
7215 
7216   if (ptr == NULL)
7217     /* Dry run.  */
7218     return MIN (len, total_bytes - off);
7219 
7220   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7221 
7222   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7223 
7224   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7225        bitpos += BITS_PER_UNIT)
7226     {
7227       byte = (bitpos / BITS_PER_UNIT) & 3;
7228       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7229 
7230       if (UNITS_PER_WORD < 4)
7231 	{
7232 	  word = byte / UNITS_PER_WORD;
7233 	  if (WORDS_BIG_ENDIAN)
7234 	    word = (words - 1) - word;
7235 	  offset = word * UNITS_PER_WORD;
7236 	  if (BYTES_BIG_ENDIAN)
7237 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7238 	  else
7239 	    offset += byte % UNITS_PER_WORD;
7240 	}
7241       else
7242 	{
7243 	  offset = byte;
7244 	  if (BYTES_BIG_ENDIAN)
7245 	    {
7246 	      /* Reverse bytes within each long, or within the entire float
7247 		 if it's smaller than a long (for HFmode).  */
7248 	      offset = MIN (3, total_bytes - 1) - offset;
7249 	      gcc_assert (offset >= 0);
7250 	    }
7251 	}
7252       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7253       if (offset >= off
7254 	  && offset - off < len)
7255 	ptr[offset - off] = value;
7256     }
7257   return MIN (len, total_bytes - off);
7258 }
7259 
7260 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7261    specified by EXPR into the buffer PTR of length LEN bytes.
7262    Return the number of bytes placed in the buffer, or zero
7263    upon failure.  */
7264 
7265 static int
7266 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7267 {
7268   int rsize, isize;
7269   tree part;
7270 
7271   part = TREE_REALPART (expr);
7272   rsize = native_encode_expr (part, ptr, len, off);
7273   if (off == -1 && rsize == 0)
7274     return 0;
7275   part = TREE_IMAGPART (expr);
7276   if (off != -1)
7277     off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7278   isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7279 			      len - rsize, off);
7280   if (off == -1 && isize != rsize)
7281     return 0;
7282   return rsize + isize;
7283 }
7284 
7285 
7286 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7287    specified by EXPR into the buffer PTR of length LEN bytes.
7288    Return the number of bytes placed in the buffer, or zero
7289    upon failure.  */
7290 
7291 static int
7292 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7293 {
7294   unsigned HOST_WIDE_INT i, count;
7295   int size, offset;
7296   tree itype, elem;
7297 
7298   offset = 0;
7299   if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7300     return 0;
7301   itype = TREE_TYPE (TREE_TYPE (expr));
7302   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7303   for (i = 0; i < count; i++)
7304     {
7305       if (off >= size)
7306 	{
7307 	  off -= size;
7308 	  continue;
7309 	}
7310       elem = VECTOR_CST_ELT (expr, i);
7311       int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7312 				    len - offset, off);
7313       if ((off == -1 && res != size) || res == 0)
7314 	return 0;
7315       offset += res;
7316       if (offset >= len)
7317 	return (off == -1 && i < count - 1) ? 0 : offset;
7318       if (off != -1)
7319 	off = 0;
7320     }
7321   return offset;
7322 }
7323 
7324 
7325 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7326    specified by EXPR into the buffer PTR of length LEN bytes.
7327    Return the number of bytes placed in the buffer, or zero
7328    upon failure.  */
7329 
7330 static int
7331 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7332 {
7333   tree type = TREE_TYPE (expr);
7334 
7335   /* Wide-char strings are encoded in target byte-order so native
7336      encoding them is trivial.  */
7337   if (BITS_PER_UNIT != CHAR_BIT
7338       || TREE_CODE (type) != ARRAY_TYPE
7339       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7340       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7341     return 0;
7342 
7343   HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7344   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7345     return 0;
7346   if (off == -1)
7347     off = 0;
7348   if (ptr == NULL)
7349     /* Dry run.  */;
7350   else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7351     {
7352       int written = 0;
7353       if (off < TREE_STRING_LENGTH (expr))
7354 	{
7355 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7356 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7357 	}
7358       memset (ptr + written, 0,
7359 	      MIN (total_bytes - written, len - written));
7360     }
7361   else
7362     memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7363   return MIN (total_bytes - off, len);
7364 }
7365 
7366 
7367 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7368    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7369    buffer PTR of length LEN bytes.  If PTR is NULL, don't actually store
7370    anything, just do a dry run.  If OFF is not -1 then start
7371    the encoding at byte offset OFF and encode at most LEN bytes.
7372    Return the number of bytes placed in the buffer, or zero upon failure.  */
7373 
7374 int
7375 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7376 {
7377   /* We don't support starting at negative offset and -1 is special.  */
7378   if (off < -1)
7379     return 0;
7380 
7381   switch (TREE_CODE (expr))
7382     {
7383     case INTEGER_CST:
7384       return native_encode_int (expr, ptr, len, off);
7385 
7386     case REAL_CST:
7387       return native_encode_real (expr, ptr, len, off);
7388 
7389     case FIXED_CST:
7390       return native_encode_fixed (expr, ptr, len, off);
7391 
7392     case COMPLEX_CST:
7393       return native_encode_complex (expr, ptr, len, off);
7394 
7395     case VECTOR_CST:
7396       return native_encode_vector (expr, ptr, len, off);
7397 
7398     case STRING_CST:
7399       return native_encode_string (expr, ptr, len, off);
7400 
7401     default:
7402       return 0;
7403     }
7404 }
7405 
7406 
7407 /* Subroutine of native_interpret_expr.  Interpret the contents of
7408    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7409    If the buffer cannot be interpreted, return NULL_TREE.  */
7410 
7411 static tree
7412 native_interpret_int (tree type, const unsigned char *ptr, int len)
7413 {
7414   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7415 
7416   if (total_bytes > len
7417       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7418     return NULL_TREE;
7419 
7420   wide_int result = wi::from_buffer (ptr, total_bytes);
7421 
7422   return wide_int_to_tree (type, result);
7423 }
7424 
7425 
7426 /* Subroutine of native_interpret_expr.  Interpret the contents of
7427    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7428    If the buffer cannot be interpreted, return NULL_TREE.  */
7429 
7430 static tree
7431 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7432 {
7433   scalar_mode mode = SCALAR_TYPE_MODE (type);
7434   int total_bytes = GET_MODE_SIZE (mode);
7435   double_int result;
7436   FIXED_VALUE_TYPE fixed_value;
7437 
7438   if (total_bytes > len
7439       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7440     return NULL_TREE;
7441 
7442   result = double_int::from_buffer (ptr, total_bytes);
7443   fixed_value = fixed_from_double_int (result, mode);
7444 
7445   return build_fixed (type, fixed_value);
7446 }
7447 
7448 
7449 /* Subroutine of native_interpret_expr.  Interpret the contents of
7450    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7451    If the buffer cannot be interpreted, return NULL_TREE.  */
7452 
7453 static tree
7454 native_interpret_real (tree type, const unsigned char *ptr, int len)
7455 {
7456   scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7457   int total_bytes = GET_MODE_SIZE (mode);
7458   unsigned char value;
7459   /* There are always 32 bits in each long, no matter the size of
7460      the hosts long.  We handle floating point representations with
7461      up to 192 bits.  */
7462   REAL_VALUE_TYPE r;
7463   long tmp[6];
7464 
7465   if (total_bytes > len || total_bytes > 24)
7466     return NULL_TREE;
7467   int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7468 
7469   memset (tmp, 0, sizeof (tmp));
7470   for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7471        bitpos += BITS_PER_UNIT)
7472     {
7473       /* Both OFFSET and BYTE index within a long;
7474 	 bitpos indexes the whole float.  */
7475       int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7476       if (UNITS_PER_WORD < 4)
7477 	{
7478 	  int word = byte / UNITS_PER_WORD;
7479 	  if (WORDS_BIG_ENDIAN)
7480 	    word = (words - 1) - word;
7481 	  offset = word * UNITS_PER_WORD;
7482 	  if (BYTES_BIG_ENDIAN)
7483 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7484 	  else
7485 	    offset += byte % UNITS_PER_WORD;
7486 	}
7487       else
7488 	{
7489 	  offset = byte;
7490 	  if (BYTES_BIG_ENDIAN)
7491 	    {
7492 	      /* Reverse bytes within each long, or within the entire float
7493 		 if it's smaller than a long (for HFmode).  */
7494 	      offset = MIN (3, total_bytes - 1) - offset;
7495 	      gcc_assert (offset >= 0);
7496 	    }
7497 	}
7498       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7499 
7500       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7501     }
7502 
7503   real_from_target (&r, tmp, mode);
7504   return build_real (type, r);
7505 }
7506 
7507 
7508 /* Subroutine of native_interpret_expr.  Interpret the contents of
7509    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7510    If the buffer cannot be interpreted, return NULL_TREE.  */
7511 
7512 static tree
7513 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7514 {
7515   tree etype, rpart, ipart;
7516   int size;
7517 
7518   etype = TREE_TYPE (type);
7519   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7520   if (size * 2 > len)
7521     return NULL_TREE;
7522   rpart = native_interpret_expr (etype, ptr, size);
7523   if (!rpart)
7524     return NULL_TREE;
7525   ipart = native_interpret_expr (etype, ptr+size, size);
7526   if (!ipart)
7527     return NULL_TREE;
7528   return build_complex (type, rpart, ipart);
7529 }
7530 
7531 
7532 /* Subroutine of native_interpret_expr.  Interpret the contents of
7533    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7534    If the buffer cannot be interpreted, return NULL_TREE.  */
7535 
7536 static tree
7537 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
7538 {
7539   tree etype, elem;
7540   unsigned int i, size;
7541   unsigned HOST_WIDE_INT count;
7542 
7543   etype = TREE_TYPE (type);
7544   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7545   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
7546       || size * count > len)
7547     return NULL_TREE;
7548 
7549   tree_vector_builder elements (type, count, 1);
7550   for (i = 0; i < count; ++i)
7551     {
7552       elem = native_interpret_expr (etype, ptr+(i*size), size);
7553       if (!elem)
7554 	return NULL_TREE;
7555       elements.quick_push (elem);
7556     }
7557   return elements.build ();
7558 }
7559 
7560 
7561 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7562    the buffer PTR of length LEN as a constant of type TYPE.  For
7563    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7564    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7565    return NULL_TREE.  */
7566 
7567 tree
7568 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7569 {
7570   switch (TREE_CODE (type))
7571     {
7572     case INTEGER_TYPE:
7573     case ENUMERAL_TYPE:
7574     case BOOLEAN_TYPE:
7575     case POINTER_TYPE:
7576     case REFERENCE_TYPE:
7577       return native_interpret_int (type, ptr, len);
7578 
7579     case REAL_TYPE:
7580       return native_interpret_real (type, ptr, len);
7581 
7582     case FIXED_POINT_TYPE:
7583       return native_interpret_fixed (type, ptr, len);
7584 
7585     case COMPLEX_TYPE:
7586       return native_interpret_complex (type, ptr, len);
7587 
7588     case VECTOR_TYPE:
7589       return native_interpret_vector (type, ptr, len);
7590 
7591     default:
7592       return NULL_TREE;
7593     }
7594 }
7595 
7596 /* Returns true if we can interpret the contents of a native encoding
7597    as TYPE.  */
7598 
7599 static bool
7600 can_native_interpret_type_p (tree type)
7601 {
7602   switch (TREE_CODE (type))
7603     {
7604     case INTEGER_TYPE:
7605     case ENUMERAL_TYPE:
7606     case BOOLEAN_TYPE:
7607     case POINTER_TYPE:
7608     case REFERENCE_TYPE:
7609     case FIXED_POINT_TYPE:
7610     case REAL_TYPE:
7611     case COMPLEX_TYPE:
7612     case VECTOR_TYPE:
7613       return true;
7614     default:
7615       return false;
7616     }
7617 }
7618 
7619 
7620 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7621    TYPE at compile-time.  If we're unable to perform the conversion
7622    return NULL_TREE.  */
7623 
7624 static tree
7625 fold_view_convert_expr (tree type, tree expr)
7626 {
7627   /* We support up to 512-bit values (for V8DFmode).  */
7628   unsigned char buffer[64];
7629   int len;
7630 
7631   /* Check that the host and target are sane.  */
7632   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7633     return NULL_TREE;
7634 
7635   len = native_encode_expr (expr, buffer, sizeof (buffer));
7636   if (len == 0)
7637     return NULL_TREE;
7638 
7639   return native_interpret_expr (type, buffer, len);
7640 }
7641 
7642 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7643    to avoid confusing the gimplify process.  */
7644 
7645 tree
7646 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7647 {
7648   /* The size of the object is not relevant when talking about its address.  */
7649   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7650     t = TREE_OPERAND (t, 0);
7651 
7652   if (TREE_CODE (t) == INDIRECT_REF)
7653     {
7654       t = TREE_OPERAND (t, 0);
7655 
7656       if (TREE_TYPE (t) != ptrtype)
7657 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7658     }
7659   else if (TREE_CODE (t) == MEM_REF
7660 	   && integer_zerop (TREE_OPERAND (t, 1)))
7661     return TREE_OPERAND (t, 0);
7662   else if (TREE_CODE (t) == MEM_REF
7663 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7664     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7665 			TREE_OPERAND (t, 0),
7666 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7667   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7668     {
7669       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7670 
7671       if (TREE_TYPE (t) != ptrtype)
7672 	t = fold_convert_loc (loc, ptrtype, t);
7673     }
7674   else
7675     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7676 
7677   return t;
7678 }
7679 
7680 /* Build an expression for the address of T.  */
7681 
7682 tree
7683 build_fold_addr_expr_loc (location_t loc, tree t)
7684 {
7685   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7686 
7687   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7688 }
7689 
7690 /* Fold a unary expression of code CODE and type TYPE with operand
7691    OP0.  Return the folded expression if folding is successful.
7692    Otherwise, return NULL_TREE.  */
7693 
7694 tree
7695 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7696 {
7697   tree tem;
7698   tree arg0;
7699   enum tree_code_class kind = TREE_CODE_CLASS (code);
7700 
7701   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7702 	      && TREE_CODE_LENGTH (code) == 1);
7703 
7704   arg0 = op0;
7705   if (arg0)
7706     {
7707       if (CONVERT_EXPR_CODE_P (code)
7708 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7709 	{
7710 	  /* Don't use STRIP_NOPS, because signedness of argument type
7711 	     matters.  */
7712 	  STRIP_SIGN_NOPS (arg0);
7713 	}
7714       else
7715 	{
7716 	  /* Strip any conversions that don't change the mode.  This
7717 	     is safe for every expression, except for a comparison
7718 	     expression because its signedness is derived from its
7719 	     operands.
7720 
7721 	     Note that this is done as an internal manipulation within
7722 	     the constant folder, in order to find the simplest
7723 	     representation of the arguments so that their form can be
7724 	     studied.  In any cases, the appropriate type conversions
7725 	     should be put back in the tree that will get out of the
7726 	     constant folder.  */
7727 	  STRIP_NOPS (arg0);
7728 	}
7729 
7730       if (CONSTANT_CLASS_P (arg0))
7731 	{
7732 	  tree tem = const_unop (code, type, arg0);
7733 	  if (tem)
7734 	    {
7735 	      if (TREE_TYPE (tem) != type)
7736 		tem = fold_convert_loc (loc, type, tem);
7737 	      return tem;
7738 	    }
7739 	}
7740     }
7741 
7742   tem = generic_simplify (loc, code, type, op0);
7743   if (tem)
7744     return tem;
7745 
7746   if (TREE_CODE_CLASS (code) == tcc_unary)
7747     {
7748       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7749 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7750 		       fold_build1_loc (loc, code, type,
7751 				    fold_convert_loc (loc, TREE_TYPE (op0),
7752 						      TREE_OPERAND (arg0, 1))));
7753       else if (TREE_CODE (arg0) == COND_EXPR)
7754 	{
7755 	  tree arg01 = TREE_OPERAND (arg0, 1);
7756 	  tree arg02 = TREE_OPERAND (arg0, 2);
7757 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7758 	    arg01 = fold_build1_loc (loc, code, type,
7759 				 fold_convert_loc (loc,
7760 						   TREE_TYPE (op0), arg01));
7761 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7762 	    arg02 = fold_build1_loc (loc, code, type,
7763 				 fold_convert_loc (loc,
7764 						   TREE_TYPE (op0), arg02));
7765 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7766 			     arg01, arg02);
7767 
7768 	  /* If this was a conversion, and all we did was to move into
7769 	     inside the COND_EXPR, bring it back out.  But leave it if
7770 	     it is a conversion from integer to integer and the
7771 	     result precision is no wider than a word since such a
7772 	     conversion is cheap and may be optimized away by combine,
7773 	     while it couldn't if it were outside the COND_EXPR.  Then return
7774 	     so we don't get into an infinite recursion loop taking the
7775 	     conversion out and then back in.  */
7776 
7777 	  if ((CONVERT_EXPR_CODE_P (code)
7778 	       || code == NON_LVALUE_EXPR)
7779 	      && TREE_CODE (tem) == COND_EXPR
7780 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7781 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7782 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7783 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7784 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7785 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7786 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7787 		     && (INTEGRAL_TYPE_P
7788 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7789 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7790 		  || flag_syntax_only))
7791 	    tem = build1_loc (loc, code, type,
7792 			      build3 (COND_EXPR,
7793 				      TREE_TYPE (TREE_OPERAND
7794 						 (TREE_OPERAND (tem, 1), 0)),
7795 				      TREE_OPERAND (tem, 0),
7796 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7797 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7798 						    0)));
7799 	  return tem;
7800 	}
7801    }
7802 
7803   switch (code)
7804     {
7805     case NON_LVALUE_EXPR:
7806       if (!maybe_lvalue_p (op0))
7807 	return fold_convert_loc (loc, type, op0);
7808       return NULL_TREE;
7809 
7810     CASE_CONVERT:
7811     case FLOAT_EXPR:
7812     case FIX_TRUNC_EXPR:
7813       if (COMPARISON_CLASS_P (op0))
7814 	{
7815 	  /* If we have (type) (a CMP b) and type is an integral type, return
7816 	     new expression involving the new type.  Canonicalize
7817 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7818 	     non-integral type.
7819 	     Do not fold the result as that would not simplify further, also
7820 	     folding again results in recursions.  */
7821 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7822 	    return build2_loc (loc, TREE_CODE (op0), type,
7823 			       TREE_OPERAND (op0, 0),
7824 			       TREE_OPERAND (op0, 1));
7825 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7826 		   && TREE_CODE (type) != VECTOR_TYPE)
7827 	    return build3_loc (loc, COND_EXPR, type, op0,
7828 			       constant_boolean_node (true, type),
7829 			       constant_boolean_node (false, type));
7830 	}
7831 
7832       /* Handle (T *)&A.B.C for A being of type T and B and C
7833 	 living at offset zero.  This occurs frequently in
7834 	 C++ upcasting and then accessing the base.  */
7835       if (TREE_CODE (op0) == ADDR_EXPR
7836 	  && POINTER_TYPE_P (type)
7837 	  && handled_component_p (TREE_OPERAND (op0, 0)))
7838         {
7839 	  poly_int64 bitsize, bitpos;
7840 	  tree offset;
7841 	  machine_mode mode;
7842 	  int unsignedp, reversep, volatilep;
7843 	  tree base
7844 	    = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7845 				   &offset, &mode, &unsignedp, &reversep,
7846 				   &volatilep);
7847 	  /* If the reference was to a (constant) zero offset, we can use
7848 	     the address of the base if it has the same base type
7849 	     as the result type and the pointer type is unqualified.  */
7850 	  if (!offset
7851 	      && known_eq (bitpos, 0)
7852 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7853 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7854 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7855 	    return fold_convert_loc (loc, type,
7856 				     build_fold_addr_expr_loc (loc, base));
7857         }
7858 
7859       if (TREE_CODE (op0) == MODIFY_EXPR
7860 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7861 	  /* Detect assigning a bitfield.  */
7862 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7863 	       && DECL_BIT_FIELD
7864 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7865 	{
7866 	  /* Don't leave an assignment inside a conversion
7867 	     unless assigning a bitfield.  */
7868 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7869 	  /* First do the assignment, then return converted constant.  */
7870 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7871 	  TREE_NO_WARNING (tem) = 1;
7872 	  TREE_USED (tem) = 1;
7873 	  return tem;
7874 	}
7875 
7876       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7877 	 constants (if x has signed type, the sign bit cannot be set
7878 	 in c).  This folds extension into the BIT_AND_EXPR.
7879 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7880 	 very likely don't have maximal range for their precision and this
7881 	 transformation effectively doesn't preserve non-maximal ranges.  */
7882       if (TREE_CODE (type) == INTEGER_TYPE
7883 	  && TREE_CODE (op0) == BIT_AND_EXPR
7884 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7885 	{
7886 	  tree and_expr = op0;
7887 	  tree and0 = TREE_OPERAND (and_expr, 0);
7888 	  tree and1 = TREE_OPERAND (and_expr, 1);
7889 	  int change = 0;
7890 
7891 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7892 	      || (TYPE_PRECISION (type)
7893 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7894 	    change = 1;
7895 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7896 		   <= HOST_BITS_PER_WIDE_INT
7897 		   && tree_fits_uhwi_p (and1))
7898 	    {
7899 	      unsigned HOST_WIDE_INT cst;
7900 
7901 	      cst = tree_to_uhwi (and1);
7902 	      cst &= HOST_WIDE_INT_M1U
7903 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7904 	      change = (cst == 0);
7905 	      if (change
7906 		  && !flag_syntax_only
7907 		  && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7908 		      == ZERO_EXTEND))
7909 		{
7910 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
7911 		  and0 = fold_convert_loc (loc, uns, and0);
7912 		  and1 = fold_convert_loc (loc, uns, and1);
7913 		}
7914 	    }
7915 	  if (change)
7916 	    {
7917 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
7918 				    TREE_OVERFLOW (and1));
7919 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
7920 				      fold_convert_loc (loc, type, and0), tem);
7921 	    }
7922 	}
7923 
7924       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7925 	 cast (T1)X will fold away.  We assume that this happens when X itself
7926 	 is a cast.  */
7927       if (POINTER_TYPE_P (type)
7928 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7929 	  && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7930 	{
7931 	  tree arg00 = TREE_OPERAND (arg0, 0);
7932 	  tree arg01 = TREE_OPERAND (arg0, 1);
7933 
7934 	  return fold_build_pointer_plus_loc
7935 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
7936 	}
7937 
7938       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7939 	 of the same precision, and X is an integer type not narrower than
7940 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
7941       if (INTEGRAL_TYPE_P (type)
7942 	  && TREE_CODE (op0) == BIT_NOT_EXPR
7943 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7944 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7945 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7946 	{
7947 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7948 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7949 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7950 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7951 				fold_convert_loc (loc, type, tem));
7952 	}
7953 
7954       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7955 	 type of X and Y (integer types only).  */
7956       if (INTEGRAL_TYPE_P (type)
7957 	  && TREE_CODE (op0) == MULT_EXPR
7958 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7959 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7960 	{
7961 	  /* Be careful not to introduce new overflows.  */
7962 	  tree mult_type;
7963           if (TYPE_OVERFLOW_WRAPS (type))
7964 	    mult_type = type;
7965 	  else
7966 	    mult_type = unsigned_type_for (type);
7967 
7968 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7969 	    {
7970 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7971 				 fold_convert_loc (loc, mult_type,
7972 						   TREE_OPERAND (op0, 0)),
7973 				 fold_convert_loc (loc, mult_type,
7974 						   TREE_OPERAND (op0, 1)));
7975 	      return fold_convert_loc (loc, type, tem);
7976 	    }
7977 	}
7978 
7979       return NULL_TREE;
7980 
7981     case VIEW_CONVERT_EXPR:
7982       if (TREE_CODE (op0) == MEM_REF)
7983         {
7984 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7985 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7986 	  tem = fold_build2_loc (loc, MEM_REF, type,
7987 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7988 	  REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7989 	  return tem;
7990 	}
7991 
7992       return NULL_TREE;
7993 
7994     case NEGATE_EXPR:
7995       tem = fold_negate_expr (loc, arg0);
7996       if (tem)
7997 	return fold_convert_loc (loc, type, tem);
7998       return NULL_TREE;
7999 
8000     case ABS_EXPR:
8001       /* Convert fabs((double)float) into (double)fabsf(float).  */
8002       if (TREE_CODE (arg0) == NOP_EXPR
8003 	  && TREE_CODE (type) == REAL_TYPE)
8004 	{
8005 	  tree targ0 = strip_float_extensions (arg0);
8006 	  if (targ0 != arg0)
8007 	    return fold_convert_loc (loc, type,
8008 				     fold_build1_loc (loc, ABS_EXPR,
8009 						  TREE_TYPE (targ0),
8010 						  targ0));
8011 	}
8012       return NULL_TREE;
8013 
8014     case BIT_NOT_EXPR:
8015       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8016       if (TREE_CODE (arg0) == BIT_XOR_EXPR
8017 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8018 				    fold_convert_loc (loc, type,
8019 						      TREE_OPERAND (arg0, 0)))))
8020 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8021 				fold_convert_loc (loc, type,
8022 						  TREE_OPERAND (arg0, 1)));
8023       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8024 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8025 			       	     fold_convert_loc (loc, type,
8026 						       TREE_OPERAND (arg0, 1)))))
8027 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8028 			    fold_convert_loc (loc, type,
8029 					      TREE_OPERAND (arg0, 0)), tem);
8030 
8031       return NULL_TREE;
8032 
8033     case TRUTH_NOT_EXPR:
8034       /* Note that the operand of this must be an int
8035 	 and its values must be 0 or 1.
8036 	 ("true" is a fixed value perhaps depending on the language,
8037 	 but we don't handle values other than 1 correctly yet.)  */
8038       tem = fold_truth_not_expr (loc, arg0);
8039       if (!tem)
8040 	return NULL_TREE;
8041       return fold_convert_loc (loc, type, tem);
8042 
8043     case INDIRECT_REF:
8044       /* Fold *&X to X if X is an lvalue.  */
8045       if (TREE_CODE (op0) == ADDR_EXPR)
8046 	{
8047 	  tree op00 = TREE_OPERAND (op0, 0);
8048 	  if ((VAR_P (op00)
8049 	       || TREE_CODE (op00) == PARM_DECL
8050 	       || TREE_CODE (op00) == RESULT_DECL)
8051 	      && !TREE_READONLY (op00))
8052 	    return op00;
8053 	}
8054       return NULL_TREE;
8055 
8056     default:
8057       return NULL_TREE;
8058     } /* switch (code) */
8059 }
8060 
8061 
8062 /* If the operation was a conversion do _not_ mark a resulting constant
8063    with TREE_OVERFLOW if the original constant was not.  These conversions
8064    have implementation defined behavior and retaining the TREE_OVERFLOW
8065    flag here would confuse later passes such as VRP.  */
8066 tree
8067 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8068 				tree type, tree op0)
8069 {
8070   tree res = fold_unary_loc (loc, code, type, op0);
8071   if (res
8072       && TREE_CODE (res) == INTEGER_CST
8073       && TREE_CODE (op0) == INTEGER_CST
8074       && CONVERT_EXPR_CODE_P (code))
8075     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8076 
8077   return res;
8078 }
8079 
8080 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8081    operands OP0 and OP1.  LOC is the location of the resulting expression.
8082    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8083    Return the folded expression if folding is successful.  Otherwise,
8084    return NULL_TREE.  */
8085 static tree
8086 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8087 		  tree arg0, tree arg1, tree op0, tree op1)
8088 {
8089   tree tem;
8090 
8091   /* We only do these simplifications if we are optimizing.  */
8092   if (!optimize)
8093     return NULL_TREE;
8094 
8095   /* Check for things like (A || B) && (A || C).  We can convert this
8096      to A || (B && C).  Note that either operator can be any of the four
8097      truth and/or operations and the transformation will still be
8098      valid.   Also note that we only care about order for the
8099      ANDIF and ORIF operators.  If B contains side effects, this
8100      might change the truth-value of A.  */
8101   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8102       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8103 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8104 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8105 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8106       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8107     {
8108       tree a00 = TREE_OPERAND (arg0, 0);
8109       tree a01 = TREE_OPERAND (arg0, 1);
8110       tree a10 = TREE_OPERAND (arg1, 0);
8111       tree a11 = TREE_OPERAND (arg1, 1);
8112       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8113 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8114 			 && (code == TRUTH_AND_EXPR
8115 			     || code == TRUTH_OR_EXPR));
8116 
8117       if (operand_equal_p (a00, a10, 0))
8118 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8119 			    fold_build2_loc (loc, code, type, a01, a11));
8120       else if (commutative && operand_equal_p (a00, a11, 0))
8121 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8122 			    fold_build2_loc (loc, code, type, a01, a10));
8123       else if (commutative && operand_equal_p (a01, a10, 0))
8124 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8125 			    fold_build2_loc (loc, code, type, a00, a11));
8126 
8127       /* This case if tricky because we must either have commutative
8128 	 operators or else A10 must not have side-effects.  */
8129 
8130       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8131 	       && operand_equal_p (a01, a11, 0))
8132 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8133 			    fold_build2_loc (loc, code, type, a00, a10),
8134 			    a01);
8135     }
8136 
8137   /* See if we can build a range comparison.  */
8138   if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8139     return tem;
8140 
8141   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8142       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8143     {
8144       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8145       if (tem)
8146 	return fold_build2_loc (loc, code, type, tem, arg1);
8147     }
8148 
8149   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8150       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8151     {
8152       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8153       if (tem)
8154 	return fold_build2_loc (loc, code, type, arg0, tem);
8155     }
8156 
8157   /* Check for the possibility of merging component references.  If our
8158      lhs is another similar operation, try to merge its rhs with our
8159      rhs.  Then try to merge our lhs and rhs.  */
8160   if (TREE_CODE (arg0) == code
8161       && (tem = fold_truth_andor_1 (loc, code, type,
8162 				    TREE_OPERAND (arg0, 1), arg1)) != 0)
8163     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8164 
8165   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8166     return tem;
8167 
8168   if (LOGICAL_OP_NON_SHORT_CIRCUIT
8169       && !flag_sanitize_coverage
8170       && (code == TRUTH_AND_EXPR
8171           || code == TRUTH_ANDIF_EXPR
8172           || code == TRUTH_OR_EXPR
8173           || code == TRUTH_ORIF_EXPR))
8174     {
8175       enum tree_code ncode, icode;
8176 
8177       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8178 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8179       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8180 
8181       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8182 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8183 	 We don't want to pack more than two leafs to a non-IF AND/OR
8184 	 expression.
8185 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8186 	 equal to IF-CODE, then we don't want to add right-hand operand.
8187 	 If the inner right-hand side of left-hand operand has
8188 	 side-effects, or isn't simple, then we can't add to it,
8189 	 as otherwise we might destroy if-sequence.  */
8190       if (TREE_CODE (arg0) == icode
8191 	  && simple_operand_p_2 (arg1)
8192 	  /* Needed for sequence points to handle trappings, and
8193 	     side-effects.  */
8194 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8195 	{
8196 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8197 				 arg1);
8198 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8199 				  tem);
8200 	}
8201 	/* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8202 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8203       else if (TREE_CODE (arg1) == icode
8204 	  && simple_operand_p_2 (arg0)
8205 	  /* Needed for sequence points to handle trappings, and
8206 	     side-effects.  */
8207 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8208 	{
8209 	  tem = fold_build2_loc (loc, ncode, type,
8210 				 arg0, TREE_OPERAND (arg1, 0));
8211 	  return fold_build2_loc (loc, icode, type, tem,
8212 				  TREE_OPERAND (arg1, 1));
8213 	}
8214       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8215 	 into (A OR B).
8216 	 For sequence point consistancy, we need to check for trapping,
8217 	 and side-effects.  */
8218       else if (code == icode && simple_operand_p_2 (arg0)
8219                && simple_operand_p_2 (arg1))
8220 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8221     }
8222 
8223   return NULL_TREE;
8224 }
8225 
8226 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8227    by changing CODE to reduce the magnitude of constants involved in
8228    ARG0 of the comparison.
8229    Returns a canonicalized comparison tree if a simplification was
8230    possible, otherwise returns NULL_TREE.
8231    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8232    valid if signed overflow is undefined.  */
8233 
8234 static tree
8235 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8236 				 tree arg0, tree arg1,
8237 				 bool *strict_overflow_p)
8238 {
8239   enum tree_code code0 = TREE_CODE (arg0);
8240   tree t, cst0 = NULL_TREE;
8241   int sgn0;
8242 
8243   /* Match A +- CST code arg1.  We can change this only if overflow
8244      is undefined.  */
8245   if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8246 	 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8247 	/* In principle pointers also have undefined overflow behavior,
8248 	   but that causes problems elsewhere.  */
8249 	&& !POINTER_TYPE_P (TREE_TYPE (arg0))
8250 	&& (code0 == MINUS_EXPR
8251 	    || code0 == PLUS_EXPR)
8252 	&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8253     return NULL_TREE;
8254 
8255   /* Identify the constant in arg0 and its sign.  */
8256   cst0 = TREE_OPERAND (arg0, 1);
8257   sgn0 = tree_int_cst_sgn (cst0);
8258 
8259   /* Overflowed constants and zero will cause problems.  */
8260   if (integer_zerop (cst0)
8261       || TREE_OVERFLOW (cst0))
8262     return NULL_TREE;
8263 
8264   /* See if we can reduce the magnitude of the constant in
8265      arg0 by changing the comparison code.  */
8266   /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8267   if (code == LT_EXPR
8268       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8269     code = LE_EXPR;
8270   /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8271   else if (code == GT_EXPR
8272 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8273     code = GE_EXPR;
8274   /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8275   else if (code == LE_EXPR
8276 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8277     code = LT_EXPR;
8278   /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8279   else if (code == GE_EXPR
8280 	   && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8281     code = GT_EXPR;
8282   else
8283     return NULL_TREE;
8284   *strict_overflow_p = true;
8285 
8286   /* Now build the constant reduced in magnitude.  But not if that
8287      would produce one outside of its types range.  */
8288   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8289       && ((sgn0 == 1
8290 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8291 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8292 	  || (sgn0 == -1
8293 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8294 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8295     return NULL_TREE;
8296 
8297   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8298 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8299   t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8300   t = fold_convert (TREE_TYPE (arg1), t);
8301 
8302   return fold_build2_loc (loc, code, type, t, arg1);
8303 }
8304 
8305 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8306    overflow further.  Try to decrease the magnitude of constants involved
8307    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8308    and put sole constants at the second argument position.
8309    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8310 
8311 static tree
8312 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8313 			       tree arg0, tree arg1)
8314 {
8315   tree t;
8316   bool strict_overflow_p;
8317   const char * const warnmsg = G_("assuming signed overflow does not occur "
8318 				  "when reducing constant in comparison");
8319 
8320   /* Try canonicalization by simplifying arg0.  */
8321   strict_overflow_p = false;
8322   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8323 				       &strict_overflow_p);
8324   if (t)
8325     {
8326       if (strict_overflow_p)
8327 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8328       return t;
8329     }
8330 
8331   /* Try canonicalization by simplifying arg1 using the swapped
8332      comparison.  */
8333   code = swap_tree_comparison (code);
8334   strict_overflow_p = false;
8335   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8336 				       &strict_overflow_p);
8337   if (t && strict_overflow_p)
8338     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8339   return t;
8340 }
8341 
8342 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8343    space.  This is used to avoid issuing overflow warnings for
8344    expressions like &p->x which can not wrap.  */
8345 
8346 static bool
8347 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8348 {
8349   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8350     return true;
8351 
8352   if (maybe_lt (bitpos, 0))
8353     return true;
8354 
8355   poly_wide_int wi_offset;
8356   int precision = TYPE_PRECISION (TREE_TYPE (base));
8357   if (offset == NULL_TREE)
8358     wi_offset = wi::zero (precision);
8359   else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8360     return true;
8361   else
8362     wi_offset = wi::to_poly_wide (offset);
8363 
8364   bool overflow;
8365   poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8366 				  precision);
8367   poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8368   if (overflow)
8369     return true;
8370 
8371   poly_uint64 total_hwi, size;
8372   if (!total.to_uhwi (&total_hwi)
8373       || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8374 			   &size)
8375       || known_eq (size, 0U))
8376     return true;
8377 
8378   if (known_le (total_hwi, size))
8379     return false;
8380 
8381   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8382      array.  */
8383   if (TREE_CODE (base) == ADDR_EXPR
8384       && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8385 			  &size)
8386       && maybe_ne (size, 0U)
8387       && known_le (total_hwi, size))
8388     return false;
8389 
8390   return true;
8391 }
8392 
8393 /* Return a positive integer when the symbol DECL is known to have
8394    a nonzero address, zero when it's known not to (e.g., it's a weak
8395    symbol), and a negative integer when the symbol is not yet in the
8396    symbol table and so whether or not its address is zero is unknown.
8397    For function local objects always return positive integer.  */
8398 static int
8399 maybe_nonzero_address (tree decl)
8400 {
8401   if (DECL_P (decl) && decl_in_symtab_p (decl))
8402     if (struct symtab_node *symbol = symtab_node::get_create (decl))
8403       return symbol->nonzero_address ();
8404 
8405   /* Function local objects are never NULL.  */
8406   if (DECL_P (decl)
8407       && (DECL_CONTEXT (decl)
8408       && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8409       && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8410     return 1;
8411 
8412   return -1;
8413 }
8414 
8415 /* Subroutine of fold_binary.  This routine performs all of the
8416    transformations that are common to the equality/inequality
8417    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8418    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8419    fold_binary should call fold_binary.  Fold a comparison with
8420    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8421    the folded comparison or NULL_TREE.  */
8422 
8423 static tree
8424 fold_comparison (location_t loc, enum tree_code code, tree type,
8425 		 tree op0, tree op1)
8426 {
8427   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8428   tree arg0, arg1, tem;
8429 
8430   arg0 = op0;
8431   arg1 = op1;
8432 
8433   STRIP_SIGN_NOPS (arg0);
8434   STRIP_SIGN_NOPS (arg1);
8435 
8436   /* For comparisons of pointers we can decompose it to a compile time
8437      comparison of the base objects and the offsets into the object.
8438      This requires at least one operand being an ADDR_EXPR or a
8439      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
8440   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8441       && (TREE_CODE (arg0) == ADDR_EXPR
8442 	  || TREE_CODE (arg1) == ADDR_EXPR
8443 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8444 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8445     {
8446       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8447       poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
8448       machine_mode mode;
8449       int volatilep, reversep, unsignedp;
8450       bool indirect_base0 = false, indirect_base1 = false;
8451 
8452       /* Get base and offset for the access.  Strip ADDR_EXPR for
8453 	 get_inner_reference, but put it back by stripping INDIRECT_REF
8454 	 off the base object if possible.  indirect_baseN will be true
8455 	 if baseN is not an address but refers to the object itself.  */
8456       base0 = arg0;
8457       if (TREE_CODE (arg0) == ADDR_EXPR)
8458 	{
8459 	  base0
8460 	    = get_inner_reference (TREE_OPERAND (arg0, 0),
8461 				   &bitsize, &bitpos0, &offset0, &mode,
8462 				   &unsignedp, &reversep, &volatilep);
8463 	  if (TREE_CODE (base0) == INDIRECT_REF)
8464 	    base0 = TREE_OPERAND (base0, 0);
8465 	  else
8466 	    indirect_base0 = true;
8467 	}
8468       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8469 	{
8470 	  base0 = TREE_OPERAND (arg0, 0);
8471 	  STRIP_SIGN_NOPS (base0);
8472 	  if (TREE_CODE (base0) == ADDR_EXPR)
8473 	    {
8474 	      base0
8475 		= get_inner_reference (TREE_OPERAND (base0, 0),
8476 				       &bitsize, &bitpos0, &offset0, &mode,
8477 				       &unsignedp, &reversep, &volatilep);
8478 	      if (TREE_CODE (base0) == INDIRECT_REF)
8479 		base0 = TREE_OPERAND (base0, 0);
8480 	      else
8481 		indirect_base0 = true;
8482 	    }
8483 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
8484 	    offset0 = TREE_OPERAND (arg0, 1);
8485 	  else
8486 	    offset0 = size_binop (PLUS_EXPR, offset0,
8487 				  TREE_OPERAND (arg0, 1));
8488 	  if (poly_int_tree_p (offset0))
8489 	    {
8490 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
8491 					      TYPE_PRECISION (sizetype));
8492 	      tem <<= LOG2_BITS_PER_UNIT;
8493 	      tem += bitpos0;
8494 	      if (tem.to_shwi (&bitpos0))
8495 		offset0 = NULL_TREE;
8496 	    }
8497 	}
8498 
8499       base1 = arg1;
8500       if (TREE_CODE (arg1) == ADDR_EXPR)
8501 	{
8502 	  base1
8503 	    = get_inner_reference (TREE_OPERAND (arg1, 0),
8504 				   &bitsize, &bitpos1, &offset1, &mode,
8505 				   &unsignedp, &reversep, &volatilep);
8506 	  if (TREE_CODE (base1) == INDIRECT_REF)
8507 	    base1 = TREE_OPERAND (base1, 0);
8508 	  else
8509 	    indirect_base1 = true;
8510 	}
8511       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8512 	{
8513 	  base1 = TREE_OPERAND (arg1, 0);
8514 	  STRIP_SIGN_NOPS (base1);
8515 	  if (TREE_CODE (base1) == ADDR_EXPR)
8516 	    {
8517 	      base1
8518 		= get_inner_reference (TREE_OPERAND (base1, 0),
8519 				       &bitsize, &bitpos1, &offset1, &mode,
8520 				       &unsignedp, &reversep, &volatilep);
8521 	      if (TREE_CODE (base1) == INDIRECT_REF)
8522 		base1 = TREE_OPERAND (base1, 0);
8523 	      else
8524 		indirect_base1 = true;
8525 	    }
8526 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
8527 	    offset1 = TREE_OPERAND (arg1, 1);
8528 	  else
8529 	    offset1 = size_binop (PLUS_EXPR, offset1,
8530 				  TREE_OPERAND (arg1, 1));
8531 	  if (poly_int_tree_p (offset1))
8532 	    {
8533 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
8534 					      TYPE_PRECISION (sizetype));
8535 	      tem <<= LOG2_BITS_PER_UNIT;
8536 	      tem += bitpos1;
8537 	      if (tem.to_shwi (&bitpos1))
8538 		offset1 = NULL_TREE;
8539 	    }
8540 	}
8541 
8542       /* If we have equivalent bases we might be able to simplify.  */
8543       if (indirect_base0 == indirect_base1
8544 	  && operand_equal_p (base0, base1,
8545 			      indirect_base0 ? OEP_ADDRESS_OF : 0))
8546 	{
8547 	  /* We can fold this expression to a constant if the non-constant
8548 	     offset parts are equal.  */
8549 	  if ((offset0 == offset1
8550 	       || (offset0 && offset1
8551 		   && operand_equal_p (offset0, offset1, 0)))
8552 	      && (equality_code
8553 		  || (indirect_base0
8554 		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8555 		  || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8556 	    {
8557 	      if (!equality_code
8558 		  && maybe_ne (bitpos0, bitpos1)
8559 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8560 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8561 		fold_overflow_warning (("assuming pointer wraparound does not "
8562 					"occur when comparing P +- C1 with "
8563 					"P +- C2"),
8564 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
8565 
8566 	      switch (code)
8567 		{
8568 		case EQ_EXPR:
8569 		  if (known_eq (bitpos0, bitpos1))
8570 		    return constant_boolean_node (true, type);
8571 		  if (known_ne (bitpos0, bitpos1))
8572 		    return constant_boolean_node (false, type);
8573 		  break;
8574 		case NE_EXPR:
8575 		  if (known_ne (bitpos0, bitpos1))
8576 		    return constant_boolean_node (true, type);
8577 		  if (known_eq (bitpos0, bitpos1))
8578 		    return constant_boolean_node (false, type);
8579 		  break;
8580 		case LT_EXPR:
8581 		  if (known_lt (bitpos0, bitpos1))
8582 		    return constant_boolean_node (true, type);
8583 		  if (known_ge (bitpos0, bitpos1))
8584 		    return constant_boolean_node (false, type);
8585 		  break;
8586 		case LE_EXPR:
8587 		  if (known_le (bitpos0, bitpos1))
8588 		    return constant_boolean_node (true, type);
8589 		  if (known_gt (bitpos0, bitpos1))
8590 		    return constant_boolean_node (false, type);
8591 		  break;
8592 		case GE_EXPR:
8593 		  if (known_ge (bitpos0, bitpos1))
8594 		    return constant_boolean_node (true, type);
8595 		  if (known_lt (bitpos0, bitpos1))
8596 		    return constant_boolean_node (false, type);
8597 		  break;
8598 		case GT_EXPR:
8599 		  if (known_gt (bitpos0, bitpos1))
8600 		    return constant_boolean_node (true, type);
8601 		  if (known_le (bitpos0, bitpos1))
8602 		    return constant_boolean_node (false, type);
8603 		  break;
8604 		default:;
8605 		}
8606 	    }
8607 	  /* We can simplify the comparison to a comparison of the variable
8608 	     offset parts if the constant offset parts are equal.
8609 	     Be careful to use signed sizetype here because otherwise we
8610 	     mess with array offsets in the wrong way.  This is possible
8611 	     because pointer arithmetic is restricted to retain within an
8612 	     object and overflow on pointer differences is undefined as of
8613 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
8614 	  else if (known_eq (bitpos0, bitpos1)
8615 		   && (equality_code
8616 		       || (indirect_base0
8617 			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8618 		       || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8619 	    {
8620 	      /* By converting to signed sizetype we cover middle-end pointer
8621 	         arithmetic which operates on unsigned pointer types of size
8622 	         type size and ARRAY_REF offsets which are properly sign or
8623 	         zero extended from their type in case it is narrower than
8624 	         sizetype.  */
8625 	      if (offset0 == NULL_TREE)
8626 		offset0 = build_int_cst (ssizetype, 0);
8627 	      else
8628 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
8629 	      if (offset1 == NULL_TREE)
8630 		offset1 = build_int_cst (ssizetype, 0);
8631 	      else
8632 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
8633 
8634 	      if (!equality_code
8635 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8636 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8637 		fold_overflow_warning (("assuming pointer wraparound does not "
8638 					"occur when comparing P +- C1 with "
8639 					"P +- C2"),
8640 				       WARN_STRICT_OVERFLOW_COMPARISON);
8641 
8642 	      return fold_build2_loc (loc, code, type, offset0, offset1);
8643 	    }
8644 	}
8645       /* For equal offsets we can simplify to a comparison of the
8646 	 base addresses.  */
8647       else if (known_eq (bitpos0, bitpos1)
8648 	       && (indirect_base0
8649 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8650 	       && (indirect_base1
8651 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8652 	       && ((offset0 == offset1)
8653 		   || (offset0 && offset1
8654 		       && operand_equal_p (offset0, offset1, 0))))
8655 	{
8656 	  if (indirect_base0)
8657 	    base0 = build_fold_addr_expr_loc (loc, base0);
8658 	  if (indirect_base1)
8659 	    base1 = build_fold_addr_expr_loc (loc, base1);
8660 	  return fold_build2_loc (loc, code, type, base0, base1);
8661 	}
8662       /* Comparison between an ordinary (non-weak) symbol and a null
8663 	 pointer can be eliminated since such symbols must have a non
8664 	 null address.  In C, relational expressions between pointers
8665 	 to objects and null pointers are undefined.  The results
8666 	 below follow the C++ rules with the additional property that
8667 	 every object pointer compares greater than a null pointer.
8668       */
8669       else if (((DECL_P (base0)
8670 		 && maybe_nonzero_address (base0) > 0
8671 		 /* Avoid folding references to struct members at offset 0 to
8672 		    prevent tests like '&ptr->firstmember == 0' from getting
8673 		    eliminated.  When ptr is null, although the -> expression
8674 		    is strictly speaking invalid, GCC retains it as a matter
8675 		    of QoI.  See PR c/44555. */
8676 		 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
8677 		|| CONSTANT_CLASS_P (base0))
8678 	       && indirect_base0
8679 	       /* The caller guarantees that when one of the arguments is
8680 		  constant (i.e., null in this case) it is second.  */
8681 	       && integer_zerop (arg1))
8682 	{
8683 	  switch (code)
8684 	    {
8685 	    case EQ_EXPR:
8686 	    case LE_EXPR:
8687 	    case LT_EXPR:
8688 	      return constant_boolean_node (false, type);
8689 	    case GE_EXPR:
8690 	    case GT_EXPR:
8691 	    case NE_EXPR:
8692 	      return constant_boolean_node (true, type);
8693 	    default:
8694 	      gcc_unreachable ();
8695 	    }
8696 	}
8697     }
8698 
8699   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8700      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
8701      the resulting offset is smaller in absolute value than the
8702      original one and has the same sign.  */
8703   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8704       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8705       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8706       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8707 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8708       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8709       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8710 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8711     {
8712       tree const1 = TREE_OPERAND (arg0, 1);
8713       tree const2 = TREE_OPERAND (arg1, 1);
8714       tree variable1 = TREE_OPERAND (arg0, 0);
8715       tree variable2 = TREE_OPERAND (arg1, 0);
8716       tree cst;
8717       const char * const warnmsg = G_("assuming signed overflow does not "
8718 				      "occur when combining constants around "
8719 				      "a comparison");
8720 
8721       /* Put the constant on the side where it doesn't overflow and is
8722 	 of lower absolute value and of same sign than before.  */
8723       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8724 			     ? MINUS_EXPR : PLUS_EXPR,
8725 			     const2, const1);
8726       if (!TREE_OVERFLOW (cst)
8727 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8728 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8729 	{
8730 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8731 	  return fold_build2_loc (loc, code, type,
8732 				  variable1,
8733 				  fold_build2_loc (loc, TREE_CODE (arg1),
8734 						   TREE_TYPE (arg1),
8735 						   variable2, cst));
8736 	}
8737 
8738       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8739 			     ? MINUS_EXPR : PLUS_EXPR,
8740 			     const1, const2);
8741       if (!TREE_OVERFLOW (cst)
8742 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8743 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8744 	{
8745 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8746 	  return fold_build2_loc (loc, code, type,
8747 				  fold_build2_loc (loc, TREE_CODE (arg0),
8748 						   TREE_TYPE (arg0),
8749 						   variable1, cst),
8750 				  variable2);
8751 	}
8752     }
8753 
8754   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8755   if (tem)
8756     return tem;
8757 
8758   /* If we are comparing an expression that just has comparisons
8759      of two integer values, arithmetic expressions of those comparisons,
8760      and constants, we can simplify it.  There are only three cases
8761      to check: the two values can either be equal, the first can be
8762      greater, or the second can be greater.  Fold the expression for
8763      those three values.  Since each value must be 0 or 1, we have
8764      eight possibilities, each of which corresponds to the constant 0
8765      or 1 or one of the six possible comparisons.
8766 
8767      This handles common cases like (a > b) == 0 but also handles
8768      expressions like  ((x > y) - (y > x)) > 0, which supposedly
8769      occur in macroized code.  */
8770 
8771   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8772     {
8773       tree cval1 = 0, cval2 = 0;
8774 
8775       if (twoval_comparison_p (arg0, &cval1, &cval2)
8776 	  /* Don't handle degenerate cases here; they should already
8777 	     have been handled anyway.  */
8778 	  && cval1 != 0 && cval2 != 0
8779 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8780 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8781 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8782 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8783 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8784 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8785 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8786 	{
8787 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8788 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8789 
8790 	  /* We can't just pass T to eval_subst in case cval1 or cval2
8791 	     was the same as ARG1.  */
8792 
8793 	  tree high_result
8794 		= fold_build2_loc (loc, code, type,
8795 			       eval_subst (loc, arg0, cval1, maxval,
8796 					   cval2, minval),
8797 			       arg1);
8798 	  tree equal_result
8799 		= fold_build2_loc (loc, code, type,
8800 			       eval_subst (loc, arg0, cval1, maxval,
8801 					   cval2, maxval),
8802 			       arg1);
8803 	  tree low_result
8804 		= fold_build2_loc (loc, code, type,
8805 			       eval_subst (loc, arg0, cval1, minval,
8806 					   cval2, maxval),
8807 			       arg1);
8808 
8809 	  /* All three of these results should be 0 or 1.  Confirm they are.
8810 	     Then use those values to select the proper code to use.  */
8811 
8812 	  if (TREE_CODE (high_result) == INTEGER_CST
8813 	      && TREE_CODE (equal_result) == INTEGER_CST
8814 	      && TREE_CODE (low_result) == INTEGER_CST)
8815 	    {
8816 	      /* Make a 3-bit mask with the high-order bit being the
8817 		 value for `>', the next for '=', and the low for '<'.  */
8818 	      switch ((integer_onep (high_result) * 4)
8819 		      + (integer_onep (equal_result) * 2)
8820 		      + integer_onep (low_result))
8821 		{
8822 		case 0:
8823 		  /* Always false.  */
8824 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8825 		case 1:
8826 		  code = LT_EXPR;
8827 		  break;
8828 		case 2:
8829 		  code = EQ_EXPR;
8830 		  break;
8831 		case 3:
8832 		  code = LE_EXPR;
8833 		  break;
8834 		case 4:
8835 		  code = GT_EXPR;
8836 		  break;
8837 		case 5:
8838 		  code = NE_EXPR;
8839 		  break;
8840 		case 6:
8841 		  code = GE_EXPR;
8842 		  break;
8843 		case 7:
8844 		  /* Always true.  */
8845 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8846 		}
8847 
8848 	      return fold_build2_loc (loc, code, type, cval1, cval2);
8849 	    }
8850 	}
8851     }
8852 
8853   return NULL_TREE;
8854 }
8855 
8856 
8857 /* Subroutine of fold_binary.  Optimize complex multiplications of the
8858    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
8859    argument EXPR represents the expression "z" of type TYPE.  */
8860 
8861 static tree
8862 fold_mult_zconjz (location_t loc, tree type, tree expr)
8863 {
8864   tree itype = TREE_TYPE (type);
8865   tree rpart, ipart, tem;
8866 
8867   if (TREE_CODE (expr) == COMPLEX_EXPR)
8868     {
8869       rpart = TREE_OPERAND (expr, 0);
8870       ipart = TREE_OPERAND (expr, 1);
8871     }
8872   else if (TREE_CODE (expr) == COMPLEX_CST)
8873     {
8874       rpart = TREE_REALPART (expr);
8875       ipart = TREE_IMAGPART (expr);
8876     }
8877   else
8878     {
8879       expr = save_expr (expr);
8880       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8881       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8882     }
8883 
8884   rpart = save_expr (rpart);
8885   ipart = save_expr (ipart);
8886   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8887 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8888 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8889   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8890 			  build_zero_cst (itype));
8891 }
8892 
8893 
8894 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
8895    CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8896    true if successful.  */
8897 
8898 static bool
8899 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8900 {
8901   unsigned HOST_WIDE_INT i, nunits;
8902 
8903   if (TREE_CODE (arg) == VECTOR_CST
8904       && VECTOR_CST_NELTS (arg).is_constant (&nunits))
8905     {
8906       for (i = 0; i < nunits; ++i)
8907 	elts[i] = VECTOR_CST_ELT (arg, i);
8908     }
8909   else if (TREE_CODE (arg) == CONSTRUCTOR)
8910     {
8911       constructor_elt *elt;
8912 
8913       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8914 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8915 	  return false;
8916 	else
8917 	  elts[i] = elt->value;
8918     }
8919   else
8920     return false;
8921   for (; i < nelts; i++)
8922     elts[i]
8923       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8924   return true;
8925 }
8926 
8927 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8928    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8929    NULL_TREE otherwise.  */
8930 
8931 static tree
8932 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
8933 {
8934   unsigned int i;
8935   unsigned HOST_WIDE_INT nelts;
8936   bool need_ctor = false;
8937 
8938   if (!sel.length ().is_constant (&nelts))
8939     return NULL_TREE;
8940   gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
8941 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
8942 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
8943   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8944       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8945     return NULL_TREE;
8946 
8947   tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8948   if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8949       || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8950     return NULL_TREE;
8951 
8952   tree_vector_builder out_elts (type, nelts, 1);
8953   for (i = 0; i < nelts; i++)
8954     {
8955       HOST_WIDE_INT index;
8956       if (!sel[i].is_constant (&index))
8957 	return NULL_TREE;
8958       if (!CONSTANT_CLASS_P (in_elts[index]))
8959 	need_ctor = true;
8960       out_elts.quick_push (unshare_expr (in_elts[index]));
8961     }
8962 
8963   if (need_ctor)
8964     {
8965       vec<constructor_elt, va_gc> *v;
8966       vec_alloc (v, nelts);
8967       for (i = 0; i < nelts; i++)
8968 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8969       return build_constructor (type, v);
8970     }
8971   else
8972     return out_elts.build ();
8973 }
8974 
8975 /* Try to fold a pointer difference of type TYPE two address expressions of
8976    array references AREF0 and AREF1 using location LOC.  Return a
8977    simplified expression for the difference or NULL_TREE.  */
8978 
8979 static tree
8980 fold_addr_of_array_ref_difference (location_t loc, tree type,
8981 				   tree aref0, tree aref1,
8982 				   bool use_pointer_diff)
8983 {
8984   tree base0 = TREE_OPERAND (aref0, 0);
8985   tree base1 = TREE_OPERAND (aref1, 0);
8986   tree base_offset = build_int_cst (type, 0);
8987 
8988   /* If the bases are array references as well, recurse.  If the bases
8989      are pointer indirections compute the difference of the pointers.
8990      If the bases are equal, we are set.  */
8991   if ((TREE_CODE (base0) == ARRAY_REF
8992        && TREE_CODE (base1) == ARRAY_REF
8993        && (base_offset
8994 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1,
8995 						use_pointer_diff)))
8996       || (INDIRECT_REF_P (base0)
8997 	  && INDIRECT_REF_P (base1)
8998 	  && (base_offset
8999 	        = use_pointer_diff
9000 		  ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9001 				     TREE_OPERAND (base0, 0),
9002 				     TREE_OPERAND (base1, 0))
9003 		  : fold_binary_loc (loc, MINUS_EXPR, type,
9004 				     fold_convert (type,
9005 						   TREE_OPERAND (base0, 0)),
9006 				     fold_convert (type,
9007 						   TREE_OPERAND (base1, 0)))))
9008       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9009     {
9010       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9011       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9012       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9013       tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9014       return fold_build2_loc (loc, PLUS_EXPR, type,
9015 			      base_offset,
9016 			      fold_build2_loc (loc, MULT_EXPR, type,
9017 					       diff, esz));
9018     }
9019   return NULL_TREE;
9020 }
9021 
9022 /* If the real or vector real constant CST of type TYPE has an exact
9023    inverse, return it, else return NULL.  */
9024 
9025 tree
9026 exact_inverse (tree type, tree cst)
9027 {
9028   REAL_VALUE_TYPE r;
9029   tree unit_type;
9030   machine_mode mode;
9031 
9032   switch (TREE_CODE (cst))
9033     {
9034     case REAL_CST:
9035       r = TREE_REAL_CST (cst);
9036 
9037       if (exact_real_inverse (TYPE_MODE (type), &r))
9038 	return build_real (type, r);
9039 
9040       return NULL_TREE;
9041 
9042     case VECTOR_CST:
9043       {
9044 	unit_type = TREE_TYPE (type);
9045 	mode = TYPE_MODE (unit_type);
9046 
9047 	tree_vector_builder elts;
9048 	if (!elts.new_unary_operation (type, cst, false))
9049 	  return NULL_TREE;
9050 	unsigned int count = elts.encoded_nelts ();
9051 	for (unsigned int i = 0; i < count; ++i)
9052 	  {
9053 	    r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9054 	    if (!exact_real_inverse (mode, &r))
9055 	      return NULL_TREE;
9056 	    elts.quick_push (build_real (unit_type, r));
9057 	  }
9058 
9059 	return elts.build ();
9060       }
9061 
9062     default:
9063       return NULL_TREE;
9064     }
9065 }
9066 
9067 /*  Mask out the tz least significant bits of X of type TYPE where
9068     tz is the number of trailing zeroes in Y.  */
9069 static wide_int
9070 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9071 {
9072   int tz = wi::ctz (y);
9073   if (tz > 0)
9074     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9075   return x;
9076 }
9077 
9078 /* Return true when T is an address and is known to be nonzero.
9079    For floating point we further ensure that T is not denormal.
9080    Similar logic is present in nonzero_address in rtlanal.h.
9081 
9082    If the return value is based on the assumption that signed overflow
9083    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9084    change *STRICT_OVERFLOW_P.  */
9085 
9086 static bool
9087 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9088 {
9089   tree type = TREE_TYPE (t);
9090   enum tree_code code;
9091 
9092   /* Doing something useful for floating point would need more work.  */
9093   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9094     return false;
9095 
9096   code = TREE_CODE (t);
9097   switch (TREE_CODE_CLASS (code))
9098     {
9099     case tcc_unary:
9100       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9101 					      strict_overflow_p);
9102     case tcc_binary:
9103     case tcc_comparison:
9104       return tree_binary_nonzero_warnv_p (code, type,
9105 					       TREE_OPERAND (t, 0),
9106 					       TREE_OPERAND (t, 1),
9107 					       strict_overflow_p);
9108     case tcc_constant:
9109     case tcc_declaration:
9110     case tcc_reference:
9111       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9112 
9113     default:
9114       break;
9115     }
9116 
9117   switch (code)
9118     {
9119     case TRUTH_NOT_EXPR:
9120       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9121 					      strict_overflow_p);
9122 
9123     case TRUTH_AND_EXPR:
9124     case TRUTH_OR_EXPR:
9125     case TRUTH_XOR_EXPR:
9126       return tree_binary_nonzero_warnv_p (code, type,
9127 					       TREE_OPERAND (t, 0),
9128 					       TREE_OPERAND (t, 1),
9129 					       strict_overflow_p);
9130 
9131     case COND_EXPR:
9132     case CONSTRUCTOR:
9133     case OBJ_TYPE_REF:
9134     case ASSERT_EXPR:
9135     case ADDR_EXPR:
9136     case WITH_SIZE_EXPR:
9137     case SSA_NAME:
9138       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9139 
9140     case COMPOUND_EXPR:
9141     case MODIFY_EXPR:
9142     case BIND_EXPR:
9143       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9144 					strict_overflow_p);
9145 
9146     case SAVE_EXPR:
9147       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9148 					strict_overflow_p);
9149 
9150     case CALL_EXPR:
9151       {
9152 	tree fndecl = get_callee_fndecl (t);
9153 	if (!fndecl) return false;
9154 	if (flag_delete_null_pointer_checks && !flag_check_new
9155 	    && DECL_IS_OPERATOR_NEW (fndecl)
9156 	    && !TREE_NOTHROW (fndecl))
9157 	  return true;
9158 	if (flag_delete_null_pointer_checks
9159 	    && lookup_attribute ("returns_nonnull",
9160 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9161 	  return true;
9162 	return alloca_call_p (t);
9163       }
9164 
9165     default:
9166       break;
9167     }
9168   return false;
9169 }
9170 
9171 /* Return true when T is an address and is known to be nonzero.
9172    Handle warnings about undefined signed overflow.  */
9173 
9174 bool
9175 tree_expr_nonzero_p (tree t)
9176 {
9177   bool ret, strict_overflow_p;
9178 
9179   strict_overflow_p = false;
9180   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9181   if (strict_overflow_p)
9182     fold_overflow_warning (("assuming signed overflow does not occur when "
9183 			    "determining that expression is always "
9184 			    "non-zero"),
9185 			   WARN_STRICT_OVERFLOW_MISC);
9186   return ret;
9187 }
9188 
9189 /* Return true if T is known not to be equal to an integer W.  */
9190 
9191 bool
9192 expr_not_equal_to (tree t, const wide_int &w)
9193 {
9194   wide_int min, max, nz;
9195   value_range_type rtype;
9196   switch (TREE_CODE (t))
9197     {
9198     case INTEGER_CST:
9199       return wi::to_wide (t) != w;
9200 
9201     case SSA_NAME:
9202       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9203 	return false;
9204       rtype = get_range_info (t, &min, &max);
9205       if (rtype == VR_RANGE)
9206 	{
9207 	  if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9208 	    return true;
9209 	  if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9210 	    return true;
9211 	}
9212       else if (rtype == VR_ANTI_RANGE
9213 	       && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9214 	       && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9215 	return true;
9216       /* If T has some known zero bits and W has any of those bits set,
9217 	 then T is known not to be equal to W.  */
9218       if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9219 			      TYPE_PRECISION (TREE_TYPE (t))), 0))
9220 	return true;
9221       return false;
9222 
9223     default:
9224       return false;
9225     }
9226 }
9227 
9228 /* Fold a binary expression of code CODE and type TYPE with operands
9229    OP0 and OP1.  LOC is the location of the resulting expression.
9230    Return the folded expression if folding is successful.  Otherwise,
9231    return NULL_TREE.  */
9232 
9233 tree
9234 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9235 		 tree op0, tree op1)
9236 {
9237   enum tree_code_class kind = TREE_CODE_CLASS (code);
9238   tree arg0, arg1, tem;
9239   tree t1 = NULL_TREE;
9240   bool strict_overflow_p;
9241   unsigned int prec;
9242 
9243   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9244 	      && TREE_CODE_LENGTH (code) == 2
9245 	      && op0 != NULL_TREE
9246 	      && op1 != NULL_TREE);
9247 
9248   arg0 = op0;
9249   arg1 = op1;
9250 
9251   /* Strip any conversions that don't change the mode.  This is
9252      safe for every expression, except for a comparison expression
9253      because its signedness is derived from its operands.  So, in
9254      the latter case, only strip conversions that don't change the
9255      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
9256      preserved.
9257 
9258      Note that this is done as an internal manipulation within the
9259      constant folder, in order to find the simplest representation
9260      of the arguments so that their form can be studied.  In any
9261      cases, the appropriate type conversions should be put back in
9262      the tree that will get out of the constant folder.  */
9263 
9264   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9265     {
9266       STRIP_SIGN_NOPS (arg0);
9267       STRIP_SIGN_NOPS (arg1);
9268     }
9269   else
9270     {
9271       STRIP_NOPS (arg0);
9272       STRIP_NOPS (arg1);
9273     }
9274 
9275   /* Note that TREE_CONSTANT isn't enough: static var addresses are
9276      constant but we can't do arithmetic on them.  */
9277   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9278     {
9279       tem = const_binop (code, type, arg0, arg1);
9280       if (tem != NULL_TREE)
9281 	{
9282 	  if (TREE_TYPE (tem) != type)
9283 	    tem = fold_convert_loc (loc, type, tem);
9284 	  return tem;
9285 	}
9286     }
9287 
9288   /* If this is a commutative operation, and ARG0 is a constant, move it
9289      to ARG1 to reduce the number of tests below.  */
9290   if (commutative_tree_code (code)
9291       && tree_swap_operands_p (arg0, arg1))
9292     return fold_build2_loc (loc, code, type, op1, op0);
9293 
9294   /* Likewise if this is a comparison, and ARG0 is a constant, move it
9295      to ARG1 to reduce the number of tests below.  */
9296   if (kind == tcc_comparison
9297       && tree_swap_operands_p (arg0, arg1))
9298     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9299 
9300   tem = generic_simplify (loc, code, type, op0, op1);
9301   if (tem)
9302     return tem;
9303 
9304   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9305 
9306      First check for cases where an arithmetic operation is applied to a
9307      compound, conditional, or comparison operation.  Push the arithmetic
9308      operation inside the compound or conditional to see if any folding
9309      can then be done.  Convert comparison to conditional for this purpose.
9310      The also optimizes non-constant cases that used to be done in
9311      expand_expr.
9312 
9313      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9314      one of the operands is a comparison and the other is a comparison, a
9315      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
9316      code below would make the expression more complex.  Change it to a
9317      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
9318      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
9319 
9320   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9321        || code == EQ_EXPR || code == NE_EXPR)
9322       && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9323       && ((truth_value_p (TREE_CODE (arg0))
9324 	   && (truth_value_p (TREE_CODE (arg1))
9325 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
9326 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
9327 	  || (truth_value_p (TREE_CODE (arg1))
9328 	      && (truth_value_p (TREE_CODE (arg0))
9329 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
9330 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
9331     {
9332       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9333 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9334 			 : TRUTH_XOR_EXPR,
9335 			 boolean_type_node,
9336 			 fold_convert_loc (loc, boolean_type_node, arg0),
9337 			 fold_convert_loc (loc, boolean_type_node, arg1));
9338 
9339       if (code == EQ_EXPR)
9340 	tem = invert_truthvalue_loc (loc, tem);
9341 
9342       return fold_convert_loc (loc, type, tem);
9343     }
9344 
9345   if (TREE_CODE_CLASS (code) == tcc_binary
9346       || TREE_CODE_CLASS (code) == tcc_comparison)
9347     {
9348       if (TREE_CODE (arg0) == COMPOUND_EXPR)
9349 	{
9350 	  tem = fold_build2_loc (loc, code, type,
9351 			     fold_convert_loc (loc, TREE_TYPE (op0),
9352 					       TREE_OPERAND (arg0, 1)), op1);
9353 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9354 			     tem);
9355 	}
9356       if (TREE_CODE (arg1) == COMPOUND_EXPR)
9357 	{
9358 	  tem = fold_build2_loc (loc, code, type, op0,
9359 			     fold_convert_loc (loc, TREE_TYPE (op1),
9360 					       TREE_OPERAND (arg1, 1)));
9361 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9362 			     tem);
9363 	}
9364 
9365       if (TREE_CODE (arg0) == COND_EXPR
9366 	  || TREE_CODE (arg0) == VEC_COND_EXPR
9367 	  || COMPARISON_CLASS_P (arg0))
9368 	{
9369 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9370 						     arg0, arg1,
9371 						     /*cond_first_p=*/1);
9372 	  if (tem != NULL_TREE)
9373 	    return tem;
9374 	}
9375 
9376       if (TREE_CODE (arg1) == COND_EXPR
9377 	  || TREE_CODE (arg1) == VEC_COND_EXPR
9378 	  || COMPARISON_CLASS_P (arg1))
9379 	{
9380 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9381 						     arg1, arg0,
9382 					             /*cond_first_p=*/0);
9383 	  if (tem != NULL_TREE)
9384 	    return tem;
9385 	}
9386     }
9387 
9388   switch (code)
9389     {
9390     case MEM_REF:
9391       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
9392       if (TREE_CODE (arg0) == ADDR_EXPR
9393 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9394 	{
9395 	  tree iref = TREE_OPERAND (arg0, 0);
9396 	  return fold_build2 (MEM_REF, type,
9397 			      TREE_OPERAND (iref, 0),
9398 			      int_const_binop (PLUS_EXPR, arg1,
9399 					       TREE_OPERAND (iref, 1)));
9400 	}
9401 
9402       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
9403       if (TREE_CODE (arg0) == ADDR_EXPR
9404 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
9405 	{
9406 	  tree base;
9407 	  poly_int64 coffset;
9408 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9409 						&coffset);
9410 	  if (!base)
9411 	    return NULL_TREE;
9412 	  return fold_build2 (MEM_REF, type,
9413 			      build_fold_addr_expr (base),
9414 			      int_const_binop (PLUS_EXPR, arg1,
9415 					       size_int (coffset)));
9416 	}
9417 
9418       return NULL_TREE;
9419 
9420     case POINTER_PLUS_EXPR:
9421       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
9422       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9423 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9424         return fold_convert_loc (loc, type,
9425 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9426 					      fold_convert_loc (loc, sizetype,
9427 								arg1),
9428 					      fold_convert_loc (loc, sizetype,
9429 								arg0)));
9430 
9431       return NULL_TREE;
9432 
9433     case PLUS_EXPR:
9434       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9435 	{
9436 	  /* X + (X / CST) * -CST is X % CST.  */
9437 	  if (TREE_CODE (arg1) == MULT_EXPR
9438 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9439 	      && operand_equal_p (arg0,
9440 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9441 	    {
9442 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9443 	      tree cst1 = TREE_OPERAND (arg1, 1);
9444 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9445 				      cst1, cst0);
9446 	      if (sum && integer_zerop (sum))
9447 		return fold_convert_loc (loc, type,
9448 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9449 						      TREE_TYPE (arg0), arg0,
9450 						      cst0));
9451 	    }
9452 	}
9453 
9454       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9455 	 one.  Make sure the type is not saturating and has the signedness of
9456 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9457 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9458       if ((TREE_CODE (arg0) == MULT_EXPR
9459 	   || TREE_CODE (arg1) == MULT_EXPR)
9460 	  && !TYPE_SATURATING (type)
9461 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9462 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9463 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9464         {
9465 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9466 	  if (tem)
9467 	    return tem;
9468 	}
9469 
9470       if (! FLOAT_TYPE_P (type))
9471 	{
9472 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9473 	     (plus (plus (mult) (mult)) (foo)) so that we can
9474 	     take advantage of the factoring cases below.  */
9475 	  if (ANY_INTEGRAL_TYPE_P (type)
9476 	      && TYPE_OVERFLOW_WRAPS (type)
9477 	      && (((TREE_CODE (arg0) == PLUS_EXPR
9478 		    || TREE_CODE (arg0) == MINUS_EXPR)
9479 		   && TREE_CODE (arg1) == MULT_EXPR)
9480 		  || ((TREE_CODE (arg1) == PLUS_EXPR
9481 		       || TREE_CODE (arg1) == MINUS_EXPR)
9482 		      && TREE_CODE (arg0) == MULT_EXPR)))
9483 	    {
9484 	      tree parg0, parg1, parg, marg;
9485 	      enum tree_code pcode;
9486 
9487 	      if (TREE_CODE (arg1) == MULT_EXPR)
9488 		parg = arg0, marg = arg1;
9489 	      else
9490 		parg = arg1, marg = arg0;
9491 	      pcode = TREE_CODE (parg);
9492 	      parg0 = TREE_OPERAND (parg, 0);
9493 	      parg1 = TREE_OPERAND (parg, 1);
9494 	      STRIP_NOPS (parg0);
9495 	      STRIP_NOPS (parg1);
9496 
9497 	      if (TREE_CODE (parg0) == MULT_EXPR
9498 		  && TREE_CODE (parg1) != MULT_EXPR)
9499 		return fold_build2_loc (loc, pcode, type,
9500 				    fold_build2_loc (loc, PLUS_EXPR, type,
9501 						 fold_convert_loc (loc, type,
9502 								   parg0),
9503 						 fold_convert_loc (loc, type,
9504 								   marg)),
9505 				    fold_convert_loc (loc, type, parg1));
9506 	      if (TREE_CODE (parg0) != MULT_EXPR
9507 		  && TREE_CODE (parg1) == MULT_EXPR)
9508 		return
9509 		  fold_build2_loc (loc, PLUS_EXPR, type,
9510 			       fold_convert_loc (loc, type, parg0),
9511 			       fold_build2_loc (loc, pcode, type,
9512 					    fold_convert_loc (loc, type, marg),
9513 					    fold_convert_loc (loc, type,
9514 							      parg1)));
9515 	    }
9516 	}
9517       else
9518 	{
9519 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9520 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
9521 	     if signed zeros are involved.  */
9522 	  if (!HONOR_SNANS (element_mode (arg0))
9523               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9524 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9525 	    {
9526 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9527 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9528 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9529 	      bool arg0rz = false, arg0iz = false;
9530 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
9531 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
9532 		{
9533 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9534 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9535 		  if (arg0rz && arg1i && real_zerop (arg1i))
9536 		    {
9537 		      tree rp = arg1r ? arg1r
9538 				  : build1 (REALPART_EXPR, rtype, arg1);
9539 		      tree ip = arg0i ? arg0i
9540 				  : build1 (IMAGPART_EXPR, rtype, arg0);
9541 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9542 		    }
9543 		  else if (arg0iz && arg1r && real_zerop (arg1r))
9544 		    {
9545 		      tree rp = arg0r ? arg0r
9546 				  : build1 (REALPART_EXPR, rtype, arg0);
9547 		      tree ip = arg1i ? arg1i
9548 				  : build1 (IMAGPART_EXPR, rtype, arg1);
9549 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9550 		    }
9551 		}
9552 	    }
9553 
9554           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9555              We associate floats only if the user has specified
9556              -fassociative-math.  */
9557           if (flag_associative_math
9558               && TREE_CODE (arg1) == PLUS_EXPR
9559               && TREE_CODE (arg0) != MULT_EXPR)
9560             {
9561               tree tree10 = TREE_OPERAND (arg1, 0);
9562               tree tree11 = TREE_OPERAND (arg1, 1);
9563               if (TREE_CODE (tree11) == MULT_EXPR
9564 		  && TREE_CODE (tree10) == MULT_EXPR)
9565                 {
9566                   tree tree0;
9567                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9568                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9569                 }
9570             }
9571           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9572              We associate floats only if the user has specified
9573              -fassociative-math.  */
9574           if (flag_associative_math
9575               && TREE_CODE (arg0) == PLUS_EXPR
9576               && TREE_CODE (arg1) != MULT_EXPR)
9577             {
9578               tree tree00 = TREE_OPERAND (arg0, 0);
9579               tree tree01 = TREE_OPERAND (arg0, 1);
9580               if (TREE_CODE (tree01) == MULT_EXPR
9581 		  && TREE_CODE (tree00) == MULT_EXPR)
9582                 {
9583                   tree tree0;
9584                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9585                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9586                 }
9587             }
9588 	}
9589 
9590      bit_rotate:
9591       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9592 	 is a rotate of A by C1 bits.  */
9593       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9594 	 is a rotate of A by B bits.
9595 	 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9596 	 though in this case CODE must be | and not + or ^, otherwise
9597 	 it doesn't return A when B is 0.  */
9598       {
9599 	enum tree_code code0, code1;
9600 	tree rtype;
9601 	code0 = TREE_CODE (arg0);
9602 	code1 = TREE_CODE (arg1);
9603 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9604 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9605 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
9606 			        TREE_OPERAND (arg1, 0), 0)
9607 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9608 	        TYPE_UNSIGNED (rtype))
9609 	    /* Only create rotates in complete modes.  Other cases are not
9610 	       expanded properly.  */
9611 	    && (element_precision (rtype)
9612 		== GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9613 	  {
9614 	    tree tree01, tree11;
9615 	    tree orig_tree01, orig_tree11;
9616 	    enum tree_code code01, code11;
9617 
9618 	    tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9619 	    tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9620 	    STRIP_NOPS (tree01);
9621 	    STRIP_NOPS (tree11);
9622 	    code01 = TREE_CODE (tree01);
9623 	    code11 = TREE_CODE (tree11);
9624 	    if (code11 != MINUS_EXPR
9625 		&& (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9626 	      {
9627 		std::swap (code0, code1);
9628 		std::swap (code01, code11);
9629 		std::swap (tree01, tree11);
9630 		std::swap (orig_tree01, orig_tree11);
9631 	      }
9632 	    if (code01 == INTEGER_CST
9633 		&& code11 == INTEGER_CST
9634 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
9635 		    == element_precision (rtype)))
9636 	      {
9637 		tem = build2_loc (loc, LROTATE_EXPR,
9638 				  rtype, TREE_OPERAND (arg0, 0),
9639 				  code0 == LSHIFT_EXPR
9640 				  ? orig_tree01 : orig_tree11);
9641 		return fold_convert_loc (loc, type, tem);
9642 	      }
9643 	    else if (code11 == MINUS_EXPR)
9644 	      {
9645 		tree tree110, tree111;
9646 		tree110 = TREE_OPERAND (tree11, 0);
9647 		tree111 = TREE_OPERAND (tree11, 1);
9648 		STRIP_NOPS (tree110);
9649 		STRIP_NOPS (tree111);
9650 		if (TREE_CODE (tree110) == INTEGER_CST
9651 		    && compare_tree_int (tree110,
9652 					 element_precision (rtype)) == 0
9653 		    && operand_equal_p (tree01, tree111, 0))
9654 		  {
9655 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9656 					    ? LROTATE_EXPR : RROTATE_EXPR),
9657 				      rtype, TREE_OPERAND (arg0, 0),
9658 				      orig_tree01);
9659 		    return fold_convert_loc (loc, type, tem);
9660 		  }
9661 	      }
9662 	    else if (code == BIT_IOR_EXPR
9663 		     && code11 == BIT_AND_EXPR
9664 		     && pow2p_hwi (element_precision (rtype)))
9665 	      {
9666 		tree tree110, tree111;
9667 		tree110 = TREE_OPERAND (tree11, 0);
9668 		tree111 = TREE_OPERAND (tree11, 1);
9669 		STRIP_NOPS (tree110);
9670 		STRIP_NOPS (tree111);
9671 		if (TREE_CODE (tree110) == NEGATE_EXPR
9672 		    && TREE_CODE (tree111) == INTEGER_CST
9673 		    && compare_tree_int (tree111,
9674 					 element_precision (rtype) - 1) == 0
9675 		    && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9676 		  {
9677 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9678 					    ? LROTATE_EXPR : RROTATE_EXPR),
9679 				      rtype, TREE_OPERAND (arg0, 0),
9680 				      orig_tree01);
9681 		    return fold_convert_loc (loc, type, tem);
9682 		  }
9683 	      }
9684 	  }
9685       }
9686 
9687     associate:
9688       /* In most languages, can't associate operations on floats through
9689 	 parentheses.  Rather than remember where the parentheses were, we
9690 	 don't associate floats at all, unless the user has specified
9691 	 -fassociative-math.
9692 	 And, we need to make sure type is not saturating.  */
9693 
9694       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9695 	  && !TYPE_SATURATING (type))
9696 	{
9697 	  tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9698 	  tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9699 	  tree atype = type;
9700 	  bool ok = true;
9701 
9702 	  /* Split both trees into variables, constants, and literals.  Then
9703 	     associate each group together, the constants with literals,
9704 	     then the result with variables.  This increases the chances of
9705 	     literals being recombined later and of generating relocatable
9706 	     expressions for the sum of a constant and literal.  */
9707 	  var0 = split_tree (arg0, type, code,
9708 			     &minus_var0, &con0, &minus_con0,
9709 			     &lit0, &minus_lit0, 0);
9710 	  var1 = split_tree (arg1, type, code,
9711 			     &minus_var1, &con1, &minus_con1,
9712 			     &lit1, &minus_lit1, code == MINUS_EXPR);
9713 
9714 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
9715 	  if (code == MINUS_EXPR)
9716 	    code = PLUS_EXPR;
9717 
9718 	  /* With undefined overflow prefer doing association in a type
9719 	     which wraps on overflow, if that is one of the operand types.  */
9720 	  if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
9721 	      && !TYPE_OVERFLOW_WRAPS (type))
9722 	    {
9723 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9724 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9725 		atype = TREE_TYPE (arg0);
9726 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9727 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9728 		atype = TREE_TYPE (arg1);
9729 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9730 	    }
9731 
9732 	  /* With undefined overflow we can only associate constants with one
9733 	     variable, and constants whose association doesn't overflow.  */
9734 	  if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
9735 	      && !TYPE_OVERFLOW_WRAPS (atype))
9736 	    {
9737 	      if ((var0 && var1) || (minus_var0 && minus_var1))
9738 		{
9739 		  /* ???  If split_tree would handle NEGATE_EXPR we could
9740 		     simply reject these cases and the allowed cases would
9741 		     be the var0/minus_var1 ones.  */
9742 		  tree tmp0 = var0 ? var0 : minus_var0;
9743 		  tree tmp1 = var1 ? var1 : minus_var1;
9744 		  bool one_neg = false;
9745 
9746 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
9747 		    {
9748 		      tmp0 = TREE_OPERAND (tmp0, 0);
9749 		      one_neg = !one_neg;
9750 		    }
9751 		  if (CONVERT_EXPR_P (tmp0)
9752 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9753 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9754 			  <= TYPE_PRECISION (atype)))
9755 		    tmp0 = TREE_OPERAND (tmp0, 0);
9756 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
9757 		    {
9758 		      tmp1 = TREE_OPERAND (tmp1, 0);
9759 		      one_neg = !one_neg;
9760 		    }
9761 		  if (CONVERT_EXPR_P (tmp1)
9762 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9763 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9764 			  <= TYPE_PRECISION (atype)))
9765 		    tmp1 = TREE_OPERAND (tmp1, 0);
9766 		  /* The only case we can still associate with two variables
9767 		     is if they cancel out.  */
9768 		  if (!one_neg
9769 		      || !operand_equal_p (tmp0, tmp1, 0))
9770 		    ok = false;
9771 		}
9772 	      else if ((var0 && minus_var1
9773 			&& ! operand_equal_p (var0, minus_var1, 0))
9774 		       || (minus_var0 && var1
9775 			   && ! operand_equal_p (minus_var0, var1, 0)))
9776 		ok = false;
9777 	    }
9778 
9779 	  /* Only do something if we found more than two objects.  Otherwise,
9780 	     nothing has changed and we risk infinite recursion.  */
9781 	  if (ok
9782 	      && ((var0 != 0) + (var1 != 0)
9783 		  + (minus_var0 != 0) + (minus_var1 != 0)
9784 		  + (con0 != 0) + (con1 != 0)
9785 		  + (minus_con0 != 0) + (minus_con1 != 0)
9786 		  + (lit0 != 0) + (lit1 != 0)
9787 		  + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
9788 	    {
9789 	      var0 = associate_trees (loc, var0, var1, code, atype);
9790 	      minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9791 					    code, atype);
9792 	      con0 = associate_trees (loc, con0, con1, code, atype);
9793 	      minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9794 					    code, atype);
9795 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
9796 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9797 					    code, atype);
9798 
9799 	      if (minus_var0 && var0)
9800 		{
9801 		  var0 = associate_trees (loc, var0, minus_var0,
9802 					  MINUS_EXPR, atype);
9803 		  minus_var0 = 0;
9804 		}
9805 	      if (minus_con0 && con0)
9806 		{
9807 		  con0 = associate_trees (loc, con0, minus_con0,
9808 					  MINUS_EXPR, atype);
9809 		  minus_con0 = 0;
9810 		}
9811 
9812 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
9813 		 greater than the positive part.  Otherwise, the multiplicative
9814 		 folding code (i.e extract_muldiv) may be fooled in case
9815 		 unsigned constants are subtracted, like in the following
9816 		 example: ((X*2 + 4) - 8U)/2.  */
9817 	      if (minus_lit0 && lit0)
9818 		{
9819 		  if (TREE_CODE (lit0) == INTEGER_CST
9820 		      && TREE_CODE (minus_lit0) == INTEGER_CST
9821 		      && tree_int_cst_lt (lit0, minus_lit0)
9822 		      /* But avoid ending up with only negated parts.  */
9823 		      && (var0 || con0))
9824 		    {
9825 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9826 						    MINUS_EXPR, atype);
9827 		      lit0 = 0;
9828 		    }
9829 		  else
9830 		    {
9831 		      lit0 = associate_trees (loc, lit0, minus_lit0,
9832 					      MINUS_EXPR, atype);
9833 		      minus_lit0 = 0;
9834 		    }
9835 		}
9836 
9837 	      /* Don't introduce overflows through reassociation.  */
9838 	      if ((lit0 && TREE_OVERFLOW_P (lit0))
9839 		  || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9840 		return NULL_TREE;
9841 
9842 	      /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9843 	      con0 = associate_trees (loc, con0, lit0, code, atype);
9844 	      lit0 = 0;
9845 	      minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9846 					    code, atype);
9847 	      minus_lit0 = 0;
9848 
9849 	      /* Eliminate minus_con0.  */
9850 	      if (minus_con0)
9851 		{
9852 		  if (con0)
9853 		    con0 = associate_trees (loc, con0, minus_con0,
9854 					    MINUS_EXPR, atype);
9855 		  else if (var0)
9856 		    var0 = associate_trees (loc, var0, minus_con0,
9857 					    MINUS_EXPR, atype);
9858 		  else
9859 		    gcc_unreachable ();
9860 		  minus_con0 = 0;
9861 		}
9862 
9863 	      /* Eliminate minus_var0.  */
9864 	      if (minus_var0)
9865 		{
9866 		  if (con0)
9867 		    con0 = associate_trees (loc, con0, minus_var0,
9868 					    MINUS_EXPR, atype);
9869 		  else
9870 		    gcc_unreachable ();
9871 		  minus_var0 = 0;
9872 		}
9873 
9874 	      return
9875 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9876 							      code, atype));
9877 	    }
9878 	}
9879 
9880       return NULL_TREE;
9881 
9882     case POINTER_DIFF_EXPR:
9883     case MINUS_EXPR:
9884       /* Fold &a[i] - &a[j] to i-j.  */
9885       if (TREE_CODE (arg0) == ADDR_EXPR
9886 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9887 	  && TREE_CODE (arg1) == ADDR_EXPR
9888 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9889         {
9890 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
9891 							TREE_OPERAND (arg0, 0),
9892 							TREE_OPERAND (arg1, 0),
9893 							code
9894 							== POINTER_DIFF_EXPR);
9895 	  if (tem)
9896 	    return tem;
9897 	}
9898 
9899       /* Further transformations are not for pointers.  */
9900       if (code == POINTER_DIFF_EXPR)
9901 	return NULL_TREE;
9902 
9903       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
9904       if (TREE_CODE (arg0) == NEGATE_EXPR
9905 	  && negate_expr_p (op1)
9906 	  /* If arg0 is e.g. unsigned int and type is int, then this could
9907 	     introduce UB, because if A is INT_MIN at runtime, the original
9908 	     expression can be well defined while the latter is not.
9909 	     See PR83269.  */
9910 	  && !(ANY_INTEGRAL_TYPE_P (type)
9911 	       && TYPE_OVERFLOW_UNDEFINED (type)
9912 	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9913 	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9914 	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
9915 			        fold_convert_loc (loc, type,
9916 						  TREE_OPERAND (arg0, 0)));
9917 
9918       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9919 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
9920 	 signed zeros are involved.  */
9921       if (!HONOR_SNANS (element_mode (arg0))
9922 	  && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9923 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9924         {
9925 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9926 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9927 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9928 	  bool arg0rz = false, arg0iz = false;
9929 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
9930 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
9931 	    {
9932 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9933 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9934 	      if (arg0rz && arg1i && real_zerop (arg1i))
9935 	        {
9936 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9937 					 arg1r ? arg1r
9938 					 : build1 (REALPART_EXPR, rtype, arg1));
9939 		  tree ip = arg0i ? arg0i
9940 		    : build1 (IMAGPART_EXPR, rtype, arg0);
9941 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9942 		}
9943 	      else if (arg0iz && arg1r && real_zerop (arg1r))
9944 	        {
9945 		  tree rp = arg0r ? arg0r
9946 		    : build1 (REALPART_EXPR, rtype, arg0);
9947 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9948 					 arg1i ? arg1i
9949 					 : build1 (IMAGPART_EXPR, rtype, arg1));
9950 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9951 		}
9952 	    }
9953 	}
9954 
9955       /* A - B -> A + (-B) if B is easily negatable.  */
9956       if (negate_expr_p (op1)
9957 	  && ! TYPE_OVERFLOW_SANITIZED (type)
9958 	  && ((FLOAT_TYPE_P (type)
9959                /* Avoid this transformation if B is a positive REAL_CST.  */
9960 	       && (TREE_CODE (op1) != REAL_CST
9961 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9962 	      || INTEGRAL_TYPE_P (type)))
9963 	return fold_build2_loc (loc, PLUS_EXPR, type,
9964 				fold_convert_loc (loc, type, arg0),
9965 				negate_expr (op1));
9966 
9967       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9968 	 one.  Make sure the type is not saturating and has the signedness of
9969 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9970 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9971       if ((TREE_CODE (arg0) == MULT_EXPR
9972 	   || TREE_CODE (arg1) == MULT_EXPR)
9973 	  && !TYPE_SATURATING (type)
9974 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9975 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9976 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9977         {
9978 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9979 	  if (tem)
9980 	    return tem;
9981 	}
9982 
9983       goto associate;
9984 
9985     case MULT_EXPR:
9986       if (! FLOAT_TYPE_P (type))
9987 	{
9988 	  /* Transform x * -C into -x * C if x is easily negatable.  */
9989 	  if (TREE_CODE (op1) == INTEGER_CST
9990 	      && tree_int_cst_sgn (op1) == -1
9991 	      && negate_expr_p (op0)
9992 	      && negate_expr_p (op1)
9993 	      && (tem = negate_expr (op1)) != op1
9994 	      && ! TREE_OVERFLOW (tem))
9995 	    return fold_build2_loc (loc, MULT_EXPR, type,
9996 				    fold_convert_loc (loc, type,
9997 						      negate_expr (op0)), tem);
9998 
9999 	  strict_overflow_p = false;
10000 	  if (TREE_CODE (arg1) == INTEGER_CST
10001 	      && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10002 					&strict_overflow_p)) != 0)
10003 	    {
10004 	      if (strict_overflow_p)
10005 		fold_overflow_warning (("assuming signed overflow does not "
10006 					"occur when simplifying "
10007 					"multiplication"),
10008 				       WARN_STRICT_OVERFLOW_MISC);
10009 	      return fold_convert_loc (loc, type, tem);
10010 	    }
10011 
10012 	  /* Optimize z * conj(z) for integer complex numbers.  */
10013 	  if (TREE_CODE (arg0) == CONJ_EXPR
10014 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10015 	    return fold_mult_zconjz (loc, type, arg1);
10016 	  if (TREE_CODE (arg1) == CONJ_EXPR
10017 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10018 	    return fold_mult_zconjz (loc, type, arg0);
10019 	}
10020       else
10021 	{
10022 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10023 	     This is not the same for NaNs or if signed zeros are
10024 	     involved.  */
10025 	  if (!HONOR_NANS (arg0)
10026               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10027 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10028 	      && TREE_CODE (arg1) == COMPLEX_CST
10029 	      && real_zerop (TREE_REALPART (arg1)))
10030 	    {
10031 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10032 	      if (real_onep (TREE_IMAGPART (arg1)))
10033 		return
10034 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10035 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10036 							     rtype, arg0)),
10037 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10038 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
10039 		return
10040 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10041 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10042 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10043 							     rtype, arg0)));
10044 	    }
10045 
10046 	  /* Optimize z * conj(z) for floating point complex numbers.
10047 	     Guarded by flag_unsafe_math_optimizations as non-finite
10048 	     imaginary components don't produce scalar results.  */
10049 	  if (flag_unsafe_math_optimizations
10050 	      && TREE_CODE (arg0) == CONJ_EXPR
10051 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10052 	    return fold_mult_zconjz (loc, type, arg1);
10053 	  if (flag_unsafe_math_optimizations
10054 	      && TREE_CODE (arg1) == CONJ_EXPR
10055 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10056 	    return fold_mult_zconjz (loc, type, arg0);
10057 	}
10058       goto associate;
10059 
10060     case BIT_IOR_EXPR:
10061       /* Canonicalize (X & C1) | C2.  */
10062       if (TREE_CODE (arg0) == BIT_AND_EXPR
10063 	  && TREE_CODE (arg1) == INTEGER_CST
10064 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10065 	{
10066 	  int width = TYPE_PRECISION (type), w;
10067 	  wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10068 	  wide_int c2 = wi::to_wide (arg1);
10069 
10070 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
10071 	  if ((c1 & c2) == c1)
10072 	    return omit_one_operand_loc (loc, type, arg1,
10073 					 TREE_OPERAND (arg0, 0));
10074 
10075 	  wide_int msk = wi::mask (width, false,
10076 				   TYPE_PRECISION (TREE_TYPE (arg1)));
10077 
10078 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
10079 	  if (wi::bit_and_not (msk, c1 | c2) == 0)
10080 	    {
10081 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10082 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10083 	    }
10084 
10085 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10086 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10087 	     mode which allows further optimizations.  */
10088 	  c1 &= msk;
10089 	  c2 &= msk;
10090 	  wide_int c3 = wi::bit_and_not (c1, c2);
10091 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10092 	    {
10093 	      wide_int mask = wi::mask (w, false,
10094 					TYPE_PRECISION (type));
10095 	      if (((c1 | c2) & mask) == mask
10096 		  && wi::bit_and_not (c1, mask) == 0)
10097 		{
10098 		  c3 = mask;
10099 		  break;
10100 		}
10101 	    }
10102 
10103 	  if (c3 != c1)
10104 	    {
10105 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10106 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10107 				     wide_int_to_tree (type, c3));
10108 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10109 	    }
10110 	}
10111 
10112       /* See if this can be simplified into a rotate first.  If that
10113 	 is unsuccessful continue in the association code.  */
10114       goto bit_rotate;
10115 
10116     case BIT_XOR_EXPR:
10117       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
10118       if (TREE_CODE (arg0) == BIT_AND_EXPR
10119 	  && INTEGRAL_TYPE_P (type)
10120 	  && integer_onep (TREE_OPERAND (arg0, 1))
10121 	  && integer_onep (arg1))
10122 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10123 				build_zero_cst (TREE_TYPE (arg0)));
10124 
10125       /* See if this can be simplified into a rotate first.  If that
10126 	 is unsuccessful continue in the association code.  */
10127       goto bit_rotate;
10128 
10129     case BIT_AND_EXPR:
10130       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
10131       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10132 	  && INTEGRAL_TYPE_P (type)
10133 	  && integer_onep (TREE_OPERAND (arg0, 1))
10134 	  && integer_onep (arg1))
10135 	{
10136 	  tree tem2;
10137 	  tem = TREE_OPERAND (arg0, 0);
10138 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10139 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10140 				  tem, tem2);
10141 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10142 				  build_zero_cst (TREE_TYPE (tem)));
10143 	}
10144       /* Fold ~X & 1 as (X & 1) == 0.  */
10145       if (TREE_CODE (arg0) == BIT_NOT_EXPR
10146 	  && INTEGRAL_TYPE_P (type)
10147 	  && integer_onep (arg1))
10148 	{
10149 	  tree tem2;
10150 	  tem = TREE_OPERAND (arg0, 0);
10151 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10152 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10153 				  tem, tem2);
10154 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10155 				  build_zero_cst (TREE_TYPE (tem)));
10156 	}
10157       /* Fold !X & 1 as X == 0.  */
10158       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10159 	  && integer_onep (arg1))
10160 	{
10161 	  tem = TREE_OPERAND (arg0, 0);
10162 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
10163 				  build_zero_cst (TREE_TYPE (tem)));
10164 	}
10165 
10166       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10167          multiple of 1 << CST.  */
10168       if (TREE_CODE (arg1) == INTEGER_CST)
10169 	{
10170 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10171 	  wide_int ncst1 = -cst1;
10172 	  if ((cst1 & ncst1) == ncst1
10173 	      && multiple_of_p (type, arg0,
10174 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10175 	    return fold_convert_loc (loc, type, arg0);
10176 	}
10177 
10178       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10179          bits from CST2.  */
10180       if (TREE_CODE (arg1) == INTEGER_CST
10181 	  && TREE_CODE (arg0) == MULT_EXPR
10182 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10183 	{
10184 	  wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10185 	  wide_int masked
10186 	    = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10187 
10188 	  if (masked == 0)
10189 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
10190 	                                  arg0, arg1);
10191 	  else if (masked != warg1)
10192 	    {
10193 	      /* Avoid the transform if arg1 is a mask of some
10194 	         mode which allows further optimizations.  */
10195 	      int pop = wi::popcount (warg1);
10196 	      if (!(pop >= BITS_PER_UNIT
10197 		    && pow2p_hwi (pop)
10198 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10199 		return fold_build2_loc (loc, code, type, op0,
10200 					wide_int_to_tree (type, masked));
10201 	    }
10202 	}
10203 
10204       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10205 	 ((A & N) + B) & M -> (A + B) & M
10206 	 Similarly if (N & M) == 0,
10207 	 ((A | N) + B) & M -> (A + B) & M
10208 	 and for - instead of + (or unary - instead of +)
10209 	 and/or ^ instead of |.
10210 	 If B is constant and (B & M) == 0, fold into A & M.  */
10211       if (TREE_CODE (arg1) == INTEGER_CST)
10212 	{
10213 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10214 	  if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10215 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10216 	      && (TREE_CODE (arg0) == PLUS_EXPR
10217 		  || TREE_CODE (arg0) == MINUS_EXPR
10218 		  || TREE_CODE (arg0) == NEGATE_EXPR)
10219 	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10220 		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10221 	    {
10222 	      tree pmop[2];
10223 	      int which = 0;
10224 	      wide_int cst0;
10225 
10226 	      /* Now we know that arg0 is (C + D) or (C - D) or
10227 		 -C and arg1 (M) is == (1LL << cst) - 1.
10228 		 Store C into PMOP[0] and D into PMOP[1].  */
10229 	      pmop[0] = TREE_OPERAND (arg0, 0);
10230 	      pmop[1] = NULL;
10231 	      if (TREE_CODE (arg0) != NEGATE_EXPR)
10232 		{
10233 		  pmop[1] = TREE_OPERAND (arg0, 1);
10234 		  which = 1;
10235 		}
10236 
10237 	      if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10238 		which = -1;
10239 
10240 	      for (; which >= 0; which--)
10241 		switch (TREE_CODE (pmop[which]))
10242 		  {
10243 		  case BIT_AND_EXPR:
10244 		  case BIT_IOR_EXPR:
10245 		  case BIT_XOR_EXPR:
10246 		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10247 			!= INTEGER_CST)
10248 		      break;
10249 		    cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10250 		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10251 		      {
10252 			if (cst0 != cst1)
10253 			  break;
10254 		      }
10255 		    else if (cst0 != 0)
10256 		      break;
10257 		    /* If C or D is of the form (A & N) where
10258 		       (N & M) == M, or of the form (A | N) or
10259 		       (A ^ N) where (N & M) == 0, replace it with A.  */
10260 		    pmop[which] = TREE_OPERAND (pmop[which], 0);
10261 		    break;
10262 		  case INTEGER_CST:
10263 		    /* If C or D is a N where (N & M) == 0, it can be
10264 		       omitted (assumed 0).  */
10265 		    if ((TREE_CODE (arg0) == PLUS_EXPR
10266 			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10267 			&& (cst1 & wi::to_wide (pmop[which])) == 0)
10268 		      pmop[which] = NULL;
10269 		    break;
10270 		  default:
10271 		    break;
10272 		  }
10273 
10274 	      /* Only build anything new if we optimized one or both arguments
10275 		 above.  */
10276 	      if (pmop[0] != TREE_OPERAND (arg0, 0)
10277 		  || (TREE_CODE (arg0) != NEGATE_EXPR
10278 		      && pmop[1] != TREE_OPERAND (arg0, 1)))
10279 		{
10280 		  tree utype = TREE_TYPE (arg0);
10281 		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10282 		    {
10283 		      /* Perform the operations in a type that has defined
10284 			 overflow behavior.  */
10285 		      utype = unsigned_type_for (TREE_TYPE (arg0));
10286 		      if (pmop[0] != NULL)
10287 			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10288 		      if (pmop[1] != NULL)
10289 			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10290 		    }
10291 
10292 		  if (TREE_CODE (arg0) == NEGATE_EXPR)
10293 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10294 		  else if (TREE_CODE (arg0) == PLUS_EXPR)
10295 		    {
10296 		      if (pmop[0] != NULL && pmop[1] != NULL)
10297 			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10298 					       pmop[0], pmop[1]);
10299 		      else if (pmop[0] != NULL)
10300 			tem = pmop[0];
10301 		      else if (pmop[1] != NULL)
10302 			tem = pmop[1];
10303 		      else
10304 			return build_int_cst (type, 0);
10305 		    }
10306 		  else if (pmop[0] == NULL)
10307 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10308 		  else
10309 		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10310 					   pmop[0], pmop[1]);
10311 		  /* TEM is now the new binary +, - or unary - replacement.  */
10312 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10313 					 fold_convert_loc (loc, utype, arg1));
10314 		  return fold_convert_loc (loc, type, tem);
10315 		}
10316 	    }
10317 	}
10318 
10319       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
10320       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10321 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10322 	{
10323 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10324 
10325 	  wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10326 	  if (mask == -1)
10327 	    return
10328 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10329 	}
10330 
10331       goto associate;
10332 
10333     case RDIV_EXPR:
10334       /* Don't touch a floating-point divide by zero unless the mode
10335 	 of the constant can represent infinity.  */
10336       if (TREE_CODE (arg1) == REAL_CST
10337 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10338 	  && real_zerop (arg1))
10339 	return NULL_TREE;
10340 
10341       /* (-A) / (-B) -> A / B  */
10342       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10343 	return fold_build2_loc (loc, RDIV_EXPR, type,
10344 			    TREE_OPERAND (arg0, 0),
10345 			    negate_expr (arg1));
10346       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10347 	return fold_build2_loc (loc, RDIV_EXPR, type,
10348 			    negate_expr (arg0),
10349 			    TREE_OPERAND (arg1, 0));
10350       return NULL_TREE;
10351 
10352     case TRUNC_DIV_EXPR:
10353       /* Fall through */
10354 
10355     case FLOOR_DIV_EXPR:
10356       /* Simplify A / (B << N) where A and B are positive and B is
10357 	 a power of 2, to A >> (N + log2(B)).  */
10358       strict_overflow_p = false;
10359       if (TREE_CODE (arg1) == LSHIFT_EXPR
10360 	  && (TYPE_UNSIGNED (type)
10361 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10362 	{
10363 	  tree sval = TREE_OPERAND (arg1, 0);
10364 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10365 	    {
10366 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
10367 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10368 					 wi::exact_log2 (wi::to_wide (sval)));
10369 
10370 	      if (strict_overflow_p)
10371 		fold_overflow_warning (("assuming signed overflow does not "
10372 					"occur when simplifying A / (B << N)"),
10373 				       WARN_STRICT_OVERFLOW_MISC);
10374 
10375 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10376 					sh_cnt, pow2);
10377 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
10378 				      fold_convert_loc (loc, type, arg0), sh_cnt);
10379 	    }
10380 	}
10381 
10382       /* Fall through */
10383 
10384     case ROUND_DIV_EXPR:
10385     case CEIL_DIV_EXPR:
10386     case EXACT_DIV_EXPR:
10387       if (integer_zerop (arg1))
10388 	return NULL_TREE;
10389 
10390       /* Convert -A / -B to A / B when the type is signed and overflow is
10391 	 undefined.  */
10392       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10393 	  && TREE_CODE (op0) == NEGATE_EXPR
10394 	  && negate_expr_p (op1))
10395 	{
10396 	  if (INTEGRAL_TYPE_P (type))
10397 	    fold_overflow_warning (("assuming signed overflow does not occur "
10398 				    "when distributing negation across "
10399 				    "division"),
10400 				   WARN_STRICT_OVERFLOW_MISC);
10401 	  return fold_build2_loc (loc, code, type,
10402 				  fold_convert_loc (loc, type,
10403 						    TREE_OPERAND (arg0, 0)),
10404 				  negate_expr (op1));
10405 	}
10406       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10407 	  && TREE_CODE (arg1) == NEGATE_EXPR
10408 	  && negate_expr_p (op0))
10409 	{
10410 	  if (INTEGRAL_TYPE_P (type))
10411 	    fold_overflow_warning (("assuming signed overflow does not occur "
10412 				    "when distributing negation across "
10413 				    "division"),
10414 				   WARN_STRICT_OVERFLOW_MISC);
10415 	  return fold_build2_loc (loc, code, type,
10416 				  negate_expr (op0),
10417 				  fold_convert_loc (loc, type,
10418 						    TREE_OPERAND (arg1, 0)));
10419 	}
10420 
10421       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10422 	 operation, EXACT_DIV_EXPR.
10423 
10424 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10425 	 At one time others generated faster code, it's not clear if they do
10426 	 after the last round to changes to the DIV code in expmed.c.  */
10427       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10428 	  && multiple_of_p (type, arg0, arg1))
10429 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10430 				fold_convert (type, arg0),
10431 				fold_convert (type, arg1));
10432 
10433       strict_overflow_p = false;
10434       if (TREE_CODE (arg1) == INTEGER_CST
10435 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10436 				    &strict_overflow_p)) != 0)
10437 	{
10438 	  if (strict_overflow_p)
10439 	    fold_overflow_warning (("assuming signed overflow does not occur "
10440 				    "when simplifying division"),
10441 				   WARN_STRICT_OVERFLOW_MISC);
10442 	  return fold_convert_loc (loc, type, tem);
10443 	}
10444 
10445       return NULL_TREE;
10446 
10447     case CEIL_MOD_EXPR:
10448     case FLOOR_MOD_EXPR:
10449     case ROUND_MOD_EXPR:
10450     case TRUNC_MOD_EXPR:
10451       strict_overflow_p = false;
10452       if (TREE_CODE (arg1) == INTEGER_CST
10453 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10454 				    &strict_overflow_p)) != 0)
10455 	{
10456 	  if (strict_overflow_p)
10457 	    fold_overflow_warning (("assuming signed overflow does not occur "
10458 				    "when simplifying modulus"),
10459 				   WARN_STRICT_OVERFLOW_MISC);
10460 	  return fold_convert_loc (loc, type, tem);
10461 	}
10462 
10463       return NULL_TREE;
10464 
10465     case LROTATE_EXPR:
10466     case RROTATE_EXPR:
10467     case RSHIFT_EXPR:
10468     case LSHIFT_EXPR:
10469       /* Since negative shift count is not well-defined,
10470 	 don't try to compute it in the compiler.  */
10471       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10472 	return NULL_TREE;
10473 
10474       prec = element_precision (type);
10475 
10476       /* If we have a rotate of a bit operation with the rotate count and
10477 	 the second operand of the bit operation both constant,
10478 	 permute the two operations.  */
10479       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10480 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
10481 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
10482 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
10483 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10484 	{
10485 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10486 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10487 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
10488 				  fold_build2_loc (loc, code, type,
10489 						   arg00, arg1),
10490 				  fold_build2_loc (loc, code, type,
10491 						   arg01, arg1));
10492 	}
10493 
10494       /* Two consecutive rotates adding up to the some integer
10495 	 multiple of the precision of the type can be ignored.  */
10496       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10497 	  && TREE_CODE (arg0) == RROTATE_EXPR
10498 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10499 	  && wi::umod_trunc (wi::to_wide (arg1)
10500 			     + wi::to_wide (TREE_OPERAND (arg0, 1)),
10501 			     prec) == 0)
10502 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10503 
10504       return NULL_TREE;
10505 
10506     case MIN_EXPR:
10507     case MAX_EXPR:
10508       goto associate;
10509 
10510     case TRUTH_ANDIF_EXPR:
10511       /* Note that the operands of this must be ints
10512 	 and their values must be 0 or 1.
10513 	 ("true" is a fixed value perhaps depending on the language.)  */
10514       /* If first arg is constant zero, return it.  */
10515       if (integer_zerop (arg0))
10516 	return fold_convert_loc (loc, type, arg0);
10517       /* FALLTHRU */
10518     case TRUTH_AND_EXPR:
10519       /* If either arg is constant true, drop it.  */
10520       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10521 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10522       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10523 	  /* Preserve sequence points.  */
10524 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10525 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10526       /* If second arg is constant zero, result is zero, but first arg
10527 	 must be evaluated.  */
10528       if (integer_zerop (arg1))
10529 	return omit_one_operand_loc (loc, type, arg1, arg0);
10530       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10531 	 case will be handled here.  */
10532       if (integer_zerop (arg0))
10533 	return omit_one_operand_loc (loc, type, arg0, arg1);
10534 
10535       /* !X && X is always false.  */
10536       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10537 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10538 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10539       /* X && !X is always false.  */
10540       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10541 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10542 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10543 
10544       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
10545 	 means A >= Y && A != MAX, but in this case we know that
10546 	 A < X <= MAX.  */
10547 
10548       if (!TREE_SIDE_EFFECTS (arg0)
10549 	  && !TREE_SIDE_EFFECTS (arg1))
10550 	{
10551 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10552 	  if (tem && !operand_equal_p (tem, arg0, 0))
10553 	    return fold_build2_loc (loc, code, type, tem, arg1);
10554 
10555 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10556 	  if (tem && !operand_equal_p (tem, arg1, 0))
10557 	    return fold_build2_loc (loc, code, type, arg0, tem);
10558 	}
10559 
10560       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10561           != NULL_TREE)
10562         return tem;
10563 
10564       return NULL_TREE;
10565 
10566     case TRUTH_ORIF_EXPR:
10567       /* Note that the operands of this must be ints
10568 	 and their values must be 0 or true.
10569 	 ("true" is a fixed value perhaps depending on the language.)  */
10570       /* If first arg is constant true, return it.  */
10571       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10572 	return fold_convert_loc (loc, type, arg0);
10573       /* FALLTHRU */
10574     case TRUTH_OR_EXPR:
10575       /* If either arg is constant zero, drop it.  */
10576       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10577 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10578       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10579 	  /* Preserve sequence points.  */
10580 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10581 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10582       /* If second arg is constant true, result is true, but we must
10583 	 evaluate first arg.  */
10584       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10585 	return omit_one_operand_loc (loc, type, arg1, arg0);
10586       /* Likewise for first arg, but note this only occurs here for
10587 	 TRUTH_OR_EXPR.  */
10588       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10589 	return omit_one_operand_loc (loc, type, arg0, arg1);
10590 
10591       /* !X || X is always true.  */
10592       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10593 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10594 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10595       /* X || !X is always true.  */
10596       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10597 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10598 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10599 
10600       /* (X && !Y) || (!X && Y) is X ^ Y */
10601       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10602 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10603         {
10604 	  tree a0, a1, l0, l1, n0, n1;
10605 
10606 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10607 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10608 
10609 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10610 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10611 
10612 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10613 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10614 
10615 	  if ((operand_equal_p (n0, a0, 0)
10616 	       && operand_equal_p (n1, a1, 0))
10617 	      || (operand_equal_p (n0, a1, 0)
10618 		  && operand_equal_p (n1, a0, 0)))
10619 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10620 	}
10621 
10622       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10623           != NULL_TREE)
10624         return tem;
10625 
10626       return NULL_TREE;
10627 
10628     case TRUTH_XOR_EXPR:
10629       /* If the second arg is constant zero, drop it.  */
10630       if (integer_zerop (arg1))
10631 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10632       /* If the second arg is constant true, this is a logical inversion.  */
10633       if (integer_onep (arg1))
10634 	{
10635 	  tem = invert_truthvalue_loc (loc, arg0);
10636 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10637 	}
10638       /* Identical arguments cancel to zero.  */
10639       if (operand_equal_p (arg0, arg1, 0))
10640 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10641 
10642       /* !X ^ X is always true.  */
10643       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10644 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10645 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10646 
10647       /* X ^ !X is always true.  */
10648       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10649 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10650 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10651 
10652       return NULL_TREE;
10653 
10654     case EQ_EXPR:
10655     case NE_EXPR:
10656       STRIP_NOPS (arg0);
10657       STRIP_NOPS (arg1);
10658 
10659       tem = fold_comparison (loc, code, type, op0, op1);
10660       if (tem != NULL_TREE)
10661 	return tem;
10662 
10663       /* bool_var != 1 becomes !bool_var. */
10664       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10665           && code == NE_EXPR)
10666         return fold_convert_loc (loc, type,
10667 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10668 						  TREE_TYPE (arg0), arg0));
10669 
10670       /* bool_var == 0 becomes !bool_var. */
10671       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10672           && code == EQ_EXPR)
10673         return fold_convert_loc (loc, type,
10674 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10675 						  TREE_TYPE (arg0), arg0));
10676 
10677       /* !exp != 0 becomes !exp */
10678       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10679 	  && code == NE_EXPR)
10680         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10681 
10682       /* If this is an EQ or NE comparison with zero and ARG0 is
10683 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
10684 	 two operations, but the latter can be done in one less insn
10685 	 on machines that have only two-operand insns or on which a
10686 	 constant cannot be the first operand.  */
10687       if (TREE_CODE (arg0) == BIT_AND_EXPR
10688 	  && integer_zerop (arg1))
10689 	{
10690 	  tree arg00 = TREE_OPERAND (arg0, 0);
10691 	  tree arg01 = TREE_OPERAND (arg0, 1);
10692 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
10693 	      && integer_onep (TREE_OPERAND (arg00, 0)))
10694 	    {
10695 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10696 				      arg01, TREE_OPERAND (arg00, 1));
10697 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10698 				 build_int_cst (TREE_TYPE (arg0), 1));
10699 	      return fold_build2_loc (loc, code, type,
10700 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10701 				  arg1);
10702 	    }
10703 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
10704 		   && integer_onep (TREE_OPERAND (arg01, 0)))
10705 	    {
10706 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10707 				      arg00, TREE_OPERAND (arg01, 1));
10708 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10709 				 build_int_cst (TREE_TYPE (arg0), 1));
10710 	      return fold_build2_loc (loc, code, type,
10711 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10712 				  arg1);
10713 	    }
10714 	}
10715 
10716       /* If this is an NE or EQ comparison of zero against the result of a
10717 	 signed MOD operation whose second operand is a power of 2, make
10718 	 the MOD operation unsigned since it is simpler and equivalent.  */
10719       if (integer_zerop (arg1)
10720 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10721 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10722 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
10723 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10724 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10725 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10726 	{
10727 	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10728 	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10729 				     fold_convert_loc (loc, newtype,
10730 						       TREE_OPERAND (arg0, 0)),
10731 				     fold_convert_loc (loc, newtype,
10732 						       TREE_OPERAND (arg0, 1)));
10733 
10734 	  return fold_build2_loc (loc, code, type, newmod,
10735 			      fold_convert_loc (loc, newtype, arg1));
10736 	}
10737 
10738       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10739 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
10740 	 a single bit.  */
10741       if (TREE_CODE (arg0) == BIT_AND_EXPR
10742 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10743 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10744 	     == INTEGER_CST
10745 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10746 	  && integer_zerop (arg1))
10747 	{
10748 	  tree itype = TREE_TYPE (arg0);
10749 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10750 	  prec = TYPE_PRECISION (itype);
10751 
10752 	  /* Check for a valid shift count.  */
10753 	  if (wi::ltu_p (wi::to_wide (arg001), prec))
10754 	    {
10755 	      tree arg01 = TREE_OPERAND (arg0, 1);
10756 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10757 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10758 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10759 		 can be rewritten as (X & (C2 << C1)) != 0.  */
10760 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10761 		{
10762 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10763 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10764 		  return fold_build2_loc (loc, code, type, tem,
10765 					  fold_convert_loc (loc, itype, arg1));
10766 		}
10767 	      /* Otherwise, for signed (arithmetic) shifts,
10768 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10769 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
10770 	      else if (!TYPE_UNSIGNED (itype))
10771 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10772 				    arg000, build_int_cst (itype, 0));
10773 	      /* Otherwise, of unsigned (logical) shifts,
10774 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10775 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
10776 	      else
10777 		return omit_one_operand_loc (loc, type,
10778 					 code == EQ_EXPR ? integer_one_node
10779 							 : integer_zero_node,
10780 					 arg000);
10781 	    }
10782 	}
10783 
10784       /* If this is a comparison of a field, we may be able to simplify it.  */
10785       if ((TREE_CODE (arg0) == COMPONENT_REF
10786 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
10787 	  /* Handle the constant case even without -O
10788 	     to make sure the warnings are given.  */
10789 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10790 	{
10791 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10792 	  if (t1)
10793 	    return t1;
10794 	}
10795 
10796       /* Optimize comparisons of strlen vs zero to a compare of the
10797 	 first character of the string vs zero.  To wit,
10798 		strlen(ptr) == 0   =>  *ptr == 0
10799 		strlen(ptr) != 0   =>  *ptr != 0
10800 	 Other cases should reduce to one of these two (or a constant)
10801 	 due to the return value of strlen being unsigned.  */
10802       if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
10803 	{
10804 	  tree fndecl = get_callee_fndecl (arg0);
10805 
10806 	  if (fndecl
10807 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10808 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10809 	      && call_expr_nargs (arg0) == 1
10810 	      && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
10811 		  == POINTER_TYPE))
10812 	    {
10813 	      tree ptrtype
10814 		= build_pointer_type (build_qualified_type (char_type_node,
10815 							    TYPE_QUAL_CONST));
10816 	      tree ptr = fold_convert_loc (loc, ptrtype,
10817 					   CALL_EXPR_ARG (arg0, 0));
10818 	      tree iref = build_fold_indirect_ref_loc (loc, ptr);
10819 	      return fold_build2_loc (loc, code, type, iref,
10820 				      build_int_cst (TREE_TYPE (iref), 0));
10821 	    }
10822 	}
10823 
10824       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10825 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
10826       if (TREE_CODE (arg0) == RSHIFT_EXPR
10827 	  && integer_zerop (arg1)
10828 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10829 	{
10830 	  tree arg00 = TREE_OPERAND (arg0, 0);
10831 	  tree arg01 = TREE_OPERAND (arg0, 1);
10832 	  tree itype = TREE_TYPE (arg00);
10833 	  if (wi::to_wide (arg01) == element_precision (itype) - 1)
10834 	    {
10835 	      if (TYPE_UNSIGNED (itype))
10836 		{
10837 		  itype = signed_type_for (itype);
10838 		  arg00 = fold_convert_loc (loc, itype, arg00);
10839 		}
10840 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10841 				  type, arg00, build_zero_cst (itype));
10842 	    }
10843 	}
10844 
10845       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10846 	 (X & C) == 0 when C is a single bit.  */
10847       if (TREE_CODE (arg0) == BIT_AND_EXPR
10848 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10849 	  && integer_zerop (arg1)
10850 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10851 	{
10852 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10853 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10854 				 TREE_OPERAND (arg0, 1));
10855 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10856 				  type, tem,
10857 				  fold_convert_loc (loc, TREE_TYPE (arg0),
10858 						    arg1));
10859 	}
10860 
10861       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10862 	 constant C is a power of two, i.e. a single bit.  */
10863       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10864 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10865 	  && integer_zerop (arg1)
10866 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10867 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10868 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10869 	{
10870 	  tree arg00 = TREE_OPERAND (arg0, 0);
10871 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10872 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
10873 	}
10874 
10875       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10876 	 when is C is a power of two, i.e. a single bit.  */
10877       if (TREE_CODE (arg0) == BIT_AND_EXPR
10878 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10879 	  && integer_zerop (arg1)
10880 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10881 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10882 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10883 	{
10884 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10885 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10886 			     arg000, TREE_OPERAND (arg0, 1));
10887 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10888 			      tem, build_int_cst (TREE_TYPE (tem), 0));
10889 	}
10890 
10891       if (integer_zerop (arg1)
10892 	  && tree_expr_nonzero_p (arg0))
10893         {
10894 	  tree res = constant_boolean_node (code==NE_EXPR, type);
10895 	  return omit_one_operand_loc (loc, type, res, arg0);
10896 	}
10897 
10898       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
10899       if (TREE_CODE (arg0) == BIT_AND_EXPR
10900 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
10901 	{
10902 	  tree arg00 = TREE_OPERAND (arg0, 0);
10903 	  tree arg01 = TREE_OPERAND (arg0, 1);
10904 	  tree arg10 = TREE_OPERAND (arg1, 0);
10905 	  tree arg11 = TREE_OPERAND (arg1, 1);
10906 	  tree itype = TREE_TYPE (arg0);
10907 
10908 	  if (operand_equal_p (arg01, arg11, 0))
10909 	    {
10910 	      tem = fold_convert_loc (loc, itype, arg10);
10911 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10912 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10913 	      return fold_build2_loc (loc, code, type, tem,
10914 				      build_zero_cst (itype));
10915 	    }
10916 	  if (operand_equal_p (arg01, arg10, 0))
10917 	    {
10918 	      tem = fold_convert_loc (loc, itype, arg11);
10919 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10920 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10921 	      return fold_build2_loc (loc, code, type, tem,
10922 				      build_zero_cst (itype));
10923 	    }
10924 	  if (operand_equal_p (arg00, arg11, 0))
10925 	    {
10926 	      tem = fold_convert_loc (loc, itype, arg10);
10927 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10928 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10929 	      return fold_build2_loc (loc, code, type, tem,
10930 				      build_zero_cst (itype));
10931 	    }
10932 	  if (operand_equal_p (arg00, arg10, 0))
10933 	    {
10934 	      tem = fold_convert_loc (loc, itype, arg11);
10935 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10936 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10937 	      return fold_build2_loc (loc, code, type, tem,
10938 				      build_zero_cst (itype));
10939 	    }
10940 	}
10941 
10942       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10943 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
10944 	{
10945 	  tree arg00 = TREE_OPERAND (arg0, 0);
10946 	  tree arg01 = TREE_OPERAND (arg0, 1);
10947 	  tree arg10 = TREE_OPERAND (arg1, 0);
10948 	  tree arg11 = TREE_OPERAND (arg1, 1);
10949 	  tree itype = TREE_TYPE (arg0);
10950 
10951 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10952 	     operand_equal_p guarantees no side-effects so we don't need
10953 	     to use omit_one_operand on Z.  */
10954 	  if (operand_equal_p (arg01, arg11, 0))
10955 	    return fold_build2_loc (loc, code, type, arg00,
10956 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10957 						      arg10));
10958 	  if (operand_equal_p (arg01, arg10, 0))
10959 	    return fold_build2_loc (loc, code, type, arg00,
10960 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10961 						      arg11));
10962 	  if (operand_equal_p (arg00, arg11, 0))
10963 	    return fold_build2_loc (loc, code, type, arg01,
10964 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10965 						      arg10));
10966 	  if (operand_equal_p (arg00, arg10, 0))
10967 	    return fold_build2_loc (loc, code, type, arg01,
10968 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10969 						      arg11));
10970 
10971 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
10972 	  if (TREE_CODE (arg01) == INTEGER_CST
10973 	      && TREE_CODE (arg11) == INTEGER_CST)
10974 	    {
10975 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10976 				     fold_convert_loc (loc, itype, arg11));
10977 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10978 	      return fold_build2_loc (loc, code, type, tem,
10979 				      fold_convert_loc (loc, itype, arg10));
10980 	    }
10981 	}
10982 
10983       /* Attempt to simplify equality/inequality comparisons of complex
10984 	 values.  Only lower the comparison if the result is known or
10985 	 can be simplified to a single scalar comparison.  */
10986       if ((TREE_CODE (arg0) == COMPLEX_EXPR
10987 	   || TREE_CODE (arg0) == COMPLEX_CST)
10988 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
10989 	      || TREE_CODE (arg1) == COMPLEX_CST))
10990 	{
10991 	  tree real0, imag0, real1, imag1;
10992 	  tree rcond, icond;
10993 
10994 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
10995 	    {
10996 	      real0 = TREE_OPERAND (arg0, 0);
10997 	      imag0 = TREE_OPERAND (arg0, 1);
10998 	    }
10999 	  else
11000 	    {
11001 	      real0 = TREE_REALPART (arg0);
11002 	      imag0 = TREE_IMAGPART (arg0);
11003 	    }
11004 
11005 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
11006 	    {
11007 	      real1 = TREE_OPERAND (arg1, 0);
11008 	      imag1 = TREE_OPERAND (arg1, 1);
11009 	    }
11010 	  else
11011 	    {
11012 	      real1 = TREE_REALPART (arg1);
11013 	      imag1 = TREE_IMAGPART (arg1);
11014 	    }
11015 
11016 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
11017 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11018 	    {
11019 	      if (integer_zerop (rcond))
11020 		{
11021 		  if (code == EQ_EXPR)
11022 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11023 					      imag0, imag1);
11024 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11025 		}
11026 	      else
11027 		{
11028 		  if (code == NE_EXPR)
11029 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11030 					      imag0, imag1);
11031 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11032 		}
11033 	    }
11034 
11035 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
11036 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
11037 	    {
11038 	      if (integer_zerop (icond))
11039 		{
11040 		  if (code == EQ_EXPR)
11041 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11042 					      real0, real1);
11043 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11044 		}
11045 	      else
11046 		{
11047 		  if (code == NE_EXPR)
11048 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11049 					      real0, real1);
11050 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11051 		}
11052 	    }
11053 	}
11054 
11055       return NULL_TREE;
11056 
11057     case LT_EXPR:
11058     case GT_EXPR:
11059     case LE_EXPR:
11060     case GE_EXPR:
11061       tem = fold_comparison (loc, code, type, op0, op1);
11062       if (tem != NULL_TREE)
11063 	return tem;
11064 
11065       /* Transform comparisons of the form X +- C CMP X.  */
11066       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11067 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11068 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11069 	  && !HONOR_SNANS (arg0))
11070 	{
11071 	  tree arg01 = TREE_OPERAND (arg0, 1);
11072 	  enum tree_code code0 = TREE_CODE (arg0);
11073 	  int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11074 
11075 	  /* (X - c) > X becomes false.  */
11076 	  if (code == GT_EXPR
11077 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11078 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11079 	    return constant_boolean_node (0, type);
11080 
11081 	  /* Likewise (X + c) < X becomes false.  */
11082 	  if (code == LT_EXPR
11083 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11084 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11085 	    return constant_boolean_node (0, type);
11086 
11087 	  /* Convert (X - c) <= X to true.  */
11088 	  if (!HONOR_NANS (arg1)
11089 	      && code == LE_EXPR
11090 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11091 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11092 	    return constant_boolean_node (1, type);
11093 
11094 	  /* Convert (X + c) >= X to true.  */
11095 	  if (!HONOR_NANS (arg1)
11096 	      && code == GE_EXPR
11097 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11098 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11099 	    return constant_boolean_node (1, type);
11100 	}
11101 
11102       /* If we are comparing an ABS_EXPR with a constant, we can
11103 	 convert all the cases into explicit comparisons, but they may
11104 	 well not be faster than doing the ABS and one comparison.
11105 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11106 	 and a comparison, and is probably faster.  */
11107       if (code == LE_EXPR
11108 	  && TREE_CODE (arg1) == INTEGER_CST
11109 	  && TREE_CODE (arg0) == ABS_EXPR
11110 	  && ! TREE_SIDE_EFFECTS (arg0)
11111 	  && (tem = negate_expr (arg1)) != 0
11112 	  && TREE_CODE (tem) == INTEGER_CST
11113 	  && !TREE_OVERFLOW (tem))
11114 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11115 			    build2 (GE_EXPR, type,
11116 				    TREE_OPERAND (arg0, 0), tem),
11117 			    build2 (LE_EXPR, type,
11118 				    TREE_OPERAND (arg0, 0), arg1));
11119 
11120       /* Convert ABS_EXPR<x> >= 0 to true.  */
11121       strict_overflow_p = false;
11122       if (code == GE_EXPR
11123 	  && (integer_zerop (arg1)
11124 	      || (! HONOR_NANS (arg0)
11125 		  && real_zerop (arg1)))
11126 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11127 	{
11128 	  if (strict_overflow_p)
11129 	    fold_overflow_warning (("assuming signed overflow does not occur "
11130 				    "when simplifying comparison of "
11131 				    "absolute value and zero"),
11132 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11133 	  return omit_one_operand_loc (loc, type,
11134 				       constant_boolean_node (true, type),
11135 				       arg0);
11136 	}
11137 
11138       /* Convert ABS_EXPR<x> < 0 to false.  */
11139       strict_overflow_p = false;
11140       if (code == LT_EXPR
11141 	  && (integer_zerop (arg1) || real_zerop (arg1))
11142 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11143 	{
11144 	  if (strict_overflow_p)
11145 	    fold_overflow_warning (("assuming signed overflow does not occur "
11146 				    "when simplifying comparison of "
11147 				    "absolute value and zero"),
11148 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11149 	  return omit_one_operand_loc (loc, type,
11150 				       constant_boolean_node (false, type),
11151 				       arg0);
11152 	}
11153 
11154       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11155 	 and similarly for >= into !=.  */
11156       if ((code == LT_EXPR || code == GE_EXPR)
11157 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11158 	  && TREE_CODE (arg1) == LSHIFT_EXPR
11159 	  && integer_onep (TREE_OPERAND (arg1, 0)))
11160 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11161 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11162 				   TREE_OPERAND (arg1, 1)),
11163 			   build_zero_cst (TREE_TYPE (arg0)));
11164 
11165       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
11166 	 otherwise Y might be >= # of bits in X's type and thus e.g.
11167 	 (unsigned char) (1 << Y) for Y 15 might be 0.
11168 	 If the cast is widening, then 1 << Y should have unsigned type,
11169 	 otherwise if Y is number of bits in the signed shift type minus 1,
11170 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
11171 	 31 might be 0xffffffff80000000.  */
11172       if ((code == LT_EXPR || code == GE_EXPR)
11173 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11174 	  && CONVERT_EXPR_P (arg1)
11175 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11176 	  && (element_precision (TREE_TYPE (arg1))
11177 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11178 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11179 	      || (element_precision (TREE_TYPE (arg1))
11180 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11181 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11182 	{
11183 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11184 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11185 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11186 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11187 			     build_zero_cst (TREE_TYPE (arg0)));
11188 	}
11189 
11190       return NULL_TREE;
11191 
11192     case UNORDERED_EXPR:
11193     case ORDERED_EXPR:
11194     case UNLT_EXPR:
11195     case UNLE_EXPR:
11196     case UNGT_EXPR:
11197     case UNGE_EXPR:
11198     case UNEQ_EXPR:
11199     case LTGT_EXPR:
11200       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
11201       {
11202 	tree targ0 = strip_float_extensions (arg0);
11203 	tree targ1 = strip_float_extensions (arg1);
11204 	tree newtype = TREE_TYPE (targ0);
11205 
11206 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11207 	  newtype = TREE_TYPE (targ1);
11208 
11209 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11210 	  return fold_build2_loc (loc, code, type,
11211 			      fold_convert_loc (loc, newtype, targ0),
11212 			      fold_convert_loc (loc, newtype, targ1));
11213       }
11214 
11215       return NULL_TREE;
11216 
11217     case COMPOUND_EXPR:
11218       /* When pedantic, a compound expression can be neither an lvalue
11219 	 nor an integer constant expression.  */
11220       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11221 	return NULL_TREE;
11222       /* Don't let (0, 0) be null pointer constant.  */
11223       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11224 				 : fold_convert_loc (loc, type, arg1);
11225       return pedantic_non_lvalue_loc (loc, tem);
11226 
11227     case ASSERT_EXPR:
11228       /* An ASSERT_EXPR should never be passed to fold_binary.  */
11229       gcc_unreachable ();
11230 
11231     default:
11232       return NULL_TREE;
11233     } /* switch (code) */
11234 }
11235 
11236 /* Used by contains_label_[p1].  */
11237 
11238 struct contains_label_data
11239 {
11240   hash_set<tree> *pset;
11241   bool inside_switch_p;
11242 };
11243 
11244 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
11245    a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11246    return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
11247 
11248 static tree
11249 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11250 {
11251   contains_label_data *d = (contains_label_data *) data;
11252   switch (TREE_CODE (*tp))
11253     {
11254     case LABEL_EXPR:
11255       return *tp;
11256 
11257     case CASE_LABEL_EXPR:
11258       if (!d->inside_switch_p)
11259 	return *tp;
11260       return NULL_TREE;
11261 
11262     case SWITCH_EXPR:
11263       if (!d->inside_switch_p)
11264 	{
11265 	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11266 	    return *tp;
11267 	  d->inside_switch_p = true;
11268 	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11269 	    return *tp;
11270 	  d->inside_switch_p = false;
11271 	  *walk_subtrees = 0;
11272 	}
11273       return NULL_TREE;
11274 
11275     case GOTO_EXPR:
11276       *walk_subtrees = 0;
11277       return NULL_TREE;
11278 
11279     default:
11280       return NULL_TREE;
11281     }
11282 }
11283 
11284 /* Return whether the sub-tree ST contains a label which is accessible from
11285    outside the sub-tree.  */
11286 
11287 static bool
11288 contains_label_p (tree st)
11289 {
11290   hash_set<tree> pset;
11291   contains_label_data data = { &pset, false };
11292   return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11293 }
11294 
11295 /* Fold a ternary expression of code CODE and type TYPE with operands
11296    OP0, OP1, and OP2.  Return the folded expression if folding is
11297    successful.  Otherwise, return NULL_TREE.  */
11298 
11299 tree
11300 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11301 		  tree op0, tree op1, tree op2)
11302 {
11303   tree tem;
11304   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11305   enum tree_code_class kind = TREE_CODE_CLASS (code);
11306 
11307   gcc_assert (IS_EXPR_CODE_CLASS (kind)
11308 	      && TREE_CODE_LENGTH (code) == 3);
11309 
11310   /* If this is a commutative operation, and OP0 is a constant, move it
11311      to OP1 to reduce the number of tests below.  */
11312   if (commutative_ternary_tree_code (code)
11313       && tree_swap_operands_p (op0, op1))
11314     return fold_build3_loc (loc, code, type, op1, op0, op2);
11315 
11316   tem = generic_simplify (loc, code, type, op0, op1, op2);
11317   if (tem)
11318     return tem;
11319 
11320   /* Strip any conversions that don't change the mode.  This is safe
11321      for every expression, except for a comparison expression because
11322      its signedness is derived from its operands.  So, in the latter
11323      case, only strip conversions that don't change the signedness.
11324 
11325      Note that this is done as an internal manipulation within the
11326      constant folder, in order to find the simplest representation of
11327      the arguments so that their form can be studied.  In any cases,
11328      the appropriate type conversions should be put back in the tree
11329      that will get out of the constant folder.  */
11330   if (op0)
11331     {
11332       arg0 = op0;
11333       STRIP_NOPS (arg0);
11334     }
11335 
11336   if (op1)
11337     {
11338       arg1 = op1;
11339       STRIP_NOPS (arg1);
11340     }
11341 
11342   if (op2)
11343     {
11344       arg2 = op2;
11345       STRIP_NOPS (arg2);
11346     }
11347 
11348   switch (code)
11349     {
11350     case COMPONENT_REF:
11351       if (TREE_CODE (arg0) == CONSTRUCTOR
11352 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11353 	{
11354 	  unsigned HOST_WIDE_INT idx;
11355 	  tree field, value;
11356 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11357 	    if (field == arg1)
11358 	      return value;
11359 	}
11360       return NULL_TREE;
11361 
11362     case COND_EXPR:
11363     case VEC_COND_EXPR:
11364       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11365 	 so all simple results must be passed through pedantic_non_lvalue.  */
11366       if (TREE_CODE (arg0) == INTEGER_CST)
11367 	{
11368 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
11369 	  tem = integer_zerop (arg0) ? op2 : op1;
11370 	  /* Only optimize constant conditions when the selected branch
11371 	     has the same type as the COND_EXPR.  This avoids optimizing
11372              away "c ? x : throw", where the throw has a void type.
11373              Avoid throwing away that operand which contains label.  */
11374           if ((!TREE_SIDE_EFFECTS (unused_op)
11375                || !contains_label_p (unused_op))
11376               && (! VOID_TYPE_P (TREE_TYPE (tem))
11377                   || VOID_TYPE_P (type)))
11378 	    return pedantic_non_lvalue_loc (loc, tem);
11379 	  return NULL_TREE;
11380 	}
11381       else if (TREE_CODE (arg0) == VECTOR_CST)
11382 	{
11383 	  unsigned HOST_WIDE_INT nelts;
11384 	  if ((TREE_CODE (arg1) == VECTOR_CST
11385 	       || TREE_CODE (arg1) == CONSTRUCTOR)
11386 	      && (TREE_CODE (arg2) == VECTOR_CST
11387 		  || TREE_CODE (arg2) == CONSTRUCTOR)
11388 	      && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11389 	    {
11390 	      vec_perm_builder sel (nelts, nelts, 1);
11391 	      for (unsigned int i = 0; i < nelts; i++)
11392 		{
11393 		  tree val = VECTOR_CST_ELT (arg0, i);
11394 		  if (integer_all_onesp (val))
11395 		    sel.quick_push (i);
11396 		  else if (integer_zerop (val))
11397 		    sel.quick_push (nelts + i);
11398 		  else /* Currently unreachable.  */
11399 		    return NULL_TREE;
11400 		}
11401 	      vec_perm_indices indices (sel, 2, nelts);
11402 	      tree t = fold_vec_perm (type, arg1, arg2, indices);
11403 	      if (t != NULL_TREE)
11404 		return t;
11405 	    }
11406 	}
11407 
11408       /* If we have A op B ? A : C, we may be able to convert this to a
11409 	 simpler expression, depending on the operation and the values
11410 	 of B and C.  Signed zeros prevent all of these transformations,
11411 	 for reasons given above each one.
11412 
11413          Also try swapping the arguments and inverting the conditional.  */
11414       if (COMPARISON_CLASS_P (arg0)
11415 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11416 	  && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11417 	{
11418 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11419 	  if (tem)
11420 	    return tem;
11421 	}
11422 
11423       if (COMPARISON_CLASS_P (arg0)
11424 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11425 	  && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11426 	{
11427 	  location_t loc0 = expr_location_or (arg0, loc);
11428 	  tem = fold_invert_truthvalue (loc0, arg0);
11429 	  if (tem && COMPARISON_CLASS_P (tem))
11430 	    {
11431 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11432 	      if (tem)
11433 		return tem;
11434 	    }
11435 	}
11436 
11437       /* If the second operand is simpler than the third, swap them
11438 	 since that produces better jump optimization results.  */
11439       if (truth_value_p (TREE_CODE (arg0))
11440 	  && tree_swap_operands_p (op1, op2))
11441 	{
11442 	  location_t loc0 = expr_location_or (arg0, loc);
11443 	  /* See if this can be inverted.  If it can't, possibly because
11444 	     it was a floating-point inequality comparison, don't do
11445 	     anything.  */
11446 	  tem = fold_invert_truthvalue (loc0, arg0);
11447 	  if (tem)
11448 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
11449 	}
11450 
11451       /* Convert A ? 1 : 0 to simply A.  */
11452       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11453 				 : (integer_onep (op1)
11454 				    && !VECTOR_TYPE_P (type)))
11455 	  && integer_zerop (op2)
11456 	  /* If we try to convert OP0 to our type, the
11457 	     call to fold will try to move the conversion inside
11458 	     a COND, which will recurse.  In that case, the COND_EXPR
11459 	     is probably the best choice, so leave it alone.  */
11460 	  && type == TREE_TYPE (arg0))
11461 	return pedantic_non_lvalue_loc (loc, arg0);
11462 
11463       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
11464 	 over COND_EXPR in cases such as floating point comparisons.  */
11465       if (integer_zerop (op1)
11466 	  && code == COND_EXPR
11467 	  && integer_onep (op2)
11468 	  && !VECTOR_TYPE_P (type)
11469 	  && truth_value_p (TREE_CODE (arg0)))
11470 	return pedantic_non_lvalue_loc (loc,
11471 				    fold_convert_loc (loc, type,
11472 					      invert_truthvalue_loc (loc,
11473 								     arg0)));
11474 
11475       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
11476       if (TREE_CODE (arg0) == LT_EXPR
11477 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11478 	  && integer_zerop (op2)
11479 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11480 	{
11481 	  /* sign_bit_p looks through both zero and sign extensions,
11482 	     but for this optimization only sign extensions are
11483 	     usable.  */
11484 	  tree tem2 = TREE_OPERAND (arg0, 0);
11485 	  while (tem != tem2)
11486 	    {
11487 	      if (TREE_CODE (tem2) != NOP_EXPR
11488 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11489 		{
11490 		  tem = NULL_TREE;
11491 		  break;
11492 		}
11493 	      tem2 = TREE_OPERAND (tem2, 0);
11494 	    }
11495 	  /* sign_bit_p only checks ARG1 bits within A's precision.
11496 	     If <sign bit of A> has wider type than A, bits outside
11497 	     of A's precision in <sign bit of A> need to be checked.
11498 	     If they are all 0, this optimization needs to be done
11499 	     in unsigned A's type, if they are all 1 in signed A's type,
11500 	     otherwise this can't be done.  */
11501 	  if (tem
11502 	      && TYPE_PRECISION (TREE_TYPE (tem))
11503 		 < TYPE_PRECISION (TREE_TYPE (arg1))
11504 	      && TYPE_PRECISION (TREE_TYPE (tem))
11505 		 < TYPE_PRECISION (type))
11506 	    {
11507 	      int inner_width, outer_width;
11508 	      tree tem_type;
11509 
11510 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11511 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11512 	      if (outer_width > TYPE_PRECISION (type))
11513 		outer_width = TYPE_PRECISION (type);
11514 
11515 	      wide_int mask = wi::shifted_mask
11516 		(inner_width, outer_width - inner_width, false,
11517 		 TYPE_PRECISION (TREE_TYPE (arg1)));
11518 
11519 	      wide_int common = mask & wi::to_wide (arg1);
11520 	      if (common == mask)
11521 		{
11522 		  tem_type = signed_type_for (TREE_TYPE (tem));
11523 		  tem = fold_convert_loc (loc, tem_type, tem);
11524 		}
11525 	      else if (common == 0)
11526 		{
11527 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
11528 		  tem = fold_convert_loc (loc, tem_type, tem);
11529 		}
11530 	      else
11531 		tem = NULL;
11532 	    }
11533 
11534 	  if (tem)
11535 	    return
11536 	      fold_convert_loc (loc, type,
11537 				fold_build2_loc (loc, BIT_AND_EXPR,
11538 					     TREE_TYPE (tem), tem,
11539 					     fold_convert_loc (loc,
11540 							       TREE_TYPE (tem),
11541 							       arg1)));
11542 	}
11543 
11544       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
11545 	 already handled above.  */
11546       if (TREE_CODE (arg0) == BIT_AND_EXPR
11547 	  && integer_onep (TREE_OPERAND (arg0, 1))
11548 	  && integer_zerop (op2)
11549 	  && integer_pow2p (arg1))
11550 	{
11551 	  tree tem = TREE_OPERAND (arg0, 0);
11552 	  STRIP_NOPS (tem);
11553 	  if (TREE_CODE (tem) == RSHIFT_EXPR
11554 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11555               && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11556 		 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11557 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
11558 				    fold_convert_loc (loc, type,
11559 						      TREE_OPERAND (tem, 0)),
11560 				    op1);
11561 	}
11562 
11563       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
11564 	 is probably obsolete because the first operand should be a
11565 	 truth value (that's why we have the two cases above), but let's
11566 	 leave it in until we can confirm this for all front-ends.  */
11567       if (integer_zerop (op2)
11568 	  && TREE_CODE (arg0) == NE_EXPR
11569 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11570 	  && integer_pow2p (arg1)
11571 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11572 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11573 			      arg1, OEP_ONLY_CONST)
11574 	  /* operand_equal_p compares just value, not precision, so e.g.
11575 	     arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
11576 	     second operand 32-bit -128, which is not a power of two (or vice
11577 	     versa.  */
11578 	  && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
11579 	return pedantic_non_lvalue_loc (loc,
11580 					fold_convert_loc (loc, type,
11581 							  TREE_OPERAND (arg0,
11582 									0)));
11583 
11584       /* Disable the transformations below for vectors, since
11585 	 fold_binary_op_with_conditional_arg may undo them immediately,
11586 	 yielding an infinite loop.  */
11587       if (code == VEC_COND_EXPR)
11588 	return NULL_TREE;
11589 
11590       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
11591       if (integer_zerop (op2)
11592 	  && truth_value_p (TREE_CODE (arg0))
11593 	  && truth_value_p (TREE_CODE (arg1))
11594 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11595 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11596 							   : TRUTH_ANDIF_EXPR,
11597 				type, fold_convert_loc (loc, type, arg0), op1);
11598 
11599       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
11600       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11601 	  && truth_value_p (TREE_CODE (arg0))
11602 	  && truth_value_p (TREE_CODE (arg1))
11603 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11604 	{
11605 	  location_t loc0 = expr_location_or (arg0, loc);
11606 	  /* Only perform transformation if ARG0 is easily inverted.  */
11607 	  tem = fold_invert_truthvalue (loc0, arg0);
11608 	  if (tem)
11609 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11610 					 ? BIT_IOR_EXPR
11611 					 : TRUTH_ORIF_EXPR,
11612 				    type, fold_convert_loc (loc, type, tem),
11613 				    op1);
11614 	}
11615 
11616       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
11617       if (integer_zerop (arg1)
11618 	  && truth_value_p (TREE_CODE (arg0))
11619 	  && truth_value_p (TREE_CODE (op2))
11620 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11621 	{
11622 	  location_t loc0 = expr_location_or (arg0, loc);
11623 	  /* Only perform transformation if ARG0 is easily inverted.  */
11624 	  tem = fold_invert_truthvalue (loc0, arg0);
11625 	  if (tem)
11626 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11627 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11628 				    type, fold_convert_loc (loc, type, tem),
11629 				    op2);
11630 	}
11631 
11632       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
11633       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11634 	  && truth_value_p (TREE_CODE (arg0))
11635 	  && truth_value_p (TREE_CODE (op2))
11636 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11637 	return fold_build2_loc (loc, code == VEC_COND_EXPR
11638 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11639 				type, fold_convert_loc (loc, type, arg0), op2);
11640 
11641       return NULL_TREE;
11642 
11643     case CALL_EXPR:
11644       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
11645 	 of fold_ternary on them.  */
11646       gcc_unreachable ();
11647 
11648     case BIT_FIELD_REF:
11649       if (TREE_CODE (arg0) == VECTOR_CST
11650 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
11651 	      || (VECTOR_TYPE_P (type)
11652 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
11653 	  && tree_fits_uhwi_p (op1)
11654 	  && tree_fits_uhwi_p (op2))
11655 	{
11656 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11657 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11658 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11659 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11660 
11661 	  if (n != 0
11662 	      && (idx % width) == 0
11663 	      && (n % width) == 0
11664 	      && known_le ((idx + n) / width,
11665 			   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
11666 	    {
11667 	      idx = idx / width;
11668 	      n = n / width;
11669 
11670 	      if (TREE_CODE (arg0) == VECTOR_CST)
11671 		{
11672 		  if (n == 1)
11673 		    {
11674 		      tem = VECTOR_CST_ELT (arg0, idx);
11675 		      if (VECTOR_TYPE_P (type))
11676 			tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
11677 		      return tem;
11678 		    }
11679 
11680 		  tree_vector_builder vals (type, n, 1);
11681 		  for (unsigned i = 0; i < n; ++i)
11682 		    vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11683 		  return vals.build ();
11684 		}
11685 	    }
11686 	}
11687 
11688       /* On constants we can use native encode/interpret to constant
11689          fold (nearly) all BIT_FIELD_REFs.  */
11690       if (CONSTANT_CLASS_P (arg0)
11691 	  && can_native_interpret_type_p (type)
11692 	  && BITS_PER_UNIT == 8
11693 	  && tree_fits_uhwi_p (op1)
11694 	  && tree_fits_uhwi_p (op2))
11695 	{
11696 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11697 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11698 	  /* Limit us to a reasonable amount of work.  To relax the
11699 	     other limitations we need bit-shifting of the buffer
11700 	     and rounding up the size.  */
11701 	  if (bitpos % BITS_PER_UNIT == 0
11702 	      && bitsize % BITS_PER_UNIT == 0
11703 	      && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11704 	    {
11705 	      unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11706 	      unsigned HOST_WIDE_INT len
11707 		= native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11708 				      bitpos / BITS_PER_UNIT);
11709 	      if (len > 0
11710 		  && len * BITS_PER_UNIT >= bitsize)
11711 		{
11712 		  tree v = native_interpret_expr (type, b,
11713 						  bitsize / BITS_PER_UNIT);
11714 		  if (v)
11715 		    return v;
11716 		}
11717 	    }
11718 	}
11719 
11720       return NULL_TREE;
11721 
11722     case FMA_EXPR:
11723       /* For integers we can decompose the FMA if possible.  */
11724       if (TREE_CODE (arg0) == INTEGER_CST
11725 	  && TREE_CODE (arg1) == INTEGER_CST)
11726 	return fold_build2_loc (loc, PLUS_EXPR, type,
11727 				const_binop (MULT_EXPR, arg0, arg1), arg2);
11728       if (integer_zerop (arg2))
11729 	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11730 
11731       return fold_fma (loc, type, arg0, arg1, arg2);
11732 
11733     case VEC_PERM_EXPR:
11734       if (TREE_CODE (arg2) == VECTOR_CST)
11735 	{
11736 	  /* Build a vector of integers from the tree mask.  */
11737 	  vec_perm_builder builder;
11738 	  if (!tree_to_vec_perm_builder (&builder, arg2))
11739 	    return NULL_TREE;
11740 
11741 	  /* Create a vec_perm_indices for the integer vector.  */
11742 	  poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
11743 	  bool single_arg = (op0 == op1);
11744 	  vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
11745 
11746 	  /* Check for cases that fold to OP0 or OP1 in their original
11747 	     element order.  */
11748 	  if (sel.series_p (0, 1, 0, 1))
11749 	    return op0;
11750 	  if (sel.series_p (0, 1, nelts, 1))
11751 	    return op1;
11752 
11753 	  if (!single_arg)
11754 	    {
11755 	      if (sel.all_from_input_p (0))
11756 		op1 = op0;
11757 	      else if (sel.all_from_input_p (1))
11758 		{
11759 		  op0 = op1;
11760 		  sel.rotate_inputs (1);
11761 		}
11762 	    }
11763 
11764 	  if ((TREE_CODE (op0) == VECTOR_CST
11765 	       || TREE_CODE (op0) == CONSTRUCTOR)
11766 	      && (TREE_CODE (op1) == VECTOR_CST
11767 		  || TREE_CODE (op1) == CONSTRUCTOR))
11768 	    {
11769 	      tree t = fold_vec_perm (type, op0, op1, sel);
11770 	      if (t != NULL_TREE)
11771 		return t;
11772 	    }
11773 
11774 	  bool changed = (op0 == op1 && !single_arg);
11775 
11776 	  /* Generate a canonical form of the selector.  */
11777 	  if (arg2 == op2 && sel.encoding () != builder)
11778 	    {
11779 	      /* Some targets are deficient and fail to expand a single
11780 		 argument permutation while still allowing an equivalent
11781 		 2-argument version.  */
11782 	      if (sel.ninputs () == 2
11783 		  || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
11784 		op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11785 	      else
11786 		{
11787 		  vec_perm_indices sel2 (builder, 2, nelts);
11788 		  if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
11789 		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
11790 		  else
11791 		    /* Not directly supported with either encoding,
11792 		       so use the preferred form.  */
11793 		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11794 		}
11795 	      changed = true;
11796 	    }
11797 
11798 	  if (changed)
11799 	    return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11800 	}
11801       return NULL_TREE;
11802 
11803     case BIT_INSERT_EXPR:
11804       /* Perform (partial) constant folding of BIT_INSERT_EXPR.  */
11805       if (TREE_CODE (arg0) == INTEGER_CST
11806 	  && TREE_CODE (arg1) == INTEGER_CST)
11807 	{
11808 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11809 	  unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11810 	  wide_int tem = (wi::to_wide (arg0)
11811 			  & wi::shifted_mask (bitpos, bitsize, true,
11812 					      TYPE_PRECISION (type)));
11813 	  wide_int tem2
11814 	    = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11815 				    bitsize), bitpos);
11816 	  return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11817 	}
11818       else if (TREE_CODE (arg0) == VECTOR_CST
11819 	       && CONSTANT_CLASS_P (arg1)
11820 	       && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11821 				      TREE_TYPE (arg1)))
11822 	{
11823 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11824 	  unsigned HOST_WIDE_INT elsize
11825 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11826 	  if (bitpos % elsize == 0)
11827 	    {
11828 	      unsigned k = bitpos / elsize;
11829 	      unsigned HOST_WIDE_INT nelts;
11830 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11831 		return arg0;
11832 	      else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
11833 		{
11834 		  tree_vector_builder elts (type, nelts, 1);
11835 		  elts.quick_grow (nelts);
11836 		  for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
11837 		    elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11838 		  return elts.build ();
11839 		}
11840 	    }
11841 	}
11842       return NULL_TREE;
11843 
11844     default:
11845       return NULL_TREE;
11846     } /* switch (code) */
11847 }
11848 
11849 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11850    of an array (or vector).  */
11851 
11852 tree
11853 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11854 {
11855   tree index_type = NULL_TREE;
11856   offset_int low_bound = 0;
11857 
11858   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11859     {
11860       tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11861       if (domain_type && TYPE_MIN_VALUE (domain_type))
11862 	{
11863 	  /* Static constructors for variably sized objects makes no sense.  */
11864 	  gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11865 	  index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11866 	  low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11867 	}
11868     }
11869 
11870   if (index_type)
11871     access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11872 			    TYPE_SIGN (index_type));
11873 
11874   offset_int index = low_bound - 1;
11875   if (index_type)
11876     index = wi::ext (index, TYPE_PRECISION (index_type),
11877 		     TYPE_SIGN (index_type));
11878 
11879   offset_int max_index;
11880   unsigned HOST_WIDE_INT cnt;
11881   tree cfield, cval;
11882 
11883   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11884     {
11885       /* Array constructor might explicitly set index, or specify a range,
11886 	 or leave index NULL meaning that it is next index after previous
11887 	 one.  */
11888       if (cfield)
11889 	{
11890 	  if (TREE_CODE (cfield) == INTEGER_CST)
11891 	    max_index = index = wi::to_offset (cfield);
11892 	  else
11893 	    {
11894 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11895 	      index = wi::to_offset (TREE_OPERAND (cfield, 0));
11896 	      max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11897 	    }
11898 	}
11899       else
11900 	{
11901 	  index += 1;
11902 	  if (index_type)
11903 	    index = wi::ext (index, TYPE_PRECISION (index_type),
11904 			     TYPE_SIGN (index_type));
11905 	  max_index = index;
11906 	}
11907 
11908     /* Do we have match?  */
11909     if (wi::cmpu (access_index, index) >= 0
11910 	&& wi::cmpu (access_index, max_index) <= 0)
11911       return cval;
11912   }
11913   return NULL_TREE;
11914 }
11915 
11916 /* Perform constant folding and related simplification of EXPR.
11917    The related simplifications include x*1 => x, x*0 => 0, etc.,
11918    and application of the associative law.
11919    NOP_EXPR conversions may be removed freely (as long as we
11920    are careful not to change the type of the overall expression).
11921    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11922    but we can constant-fold them if they have constant operands.  */
11923 
11924 #ifdef ENABLE_FOLD_CHECKING
11925 # define fold(x) fold_1 (x)
11926 static tree fold_1 (tree);
11927 static
11928 #endif
11929 tree
11930 fold (tree expr)
11931 {
11932   const tree t = expr;
11933   enum tree_code code = TREE_CODE (t);
11934   enum tree_code_class kind = TREE_CODE_CLASS (code);
11935   tree tem;
11936   location_t loc = EXPR_LOCATION (expr);
11937 
11938   /* Return right away if a constant.  */
11939   if (kind == tcc_constant)
11940     return t;
11941 
11942   /* CALL_EXPR-like objects with variable numbers of operands are
11943      treated specially.  */
11944   if (kind == tcc_vl_exp)
11945     {
11946       if (code == CALL_EXPR)
11947 	{
11948 	  tem = fold_call_expr (loc, expr, false);
11949 	  return tem ? tem : expr;
11950 	}
11951       return expr;
11952     }
11953 
11954   if (IS_EXPR_CODE_CLASS (kind))
11955     {
11956       tree type = TREE_TYPE (t);
11957       tree op0, op1, op2;
11958 
11959       switch (TREE_CODE_LENGTH (code))
11960 	{
11961 	case 1:
11962 	  op0 = TREE_OPERAND (t, 0);
11963 	  tem = fold_unary_loc (loc, code, type, op0);
11964 	  return tem ? tem : expr;
11965 	case 2:
11966 	  op0 = TREE_OPERAND (t, 0);
11967 	  op1 = TREE_OPERAND (t, 1);
11968 	  tem = fold_binary_loc (loc, code, type, op0, op1);
11969 	  return tem ? tem : expr;
11970 	case 3:
11971 	  op0 = TREE_OPERAND (t, 0);
11972 	  op1 = TREE_OPERAND (t, 1);
11973 	  op2 = TREE_OPERAND (t, 2);
11974 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11975 	  return tem ? tem : expr;
11976 	default:
11977 	  break;
11978 	}
11979     }
11980 
11981   switch (code)
11982     {
11983     case ARRAY_REF:
11984       {
11985 	tree op0 = TREE_OPERAND (t, 0);
11986 	tree op1 = TREE_OPERAND (t, 1);
11987 
11988 	if (TREE_CODE (op1) == INTEGER_CST
11989 	    && TREE_CODE (op0) == CONSTRUCTOR
11990 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11991 	  {
11992 	    tree val = get_array_ctor_element_at_index (op0,
11993 							wi::to_offset (op1));
11994 	    if (val)
11995 	      return val;
11996 	  }
11997 
11998 	return t;
11999       }
12000 
12001       /* Return a VECTOR_CST if possible.  */
12002     case CONSTRUCTOR:
12003       {
12004 	tree type = TREE_TYPE (t);
12005 	if (TREE_CODE (type) != VECTOR_TYPE)
12006 	  return t;
12007 
12008 	unsigned i;
12009 	tree val;
12010 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12011 	  if (! CONSTANT_CLASS_P (val))
12012 	    return t;
12013 
12014 	return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12015       }
12016 
12017     case CONST_DECL:
12018       return fold (DECL_INITIAL (t));
12019 
12020     default:
12021       return t;
12022     } /* switch (code) */
12023 }
12024 
12025 #ifdef ENABLE_FOLD_CHECKING
12026 #undef fold
12027 
12028 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12029 				hash_table<nofree_ptr_hash<const tree_node> > *);
12030 static void fold_check_failed (const_tree, const_tree);
12031 void print_fold_checksum (const_tree);
12032 
12033 /* When --enable-checking=fold, compute a digest of expr before
12034    and after actual fold call to see if fold did not accidentally
12035    change original expr.  */
12036 
12037 tree
12038 fold (tree expr)
12039 {
12040   tree ret;
12041   struct md5_ctx ctx;
12042   unsigned char checksum_before[16], checksum_after[16];
12043   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12044 
12045   md5_init_ctx (&ctx);
12046   fold_checksum_tree (expr, &ctx, &ht);
12047   md5_finish_ctx (&ctx, checksum_before);
12048   ht.empty ();
12049 
12050   ret = fold_1 (expr);
12051 
12052   md5_init_ctx (&ctx);
12053   fold_checksum_tree (expr, &ctx, &ht);
12054   md5_finish_ctx (&ctx, checksum_after);
12055 
12056   if (memcmp (checksum_before, checksum_after, 16))
12057     fold_check_failed (expr, ret);
12058 
12059   return ret;
12060 }
12061 
12062 void
12063 print_fold_checksum (const_tree expr)
12064 {
12065   struct md5_ctx ctx;
12066   unsigned char checksum[16], cnt;
12067   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12068 
12069   md5_init_ctx (&ctx);
12070   fold_checksum_tree (expr, &ctx, &ht);
12071   md5_finish_ctx (&ctx, checksum);
12072   for (cnt = 0; cnt < 16; ++cnt)
12073     fprintf (stderr, "%02x", checksum[cnt]);
12074   putc ('\n', stderr);
12075 }
12076 
12077 static void
12078 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12079 {
12080   internal_error ("fold check: original tree changed by fold");
12081 }
12082 
12083 static void
12084 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12085 		    hash_table<nofree_ptr_hash <const tree_node> > *ht)
12086 {
12087   const tree_node **slot;
12088   enum tree_code code;
12089   union tree_node buf;
12090   int i, len;
12091 
12092  recursive_label:
12093   if (expr == NULL)
12094     return;
12095   slot = ht->find_slot (expr, INSERT);
12096   if (*slot != NULL)
12097     return;
12098   *slot = expr;
12099   code = TREE_CODE (expr);
12100   if (TREE_CODE_CLASS (code) == tcc_declaration
12101       && HAS_DECL_ASSEMBLER_NAME_P (expr))
12102     {
12103       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
12104       memcpy ((char *) &buf, expr, tree_size (expr));
12105       SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12106       buf.decl_with_vis.symtab_node = NULL;
12107       expr = (tree) &buf;
12108     }
12109   else if (TREE_CODE_CLASS (code) == tcc_type
12110 	   && (TYPE_POINTER_TO (expr)
12111 	       || TYPE_REFERENCE_TO (expr)
12112 	       || TYPE_CACHED_VALUES_P (expr)
12113 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12114 	       || TYPE_NEXT_VARIANT (expr)
12115 	       || TYPE_ALIAS_SET_KNOWN_P (expr)))
12116     {
12117       /* Allow these fields to be modified.  */
12118       tree tmp;
12119       memcpy ((char *) &buf, expr, tree_size (expr));
12120       expr = tmp = (tree) &buf;
12121       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12122       TYPE_POINTER_TO (tmp) = NULL;
12123       TYPE_REFERENCE_TO (tmp) = NULL;
12124       TYPE_NEXT_VARIANT (tmp) = NULL;
12125       TYPE_ALIAS_SET (tmp) = -1;
12126       if (TYPE_CACHED_VALUES_P (tmp))
12127 	{
12128 	  TYPE_CACHED_VALUES_P (tmp) = 0;
12129 	  TYPE_CACHED_VALUES (tmp) = NULL;
12130 	}
12131     }
12132   md5_process_bytes (expr, tree_size (expr), ctx);
12133   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12134     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12135   if (TREE_CODE_CLASS (code) != tcc_type
12136       && TREE_CODE_CLASS (code) != tcc_declaration
12137       && code != TREE_LIST
12138       && code != SSA_NAME
12139       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12140     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12141   switch (TREE_CODE_CLASS (code))
12142     {
12143     case tcc_constant:
12144       switch (code)
12145 	{
12146 	case STRING_CST:
12147 	  md5_process_bytes (TREE_STRING_POINTER (expr),
12148 			     TREE_STRING_LENGTH (expr), ctx);
12149 	  break;
12150 	case COMPLEX_CST:
12151 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12152 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12153 	  break;
12154 	case VECTOR_CST:
12155 	  len = vector_cst_encoded_nelts (expr);
12156 	  for (i = 0; i < len; ++i)
12157 	    fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12158 	  break;
12159 	default:
12160 	  break;
12161 	}
12162       break;
12163     case tcc_exceptional:
12164       switch (code)
12165 	{
12166 	case TREE_LIST:
12167 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12168 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12169 	  expr = TREE_CHAIN (expr);
12170 	  goto recursive_label;
12171 	  break;
12172 	case TREE_VEC:
12173 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12174 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12175 	  break;
12176 	default:
12177 	  break;
12178 	}
12179       break;
12180     case tcc_expression:
12181     case tcc_reference:
12182     case tcc_comparison:
12183     case tcc_unary:
12184     case tcc_binary:
12185     case tcc_statement:
12186     case tcc_vl_exp:
12187       len = TREE_OPERAND_LENGTH (expr);
12188       for (i = 0; i < len; ++i)
12189 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12190       break;
12191     case tcc_declaration:
12192       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12193       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12194       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12195 	{
12196 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12197 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12198 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12199 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12200 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12201 	}
12202 
12203       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12204 	{
12205 	  if (TREE_CODE (expr) == FUNCTION_DECL)
12206 	    {
12207 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12208 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12209 	    }
12210 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12211 	}
12212       break;
12213     case tcc_type:
12214       if (TREE_CODE (expr) == ENUMERAL_TYPE)
12215         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12216       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12217       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12218       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12219       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12220       if (INTEGRAL_TYPE_P (expr)
12221           || SCALAR_FLOAT_TYPE_P (expr))
12222 	{
12223 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12224 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12225 	}
12226       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12227       if (TREE_CODE (expr) == RECORD_TYPE
12228 	  || TREE_CODE (expr) == UNION_TYPE
12229 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
12230 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12231       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12232       break;
12233     default:
12234       break;
12235     }
12236 }
12237 
12238 /* Helper function for outputting the checksum of a tree T.  When
12239    debugging with gdb, you can "define mynext" to be "next" followed
12240    by "call debug_fold_checksum (op0)", then just trace down till the
12241    outputs differ.  */
12242 
12243 DEBUG_FUNCTION void
12244 debug_fold_checksum (const_tree t)
12245 {
12246   int i;
12247   unsigned char checksum[16];
12248   struct md5_ctx ctx;
12249   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12250 
12251   md5_init_ctx (&ctx);
12252   fold_checksum_tree (t, &ctx, &ht);
12253   md5_finish_ctx (&ctx, checksum);
12254   ht.empty ();
12255 
12256   for (i = 0; i < 16; i++)
12257     fprintf (stderr, "%d ", checksum[i]);
12258 
12259   fprintf (stderr, "\n");
12260 }
12261 
12262 #endif
12263 
12264 /* Fold a unary tree expression with code CODE of type TYPE with an
12265    operand OP0.  LOC is the location of the resulting expression.
12266    Return a folded expression if successful.  Otherwise, return a tree
12267    expression with code CODE of type TYPE with an operand OP0.  */
12268 
12269 tree
12270 fold_build1_loc (location_t loc,
12271 		 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12272 {
12273   tree tem;
12274 #ifdef ENABLE_FOLD_CHECKING
12275   unsigned char checksum_before[16], checksum_after[16];
12276   struct md5_ctx ctx;
12277   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12278 
12279   md5_init_ctx (&ctx);
12280   fold_checksum_tree (op0, &ctx, &ht);
12281   md5_finish_ctx (&ctx, checksum_before);
12282   ht.empty ();
12283 #endif
12284 
12285   tem = fold_unary_loc (loc, code, type, op0);
12286   if (!tem)
12287     tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12288 
12289 #ifdef ENABLE_FOLD_CHECKING
12290   md5_init_ctx (&ctx);
12291   fold_checksum_tree (op0, &ctx, &ht);
12292   md5_finish_ctx (&ctx, checksum_after);
12293 
12294   if (memcmp (checksum_before, checksum_after, 16))
12295     fold_check_failed (op0, tem);
12296 #endif
12297   return tem;
12298 }
12299 
12300 /* Fold a binary tree expression with code CODE of type TYPE with
12301    operands OP0 and OP1.  LOC is the location of the resulting
12302    expression.  Return a folded expression if successful.  Otherwise,
12303    return a tree expression with code CODE of type TYPE with operands
12304    OP0 and OP1.  */
12305 
12306 tree
12307 fold_build2_loc (location_t loc,
12308 		      enum tree_code code, tree type, tree op0, tree op1
12309 		      MEM_STAT_DECL)
12310 {
12311   tree tem;
12312 #ifdef ENABLE_FOLD_CHECKING
12313   unsigned char checksum_before_op0[16],
12314                 checksum_before_op1[16],
12315 		checksum_after_op0[16],
12316 		checksum_after_op1[16];
12317   struct md5_ctx ctx;
12318   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12319 
12320   md5_init_ctx (&ctx);
12321   fold_checksum_tree (op0, &ctx, &ht);
12322   md5_finish_ctx (&ctx, checksum_before_op0);
12323   ht.empty ();
12324 
12325   md5_init_ctx (&ctx);
12326   fold_checksum_tree (op1, &ctx, &ht);
12327   md5_finish_ctx (&ctx, checksum_before_op1);
12328   ht.empty ();
12329 #endif
12330 
12331   tem = fold_binary_loc (loc, code, type, op0, op1);
12332   if (!tem)
12333     tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12334 
12335 #ifdef ENABLE_FOLD_CHECKING
12336   md5_init_ctx (&ctx);
12337   fold_checksum_tree (op0, &ctx, &ht);
12338   md5_finish_ctx (&ctx, checksum_after_op0);
12339   ht.empty ();
12340 
12341   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12342     fold_check_failed (op0, tem);
12343 
12344   md5_init_ctx (&ctx);
12345   fold_checksum_tree (op1, &ctx, &ht);
12346   md5_finish_ctx (&ctx, checksum_after_op1);
12347 
12348   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12349     fold_check_failed (op1, tem);
12350 #endif
12351   return tem;
12352 }
12353 
12354 /* Fold a ternary tree expression with code CODE of type TYPE with
12355    operands OP0, OP1, and OP2.  Return a folded expression if
12356    successful.  Otherwise, return a tree expression with code CODE of
12357    type TYPE with operands OP0, OP1, and OP2.  */
12358 
12359 tree
12360 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12361 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
12362 {
12363   tree tem;
12364 #ifdef ENABLE_FOLD_CHECKING
12365   unsigned char checksum_before_op0[16],
12366                 checksum_before_op1[16],
12367                 checksum_before_op2[16],
12368 		checksum_after_op0[16],
12369 		checksum_after_op1[16],
12370 		checksum_after_op2[16];
12371   struct md5_ctx ctx;
12372   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12373 
12374   md5_init_ctx (&ctx);
12375   fold_checksum_tree (op0, &ctx, &ht);
12376   md5_finish_ctx (&ctx, checksum_before_op0);
12377   ht.empty ();
12378 
12379   md5_init_ctx (&ctx);
12380   fold_checksum_tree (op1, &ctx, &ht);
12381   md5_finish_ctx (&ctx, checksum_before_op1);
12382   ht.empty ();
12383 
12384   md5_init_ctx (&ctx);
12385   fold_checksum_tree (op2, &ctx, &ht);
12386   md5_finish_ctx (&ctx, checksum_before_op2);
12387   ht.empty ();
12388 #endif
12389 
12390   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12391   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12392   if (!tem)
12393     tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12394 
12395 #ifdef ENABLE_FOLD_CHECKING
12396   md5_init_ctx (&ctx);
12397   fold_checksum_tree (op0, &ctx, &ht);
12398   md5_finish_ctx (&ctx, checksum_after_op0);
12399   ht.empty ();
12400 
12401   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12402     fold_check_failed (op0, tem);
12403 
12404   md5_init_ctx (&ctx);
12405   fold_checksum_tree (op1, &ctx, &ht);
12406   md5_finish_ctx (&ctx, checksum_after_op1);
12407   ht.empty ();
12408 
12409   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12410     fold_check_failed (op1, tem);
12411 
12412   md5_init_ctx (&ctx);
12413   fold_checksum_tree (op2, &ctx, &ht);
12414   md5_finish_ctx (&ctx, checksum_after_op2);
12415 
12416   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12417     fold_check_failed (op2, tem);
12418 #endif
12419   return tem;
12420 }
12421 
12422 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12423    arguments in ARGARRAY, and a null static chain.
12424    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
12425    of type TYPE from the given operands as constructed by build_call_array.  */
12426 
12427 tree
12428 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12429 			   int nargs, tree *argarray)
12430 {
12431   tree tem;
12432 #ifdef ENABLE_FOLD_CHECKING
12433   unsigned char checksum_before_fn[16],
12434                 checksum_before_arglist[16],
12435 		checksum_after_fn[16],
12436 		checksum_after_arglist[16];
12437   struct md5_ctx ctx;
12438   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12439   int i;
12440 
12441   md5_init_ctx (&ctx);
12442   fold_checksum_tree (fn, &ctx, &ht);
12443   md5_finish_ctx (&ctx, checksum_before_fn);
12444   ht.empty ();
12445 
12446   md5_init_ctx (&ctx);
12447   for (i = 0; i < nargs; i++)
12448     fold_checksum_tree (argarray[i], &ctx, &ht);
12449   md5_finish_ctx (&ctx, checksum_before_arglist);
12450   ht.empty ();
12451 #endif
12452 
12453   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12454   if (!tem)
12455     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12456 
12457 #ifdef ENABLE_FOLD_CHECKING
12458   md5_init_ctx (&ctx);
12459   fold_checksum_tree (fn, &ctx, &ht);
12460   md5_finish_ctx (&ctx, checksum_after_fn);
12461   ht.empty ();
12462 
12463   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12464     fold_check_failed (fn, tem);
12465 
12466   md5_init_ctx (&ctx);
12467   for (i = 0; i < nargs; i++)
12468     fold_checksum_tree (argarray[i], &ctx, &ht);
12469   md5_finish_ctx (&ctx, checksum_after_arglist);
12470 
12471   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12472     fold_check_failed (NULL_TREE, tem);
12473 #endif
12474   return tem;
12475 }
12476 
12477 /* Perform constant folding and related simplification of initializer
12478    expression EXPR.  These behave identically to "fold_buildN" but ignore
12479    potential run-time traps and exceptions that fold must preserve.  */
12480 
12481 #define START_FOLD_INIT \
12482   int saved_signaling_nans = flag_signaling_nans;\
12483   int saved_trapping_math = flag_trapping_math;\
12484   int saved_rounding_math = flag_rounding_math;\
12485   int saved_trapv = flag_trapv;\
12486   int saved_folding_initializer = folding_initializer;\
12487   flag_signaling_nans = 0;\
12488   flag_trapping_math = 0;\
12489   flag_rounding_math = 0;\
12490   flag_trapv = 0;\
12491   folding_initializer = 1;
12492 
12493 #define END_FOLD_INIT \
12494   flag_signaling_nans = saved_signaling_nans;\
12495   flag_trapping_math = saved_trapping_math;\
12496   flag_rounding_math = saved_rounding_math;\
12497   flag_trapv = saved_trapv;\
12498   folding_initializer = saved_folding_initializer;
12499 
12500 tree
12501 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12502 			     tree type, tree op)
12503 {
12504   tree result;
12505   START_FOLD_INIT;
12506 
12507   result = fold_build1_loc (loc, code, type, op);
12508 
12509   END_FOLD_INIT;
12510   return result;
12511 }
12512 
12513 tree
12514 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12515 			     tree type, tree op0, tree op1)
12516 {
12517   tree result;
12518   START_FOLD_INIT;
12519 
12520   result = fold_build2_loc (loc, code, type, op0, op1);
12521 
12522   END_FOLD_INIT;
12523   return result;
12524 }
12525 
12526 tree
12527 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12528 				       int nargs, tree *argarray)
12529 {
12530   tree result;
12531   START_FOLD_INIT;
12532 
12533   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12534 
12535   END_FOLD_INIT;
12536   return result;
12537 }
12538 
12539 #undef START_FOLD_INIT
12540 #undef END_FOLD_INIT
12541 
12542 /* Determine if first argument is a multiple of second argument.  Return 0 if
12543    it is not, or we cannot easily determined it to be.
12544 
12545    An example of the sort of thing we care about (at this point; this routine
12546    could surely be made more general, and expanded to do what the *_DIV_EXPR's
12547    fold cases do now) is discovering that
12548 
12549      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12550 
12551    is a multiple of
12552 
12553      SAVE_EXPR (J * 8)
12554 
12555    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12556 
12557    This code also handles discovering that
12558 
12559      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12560 
12561    is a multiple of 8 so we don't have to worry about dealing with a
12562    possible remainder.
12563 
12564    Note that we *look* inside a SAVE_EXPR only to determine how it was
12565    calculated; it is not safe for fold to do much of anything else with the
12566    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12567    at run time.  For example, the latter example above *cannot* be implemented
12568    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12569    evaluation time of the original SAVE_EXPR is not necessarily the same at
12570    the time the new expression is evaluated.  The only optimization of this
12571    sort that would be valid is changing
12572 
12573      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12574 
12575    divided by 8 to
12576 
12577      SAVE_EXPR (I) * SAVE_EXPR (J)
12578 
12579    (where the same SAVE_EXPR (J) is used in the original and the
12580    transformed version).  */
12581 
12582 int
12583 multiple_of_p (tree type, const_tree top, const_tree bottom)
12584 {
12585   gimple *stmt;
12586   tree t1, op1, op2;
12587 
12588   if (operand_equal_p (top, bottom, 0))
12589     return 1;
12590 
12591   if (TREE_CODE (type) != INTEGER_TYPE)
12592     return 0;
12593 
12594   switch (TREE_CODE (top))
12595     {
12596     case BIT_AND_EXPR:
12597       /* Bitwise and provides a power of two multiple.  If the mask is
12598 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
12599       if (!integer_pow2p (bottom))
12600 	return 0;
12601       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12602 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12603 
12604     case MULT_EXPR:
12605       if (TREE_CODE (bottom) == INTEGER_CST)
12606 	{
12607 	  op1 = TREE_OPERAND (top, 0);
12608 	  op2 = TREE_OPERAND (top, 1);
12609 	  if (TREE_CODE (op1) == INTEGER_CST)
12610 	    std::swap (op1, op2);
12611 	  if (TREE_CODE (op2) == INTEGER_CST)
12612 	    {
12613 	      if (multiple_of_p (type, op2, bottom))
12614 		return 1;
12615 	      /* Handle multiple_of_p ((x * 2 + 2) * 4, 8).  */
12616 	      if (multiple_of_p (type, bottom, op2))
12617 		{
12618 		  widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
12619 						 wi::to_widest (op2));
12620 		  if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
12621 		    {
12622 		      op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
12623 		      return multiple_of_p (type, op1, op2);
12624 		    }
12625 		}
12626 	      return multiple_of_p (type, op1, bottom);
12627 	    }
12628 	}
12629       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12630 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12631 
12632     case MINUS_EXPR:
12633       /* It is impossible to prove if op0 - op1 is multiple of bottom
12634 	 precisely, so be conservative here checking if both op0 and op1
12635 	 are multiple of bottom.  Note we check the second operand first
12636 	 since it's usually simpler.  */
12637       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12638 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12639 
12640     case PLUS_EXPR:
12641       /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12642 	 as op0 - 3 if the expression has unsigned type.  For example,
12643 	 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not.  */
12644       op1 = TREE_OPERAND (top, 1);
12645       if (TYPE_UNSIGNED (type)
12646 	  && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12647 	op1 = fold_build1 (NEGATE_EXPR, type, op1);
12648       return (multiple_of_p (type, op1, bottom)
12649 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12650 
12651     case LSHIFT_EXPR:
12652       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12653 	{
12654 	  op1 = TREE_OPERAND (top, 1);
12655 	  /* const_binop may not detect overflow correctly,
12656 	     so check for it explicitly here.  */
12657 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12658 			 wi::to_wide (op1))
12659 	      && (t1 = fold_convert (type,
12660 				     const_binop (LSHIFT_EXPR, size_one_node,
12661 						  op1))) != 0
12662 	      && !TREE_OVERFLOW (t1))
12663 	    return multiple_of_p (type, t1, bottom);
12664 	}
12665       return 0;
12666 
12667     case NOP_EXPR:
12668       /* Can't handle conversions from non-integral or wider integral type.  */
12669       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12670 	  || (TYPE_PRECISION (type)
12671 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12672 	return 0;
12673 
12674       /* fall through */
12675 
12676     case SAVE_EXPR:
12677       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12678 
12679     case COND_EXPR:
12680       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12681 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12682 
12683     case INTEGER_CST:
12684       if (TREE_CODE (bottom) != INTEGER_CST
12685 	  || integer_zerop (bottom)
12686 	  || (TYPE_UNSIGNED (type)
12687 	      && (tree_int_cst_sgn (top) < 0
12688 		  || tree_int_cst_sgn (bottom) < 0)))
12689 	return 0;
12690       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12691 				SIGNED);
12692 
12693     case SSA_NAME:
12694       if (TREE_CODE (bottom) == INTEGER_CST
12695 	  && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12696 	  && gimple_code (stmt) == GIMPLE_ASSIGN)
12697 	{
12698 	  enum tree_code code = gimple_assign_rhs_code (stmt);
12699 
12700 	  /* Check for special cases to see if top is defined as multiple
12701 	     of bottom:
12702 
12703 	       top = (X & ~(bottom - 1) ; bottom is power of 2
12704 
12705 	     or
12706 
12707 	       Y = X % bottom
12708 	       top = X - Y.  */
12709 	  if (code == BIT_AND_EXPR
12710 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12711 	      && TREE_CODE (op2) == INTEGER_CST
12712 	      && integer_pow2p (bottom)
12713 	      && wi::multiple_of_p (wi::to_widest (op2),
12714 				    wi::to_widest (bottom), UNSIGNED))
12715 	    return 1;
12716 
12717 	  op1 = gimple_assign_rhs1 (stmt);
12718 	  if (code == MINUS_EXPR
12719 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12720 	      && TREE_CODE (op2) == SSA_NAME
12721 	      && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12722 	      && gimple_code (stmt) == GIMPLE_ASSIGN
12723 	      && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12724 	      && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12725 	      && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12726 	    return 1;
12727 	}
12728 
12729       /* fall through */
12730 
12731     default:
12732       if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
12733 	return multiple_p (wi::to_poly_widest (top),
12734 			   wi::to_poly_widest (bottom));
12735 
12736       return 0;
12737     }
12738 }
12739 
12740 #define tree_expr_nonnegative_warnv_p(X, Y) \
12741   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12742 
12743 #define RECURSE(X) \
12744   ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12745 
12746 /* Return true if CODE or TYPE is known to be non-negative. */
12747 
12748 static bool
12749 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12750 {
12751   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12752       && truth_value_p (code))
12753     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12754        have a signed:1 type (where the value is -1 and 0).  */
12755     return true;
12756   return false;
12757 }
12758 
12759 /* Return true if (CODE OP0) is known to be non-negative.  If the return
12760    value is based on the assumption that signed overflow is undefined,
12761    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12762    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12763 
12764 bool
12765 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12766 				bool *strict_overflow_p, int depth)
12767 {
12768   if (TYPE_UNSIGNED (type))
12769     return true;
12770 
12771   switch (code)
12772     {
12773     case ABS_EXPR:
12774       /* We can't return 1 if flag_wrapv is set because
12775 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
12776       if (!ANY_INTEGRAL_TYPE_P (type))
12777 	return true;
12778       if (TYPE_OVERFLOW_UNDEFINED (type))
12779 	{
12780 	  *strict_overflow_p = true;
12781 	  return true;
12782 	}
12783       break;
12784 
12785     case NON_LVALUE_EXPR:
12786     case FLOAT_EXPR:
12787     case FIX_TRUNC_EXPR:
12788       return RECURSE (op0);
12789 
12790     CASE_CONVERT:
12791       {
12792 	tree inner_type = TREE_TYPE (op0);
12793 	tree outer_type = type;
12794 
12795 	if (TREE_CODE (outer_type) == REAL_TYPE)
12796 	  {
12797 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12798 	      return RECURSE (op0);
12799 	    if (INTEGRAL_TYPE_P (inner_type))
12800 	      {
12801 		if (TYPE_UNSIGNED (inner_type))
12802 		  return true;
12803 		return RECURSE (op0);
12804 	      }
12805 	  }
12806 	else if (INTEGRAL_TYPE_P (outer_type))
12807 	  {
12808 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12809 	      return RECURSE (op0);
12810 	    if (INTEGRAL_TYPE_P (inner_type))
12811 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12812 		      && TYPE_UNSIGNED (inner_type);
12813 	  }
12814       }
12815       break;
12816 
12817     default:
12818       return tree_simple_nonnegative_warnv_p (code, type);
12819     }
12820 
12821   /* We don't know sign of `t', so be conservative and return false.  */
12822   return false;
12823 }
12824 
12825 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
12826    value is based on the assumption that signed overflow is undefined,
12827    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12828    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12829 
12830 bool
12831 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12832 				 tree op1, bool *strict_overflow_p,
12833 				 int depth)
12834 {
12835   if (TYPE_UNSIGNED (type))
12836     return true;
12837 
12838   switch (code)
12839     {
12840     case POINTER_PLUS_EXPR:
12841     case PLUS_EXPR:
12842       if (FLOAT_TYPE_P (type))
12843 	return RECURSE (op0) && RECURSE (op1);
12844 
12845       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12846 	 both unsigned and at least 2 bits shorter than the result.  */
12847       if (TREE_CODE (type) == INTEGER_TYPE
12848 	  && TREE_CODE (op0) == NOP_EXPR
12849 	  && TREE_CODE (op1) == NOP_EXPR)
12850 	{
12851 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12852 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12853 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12854 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12855 	    {
12856 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
12857 				       TYPE_PRECISION (inner2)) + 1;
12858 	      return prec < TYPE_PRECISION (type);
12859 	    }
12860 	}
12861       break;
12862 
12863     case MULT_EXPR:
12864       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12865 	{
12866 	  /* x * x is always non-negative for floating point x
12867 	     or without overflow.  */
12868 	  if (operand_equal_p (op0, op1, 0)
12869 	      || (RECURSE (op0) && RECURSE (op1)))
12870 	    {
12871 	      if (ANY_INTEGRAL_TYPE_P (type)
12872 		  && TYPE_OVERFLOW_UNDEFINED (type))
12873 		*strict_overflow_p = true;
12874 	      return true;
12875 	    }
12876 	}
12877 
12878       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12879 	 both unsigned and their total bits is shorter than the result.  */
12880       if (TREE_CODE (type) == INTEGER_TYPE
12881 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12882 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12883 	{
12884 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12885 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
12886 	    : TREE_TYPE (op0);
12887 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12888 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
12889 	    : TREE_TYPE (op1);
12890 
12891 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
12892 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
12893 
12894 	  if (TREE_CODE (op0) == INTEGER_CST)
12895 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12896 
12897 	  if (TREE_CODE (op1) == INTEGER_CST)
12898 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12899 
12900 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12901 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12902 	    {
12903 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12904 		? tree_int_cst_min_precision (op0, UNSIGNED)
12905 		: TYPE_PRECISION (inner0);
12906 
12907 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12908 		? tree_int_cst_min_precision (op1, UNSIGNED)
12909 		: TYPE_PRECISION (inner1);
12910 
12911 	      return precision0 + precision1 < TYPE_PRECISION (type);
12912 	    }
12913 	}
12914       return false;
12915 
12916     case BIT_AND_EXPR:
12917     case MAX_EXPR:
12918       return RECURSE (op0) || RECURSE (op1);
12919 
12920     case BIT_IOR_EXPR:
12921     case BIT_XOR_EXPR:
12922     case MIN_EXPR:
12923     case RDIV_EXPR:
12924     case TRUNC_DIV_EXPR:
12925     case CEIL_DIV_EXPR:
12926     case FLOOR_DIV_EXPR:
12927     case ROUND_DIV_EXPR:
12928       return RECURSE (op0) && RECURSE (op1);
12929 
12930     case TRUNC_MOD_EXPR:
12931       return RECURSE (op0);
12932 
12933     case FLOOR_MOD_EXPR:
12934       return RECURSE (op1);
12935 
12936     case CEIL_MOD_EXPR:
12937     case ROUND_MOD_EXPR:
12938     default:
12939       return tree_simple_nonnegative_warnv_p (code, type);
12940     }
12941 
12942   /* We don't know sign of `t', so be conservative and return false.  */
12943   return false;
12944 }
12945 
12946 /* Return true if T is known to be non-negative.  If the return
12947    value is based on the assumption that signed overflow is undefined,
12948    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12949    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12950 
12951 bool
12952 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12953 {
12954   if (TYPE_UNSIGNED (TREE_TYPE (t)))
12955     return true;
12956 
12957   switch (TREE_CODE (t))
12958     {
12959     case INTEGER_CST:
12960       return tree_int_cst_sgn (t) >= 0;
12961 
12962     case REAL_CST:
12963       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12964 
12965     case FIXED_CST:
12966       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12967 
12968     case COND_EXPR:
12969       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12970 
12971     case SSA_NAME:
12972       /* Limit the depth of recursion to avoid quadratic behavior.
12973 	 This is expected to catch almost all occurrences in practice.
12974 	 If this code misses important cases that unbounded recursion
12975 	 would not, passes that need this information could be revised
12976 	 to provide it through dataflow propagation.  */
12977       return (!name_registered_for_update_p (t)
12978 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12979 	      && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12980 						  strict_overflow_p, depth));
12981 
12982     default:
12983       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12984     }
12985 }
12986 
12987 /* Return true if T is known to be non-negative.  If the return
12988    value is based on the assumption that signed overflow is undefined,
12989    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12990    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12991 
12992 bool
12993 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12994 			       bool *strict_overflow_p, int depth)
12995 {
12996   switch (fn)
12997     {
12998     CASE_CFN_ACOS:
12999     CASE_CFN_ACOSH:
13000     CASE_CFN_CABS:
13001     CASE_CFN_COSH:
13002     CASE_CFN_ERFC:
13003     CASE_CFN_EXP:
13004     CASE_CFN_EXP10:
13005     CASE_CFN_EXP2:
13006     CASE_CFN_FABS:
13007     CASE_CFN_FDIM:
13008     CASE_CFN_HYPOT:
13009     CASE_CFN_POW10:
13010     CASE_CFN_FFS:
13011     CASE_CFN_PARITY:
13012     CASE_CFN_POPCOUNT:
13013     CASE_CFN_CLZ:
13014     CASE_CFN_CLRSB:
13015     case CFN_BUILT_IN_BSWAP32:
13016     case CFN_BUILT_IN_BSWAP64:
13017       /* Always true.  */
13018       return true;
13019 
13020     CASE_CFN_SQRT:
13021     CASE_CFN_SQRT_FN:
13022       /* sqrt(-0.0) is -0.0.  */
13023       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13024 	return true;
13025       return RECURSE (arg0);
13026 
13027     CASE_CFN_ASINH:
13028     CASE_CFN_ATAN:
13029     CASE_CFN_ATANH:
13030     CASE_CFN_CBRT:
13031     CASE_CFN_CEIL:
13032     CASE_CFN_CEIL_FN:
13033     CASE_CFN_ERF:
13034     CASE_CFN_EXPM1:
13035     CASE_CFN_FLOOR:
13036     CASE_CFN_FLOOR_FN:
13037     CASE_CFN_FMOD:
13038     CASE_CFN_FREXP:
13039     CASE_CFN_ICEIL:
13040     CASE_CFN_IFLOOR:
13041     CASE_CFN_IRINT:
13042     CASE_CFN_IROUND:
13043     CASE_CFN_LCEIL:
13044     CASE_CFN_LDEXP:
13045     CASE_CFN_LFLOOR:
13046     CASE_CFN_LLCEIL:
13047     CASE_CFN_LLFLOOR:
13048     CASE_CFN_LLRINT:
13049     CASE_CFN_LLROUND:
13050     CASE_CFN_LRINT:
13051     CASE_CFN_LROUND:
13052     CASE_CFN_MODF:
13053     CASE_CFN_NEARBYINT:
13054     CASE_CFN_NEARBYINT_FN:
13055     CASE_CFN_RINT:
13056     CASE_CFN_RINT_FN:
13057     CASE_CFN_ROUND:
13058     CASE_CFN_ROUND_FN:
13059     CASE_CFN_SCALB:
13060     CASE_CFN_SCALBLN:
13061     CASE_CFN_SCALBN:
13062     CASE_CFN_SIGNBIT:
13063     CASE_CFN_SIGNIFICAND:
13064     CASE_CFN_SINH:
13065     CASE_CFN_TANH:
13066     CASE_CFN_TRUNC:
13067     CASE_CFN_TRUNC_FN:
13068       /* True if the 1st argument is nonnegative.  */
13069       return RECURSE (arg0);
13070 
13071     CASE_CFN_FMAX:
13072     CASE_CFN_FMAX_FN:
13073       /* True if the 1st OR 2nd arguments are nonnegative.  */
13074       return RECURSE (arg0) || RECURSE (arg1);
13075 
13076     CASE_CFN_FMIN:
13077     CASE_CFN_FMIN_FN:
13078       /* True if the 1st AND 2nd arguments are nonnegative.  */
13079       return RECURSE (arg0) && RECURSE (arg1);
13080 
13081     CASE_CFN_COPYSIGN:
13082     CASE_CFN_COPYSIGN_FN:
13083       /* True if the 2nd argument is nonnegative.  */
13084       return RECURSE (arg1);
13085 
13086     CASE_CFN_POWI:
13087       /* True if the 1st argument is nonnegative or the second
13088 	 argument is an even integer.  */
13089       if (TREE_CODE (arg1) == INTEGER_CST
13090 	  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13091 	return true;
13092       return RECURSE (arg0);
13093 
13094     CASE_CFN_POW:
13095       /* True if the 1st argument is nonnegative or the second
13096 	 argument is an even integer valued real.  */
13097       if (TREE_CODE (arg1) == REAL_CST)
13098 	{
13099 	  REAL_VALUE_TYPE c;
13100 	  HOST_WIDE_INT n;
13101 
13102 	  c = TREE_REAL_CST (arg1);
13103 	  n = real_to_integer (&c);
13104 	  if ((n & 1) == 0)
13105 	    {
13106 	      REAL_VALUE_TYPE cint;
13107 	      real_from_integer (&cint, VOIDmode, n, SIGNED);
13108 	      if (real_identical (&c, &cint))
13109 		return true;
13110 	    }
13111 	}
13112       return RECURSE (arg0);
13113 
13114     default:
13115       break;
13116     }
13117   return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13118 }
13119 
13120 /* Return true if T is known to be non-negative.  If the return
13121    value is based on the assumption that signed overflow is undefined,
13122    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13123    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13124 
13125 static bool
13126 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13127 {
13128   enum tree_code code = TREE_CODE (t);
13129   if (TYPE_UNSIGNED (TREE_TYPE (t)))
13130     return true;
13131 
13132   switch (code)
13133     {
13134     case TARGET_EXPR:
13135       {
13136 	tree temp = TARGET_EXPR_SLOT (t);
13137 	t = TARGET_EXPR_INITIAL (t);
13138 
13139 	/* If the initializer is non-void, then it's a normal expression
13140 	   that will be assigned to the slot.  */
13141 	if (!VOID_TYPE_P (t))
13142 	  return RECURSE (t);
13143 
13144 	/* Otherwise, the initializer sets the slot in some way.  One common
13145 	   way is an assignment statement at the end of the initializer.  */
13146 	while (1)
13147 	  {
13148 	    if (TREE_CODE (t) == BIND_EXPR)
13149 	      t = expr_last (BIND_EXPR_BODY (t));
13150 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13151 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
13152 	      t = expr_last (TREE_OPERAND (t, 0));
13153 	    else if (TREE_CODE (t) == STATEMENT_LIST)
13154 	      t = expr_last (t);
13155 	    else
13156 	      break;
13157 	  }
13158 	if (TREE_CODE (t) == MODIFY_EXPR
13159 	    && TREE_OPERAND (t, 0) == temp)
13160 	  return RECURSE (TREE_OPERAND (t, 1));
13161 
13162 	return false;
13163       }
13164 
13165     case CALL_EXPR:
13166       {
13167 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
13168 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
13169 
13170 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13171 					      get_call_combined_fn (t),
13172 					      arg0,
13173 					      arg1,
13174 					      strict_overflow_p, depth);
13175       }
13176     case COMPOUND_EXPR:
13177     case MODIFY_EXPR:
13178       return RECURSE (TREE_OPERAND (t, 1));
13179 
13180     case BIND_EXPR:
13181       return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13182 
13183     case SAVE_EXPR:
13184       return RECURSE (TREE_OPERAND (t, 0));
13185 
13186     default:
13187       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13188     }
13189 }
13190 
13191 #undef RECURSE
13192 #undef tree_expr_nonnegative_warnv_p
13193 
13194 /* Return true if T is known to be non-negative.  If the return
13195    value is based on the assumption that signed overflow is undefined,
13196    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13197    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13198 
13199 bool
13200 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13201 {
13202   enum tree_code code;
13203   if (t == error_mark_node)
13204     return false;
13205 
13206   code = TREE_CODE (t);
13207   switch (TREE_CODE_CLASS (code))
13208     {
13209     case tcc_binary:
13210     case tcc_comparison:
13211       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13212 					      TREE_TYPE (t),
13213 					      TREE_OPERAND (t, 0),
13214 					      TREE_OPERAND (t, 1),
13215 					      strict_overflow_p, depth);
13216 
13217     case tcc_unary:
13218       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13219 					     TREE_TYPE (t),
13220 					     TREE_OPERAND (t, 0),
13221 					     strict_overflow_p, depth);
13222 
13223     case tcc_constant:
13224     case tcc_declaration:
13225     case tcc_reference:
13226       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13227 
13228     default:
13229       break;
13230     }
13231 
13232   switch (code)
13233     {
13234     case TRUTH_AND_EXPR:
13235     case TRUTH_OR_EXPR:
13236     case TRUTH_XOR_EXPR:
13237       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13238 					      TREE_TYPE (t),
13239 					      TREE_OPERAND (t, 0),
13240 					      TREE_OPERAND (t, 1),
13241 					      strict_overflow_p, depth);
13242     case TRUTH_NOT_EXPR:
13243       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13244 					     TREE_TYPE (t),
13245 					     TREE_OPERAND (t, 0),
13246 					     strict_overflow_p, depth);
13247 
13248     case COND_EXPR:
13249     case CONSTRUCTOR:
13250     case OBJ_TYPE_REF:
13251     case ASSERT_EXPR:
13252     case ADDR_EXPR:
13253     case WITH_SIZE_EXPR:
13254     case SSA_NAME:
13255       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13256 
13257     default:
13258       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13259     }
13260 }
13261 
13262 /* Return true if `t' is known to be non-negative.  Handle warnings
13263    about undefined signed overflow.  */
13264 
13265 bool
13266 tree_expr_nonnegative_p (tree t)
13267 {
13268   bool ret, strict_overflow_p;
13269 
13270   strict_overflow_p = false;
13271   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13272   if (strict_overflow_p)
13273     fold_overflow_warning (("assuming signed overflow does not occur when "
13274 			    "determining that expression is always "
13275 			    "non-negative"),
13276 			   WARN_STRICT_OVERFLOW_MISC);
13277   return ret;
13278 }
13279 
13280 
13281 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13282    For floating point we further ensure that T is not denormal.
13283    Similar logic is present in nonzero_address in rtlanal.h.
13284 
13285    If the return value is based on the assumption that signed overflow
13286    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13287    change *STRICT_OVERFLOW_P.  */
13288 
13289 bool
13290 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13291 				 bool *strict_overflow_p)
13292 {
13293   switch (code)
13294     {
13295     case ABS_EXPR:
13296       return tree_expr_nonzero_warnv_p (op0,
13297 					strict_overflow_p);
13298 
13299     case NOP_EXPR:
13300       {
13301 	tree inner_type = TREE_TYPE (op0);
13302 	tree outer_type = type;
13303 
13304 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13305 		&& tree_expr_nonzero_warnv_p (op0,
13306 					      strict_overflow_p));
13307       }
13308       break;
13309 
13310     case NON_LVALUE_EXPR:
13311       return tree_expr_nonzero_warnv_p (op0,
13312 					strict_overflow_p);
13313 
13314     default:
13315       break;
13316   }
13317 
13318   return false;
13319 }
13320 
13321 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13322    For floating point we further ensure that T is not denormal.
13323    Similar logic is present in nonzero_address in rtlanal.h.
13324 
13325    If the return value is based on the assumption that signed overflow
13326    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13327    change *STRICT_OVERFLOW_P.  */
13328 
13329 bool
13330 tree_binary_nonzero_warnv_p (enum tree_code code,
13331 			     tree type,
13332 			     tree op0,
13333 			     tree op1, bool *strict_overflow_p)
13334 {
13335   bool sub_strict_overflow_p;
13336   switch (code)
13337     {
13338     case POINTER_PLUS_EXPR:
13339     case PLUS_EXPR:
13340       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13341 	{
13342 	  /* With the presence of negative values it is hard
13343 	     to say something.  */
13344 	  sub_strict_overflow_p = false;
13345 	  if (!tree_expr_nonnegative_warnv_p (op0,
13346 					      &sub_strict_overflow_p)
13347 	      || !tree_expr_nonnegative_warnv_p (op1,
13348 						 &sub_strict_overflow_p))
13349 	    return false;
13350 	  /* One of operands must be positive and the other non-negative.  */
13351 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
13352 	     overflows, on a twos-complement machine the sum of two
13353 	     nonnegative numbers can never be zero.  */
13354 	  return (tree_expr_nonzero_warnv_p (op0,
13355 					     strict_overflow_p)
13356 		  || tree_expr_nonzero_warnv_p (op1,
13357 						strict_overflow_p));
13358 	}
13359       break;
13360 
13361     case MULT_EXPR:
13362       if (TYPE_OVERFLOW_UNDEFINED (type))
13363 	{
13364 	  if (tree_expr_nonzero_warnv_p (op0,
13365 					 strict_overflow_p)
13366 	      && tree_expr_nonzero_warnv_p (op1,
13367 					    strict_overflow_p))
13368 	    {
13369 	      *strict_overflow_p = true;
13370 	      return true;
13371 	    }
13372 	}
13373       break;
13374 
13375     case MIN_EXPR:
13376       sub_strict_overflow_p = false;
13377       if (tree_expr_nonzero_warnv_p (op0,
13378 				     &sub_strict_overflow_p)
13379 	  && tree_expr_nonzero_warnv_p (op1,
13380 					&sub_strict_overflow_p))
13381 	{
13382 	  if (sub_strict_overflow_p)
13383 	    *strict_overflow_p = true;
13384 	}
13385       break;
13386 
13387     case MAX_EXPR:
13388       sub_strict_overflow_p = false;
13389       if (tree_expr_nonzero_warnv_p (op0,
13390 				     &sub_strict_overflow_p))
13391 	{
13392 	  if (sub_strict_overflow_p)
13393 	    *strict_overflow_p = true;
13394 
13395 	  /* When both operands are nonzero, then MAX must be too.  */
13396 	  if (tree_expr_nonzero_warnv_p (op1,
13397 					 strict_overflow_p))
13398 	    return true;
13399 
13400 	  /* MAX where operand 0 is positive is positive.  */
13401 	  return tree_expr_nonnegative_warnv_p (op0,
13402 					       strict_overflow_p);
13403 	}
13404       /* MAX where operand 1 is positive is positive.  */
13405       else if (tree_expr_nonzero_warnv_p (op1,
13406 					  &sub_strict_overflow_p)
13407 	       && tree_expr_nonnegative_warnv_p (op1,
13408 						 &sub_strict_overflow_p))
13409 	{
13410 	  if (sub_strict_overflow_p)
13411 	    *strict_overflow_p = true;
13412 	  return true;
13413 	}
13414       break;
13415 
13416     case BIT_IOR_EXPR:
13417       return (tree_expr_nonzero_warnv_p (op1,
13418 					 strict_overflow_p)
13419 	      || tree_expr_nonzero_warnv_p (op0,
13420 					    strict_overflow_p));
13421 
13422     default:
13423       break;
13424   }
13425 
13426   return false;
13427 }
13428 
13429 /* Return true when T is an address and is known to be nonzero.
13430    For floating point we further ensure that T is not denormal.
13431    Similar logic is present in nonzero_address in rtlanal.h.
13432 
13433    If the return value is based on the assumption that signed overflow
13434    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13435    change *STRICT_OVERFLOW_P.  */
13436 
13437 bool
13438 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13439 {
13440   bool sub_strict_overflow_p;
13441   switch (TREE_CODE (t))
13442     {
13443     case INTEGER_CST:
13444       return !integer_zerop (t);
13445 
13446     case ADDR_EXPR:
13447       {
13448 	tree base = TREE_OPERAND (t, 0);
13449 
13450 	if (!DECL_P (base))
13451 	  base = get_base_address (base);
13452 
13453 	if (base && TREE_CODE (base) == TARGET_EXPR)
13454 	  base = TARGET_EXPR_SLOT (base);
13455 
13456 	if (!base)
13457 	  return false;
13458 
13459 	/* For objects in symbol table check if we know they are non-zero.
13460 	   Don't do anything for variables and functions before symtab is built;
13461 	   it is quite possible that they will be declared weak later.  */
13462 	int nonzero_addr = maybe_nonzero_address (base);
13463 	if (nonzero_addr >= 0)
13464 	  return nonzero_addr;
13465 
13466 	/* Constants are never weak.  */
13467 	if (CONSTANT_CLASS_P (base))
13468 	  return true;
13469 
13470 	return false;
13471       }
13472 
13473     case COND_EXPR:
13474       sub_strict_overflow_p = false;
13475       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13476 				     &sub_strict_overflow_p)
13477 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13478 					&sub_strict_overflow_p))
13479 	{
13480 	  if (sub_strict_overflow_p)
13481 	    *strict_overflow_p = true;
13482 	  return true;
13483 	}
13484       break;
13485 
13486     case SSA_NAME:
13487       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13488 	break;
13489       return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13490 
13491     default:
13492       break;
13493     }
13494   return false;
13495 }
13496 
13497 #define integer_valued_real_p(X) \
13498   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13499 
13500 #define RECURSE(X) \
13501   ((integer_valued_real_p) (X, depth + 1))
13502 
13503 /* Return true if the floating point result of (CODE OP0) has an
13504    integer value.  We also allow +Inf, -Inf and NaN to be considered
13505    integer values. Return false for signaling NaN.
13506 
13507    DEPTH is the current nesting depth of the query.  */
13508 
13509 bool
13510 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13511 {
13512   switch (code)
13513     {
13514     case FLOAT_EXPR:
13515       return true;
13516 
13517     case ABS_EXPR:
13518       return RECURSE (op0);
13519 
13520     CASE_CONVERT:
13521       {
13522 	tree type = TREE_TYPE (op0);
13523 	if (TREE_CODE (type) == INTEGER_TYPE)
13524 	  return true;
13525 	if (TREE_CODE (type) == REAL_TYPE)
13526 	  return RECURSE (op0);
13527 	break;
13528       }
13529 
13530     default:
13531       break;
13532     }
13533   return false;
13534 }
13535 
13536 /* Return true if the floating point result of (CODE OP0 OP1) has an
13537    integer value.  We also allow +Inf, -Inf and NaN to be considered
13538    integer values. Return false for signaling NaN.
13539 
13540    DEPTH is the current nesting depth of the query.  */
13541 
13542 bool
13543 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13544 {
13545   switch (code)
13546     {
13547     case PLUS_EXPR:
13548     case MINUS_EXPR:
13549     case MULT_EXPR:
13550     case MIN_EXPR:
13551     case MAX_EXPR:
13552       return RECURSE (op0) && RECURSE (op1);
13553 
13554     default:
13555       break;
13556     }
13557   return false;
13558 }
13559 
13560 /* Return true if the floating point result of calling FNDECL with arguments
13561    ARG0 and ARG1 has an integer value.  We also allow +Inf, -Inf and NaN to be
13562    considered integer values. Return false for signaling NaN.  If FNDECL
13563    takes fewer than 2 arguments, the remaining ARGn are null.
13564 
13565    DEPTH is the current nesting depth of the query.  */
13566 
13567 bool
13568 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13569 {
13570   switch (fn)
13571     {
13572     CASE_CFN_CEIL:
13573     CASE_CFN_CEIL_FN:
13574     CASE_CFN_FLOOR:
13575     CASE_CFN_FLOOR_FN:
13576     CASE_CFN_NEARBYINT:
13577     CASE_CFN_NEARBYINT_FN:
13578     CASE_CFN_RINT:
13579     CASE_CFN_RINT_FN:
13580     CASE_CFN_ROUND:
13581     CASE_CFN_ROUND_FN:
13582     CASE_CFN_TRUNC:
13583     CASE_CFN_TRUNC_FN:
13584       return true;
13585 
13586     CASE_CFN_FMIN:
13587     CASE_CFN_FMIN_FN:
13588     CASE_CFN_FMAX:
13589     CASE_CFN_FMAX_FN:
13590       return RECURSE (arg0) && RECURSE (arg1);
13591 
13592     default:
13593       break;
13594     }
13595   return false;
13596 }
13597 
13598 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13599    has an integer value.  We also allow +Inf, -Inf and NaN to be
13600    considered integer values. Return false for signaling NaN.
13601 
13602    DEPTH is the current nesting depth of the query.  */
13603 
13604 bool
13605 integer_valued_real_single_p (tree t, int depth)
13606 {
13607   switch (TREE_CODE (t))
13608     {
13609     case REAL_CST:
13610       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13611 
13612     case COND_EXPR:
13613       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13614 
13615     case SSA_NAME:
13616       /* Limit the depth of recursion to avoid quadratic behavior.
13617 	 This is expected to catch almost all occurrences in practice.
13618 	 If this code misses important cases that unbounded recursion
13619 	 would not, passes that need this information could be revised
13620 	 to provide it through dataflow propagation.  */
13621       return (!name_registered_for_update_p (t)
13622 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13623 	      && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13624 						    depth));
13625 
13626     default:
13627       break;
13628     }
13629   return false;
13630 }
13631 
13632 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13633    has an integer value.  We also allow +Inf, -Inf and NaN to be
13634    considered integer values. Return false for signaling NaN.
13635 
13636    DEPTH is the current nesting depth of the query.  */
13637 
13638 static bool
13639 integer_valued_real_invalid_p (tree t, int depth)
13640 {
13641   switch (TREE_CODE (t))
13642     {
13643     case COMPOUND_EXPR:
13644     case MODIFY_EXPR:
13645     case BIND_EXPR:
13646       return RECURSE (TREE_OPERAND (t, 1));
13647 
13648     case SAVE_EXPR:
13649       return RECURSE (TREE_OPERAND (t, 0));
13650 
13651     default:
13652       break;
13653     }
13654   return false;
13655 }
13656 
13657 #undef RECURSE
13658 #undef integer_valued_real_p
13659 
13660 /* Return true if the floating point expression T has an integer value.
13661    We also allow +Inf, -Inf and NaN to be considered integer values.
13662    Return false for signaling NaN.
13663 
13664    DEPTH is the current nesting depth of the query.  */
13665 
13666 bool
13667 integer_valued_real_p (tree t, int depth)
13668 {
13669   if (t == error_mark_node)
13670     return false;
13671 
13672   tree_code code = TREE_CODE (t);
13673   switch (TREE_CODE_CLASS (code))
13674     {
13675     case tcc_binary:
13676     case tcc_comparison:
13677       return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13678 					   TREE_OPERAND (t, 1), depth);
13679 
13680     case tcc_unary:
13681       return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13682 
13683     case tcc_constant:
13684     case tcc_declaration:
13685     case tcc_reference:
13686       return integer_valued_real_single_p (t, depth);
13687 
13688     default:
13689       break;
13690     }
13691 
13692   switch (code)
13693     {
13694     case COND_EXPR:
13695     case SSA_NAME:
13696       return integer_valued_real_single_p (t, depth);
13697 
13698     case CALL_EXPR:
13699       {
13700 	tree arg0 = (call_expr_nargs (t) > 0
13701 		     ? CALL_EXPR_ARG (t, 0)
13702 		     : NULL_TREE);
13703 	tree arg1 = (call_expr_nargs (t) > 1
13704 		     ? CALL_EXPR_ARG (t, 1)
13705 		     : NULL_TREE);
13706 	return integer_valued_real_call_p (get_call_combined_fn (t),
13707 					   arg0, arg1, depth);
13708       }
13709 
13710     default:
13711       return integer_valued_real_invalid_p (t, depth);
13712     }
13713 }
13714 
13715 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13716    attempt to fold the expression to a constant without modifying TYPE,
13717    OP0 or OP1.
13718 
13719    If the expression could be simplified to a constant, then return
13720    the constant.  If the expression would not be simplified to a
13721    constant, then return NULL_TREE.  */
13722 
13723 tree
13724 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13725 {
13726   tree tem = fold_binary (code, type, op0, op1);
13727   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13728 }
13729 
13730 /* Given the components of a unary expression CODE, TYPE and OP0,
13731    attempt to fold the expression to a constant without modifying
13732    TYPE or OP0.
13733 
13734    If the expression could be simplified to a constant, then return
13735    the constant.  If the expression would not be simplified to a
13736    constant, then return NULL_TREE.  */
13737 
13738 tree
13739 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13740 {
13741   tree tem = fold_unary (code, type, op0);
13742   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13743 }
13744 
13745 /* If EXP represents referencing an element in a constant string
13746    (either via pointer arithmetic or array indexing), return the
13747    tree representing the value accessed, otherwise return NULL.  */
13748 
13749 tree
13750 fold_read_from_constant_string (tree exp)
13751 {
13752   if ((TREE_CODE (exp) == INDIRECT_REF
13753        || TREE_CODE (exp) == ARRAY_REF)
13754       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13755     {
13756       tree exp1 = TREE_OPERAND (exp, 0);
13757       tree index;
13758       tree string;
13759       location_t loc = EXPR_LOCATION (exp);
13760 
13761       if (TREE_CODE (exp) == INDIRECT_REF)
13762 	string = string_constant (exp1, &index);
13763       else
13764 	{
13765 	  tree low_bound = array_ref_low_bound (exp);
13766 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13767 
13768 	  /* Optimize the special-case of a zero lower bound.
13769 
13770 	     We convert the low_bound to sizetype to avoid some problems
13771 	     with constant folding.  (E.g. suppose the lower bound is 1,
13772 	     and its mode is QI.  Without the conversion,l (ARRAY
13773 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13774 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
13775 	  if (! integer_zerop (low_bound))
13776 	    index = size_diffop_loc (loc, index,
13777 				 fold_convert_loc (loc, sizetype, low_bound));
13778 
13779 	  string = exp1;
13780 	}
13781 
13782       scalar_int_mode char_mode;
13783       if (string
13784 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13785 	  && TREE_CODE (string) == STRING_CST
13786 	  && TREE_CODE (index) == INTEGER_CST
13787 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13788 	  && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13789 			  &char_mode)
13790 	  && GET_MODE_SIZE (char_mode) == 1)
13791 	return build_int_cst_type (TREE_TYPE (exp),
13792 				   (TREE_STRING_POINTER (string)
13793 				    [TREE_INT_CST_LOW (index)]));
13794     }
13795   return NULL;
13796 }
13797 
13798 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13799    an integer constant, real, or fixed-point constant.
13800 
13801    TYPE is the type of the result.  */
13802 
13803 static tree
13804 fold_negate_const (tree arg0, tree type)
13805 {
13806   tree t = NULL_TREE;
13807 
13808   switch (TREE_CODE (arg0))
13809     {
13810     case REAL_CST:
13811       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13812       break;
13813 
13814     case FIXED_CST:
13815       {
13816         FIXED_VALUE_TYPE f;
13817         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13818 					    &(TREE_FIXED_CST (arg0)), NULL,
13819 					    TYPE_SATURATING (type));
13820 	t = build_fixed (type, f);
13821 	/* Propagate overflow flags.  */
13822 	if (overflow_p | TREE_OVERFLOW (arg0))
13823 	  TREE_OVERFLOW (t) = 1;
13824 	break;
13825       }
13826 
13827     default:
13828       if (poly_int_tree_p (arg0))
13829 	{
13830 	  bool overflow;
13831 	  poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
13832 	  t = force_fit_type (type, res, 1,
13833 			      (overflow && ! TYPE_UNSIGNED (type))
13834 			      || TREE_OVERFLOW (arg0));
13835 	  break;
13836 	}
13837 
13838       gcc_unreachable ();
13839     }
13840 
13841   return t;
13842 }
13843 
13844 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13845    an integer constant or real constant.
13846 
13847    TYPE is the type of the result.  */
13848 
13849 tree
13850 fold_abs_const (tree arg0, tree type)
13851 {
13852   tree t = NULL_TREE;
13853 
13854   switch (TREE_CODE (arg0))
13855     {
13856     case INTEGER_CST:
13857       {
13858         /* If the value is unsigned or non-negative, then the absolute value
13859 	   is the same as the ordinary value.  */
13860 	if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13861 	  t = arg0;
13862 
13863 	/* If the value is negative, then the absolute value is
13864 	   its negation.  */
13865 	else
13866 	  {
13867 	    bool overflow;
13868 	    wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13869 	    t = force_fit_type (type, val, -1,
13870 				overflow | TREE_OVERFLOW (arg0));
13871 	  }
13872       }
13873       break;
13874 
13875     case REAL_CST:
13876       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13877 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13878       else
13879 	t =  arg0;
13880       break;
13881 
13882     default:
13883       gcc_unreachable ();
13884     }
13885 
13886   return t;
13887 }
13888 
13889 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13890    constant.  TYPE is the type of the result.  */
13891 
13892 static tree
13893 fold_not_const (const_tree arg0, tree type)
13894 {
13895   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13896 
13897   return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13898 }
13899 
13900 /* Given CODE, a relational operator, the target type, TYPE and two
13901    constant operands OP0 and OP1, return the result of the
13902    relational operation.  If the result is not a compile time
13903    constant, then return NULL_TREE.  */
13904 
13905 static tree
13906 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13907 {
13908   int result, invert;
13909 
13910   /* From here on, the only cases we handle are when the result is
13911      known to be a constant.  */
13912 
13913   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13914     {
13915       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13916       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13917 
13918       /* Handle the cases where either operand is a NaN.  */
13919       if (real_isnan (c0) || real_isnan (c1))
13920 	{
13921 	  switch (code)
13922 	    {
13923 	    case EQ_EXPR:
13924 	    case ORDERED_EXPR:
13925 	      result = 0;
13926 	      break;
13927 
13928 	    case NE_EXPR:
13929 	    case UNORDERED_EXPR:
13930 	    case UNLT_EXPR:
13931 	    case UNLE_EXPR:
13932 	    case UNGT_EXPR:
13933 	    case UNGE_EXPR:
13934 	    case UNEQ_EXPR:
13935               result = 1;
13936 	      break;
13937 
13938 	    case LT_EXPR:
13939 	    case LE_EXPR:
13940 	    case GT_EXPR:
13941 	    case GE_EXPR:
13942 	    case LTGT_EXPR:
13943 	      if (flag_trapping_math)
13944 		return NULL_TREE;
13945 	      result = 0;
13946 	      break;
13947 
13948 	    default:
13949 	      gcc_unreachable ();
13950 	    }
13951 
13952 	  return constant_boolean_node (result, type);
13953 	}
13954 
13955       return constant_boolean_node (real_compare (code, c0, c1), type);
13956     }
13957 
13958   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13959     {
13960       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13961       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13962       return constant_boolean_node (fixed_compare (code, c0, c1), type);
13963     }
13964 
13965   /* Handle equality/inequality of complex constants.  */
13966   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13967     {
13968       tree rcond = fold_relational_const (code, type,
13969 					  TREE_REALPART (op0),
13970 					  TREE_REALPART (op1));
13971       tree icond = fold_relational_const (code, type,
13972 					  TREE_IMAGPART (op0),
13973 					  TREE_IMAGPART (op1));
13974       if (code == EQ_EXPR)
13975 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13976       else if (code == NE_EXPR)
13977 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13978       else
13979 	return NULL_TREE;
13980     }
13981 
13982   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13983     {
13984       if (!VECTOR_TYPE_P (type))
13985 	{
13986 	  /* Have vector comparison with scalar boolean result.  */
13987 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13988 		      && known_eq (VECTOR_CST_NELTS (op0),
13989 				   VECTOR_CST_NELTS (op1)));
13990 	  unsigned HOST_WIDE_INT nunits;
13991 	  if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
13992 	    return NULL_TREE;
13993 	  for (unsigned i = 0; i < nunits; i++)
13994 	    {
13995 	      tree elem0 = VECTOR_CST_ELT (op0, i);
13996 	      tree elem1 = VECTOR_CST_ELT (op1, i);
13997 	      tree tmp = fold_relational_const (code, type, elem0, elem1);
13998 	      if (tmp == NULL_TREE)
13999 		return NULL_TREE;
14000 	      if (integer_zerop (tmp))
14001 		return constant_boolean_node (false, type);
14002 	    }
14003 	  return constant_boolean_node (true, type);
14004 	}
14005       tree_vector_builder elts;
14006       if (!elts.new_binary_operation (type, op0, op1, false))
14007 	return NULL_TREE;
14008       unsigned int count = elts.encoded_nelts ();
14009       for (unsigned i = 0; i < count; i++)
14010 	{
14011 	  tree elem_type = TREE_TYPE (type);
14012 	  tree elem0 = VECTOR_CST_ELT (op0, i);
14013 	  tree elem1 = VECTOR_CST_ELT (op1, i);
14014 
14015 	  tree tem = fold_relational_const (code, elem_type,
14016 					    elem0, elem1);
14017 
14018 	  if (tem == NULL_TREE)
14019 	    return NULL_TREE;
14020 
14021 	  elts.quick_push (build_int_cst (elem_type,
14022 					  integer_zerop (tem) ? 0 : -1));
14023 	}
14024 
14025       return elts.build ();
14026     }
14027 
14028   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14029 
14030      To compute GT, swap the arguments and do LT.
14031      To compute GE, do LT and invert the result.
14032      To compute LE, swap the arguments, do LT and invert the result.
14033      To compute NE, do EQ and invert the result.
14034 
14035      Therefore, the code below must handle only EQ and LT.  */
14036 
14037   if (code == LE_EXPR || code == GT_EXPR)
14038     {
14039       std::swap (op0, op1);
14040       code = swap_tree_comparison (code);
14041     }
14042 
14043   /* Note that it is safe to invert for real values here because we
14044      have already handled the one case that it matters.  */
14045 
14046   invert = 0;
14047   if (code == NE_EXPR || code == GE_EXPR)
14048     {
14049       invert = 1;
14050       code = invert_tree_comparison (code, false);
14051     }
14052 
14053   /* Compute a result for LT or EQ if args permit;
14054      Otherwise return T.  */
14055   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14056     {
14057       if (code == EQ_EXPR)
14058 	result = tree_int_cst_equal (op0, op1);
14059       else
14060 	result = tree_int_cst_lt (op0, op1);
14061     }
14062   else
14063     return NULL_TREE;
14064 
14065   if (invert)
14066     result ^= 1;
14067   return constant_boolean_node (result, type);
14068 }
14069 
14070 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14071    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
14072    itself.  */
14073 
14074 tree
14075 fold_build_cleanup_point_expr (tree type, tree expr)
14076 {
14077   /* If the expression does not have side effects then we don't have to wrap
14078      it with a cleanup point expression.  */
14079   if (!TREE_SIDE_EFFECTS (expr))
14080     return expr;
14081 
14082   /* If the expression is a return, check to see if the expression inside the
14083      return has no side effects or the right hand side of the modify expression
14084      inside the return. If either don't have side effects set we don't need to
14085      wrap the expression in a cleanup point expression.  Note we don't check the
14086      left hand side of the modify because it should always be a return decl.  */
14087   if (TREE_CODE (expr) == RETURN_EXPR)
14088     {
14089       tree op = TREE_OPERAND (expr, 0);
14090       if (!op || !TREE_SIDE_EFFECTS (op))
14091         return expr;
14092       op = TREE_OPERAND (op, 1);
14093       if (!TREE_SIDE_EFFECTS (op))
14094         return expr;
14095     }
14096 
14097   return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14098 }
14099 
14100 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14101    of an indirection through OP0, or NULL_TREE if no simplification is
14102    possible.  */
14103 
14104 tree
14105 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14106 {
14107   tree sub = op0;
14108   tree subtype;
14109   poly_uint64 const_op01;
14110 
14111   STRIP_NOPS (sub);
14112   subtype = TREE_TYPE (sub);
14113   if (!POINTER_TYPE_P (subtype)
14114       || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14115     return NULL_TREE;
14116 
14117   if (TREE_CODE (sub) == ADDR_EXPR)
14118     {
14119       tree op = TREE_OPERAND (sub, 0);
14120       tree optype = TREE_TYPE (op);
14121 
14122       /* *&CONST_DECL -> to the value of the const decl.  */
14123       if (TREE_CODE (op) == CONST_DECL)
14124 	return DECL_INITIAL (op);
14125       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
14126       if (type == optype)
14127 	{
14128 	  tree fop = fold_read_from_constant_string (op);
14129 	  if (fop)
14130 	    return fop;
14131 	  else
14132 	    return op;
14133 	}
14134       /* *(foo *)&fooarray => fooarray[0] */
14135       else if (TREE_CODE (optype) == ARRAY_TYPE
14136 	       && type == TREE_TYPE (optype)
14137 	       && (!in_gimple_form
14138 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14139 	{
14140 	  tree type_domain = TYPE_DOMAIN (optype);
14141 	  tree min_val = size_zero_node;
14142 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
14143 	    min_val = TYPE_MIN_VALUE (type_domain);
14144 	  if (in_gimple_form
14145 	      && TREE_CODE (min_val) != INTEGER_CST)
14146 	    return NULL_TREE;
14147 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
14148 			     NULL_TREE, NULL_TREE);
14149 	}
14150       /* *(foo *)&complexfoo => __real__ complexfoo */
14151       else if (TREE_CODE (optype) == COMPLEX_TYPE
14152 	       && type == TREE_TYPE (optype))
14153 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
14154       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14155       else if (VECTOR_TYPE_P (optype)
14156 	       && type == TREE_TYPE (optype))
14157 	{
14158 	  tree part_width = TYPE_SIZE (type);
14159 	  tree index = bitsize_int (0);
14160 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14161 				  index);
14162 	}
14163     }
14164 
14165   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14166       && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14167     {
14168       tree op00 = TREE_OPERAND (sub, 0);
14169       tree op01 = TREE_OPERAND (sub, 1);
14170 
14171       STRIP_NOPS (op00);
14172       if (TREE_CODE (op00) == ADDR_EXPR)
14173 	{
14174 	  tree op00type;
14175 	  op00 = TREE_OPERAND (op00, 0);
14176 	  op00type = TREE_TYPE (op00);
14177 
14178 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14179 	  if (VECTOR_TYPE_P (op00type)
14180 	      && type == TREE_TYPE (op00type)
14181 	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14182 		 but we want to treat offsets with MSB set as negative.
14183 		 For the code below negative offsets are invalid and
14184 		 TYPE_SIZE of the element is something unsigned, so
14185 		 check whether op01 fits into poly_int64, which implies
14186 		 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14187 		 then just use poly_uint64 because we want to treat the
14188 		 value as unsigned.  */
14189 	      && tree_fits_poly_int64_p (op01))
14190 	    {
14191 	      tree part_width = TYPE_SIZE (type);
14192 	      poly_uint64 max_offset
14193 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
14194 		   * TYPE_VECTOR_SUBPARTS (op00type));
14195 	      if (known_lt (const_op01, max_offset))
14196 		{
14197 		  tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
14198 		  return fold_build3_loc (loc,
14199 					  BIT_FIELD_REF, type, op00,
14200 					  part_width, index);
14201 		}
14202 	    }
14203 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14204 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
14205 		   && type == TREE_TYPE (op00type))
14206 	    {
14207 	      if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
14208 			    const_op01))
14209 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14210 	    }
14211 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
14212 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
14213 		   && type == TREE_TYPE (op00type))
14214 	    {
14215 	      tree type_domain = TYPE_DOMAIN (op00type);
14216 	      tree min_val = size_zero_node;
14217 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
14218 		min_val = TYPE_MIN_VALUE (type_domain);
14219 	      offset_int off = wi::to_offset (op01);
14220 	      offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
14221 	      offset_int remainder;
14222 	      off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
14223 	      if (remainder == 0 && TREE_CODE (min_val) == INTEGER_CST)
14224 		{
14225 		  off = off + wi::to_offset (min_val);
14226 		  op01 = wide_int_to_tree (sizetype, off);
14227 		  return build4_loc (loc, ARRAY_REF, type, op00, op01,
14228 				     NULL_TREE, NULL_TREE);
14229 		}
14230 	    }
14231 	}
14232     }
14233 
14234   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14235   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14236       && type == TREE_TYPE (TREE_TYPE (subtype))
14237       && (!in_gimple_form
14238 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14239     {
14240       tree type_domain;
14241       tree min_val = size_zero_node;
14242       sub = build_fold_indirect_ref_loc (loc, sub);
14243       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14244       if (type_domain && TYPE_MIN_VALUE (type_domain))
14245 	min_val = TYPE_MIN_VALUE (type_domain);
14246       if (in_gimple_form
14247 	  && TREE_CODE (min_val) != INTEGER_CST)
14248 	return NULL_TREE;
14249       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14250 			 NULL_TREE);
14251     }
14252 
14253   return NULL_TREE;
14254 }
14255 
14256 /* Builds an expression for an indirection through T, simplifying some
14257    cases.  */
14258 
14259 tree
14260 build_fold_indirect_ref_loc (location_t loc, tree t)
14261 {
14262   tree type = TREE_TYPE (TREE_TYPE (t));
14263   tree sub = fold_indirect_ref_1 (loc, type, t);
14264 
14265   if (sub)
14266     return sub;
14267 
14268   return build1_loc (loc, INDIRECT_REF, type, t);
14269 }
14270 
14271 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
14272 
14273 tree
14274 fold_indirect_ref_loc (location_t loc, tree t)
14275 {
14276   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14277 
14278   if (sub)
14279     return sub;
14280   else
14281     return t;
14282 }
14283 
14284 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14285    whose result is ignored.  The type of the returned tree need not be
14286    the same as the original expression.  */
14287 
14288 tree
14289 fold_ignored_result (tree t)
14290 {
14291   if (!TREE_SIDE_EFFECTS (t))
14292     return integer_zero_node;
14293 
14294   for (;;)
14295     switch (TREE_CODE_CLASS (TREE_CODE (t)))
14296       {
14297       case tcc_unary:
14298 	t = TREE_OPERAND (t, 0);
14299 	break;
14300 
14301       case tcc_binary:
14302       case tcc_comparison:
14303 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14304 	  t = TREE_OPERAND (t, 0);
14305 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14306 	  t = TREE_OPERAND (t, 1);
14307 	else
14308 	  return t;
14309 	break;
14310 
14311       case tcc_expression:
14312 	switch (TREE_CODE (t))
14313 	  {
14314 	  case COMPOUND_EXPR:
14315 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14316 	      return t;
14317 	    t = TREE_OPERAND (t, 0);
14318 	    break;
14319 
14320 	  case COND_EXPR:
14321 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14322 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14323 	      return t;
14324 	    t = TREE_OPERAND (t, 0);
14325 	    break;
14326 
14327 	  default:
14328 	    return t;
14329 	  }
14330 	break;
14331 
14332       default:
14333 	return t;
14334       }
14335 }
14336 
14337 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14338 
14339 tree
14340 round_up_loc (location_t loc, tree value, unsigned int divisor)
14341 {
14342   tree div = NULL_TREE;
14343 
14344   if (divisor == 1)
14345     return value;
14346 
14347   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14348      have to do anything.  Only do this when we are not given a const,
14349      because in that case, this check is more expensive than just
14350      doing it.  */
14351   if (TREE_CODE (value) != INTEGER_CST)
14352     {
14353       div = build_int_cst (TREE_TYPE (value), divisor);
14354 
14355       if (multiple_of_p (TREE_TYPE (value), value, div))
14356 	return value;
14357     }
14358 
14359   /* If divisor is a power of two, simplify this to bit manipulation.  */
14360   if (pow2_or_zerop (divisor))
14361     {
14362       if (TREE_CODE (value) == INTEGER_CST)
14363 	{
14364 	  wide_int val = wi::to_wide (value);
14365 	  bool overflow_p;
14366 
14367 	  if ((val & (divisor - 1)) == 0)
14368 	    return value;
14369 
14370 	  overflow_p = TREE_OVERFLOW (value);
14371 	  val += divisor - 1;
14372 	  val &= (int) -divisor;
14373 	  if (val == 0)
14374 	    overflow_p = true;
14375 
14376 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14377 	}
14378       else
14379 	{
14380 	  tree t;
14381 
14382 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
14383 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
14384 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14385 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14386 	}
14387     }
14388   else
14389     {
14390       if (!div)
14391 	div = build_int_cst (TREE_TYPE (value), divisor);
14392       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14393       value = size_binop_loc (loc, MULT_EXPR, value, div);
14394     }
14395 
14396   return value;
14397 }
14398 
14399 /* Likewise, but round down.  */
14400 
14401 tree
14402 round_down_loc (location_t loc, tree value, int divisor)
14403 {
14404   tree div = NULL_TREE;
14405 
14406   gcc_assert (divisor > 0);
14407   if (divisor == 1)
14408     return value;
14409 
14410   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14411      have to do anything.  Only do this when we are not given a const,
14412      because in that case, this check is more expensive than just
14413      doing it.  */
14414   if (TREE_CODE (value) != INTEGER_CST)
14415     {
14416       div = build_int_cst (TREE_TYPE (value), divisor);
14417 
14418       if (multiple_of_p (TREE_TYPE (value), value, div))
14419 	return value;
14420     }
14421 
14422   /* If divisor is a power of two, simplify this to bit manipulation.  */
14423   if (pow2_or_zerop (divisor))
14424     {
14425       tree t;
14426 
14427       t = build_int_cst (TREE_TYPE (value), -divisor);
14428       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14429     }
14430   else
14431     {
14432       if (!div)
14433 	div = build_int_cst (TREE_TYPE (value), divisor);
14434       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14435       value = size_binop_loc (loc, MULT_EXPR, value, div);
14436     }
14437 
14438   return value;
14439 }
14440 
14441 /* Returns the pointer to the base of the object addressed by EXP and
14442    extracts the information about the offset of the access, storing it
14443    to PBITPOS and POFFSET.  */
14444 
14445 static tree
14446 split_address_to_core_and_offset (tree exp,
14447 				  poly_int64_pod *pbitpos, tree *poffset)
14448 {
14449   tree core;
14450   machine_mode mode;
14451   int unsignedp, reversep, volatilep;
14452   poly_int64 bitsize;
14453   location_t loc = EXPR_LOCATION (exp);
14454 
14455   if (TREE_CODE (exp) == ADDR_EXPR)
14456     {
14457       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14458 				  poffset, &mode, &unsignedp, &reversep,
14459 				  &volatilep);
14460       core = build_fold_addr_expr_loc (loc, core);
14461     }
14462   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14463     {
14464       core = TREE_OPERAND (exp, 0);
14465       STRIP_NOPS (core);
14466       *pbitpos = 0;
14467       *poffset = TREE_OPERAND (exp, 1);
14468       if (poly_int_tree_p (*poffset))
14469 	{
14470 	  poly_offset_int tem
14471 	    = wi::sext (wi::to_poly_offset (*poffset),
14472 			TYPE_PRECISION (TREE_TYPE (*poffset)));
14473 	  tem <<= LOG2_BITS_PER_UNIT;
14474 	  if (tem.to_shwi (pbitpos))
14475 	    *poffset = NULL_TREE;
14476 	}
14477     }
14478   else
14479     {
14480       core = exp;
14481       *pbitpos = 0;
14482       *poffset = NULL_TREE;
14483     }
14484 
14485   return core;
14486 }
14487 
14488 /* Returns true if addresses of E1 and E2 differ by a constant, false
14489    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
14490 
14491 bool
14492 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
14493 {
14494   tree core1, core2;
14495   poly_int64 bitpos1, bitpos2;
14496   tree toffset1, toffset2, tdiff, type;
14497 
14498   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14499   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14500 
14501   poly_int64 bytepos1, bytepos2;
14502   if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
14503       || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
14504       || !operand_equal_p (core1, core2, 0))
14505     return false;
14506 
14507   if (toffset1 && toffset2)
14508     {
14509       type = TREE_TYPE (toffset1);
14510       if (type != TREE_TYPE (toffset2))
14511 	toffset2 = fold_convert (type, toffset2);
14512 
14513       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14514       if (!cst_and_fits_in_hwi (tdiff))
14515 	return false;
14516 
14517       *diff = int_cst_value (tdiff);
14518     }
14519   else if (toffset1 || toffset2)
14520     {
14521       /* If only one of the offsets is non-constant, the difference cannot
14522 	 be a constant.  */
14523       return false;
14524     }
14525   else
14526     *diff = 0;
14527 
14528   *diff += bytepos1 - bytepos2;
14529   return true;
14530 }
14531 
14532 /* Return OFF converted to a pointer offset type suitable as offset for
14533    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
14534 tree
14535 convert_to_ptrofftype_loc (location_t loc, tree off)
14536 {
14537   return fold_convert_loc (loc, sizetype, off);
14538 }
14539 
14540 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14541 tree
14542 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14543 {
14544   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14545 			  ptr, convert_to_ptrofftype_loc (loc, off));
14546 }
14547 
14548 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14549 tree
14550 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14551 {
14552   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14553 			  ptr, size_int (off));
14554 }
14555 
14556 /* Return a char pointer for a C string if it is a string constant
14557    or sum of string constant and integer constant.  We only support
14558    string constants properly terminated with '\0' character.
14559    If STRLEN is a valid pointer, length (including terminating character)
14560    of returned string is stored to the argument.  */
14561 
14562 const char *
14563 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14564 {
14565   tree offset_node;
14566 
14567   if (strlen)
14568     *strlen = 0;
14569 
14570   src = string_constant (src, &offset_node);
14571   if (src == 0)
14572     return NULL;
14573 
14574   unsigned HOST_WIDE_INT offset = 0;
14575   if (offset_node != NULL_TREE)
14576     {
14577       if (!tree_fits_uhwi_p (offset_node))
14578 	return NULL;
14579       else
14580 	offset = tree_to_uhwi (offset_node);
14581     }
14582 
14583   unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14584   const char *string = TREE_STRING_POINTER (src);
14585 
14586   /* Support only properly null-terminated strings.  */
14587   if (string_length == 0
14588       || string[string_length - 1] != '\0'
14589       || offset >= string_length)
14590     return NULL;
14591 
14592   if (strlen)
14593     *strlen = string_length - offset;
14594   return string + offset;
14595 }
14596 
14597 #if CHECKING_P
14598 
14599 namespace selftest {
14600 
14601 /* Helper functions for writing tests of folding trees.  */
14602 
14603 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT.  */
14604 
14605 static void
14606 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14607 			     tree constant)
14608 {
14609   ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14610 }
14611 
14612 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14613    wrapping WRAPPED_EXPR.  */
14614 
14615 static void
14616 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14617 				 tree wrapped_expr)
14618 {
14619   tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14620   ASSERT_NE (wrapped_expr, result);
14621   ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14622   ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14623 }
14624 
14625 /* Verify that various arithmetic binary operations are folded
14626    correctly.  */
14627 
14628 static void
14629 test_arithmetic_folding ()
14630 {
14631   tree type = integer_type_node;
14632   tree x = create_tmp_var_raw (type, "x");
14633   tree zero = build_zero_cst (type);
14634   tree one = build_int_cst (type, 1);
14635 
14636   /* Addition.  */
14637   /* 1 <-- (0 + 1) */
14638   assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14639 			       one);
14640   assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14641 			       one);
14642 
14643   /* (nonlvalue)x <-- (x + 0) */
14644   assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14645 				   x);
14646 
14647   /* Subtraction.  */
14648   /* 0 <-- (x - x) */
14649   assert_binop_folds_to_const (x, MINUS_EXPR, x,
14650 			       zero);
14651   assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14652 				   x);
14653 
14654   /* Multiplication.  */
14655   /* 0 <-- (x * 0) */
14656   assert_binop_folds_to_const (x, MULT_EXPR, zero,
14657 			       zero);
14658 
14659   /* (nonlvalue)x <-- (x * 1) */
14660   assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14661 				   x);
14662 }
14663 
14664 /* Verify that various binary operations on vectors are folded
14665    correctly.  */
14666 
14667 static void
14668 test_vector_folding ()
14669 {
14670   tree inner_type = integer_type_node;
14671   tree type = build_vector_type (inner_type, 4);
14672   tree zero = build_zero_cst (type);
14673   tree one = build_one_cst (type);
14674 
14675   /* Verify equality tests that return a scalar boolean result.  */
14676   tree res_type = boolean_type_node;
14677   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14678   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14679   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14680   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14681 }
14682 
14683 /* Verify folding of VEC_DUPLICATE_EXPRs.  */
14684 
14685 static void
14686 test_vec_duplicate_folding ()
14687 {
14688   scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
14689   machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
14690   /* This will be 1 if VEC_MODE isn't a vector mode.  */
14691   poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
14692 
14693   tree type = build_vector_type (ssizetype, nunits);
14694   tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
14695   tree dup5_cst = build_vector_from_val (type, ssize_int (5));
14696   ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
14697 }
14698 
14699 /* Run all of the selftests within this file.  */
14700 
14701 void
14702 fold_const_c_tests ()
14703 {
14704   test_arithmetic_folding ();
14705   test_vector_folding ();
14706   test_vec_duplicate_folding ();
14707 }
14708 
14709 } // namespace selftest
14710 
14711 #endif /* CHECKING_P */
14712