xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/fold-const.c (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 
83 /* Nonzero if we are folding constants inside an initializer; zero
84    otherwise.  */
85 int folding_initializer = 0;
86 
87 /* The following constants represent a bit based encoding of GCC's
88    comparison operators.  This encoding simplifies transformations
89    on relational comparison operators, such as AND and OR.  */
90 enum comparison_code {
91   COMPCODE_FALSE = 0,
92   COMPCODE_LT = 1,
93   COMPCODE_EQ = 2,
94   COMPCODE_LE = 3,
95   COMPCODE_GT = 4,
96   COMPCODE_LTGT = 5,
97   COMPCODE_GE = 6,
98   COMPCODE_ORD = 7,
99   COMPCODE_UNORD = 8,
100   COMPCODE_UNLT = 9,
101   COMPCODE_UNEQ = 10,
102   COMPCODE_UNLE = 11,
103   COMPCODE_UNGT = 12,
104   COMPCODE_NE = 13,
105   COMPCODE_UNGE = 14,
106   COMPCODE_TRUE = 15
107 };
108 
109 static bool negate_expr_p (tree);
110 static tree negate_expr (tree);
111 static tree split_tree (location_t, tree, tree, enum tree_code,
112 			tree *, tree *, tree *, int);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int operand_equal_for_comparison_p (tree, tree, tree);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 					tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 						 enum tree_code, tree,
133 						 tree, tree,
134 						 tree, tree, int);
135 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static bool vec_cst_ctor_to_array (tree, tree *);
142 static tree fold_negate_expr (location_t, tree);
143 
144 
145 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
146    Otherwise, return LOC.  */
147 
148 static location_t
149 expr_location_or (tree t, location_t loc)
150 {
151   location_t tloc = EXPR_LOCATION (t);
152   return tloc == UNKNOWN_LOCATION ? loc : tloc;
153 }
154 
155 /* Similar to protected_set_expr_location, but never modify x in place,
156    if location can and needs to be set, unshare it.  */
157 
158 static inline tree
159 protected_set_expr_location_unshare (tree x, location_t loc)
160 {
161   if (CAN_HAVE_LOCATION_P (x)
162       && EXPR_LOCATION (x) != loc
163       && !(TREE_CODE (x) == SAVE_EXPR
164 	   || TREE_CODE (x) == TARGET_EXPR
165 	   || TREE_CODE (x) == BIND_EXPR))
166     {
167       x = copy_node (x);
168       SET_EXPR_LOCATION (x, loc);
169     }
170   return x;
171 }
172 
173 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
174    division and returns the quotient.  Otherwise returns
175    NULL_TREE.  */
176 
177 tree
178 div_if_zero_remainder (const_tree arg1, const_tree arg2)
179 {
180   widest_int quo;
181 
182   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
183 			 SIGNED, &quo))
184     return wide_int_to_tree (TREE_TYPE (arg1), quo);
185 
186   return NULL_TREE;
187 }
188 
189 /* This is nonzero if we should defer warnings about undefined
190    overflow.  This facility exists because these warnings are a
191    special case.  The code to estimate loop iterations does not want
192    to issue any warnings, since it works with expressions which do not
193    occur in user code.  Various bits of cleanup code call fold(), but
194    only use the result if it has certain characteristics (e.g., is a
195    constant); that code only wants to issue a warning if the result is
196    used.  */
197 
198 static int fold_deferring_overflow_warnings;
199 
200 /* If a warning about undefined overflow is deferred, this is the
201    warning.  Note that this may cause us to turn two warnings into
202    one, but that is fine since it is sufficient to only give one
203    warning per expression.  */
204 
205 static const char* fold_deferred_overflow_warning;
206 
207 /* If a warning about undefined overflow is deferred, this is the
208    level at which the warning should be emitted.  */
209 
210 static enum warn_strict_overflow_code fold_deferred_overflow_code;
211 
212 /* Start deferring overflow warnings.  We could use a stack here to
213    permit nested calls, but at present it is not necessary.  */
214 
215 void
216 fold_defer_overflow_warnings (void)
217 {
218   ++fold_deferring_overflow_warnings;
219 }
220 
221 /* Stop deferring overflow warnings.  If there is a pending warning,
222    and ISSUE is true, then issue the warning if appropriate.  STMT is
223    the statement with which the warning should be associated (used for
224    location information); STMT may be NULL.  CODE is the level of the
225    warning--a warn_strict_overflow_code value.  This function will use
226    the smaller of CODE and the deferred code when deciding whether to
227    issue the warning.  CODE may be zero to mean to always use the
228    deferred code.  */
229 
230 void
231 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
232 {
233   const char *warnmsg;
234   location_t locus;
235 
236   gcc_assert (fold_deferring_overflow_warnings > 0);
237   --fold_deferring_overflow_warnings;
238   if (fold_deferring_overflow_warnings > 0)
239     {
240       if (fold_deferred_overflow_warning != NULL
241 	  && code != 0
242 	  && code < (int) fold_deferred_overflow_code)
243 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
244       return;
245     }
246 
247   warnmsg = fold_deferred_overflow_warning;
248   fold_deferred_overflow_warning = NULL;
249 
250   if (!issue || warnmsg == NULL)
251     return;
252 
253   if (gimple_no_warning_p (stmt))
254     return;
255 
256   /* Use the smallest code level when deciding to issue the
257      warning.  */
258   if (code == 0 || code > (int) fold_deferred_overflow_code)
259     code = fold_deferred_overflow_code;
260 
261   if (!issue_strict_overflow_warning (code))
262     return;
263 
264   if (stmt == NULL)
265     locus = input_location;
266   else
267     locus = gimple_location (stmt);
268   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
269 }
270 
271 /* Stop deferring overflow warnings, ignoring any deferred
272    warnings.  */
273 
274 void
275 fold_undefer_and_ignore_overflow_warnings (void)
276 {
277   fold_undefer_overflow_warnings (false, NULL, 0);
278 }
279 
280 /* Whether we are deferring overflow warnings.  */
281 
282 bool
283 fold_deferring_overflow_warnings_p (void)
284 {
285   return fold_deferring_overflow_warnings > 0;
286 }
287 
288 /* This is called when we fold something based on the fact that signed
289    overflow is undefined.  */
290 
291 void
292 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
293 {
294   if (fold_deferring_overflow_warnings > 0)
295     {
296       if (fold_deferred_overflow_warning == NULL
297 	  || wc < fold_deferred_overflow_code)
298 	{
299 	  fold_deferred_overflow_warning = gmsgid;
300 	  fold_deferred_overflow_code = wc;
301 	}
302     }
303   else if (issue_strict_overflow_warning (wc))
304     warning (OPT_Wstrict_overflow, gmsgid);
305 }
306 
307 /* Return true if the built-in mathematical function specified by CODE
308    is odd, i.e. -f(x) == f(-x).  */
309 
310 bool
311 negate_mathfn_p (combined_fn fn)
312 {
313   switch (fn)
314     {
315     CASE_CFN_ASIN:
316     CASE_CFN_ASINH:
317     CASE_CFN_ATAN:
318     CASE_CFN_ATANH:
319     CASE_CFN_CASIN:
320     CASE_CFN_CASINH:
321     CASE_CFN_CATAN:
322     CASE_CFN_CATANH:
323     CASE_CFN_CBRT:
324     CASE_CFN_CPROJ:
325     CASE_CFN_CSIN:
326     CASE_CFN_CSINH:
327     CASE_CFN_CTAN:
328     CASE_CFN_CTANH:
329     CASE_CFN_ERF:
330     CASE_CFN_LLROUND:
331     CASE_CFN_LROUND:
332     CASE_CFN_ROUND:
333     CASE_CFN_SIN:
334     CASE_CFN_SINH:
335     CASE_CFN_TAN:
336     CASE_CFN_TANH:
337     CASE_CFN_TRUNC:
338       return true;
339 
340     CASE_CFN_LLRINT:
341     CASE_CFN_LRINT:
342     CASE_CFN_NEARBYINT:
343     CASE_CFN_RINT:
344       return !flag_rounding_math;
345 
346     default:
347       break;
348     }
349   return false;
350 }
351 
352 /* Check whether we may negate an integer constant T without causing
353    overflow.  */
354 
355 bool
356 may_negate_without_overflow_p (const_tree t)
357 {
358   tree type;
359 
360   gcc_assert (TREE_CODE (t) == INTEGER_CST);
361 
362   type = TREE_TYPE (t);
363   if (TYPE_UNSIGNED (type))
364     return false;
365 
366   return !wi::only_sign_bit_p (t);
367 }
368 
369 /* Determine whether an expression T can be cheaply negated using
370    the function negate_expr without introducing undefined overflow.  */
371 
372 static bool
373 negate_expr_p (tree t)
374 {
375   tree type;
376 
377   if (t == 0)
378     return false;
379 
380   type = TREE_TYPE (t);
381 
382   STRIP_SIGN_NOPS (t);
383   switch (TREE_CODE (t))
384     {
385     case INTEGER_CST:
386       if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
387 	return true;
388 
389       /* Check that -CST will not overflow type.  */
390       return may_negate_without_overflow_p (t);
391     case BIT_NOT_EXPR:
392       return (INTEGRAL_TYPE_P (type)
393 	      && TYPE_OVERFLOW_WRAPS (type));
394 
395     case FIXED_CST:
396       return true;
397 
398     case NEGATE_EXPR:
399       return !TYPE_OVERFLOW_SANITIZED (type);
400 
401     case REAL_CST:
402       /* We want to canonicalize to positive real constants.  Pretend
403          that only negative ones can be easily negated.  */
404       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405 
406     case COMPLEX_CST:
407       return negate_expr_p (TREE_REALPART (t))
408 	     && negate_expr_p (TREE_IMAGPART (t));
409 
410     case VECTOR_CST:
411       {
412 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 	  return true;
414 
415 	int count = TYPE_VECTOR_SUBPARTS (type), i;
416 
417 	for (i = 0; i < count; i++)
418 	  if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
419 	    return false;
420 
421 	return true;
422       }
423 
424     case COMPLEX_EXPR:
425       return negate_expr_p (TREE_OPERAND (t, 0))
426 	     && negate_expr_p (TREE_OPERAND (t, 1));
427 
428     case CONJ_EXPR:
429       return negate_expr_p (TREE_OPERAND (t, 0));
430 
431     case PLUS_EXPR:
432       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 	  || HONOR_SIGNED_ZEROS (element_mode (type))
434 	  || (INTEGRAL_TYPE_P (type)
435 	      && ! TYPE_OVERFLOW_WRAPS (type)))
436 	return false;
437       /* -(A + B) -> (-B) - A.  */
438       if (negate_expr_p (TREE_OPERAND (t, 1)))
439 	return true;
440       /* -(A + B) -> (-A) - B.  */
441       return negate_expr_p (TREE_OPERAND (t, 0));
442 
443     case MINUS_EXPR:
444       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
445       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
447 	     && (! INTEGRAL_TYPE_P (type)
448 		 || TYPE_OVERFLOW_WRAPS (type));
449 
450     case MULT_EXPR:
451       if (TYPE_UNSIGNED (type))
452 	break;
453       /* INT_MIN/n * n doesn't overflow while negating one operand it does
454          if n is a (negative) power of two.  */
455       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 		 && wi::popcount (wi::abs (TREE_OPERAND (t, 0))) != 1)
459 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 		    && wi::popcount (wi::abs (TREE_OPERAND (t, 1))) != 1)))
461 	break;
462 
463       /* Fall through.  */
464 
465     case RDIV_EXPR:
466       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
467 	return negate_expr_p (TREE_OPERAND (t, 1))
468 	       || negate_expr_p (TREE_OPERAND (t, 0));
469       break;
470 
471     case TRUNC_DIV_EXPR:
472     case ROUND_DIV_EXPR:
473     case EXACT_DIV_EXPR:
474       if (TYPE_UNSIGNED (type))
475 	break;
476       /* In general we can't negate A in A / B, because if A is INT_MIN and
477          B is not 1 we change the sign of the result.  */
478       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
479 	  && negate_expr_p (TREE_OPERAND (t, 0)))
480 	return true;
481       /* In general we can't negate B in A / B, because if A is INT_MIN and
482 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 	 and actually traps on some architectures.  */
484       if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
485 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
486 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
487 	      && ! integer_onep (TREE_OPERAND (t, 1))))
488 	return negate_expr_p (TREE_OPERAND (t, 1));
489       break;
490 
491     case NOP_EXPR:
492       /* Negate -((double)float) as (double)(-float).  */
493       if (TREE_CODE (type) == REAL_TYPE)
494 	{
495 	  tree tem = strip_float_extensions (t);
496 	  if (tem != t)
497 	    return negate_expr_p (tem);
498 	}
499       break;
500 
501     case CALL_EXPR:
502       /* Negate -f(x) as f(-x).  */
503       if (negate_mathfn_p (get_call_combined_fn (t)))
504 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
505       break;
506 
507     case RSHIFT_EXPR:
508       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
509       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
510 	{
511 	  tree op1 = TREE_OPERAND (t, 1);
512 	  if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
513 	    return true;
514 	}
515       break;
516 
517     default:
518       break;
519     }
520   return false;
521 }
522 
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524    simplification is possible.
525    If negate_expr_p would return true for T, NULL_TREE will never be
526    returned.  */
527 
528 static tree
529 fold_negate_expr_1 (location_t loc, tree t)
530 {
531   tree type = TREE_TYPE (t);
532   tree tem;
533 
534   switch (TREE_CODE (t))
535     {
536     /* Convert - (~A) to A + 1.  */
537     case BIT_NOT_EXPR:
538       if (INTEGRAL_TYPE_P (type))
539         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540 				build_one_cst (type));
541       break;
542 
543     case INTEGER_CST:
544       tem = fold_negate_const (t, type);
545       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 	  || (ANY_INTEGRAL_TYPE_P (type)
547 	      && !TYPE_OVERFLOW_TRAPS (type)
548 	      && TYPE_OVERFLOW_WRAPS (type))
549 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
550 	return tem;
551       break;
552 
553     case REAL_CST:
554       tem = fold_negate_const (t, type);
555       return tem;
556 
557     case FIXED_CST:
558       tem = fold_negate_const (t, type);
559       return tem;
560 
561     case COMPLEX_CST:
562       {
563 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 	if (rpart && ipart)
566 	  return build_complex (type, rpart, ipart);
567       }
568       break;
569 
570     case VECTOR_CST:
571       {
572 	int count = TYPE_VECTOR_SUBPARTS (type), i;
573 	tree *elts = XALLOCAVEC (tree, count);
574 
575 	for (i = 0; i < count; i++)
576 	  {
577 	    elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 	    if (elts[i] == NULL_TREE)
579 	      return NULL_TREE;
580 	  }
581 
582 	return build_vector (type, elts);
583       }
584 
585     case COMPLEX_EXPR:
586       if (negate_expr_p (t))
587 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
588 				fold_negate_expr (loc, TREE_OPERAND (t, 0)),
589 				fold_negate_expr (loc, TREE_OPERAND (t, 1)));
590       break;
591 
592     case CONJ_EXPR:
593       if (negate_expr_p (t))
594 	return fold_build1_loc (loc, CONJ_EXPR, type,
595 				fold_negate_expr (loc, TREE_OPERAND (t, 0)));
596       break;
597 
598     case NEGATE_EXPR:
599       if (!TYPE_OVERFLOW_SANITIZED (type))
600 	return TREE_OPERAND (t, 0);
601       break;
602 
603     case PLUS_EXPR:
604       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
605 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
606 	{
607 	  /* -(A + B) -> (-B) - A.  */
608 	  if (negate_expr_p (TREE_OPERAND (t, 1)))
609 	    {
610 	      tem = negate_expr (TREE_OPERAND (t, 1));
611 	      return fold_build2_loc (loc, MINUS_EXPR, type,
612 				      tem, TREE_OPERAND (t, 0));
613 	    }
614 
615 	  /* -(A + B) -> (-A) - B.  */
616 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
617 	    {
618 	      tem = negate_expr (TREE_OPERAND (t, 0));
619 	      return fold_build2_loc (loc, MINUS_EXPR, type,
620 				      tem, TREE_OPERAND (t, 1));
621 	    }
622 	}
623       break;
624 
625     case MINUS_EXPR:
626       /* - (A - B) -> B - A  */
627       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
628 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
629 	return fold_build2_loc (loc, MINUS_EXPR, type,
630 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
631       break;
632 
633     case MULT_EXPR:
634       if (TYPE_UNSIGNED (type))
635         break;
636 
637       /* Fall through.  */
638 
639     case RDIV_EXPR:
640       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
641 	{
642 	  tem = TREE_OPERAND (t, 1);
643 	  if (negate_expr_p (tem))
644 	    return fold_build2_loc (loc, TREE_CODE (t), type,
645 				    TREE_OPERAND (t, 0), negate_expr (tem));
646 	  tem = TREE_OPERAND (t, 0);
647 	  if (negate_expr_p (tem))
648 	    return fold_build2_loc (loc, TREE_CODE (t), type,
649 				    negate_expr (tem), TREE_OPERAND (t, 1));
650 	}
651       break;
652 
653     case TRUNC_DIV_EXPR:
654     case ROUND_DIV_EXPR:
655     case EXACT_DIV_EXPR:
656       if (TYPE_UNSIGNED (type))
657 	break;
658       /* In general we can't negate A in A / B, because if A is INT_MIN and
659 	 B is not 1 we change the sign of the result.  */
660       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
661 	  && negate_expr_p (TREE_OPERAND (t, 0)))
662 	return fold_build2_loc (loc, TREE_CODE (t), type,
663 				negate_expr (TREE_OPERAND (t, 0)),
664 				TREE_OPERAND (t, 1));
665       /* In general we can't negate B in A / B, because if A is INT_MIN and
666 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
667 	 and actually traps on some architectures.  */
668       if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
669 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
670 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
671 	       && ! integer_onep (TREE_OPERAND (t, 1))))
672 	  && negate_expr_p (TREE_OPERAND (t, 1)))
673 	return fold_build2_loc (loc, TREE_CODE (t), type,
674 				TREE_OPERAND (t, 0),
675 				negate_expr (TREE_OPERAND (t, 1)));
676       break;
677 
678     case NOP_EXPR:
679       /* Convert -((double)float) into (double)(-float).  */
680       if (TREE_CODE (type) == REAL_TYPE)
681 	{
682 	  tem = strip_float_extensions (t);
683 	  if (tem != t && negate_expr_p (tem))
684 	    return fold_convert_loc (loc, type, negate_expr (tem));
685 	}
686       break;
687 
688     case CALL_EXPR:
689       /* Negate -f(x) as f(-x).  */
690       if (negate_mathfn_p (get_call_combined_fn (t))
691 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
692 	{
693 	  tree fndecl, arg;
694 
695 	  fndecl = get_callee_fndecl (t);
696 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
697 	  return build_call_expr_loc (loc, fndecl, 1, arg);
698 	}
699       break;
700 
701     case RSHIFT_EXPR:
702       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
703       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
704 	{
705 	  tree op1 = TREE_OPERAND (t, 1);
706 	  if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
707 	    {
708 	      tree ntype = TYPE_UNSIGNED (type)
709 			   ? signed_type_for (type)
710 			   : unsigned_type_for (type);
711 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
712 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
713 	      return fold_convert_loc (loc, type, temp);
714 	    }
715 	}
716       break;
717 
718     default:
719       break;
720     }
721 
722   return NULL_TREE;
723 }
724 
725 /* A wrapper for fold_negate_expr_1.  */
726 
727 static tree
728 fold_negate_expr (location_t loc, tree t)
729 {
730   tree type = TREE_TYPE (t);
731   STRIP_SIGN_NOPS (t);
732   tree tem = fold_negate_expr_1 (loc, t);
733   if (tem == NULL_TREE)
734     return NULL_TREE;
735   return fold_convert_loc (loc, type, tem);
736 }
737 
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
740    return NULL_TREE. */
741 
742 static tree
743 negate_expr (tree t)
744 {
745   tree type, tem;
746   location_t loc;
747 
748   if (t == NULL_TREE)
749     return NULL_TREE;
750 
751   loc = EXPR_LOCATION (t);
752   type = TREE_TYPE (t);
753   STRIP_SIGN_NOPS (t);
754 
755   tem = fold_negate_expr (loc, t);
756   if (!tem)
757     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758   return fold_convert_loc (loc, type, tem);
759 }
760 
761 /* Split a tree IN into a constant, literal and variable parts that could be
762    combined with CODE to make IN.  "constant" means an expression with
763    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
764    commutative arithmetic operation.  Store the constant part into *CONP,
765    the literal in *LITP and return the variable part.  If a part isn't
766    present, set it to null.  If the tree does not decompose in this way,
767    return the entire tree as the variable part and the other parts as null.
768 
769    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
770    case, we negate an operand that was subtracted.  Except if it is a
771    literal for which we use *MINUS_LITP instead.
772 
773    If NEGATE_P is true, we are negating all of IN, again except a literal
774    for which we use *MINUS_LITP instead.  If a variable part is of pointer
775    type, it is negated after converting to TYPE.  This prevents us from
776    generating illegal MINUS pointer expression.  LOC is the location of
777    the converted variable part.
778 
779    If IN is itself a literal or constant, return it as appropriate.
780 
781    Note that we do not guarantee that any of the three values will be the
782    same type as IN, but they will have the same signedness and mode.  */
783 
784 static tree
785 split_tree (location_t loc, tree in, tree type, enum tree_code code,
786 	    tree *conp, tree *litp, tree *minus_litp, int negate_p)
787 {
788   tree var = 0;
789 
790   *conp = 0;
791   *litp = 0;
792   *minus_litp = 0;
793 
794   /* Strip any conversions that don't change the machine mode or signedness.  */
795   STRIP_SIGN_NOPS (in);
796 
797   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
798       || TREE_CODE (in) == FIXED_CST)
799     *litp = in;
800   else if (TREE_CODE (in) == code
801 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
802 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
803 	       /* We can associate addition and subtraction together (even
804 		  though the C standard doesn't say so) for integers because
805 		  the value is not affected.  For reals, the value might be
806 		  affected, so we can't.  */
807 	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
808 		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
809     {
810       tree op0 = TREE_OPERAND (in, 0);
811       tree op1 = TREE_OPERAND (in, 1);
812       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
813       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
814 
815       /* First see if either of the operands is a literal, then a constant.  */
816       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
817 	  || TREE_CODE (op0) == FIXED_CST)
818 	*litp = op0, op0 = 0;
819       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
820 	       || TREE_CODE (op1) == FIXED_CST)
821 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
822 
823       if (op0 != 0 && TREE_CONSTANT (op0))
824 	*conp = op0, op0 = 0;
825       else if (op1 != 0 && TREE_CONSTANT (op1))
826 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
827 
828       /* If we haven't dealt with either operand, this is not a case we can
829 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
830       if (op0 != 0 && op1 != 0)
831 	var = in;
832       else if (op0 != 0)
833 	var = op0;
834       else
835 	var = op1, neg_var_p = neg1_p;
836 
837       /* Now do any needed negations.  */
838       if (neg_litp_p)
839 	*minus_litp = *litp, *litp = 0;
840       if (neg_conp_p && *conp)
841 	{
842 	  /* Convert to TYPE before negating.  */
843 	  *conp = fold_convert_loc (loc, type, *conp);
844 	  *conp = negate_expr (*conp);
845 	}
846       if (neg_var_p && var)
847 	{
848 	  /* Convert to TYPE before negating.  */
849 	  var = fold_convert_loc (loc, type, var);
850 	  var = negate_expr (var);
851 	}
852     }
853   else if (TREE_CONSTANT (in))
854     *conp = in;
855   else if (TREE_CODE (in) == BIT_NOT_EXPR
856 	   && code == PLUS_EXPR)
857     {
858       /* -X - 1 is folded to ~X, undo that here.  Do _not_ do this
859          when IN is constant.  */
860       *minus_litp = build_one_cst (TREE_TYPE (in));
861       var = negate_expr (TREE_OPERAND (in, 0));
862     }
863   else
864     var = in;
865 
866   if (negate_p)
867     {
868       if (*litp)
869 	*minus_litp = *litp, *litp = 0;
870       else if (*minus_litp)
871 	*litp = *minus_litp, *minus_litp = 0;
872       if (*conp)
873 	{
874 	  /* Convert to TYPE before negating.  */
875 	  *conp = fold_convert_loc (loc, type, *conp);
876 	  *conp = negate_expr (*conp);
877 	}
878       if (var)
879 	{
880 	  /* Convert to TYPE before negating.  */
881 	  var = fold_convert_loc (loc, type, var);
882 	  var = negate_expr (var);
883 	}
884     }
885 
886   if (*litp
887       && TREE_OVERFLOW_P (*litp))
888     *litp = drop_tree_overflow (*litp);
889   if (*minus_litp
890       && TREE_OVERFLOW_P (*minus_litp))
891     *minus_litp = drop_tree_overflow (*minus_litp);
892 
893   return var;
894 }
895 
896 /* Re-associate trees split by the above function.  T1 and T2 are
897    either expressions to associate or null.  Return the new
898    expression, if any.  LOC is the location of the new expression.  If
899    we build an operation, do it in TYPE and with CODE.  */
900 
901 static tree
902 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
903 {
904   if (t1 == 0)
905     return t2;
906   else if (t2 == 0)
907     return t1;
908 
909   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
910      try to fold this since we will have infinite recursion.  But do
911      deal with any NEGATE_EXPRs.  */
912   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
913       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
914     {
915       if (code == PLUS_EXPR)
916 	{
917 	  if (TREE_CODE (t1) == NEGATE_EXPR)
918 	    return build2_loc (loc, MINUS_EXPR, type,
919 			       fold_convert_loc (loc, type, t2),
920 			       fold_convert_loc (loc, type,
921 						 TREE_OPERAND (t1, 0)));
922 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
923 	    return build2_loc (loc, MINUS_EXPR, type,
924 			       fold_convert_loc (loc, type, t1),
925 			       fold_convert_loc (loc, type,
926 						 TREE_OPERAND (t2, 0)));
927 	  else if (integer_zerop (t2))
928 	    return fold_convert_loc (loc, type, t1);
929 	}
930       else if (code == MINUS_EXPR)
931 	{
932 	  if (integer_zerop (t2))
933 	    return fold_convert_loc (loc, type, t1);
934 	}
935 
936       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
937 			 fold_convert_loc (loc, type, t2));
938     }
939 
940   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
941 			  fold_convert_loc (loc, type, t2));
942 }
943 
944 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
945    for use in int_const_binop, size_binop and size_diffop.  */
946 
947 static bool
948 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
949 {
950   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
951     return false;
952   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
953     return false;
954 
955   switch (code)
956     {
957     case LSHIFT_EXPR:
958     case RSHIFT_EXPR:
959     case LROTATE_EXPR:
960     case RROTATE_EXPR:
961       return true;
962 
963     default:
964       break;
965     }
966 
967   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
968 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
969 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
970 }
971 
972 
973 /* Combine two integer constants ARG1 and ARG2 under operation CODE
974    to produce a new constant.  Return NULL_TREE if we don't know how
975    to evaluate CODE at compile-time.  */
976 
977 static tree
978 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
979 		   int overflowable)
980 {
981   wide_int res;
982   tree t;
983   tree type = TREE_TYPE (arg1);
984   signop sign = TYPE_SIGN (type);
985   bool overflow = false;
986 
987   wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
988 
989   switch (code)
990     {
991     case BIT_IOR_EXPR:
992       res = wi::bit_or (arg1, arg2);
993       break;
994 
995     case BIT_XOR_EXPR:
996       res = wi::bit_xor (arg1, arg2);
997       break;
998 
999     case BIT_AND_EXPR:
1000       res = wi::bit_and (arg1, arg2);
1001       break;
1002 
1003     case RSHIFT_EXPR:
1004     case LSHIFT_EXPR:
1005       if (wi::neg_p (arg2))
1006 	{
1007 	  arg2 = -arg2;
1008 	  if (code == RSHIFT_EXPR)
1009 	    code = LSHIFT_EXPR;
1010 	  else
1011 	    code = RSHIFT_EXPR;
1012 	}
1013 
1014       if (code == RSHIFT_EXPR)
1015 	/* It's unclear from the C standard whether shifts can overflow.
1016 	   The following code ignores overflow; perhaps a C standard
1017 	   interpretation ruling is needed.  */
1018 	res = wi::rshift (arg1, arg2, sign);
1019       else
1020 	res = wi::lshift (arg1, arg2);
1021       break;
1022 
1023     case RROTATE_EXPR:
1024     case LROTATE_EXPR:
1025       if (wi::neg_p (arg2))
1026 	{
1027 	  arg2 = -arg2;
1028 	  if (code == RROTATE_EXPR)
1029 	    code = LROTATE_EXPR;
1030 	  else
1031 	    code = RROTATE_EXPR;
1032 	}
1033 
1034       if (code == RROTATE_EXPR)
1035 	res = wi::rrotate (arg1, arg2);
1036       else
1037 	res = wi::lrotate (arg1, arg2);
1038       break;
1039 
1040     case PLUS_EXPR:
1041       res = wi::add (arg1, arg2, sign, &overflow);
1042       break;
1043 
1044     case MINUS_EXPR:
1045       res = wi::sub (arg1, arg2, sign, &overflow);
1046       break;
1047 
1048     case MULT_EXPR:
1049       res = wi::mul (arg1, arg2, sign, &overflow);
1050       break;
1051 
1052     case MULT_HIGHPART_EXPR:
1053       res = wi::mul_high (arg1, arg2, sign);
1054       break;
1055 
1056     case TRUNC_DIV_EXPR:
1057     case EXACT_DIV_EXPR:
1058       if (arg2 == 0)
1059 	return NULL_TREE;
1060       res = wi::div_trunc (arg1, arg2, sign, &overflow);
1061       break;
1062 
1063     case FLOOR_DIV_EXPR:
1064       if (arg2 == 0)
1065 	return NULL_TREE;
1066       res = wi::div_floor (arg1, arg2, sign, &overflow);
1067       break;
1068 
1069     case CEIL_DIV_EXPR:
1070       if (arg2 == 0)
1071 	return NULL_TREE;
1072       res = wi::div_ceil (arg1, arg2, sign, &overflow);
1073       break;
1074 
1075     case ROUND_DIV_EXPR:
1076       if (arg2 == 0)
1077 	return NULL_TREE;
1078       res = wi::div_round (arg1, arg2, sign, &overflow);
1079       break;
1080 
1081     case TRUNC_MOD_EXPR:
1082       if (arg2 == 0)
1083 	return NULL_TREE;
1084       res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1085       break;
1086 
1087     case FLOOR_MOD_EXPR:
1088       if (arg2 == 0)
1089 	return NULL_TREE;
1090       res = wi::mod_floor (arg1, arg2, sign, &overflow);
1091       break;
1092 
1093     case CEIL_MOD_EXPR:
1094       if (arg2 == 0)
1095 	return NULL_TREE;
1096       res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1097       break;
1098 
1099     case ROUND_MOD_EXPR:
1100       if (arg2 == 0)
1101 	return NULL_TREE;
1102       res = wi::mod_round (arg1, arg2, sign, &overflow);
1103       break;
1104 
1105     case MIN_EXPR:
1106       res = wi::min (arg1, arg2, sign);
1107       break;
1108 
1109     case MAX_EXPR:
1110       res = wi::max (arg1, arg2, sign);
1111       break;
1112 
1113     default:
1114       return NULL_TREE;
1115     }
1116 
1117   t = force_fit_type (type, res, overflowable,
1118 		      (((sign == SIGNED || overflowable == -1)
1119 			&& overflow)
1120 		       | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1121 
1122   return t;
1123 }
1124 
1125 tree
1126 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1127 {
1128   return int_const_binop_1 (code, arg1, arg2, 1);
1129 }
1130 
1131 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1132    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1133    are the same kind of constant and the same machine mode.  Return zero if
1134    combining the constants is not allowed in the current operating mode.  */
1135 
1136 static tree
1137 const_binop (enum tree_code code, tree arg1, tree arg2)
1138 {
1139   /* Sanity check for the recursive cases.  */
1140   if (!arg1 || !arg2)
1141     return NULL_TREE;
1142 
1143   STRIP_NOPS (arg1);
1144   STRIP_NOPS (arg2);
1145 
1146   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1147     {
1148       if (code == POINTER_PLUS_EXPR)
1149 	return int_const_binop (PLUS_EXPR,
1150 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1151 
1152       return int_const_binop (code, arg1, arg2);
1153     }
1154 
1155   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1156     {
1157       machine_mode mode;
1158       REAL_VALUE_TYPE d1;
1159       REAL_VALUE_TYPE d2;
1160       REAL_VALUE_TYPE value;
1161       REAL_VALUE_TYPE result;
1162       bool inexact;
1163       tree t, type;
1164 
1165       /* The following codes are handled by real_arithmetic.  */
1166       switch (code)
1167 	{
1168 	case PLUS_EXPR:
1169 	case MINUS_EXPR:
1170 	case MULT_EXPR:
1171 	case RDIV_EXPR:
1172 	case MIN_EXPR:
1173 	case MAX_EXPR:
1174 	  break;
1175 
1176 	default:
1177 	  return NULL_TREE;
1178 	}
1179 
1180       d1 = TREE_REAL_CST (arg1);
1181       d2 = TREE_REAL_CST (arg2);
1182 
1183       type = TREE_TYPE (arg1);
1184       mode = TYPE_MODE (type);
1185 
1186       /* Don't perform operation if we honor signaling NaNs and
1187 	 either operand is a signaling NaN.  */
1188       if (HONOR_SNANS (mode)
1189 	  && (REAL_VALUE_ISSIGNALING_NAN (d1)
1190 	      || REAL_VALUE_ISSIGNALING_NAN (d2)))
1191 	return NULL_TREE;
1192 
1193       /* Don't perform operation if it would raise a division
1194 	 by zero exception.  */
1195       if (code == RDIV_EXPR
1196 	  && real_equal (&d2, &dconst0)
1197 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1198 	return NULL_TREE;
1199 
1200       /* If either operand is a NaN, just return it.  Otherwise, set up
1201 	 for floating-point trap; we return an overflow.  */
1202       if (REAL_VALUE_ISNAN (d1))
1203       {
1204 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1205 	   is off.  */
1206 	d1.signalling = 0;
1207 	t = build_real (type, d1);
1208 	return t;
1209       }
1210       else if (REAL_VALUE_ISNAN (d2))
1211       {
1212 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1213 	   is off.  */
1214 	d2.signalling = 0;
1215 	t = build_real (type, d2);
1216 	return t;
1217       }
1218 
1219       inexact = real_arithmetic (&value, code, &d1, &d2);
1220       real_convert (&result, mode, &value);
1221 
1222       /* Don't constant fold this floating point operation if
1223 	 the result has overflowed and flag_trapping_math.  */
1224       if (flag_trapping_math
1225 	  && MODE_HAS_INFINITIES (mode)
1226 	  && REAL_VALUE_ISINF (result)
1227 	  && !REAL_VALUE_ISINF (d1)
1228 	  && !REAL_VALUE_ISINF (d2))
1229 	return NULL_TREE;
1230 
1231       /* Don't constant fold this floating point operation if the
1232 	 result may dependent upon the run-time rounding mode and
1233 	 flag_rounding_math is set, or if GCC's software emulation
1234 	 is unable to accurately represent the result.  */
1235       if ((flag_rounding_math
1236 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1237 	  && (inexact || !real_identical (&result, &value)))
1238 	return NULL_TREE;
1239 
1240       t = build_real (type, result);
1241 
1242       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1243       return t;
1244     }
1245 
1246   if (TREE_CODE (arg1) == FIXED_CST)
1247     {
1248       FIXED_VALUE_TYPE f1;
1249       FIXED_VALUE_TYPE f2;
1250       FIXED_VALUE_TYPE result;
1251       tree t, type;
1252       int sat_p;
1253       bool overflow_p;
1254 
1255       /* The following codes are handled by fixed_arithmetic.  */
1256       switch (code)
1257         {
1258 	case PLUS_EXPR:
1259 	case MINUS_EXPR:
1260 	case MULT_EXPR:
1261 	case TRUNC_DIV_EXPR:
1262 	  if (TREE_CODE (arg2) != FIXED_CST)
1263 	    return NULL_TREE;
1264 	  f2 = TREE_FIXED_CST (arg2);
1265 	  break;
1266 
1267 	case LSHIFT_EXPR:
1268 	case RSHIFT_EXPR:
1269 	  {
1270 	    if (TREE_CODE (arg2) != INTEGER_CST)
1271 	      return NULL_TREE;
1272 	    wide_int w2 = arg2;
1273 	    f2.data.high = w2.elt (1);
1274 	    f2.data.low = w2.ulow ();
1275 	    f2.mode = SImode;
1276 	  }
1277 	  break;
1278 
1279         default:
1280 	  return NULL_TREE;
1281         }
1282 
1283       f1 = TREE_FIXED_CST (arg1);
1284       type = TREE_TYPE (arg1);
1285       sat_p = TYPE_SATURATING (type);
1286       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1287       t = build_fixed (type, result);
1288       /* Propagate overflow flags.  */
1289       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1290 	TREE_OVERFLOW (t) = 1;
1291       return t;
1292     }
1293 
1294   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1295     {
1296       tree type = TREE_TYPE (arg1);
1297       tree r1 = TREE_REALPART (arg1);
1298       tree i1 = TREE_IMAGPART (arg1);
1299       tree r2 = TREE_REALPART (arg2);
1300       tree i2 = TREE_IMAGPART (arg2);
1301       tree real, imag;
1302 
1303       switch (code)
1304 	{
1305 	case PLUS_EXPR:
1306 	case MINUS_EXPR:
1307 	  real = const_binop (code, r1, r2);
1308 	  imag = const_binop (code, i1, i2);
1309 	  break;
1310 
1311 	case MULT_EXPR:
1312 	  if (COMPLEX_FLOAT_TYPE_P (type))
1313 	    return do_mpc_arg2 (arg1, arg2, type,
1314 				/* do_nonfinite= */ folding_initializer,
1315 				mpc_mul);
1316 
1317 	  real = const_binop (MINUS_EXPR,
1318 			      const_binop (MULT_EXPR, r1, r2),
1319 			      const_binop (MULT_EXPR, i1, i2));
1320 	  imag = const_binop (PLUS_EXPR,
1321 			      const_binop (MULT_EXPR, r1, i2),
1322 			      const_binop (MULT_EXPR, i1, r2));
1323 	  break;
1324 
1325 	case RDIV_EXPR:
1326 	  if (COMPLEX_FLOAT_TYPE_P (type))
1327 	    return do_mpc_arg2 (arg1, arg2, type,
1328                                 /* do_nonfinite= */ folding_initializer,
1329 				mpc_div);
1330 	  /* Fallthru. */
1331 	case TRUNC_DIV_EXPR:
1332 	case CEIL_DIV_EXPR:
1333 	case FLOOR_DIV_EXPR:
1334 	case ROUND_DIV_EXPR:
1335 	  if (flag_complex_method == 0)
1336 	  {
1337 	    /* Keep this algorithm in sync with
1338 	       tree-complex.c:expand_complex_div_straight().
1339 
1340 	       Expand complex division to scalars, straightforward algorithm.
1341 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1342 	       t = br*br + bi*bi
1343 	    */
1344 	    tree magsquared
1345 	      = const_binop (PLUS_EXPR,
1346 			     const_binop (MULT_EXPR, r2, r2),
1347 			     const_binop (MULT_EXPR, i2, i2));
1348 	    tree t1
1349 	      = const_binop (PLUS_EXPR,
1350 			     const_binop (MULT_EXPR, r1, r2),
1351 			     const_binop (MULT_EXPR, i1, i2));
1352 	    tree t2
1353 	      = const_binop (MINUS_EXPR,
1354 			     const_binop (MULT_EXPR, i1, r2),
1355 			     const_binop (MULT_EXPR, r1, i2));
1356 
1357 	    real = const_binop (code, t1, magsquared);
1358 	    imag = const_binop (code, t2, magsquared);
1359 	  }
1360 	  else
1361 	  {
1362 	    /* Keep this algorithm in sync with
1363                tree-complex.c:expand_complex_div_wide().
1364 
1365 	       Expand complex division to scalars, modified algorithm to minimize
1366 	       overflow with wide input ranges.  */
1367 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1368 					fold_abs_const (r2, TREE_TYPE (type)),
1369 					fold_abs_const (i2, TREE_TYPE (type)));
1370 
1371 	    if (integer_nonzerop (compare))
1372 	      {
1373 		/* In the TRUE branch, we compute
1374 		   ratio = br/bi;
1375 		   div = (br * ratio) + bi;
1376 		   tr = (ar * ratio) + ai;
1377 		   ti = (ai * ratio) - ar;
1378 		   tr = tr / div;
1379 		   ti = ti / div;  */
1380 		tree ratio = const_binop (code, r2, i2);
1381 		tree div = const_binop (PLUS_EXPR, i2,
1382 					const_binop (MULT_EXPR, r2, ratio));
1383 		real = const_binop (MULT_EXPR, r1, ratio);
1384 		real = const_binop (PLUS_EXPR, real, i1);
1385 		real = const_binop (code, real, div);
1386 
1387 		imag = const_binop (MULT_EXPR, i1, ratio);
1388 		imag = const_binop (MINUS_EXPR, imag, r1);
1389 		imag = const_binop (code, imag, div);
1390 	      }
1391 	    else
1392 	      {
1393 		/* In the FALSE branch, we compute
1394 		   ratio = d/c;
1395 		   divisor = (d * ratio) + c;
1396 		   tr = (b * ratio) + a;
1397 		   ti = b - (a * ratio);
1398 		   tr = tr / div;
1399 		   ti = ti / div;  */
1400 		tree ratio = const_binop (code, i2, r2);
1401 		tree div = const_binop (PLUS_EXPR, r2,
1402                                         const_binop (MULT_EXPR, i2, ratio));
1403 
1404 		real = const_binop (MULT_EXPR, i1, ratio);
1405 		real = const_binop (PLUS_EXPR, real, r1);
1406 		real = const_binop (code, real, div);
1407 
1408 		imag = const_binop (MULT_EXPR, r1, ratio);
1409 		imag = const_binop (MINUS_EXPR, i1, imag);
1410 		imag = const_binop (code, imag, div);
1411 	      }
1412 	  }
1413 	  break;
1414 
1415 	default:
1416 	  return NULL_TREE;
1417 	}
1418 
1419       if (real && imag)
1420 	return build_complex (type, real, imag);
1421     }
1422 
1423   if (TREE_CODE (arg1) == VECTOR_CST
1424       && TREE_CODE (arg2) == VECTOR_CST)
1425     {
1426       tree type = TREE_TYPE (arg1);
1427       int count = TYPE_VECTOR_SUBPARTS (type), i;
1428       tree *elts = XALLOCAVEC (tree, count);
1429 
1430       for (i = 0; i < count; i++)
1431 	{
1432 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1433 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1434 
1435 	  elts[i] = const_binop (code, elem1, elem2);
1436 
1437 	  /* It is possible that const_binop cannot handle the given
1438 	     code and return NULL_TREE */
1439 	  if (elts[i] == NULL_TREE)
1440 	    return NULL_TREE;
1441 	}
1442 
1443       return build_vector (type, elts);
1444     }
1445 
1446   /* Shifts allow a scalar offset for a vector.  */
1447   if (TREE_CODE (arg1) == VECTOR_CST
1448       && TREE_CODE (arg2) == INTEGER_CST)
1449     {
1450       tree type = TREE_TYPE (arg1);
1451       int count = TYPE_VECTOR_SUBPARTS (type), i;
1452       tree *elts = XALLOCAVEC (tree, count);
1453 
1454       for (i = 0; i < count; i++)
1455 	{
1456 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1457 
1458 	  elts[i] = const_binop (code, elem1, arg2);
1459 
1460 	  /* It is possible that const_binop cannot handle the given
1461 	     code and return NULL_TREE.  */
1462 	  if (elts[i] == NULL_TREE)
1463 	    return NULL_TREE;
1464 	}
1465 
1466       return build_vector (type, elts);
1467     }
1468   return NULL_TREE;
1469 }
1470 
1471 /* Overload that adds a TYPE parameter to be able to dispatch
1472    to fold_relational_const.  */
1473 
1474 tree
1475 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1476 {
1477   if (TREE_CODE_CLASS (code) == tcc_comparison)
1478     return fold_relational_const (code, type, arg1, arg2);
1479 
1480   /* ???  Until we make the const_binop worker take the type of the
1481      result as argument put those cases that need it here.  */
1482   switch (code)
1483     {
1484     case COMPLEX_EXPR:
1485       if ((TREE_CODE (arg1) == REAL_CST
1486 	   && TREE_CODE (arg2) == REAL_CST)
1487 	  || (TREE_CODE (arg1) == INTEGER_CST
1488 	      && TREE_CODE (arg2) == INTEGER_CST))
1489 	return build_complex (type, arg1, arg2);
1490       return NULL_TREE;
1491 
1492     case VEC_PACK_TRUNC_EXPR:
1493     case VEC_PACK_FIX_TRUNC_EXPR:
1494       {
1495 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1496 	tree *elts;
1497 
1498 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1499 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1500 	if (TREE_CODE (arg1) != VECTOR_CST
1501 	    || TREE_CODE (arg2) != VECTOR_CST)
1502 	  return NULL_TREE;
1503 
1504 	elts = XALLOCAVEC (tree, nelts);
1505 	if (!vec_cst_ctor_to_array (arg1, elts)
1506 	    || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1507 	  return NULL_TREE;
1508 
1509 	for (i = 0; i < nelts; i++)
1510 	  {
1511 	    elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1512 					  ? NOP_EXPR : FIX_TRUNC_EXPR,
1513 					  TREE_TYPE (type), elts[i]);
1514 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1515 	      return NULL_TREE;
1516 	  }
1517 
1518 	return build_vector (type, elts);
1519       }
1520 
1521     case VEC_WIDEN_MULT_LO_EXPR:
1522     case VEC_WIDEN_MULT_HI_EXPR:
1523     case VEC_WIDEN_MULT_EVEN_EXPR:
1524     case VEC_WIDEN_MULT_ODD_EXPR:
1525       {
1526 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1527 	unsigned int out, ofs, scale;
1528 	tree *elts;
1529 
1530 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1531 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1532 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1533 	  return NULL_TREE;
1534 
1535 	elts = XALLOCAVEC (tree, nelts * 4);
1536 	if (!vec_cst_ctor_to_array (arg1, elts)
1537 	    || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1538 	  return NULL_TREE;
1539 
1540 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1541 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1542 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1543 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1544 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1545 	  scale = 1, ofs = 0;
1546 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1547 	  scale = 1, ofs = 1;
1548 
1549 	for (out = 0; out < nelts; out++)
1550 	  {
1551 	    unsigned int in1 = (out << scale) + ofs;
1552 	    unsigned int in2 = in1 + nelts * 2;
1553 	    tree t1, t2;
1554 
1555 	    t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1556 	    t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1557 
1558 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1559 	      return NULL_TREE;
1560 	    elts[out] = const_binop (MULT_EXPR, t1, t2);
1561 	    if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1562 	      return NULL_TREE;
1563 	  }
1564 
1565 	return build_vector (type, elts);
1566       }
1567 
1568     default:;
1569     }
1570 
1571   if (TREE_CODE_CLASS (code) != tcc_binary)
1572     return NULL_TREE;
1573 
1574   /* Make sure type and arg0 have the same saturating flag.  */
1575   gcc_checking_assert (TYPE_SATURATING (type)
1576 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1577 
1578   return const_binop (code, arg1, arg2);
1579 }
1580 
1581 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1582    Return zero if computing the constants is not possible.  */
1583 
1584 tree
1585 const_unop (enum tree_code code, tree type, tree arg0)
1586 {
1587   /* Don't perform the operation, other than NEGATE and ABS, if
1588      flag_signaling_nans is on and the operand is a signaling NaN.  */
1589   if (TREE_CODE (arg0) == REAL_CST
1590       && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1591       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1592       && code != NEGATE_EXPR
1593       && code != ABS_EXPR)
1594     return NULL_TREE;
1595 
1596   switch (code)
1597     {
1598     CASE_CONVERT:
1599     case FLOAT_EXPR:
1600     case FIX_TRUNC_EXPR:
1601     case FIXED_CONVERT_EXPR:
1602       return fold_convert_const (code, type, arg0);
1603 
1604     case ADDR_SPACE_CONVERT_EXPR:
1605       /* If the source address is 0, and the source address space
1606 	 cannot have a valid object at 0, fold to dest type null.  */
1607       if (integer_zerop (arg0)
1608 	  && !(targetm.addr_space.zero_address_valid
1609 	       (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1610 	return fold_convert_const (code, type, arg0);
1611       break;
1612 
1613     case VIEW_CONVERT_EXPR:
1614       return fold_view_convert_expr (type, arg0);
1615 
1616     case NEGATE_EXPR:
1617       {
1618 	/* Can't call fold_negate_const directly here as that doesn't
1619 	   handle all cases and we might not be able to negate some
1620 	   constants.  */
1621 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1622 	if (tem && CONSTANT_CLASS_P (tem))
1623 	  return tem;
1624 	break;
1625       }
1626 
1627     case ABS_EXPR:
1628       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1629 	return fold_abs_const (arg0, type);
1630       break;
1631 
1632     case CONJ_EXPR:
1633       if (TREE_CODE (arg0) == COMPLEX_CST)
1634 	{
1635 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1636 					  TREE_TYPE (type));
1637 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1638 	}
1639       break;
1640 
1641     case BIT_NOT_EXPR:
1642       if (TREE_CODE (arg0) == INTEGER_CST)
1643 	return fold_not_const (arg0, type);
1644       /* Perform BIT_NOT_EXPR on each element individually.  */
1645       else if (TREE_CODE (arg0) == VECTOR_CST)
1646 	{
1647 	  tree *elements;
1648 	  tree elem;
1649 	  unsigned count = VECTOR_CST_NELTS (arg0), i;
1650 
1651 	  elements = XALLOCAVEC (tree, count);
1652 	  for (i = 0; i < count; i++)
1653 	    {
1654 	      elem = VECTOR_CST_ELT (arg0, i);
1655 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1656 	      if (elem == NULL_TREE)
1657 		break;
1658 	      elements[i] = elem;
1659 	    }
1660 	  if (i == count)
1661 	    return build_vector (type, elements);
1662 	}
1663       break;
1664 
1665     case TRUTH_NOT_EXPR:
1666       if (TREE_CODE (arg0) == INTEGER_CST)
1667 	return constant_boolean_node (integer_zerop (arg0), type);
1668       break;
1669 
1670     case REALPART_EXPR:
1671       if (TREE_CODE (arg0) == COMPLEX_CST)
1672 	return fold_convert (type, TREE_REALPART (arg0));
1673       break;
1674 
1675     case IMAGPART_EXPR:
1676       if (TREE_CODE (arg0) == COMPLEX_CST)
1677 	return fold_convert (type, TREE_IMAGPART (arg0));
1678       break;
1679 
1680     case VEC_UNPACK_LO_EXPR:
1681     case VEC_UNPACK_HI_EXPR:
1682     case VEC_UNPACK_FLOAT_LO_EXPR:
1683     case VEC_UNPACK_FLOAT_HI_EXPR:
1684       {
1685 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1686 	tree *elts;
1687 	enum tree_code subcode;
1688 
1689 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1690 	if (TREE_CODE (arg0) != VECTOR_CST)
1691 	  return NULL_TREE;
1692 
1693 	elts = XALLOCAVEC (tree, nelts * 2);
1694 	if (!vec_cst_ctor_to_array (arg0, elts))
1695 	  return NULL_TREE;
1696 
1697 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1698 				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
1699 	  elts += nelts;
1700 
1701 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1702 	  subcode = NOP_EXPR;
1703 	else
1704 	  subcode = FLOAT_EXPR;
1705 
1706 	for (i = 0; i < nelts; i++)
1707 	  {
1708 	    elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1709 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1710 	      return NULL_TREE;
1711 	  }
1712 
1713 	return build_vector (type, elts);
1714       }
1715 
1716     case REDUC_MIN_EXPR:
1717     case REDUC_MAX_EXPR:
1718     case REDUC_PLUS_EXPR:
1719       {
1720 	unsigned int nelts, i;
1721 	tree *elts;
1722 	enum tree_code subcode;
1723 
1724 	if (TREE_CODE (arg0) != VECTOR_CST)
1725 	  return NULL_TREE;
1726         nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1727 
1728 	elts = XALLOCAVEC (tree, nelts);
1729 	if (!vec_cst_ctor_to_array (arg0, elts))
1730 	  return NULL_TREE;
1731 
1732 	switch (code)
1733 	  {
1734 	  case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1735 	  case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1736 	  case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1737 	  default: gcc_unreachable ();
1738 	  }
1739 
1740 	for (i = 1; i < nelts; i++)
1741 	  {
1742 	    elts[0] = const_binop (subcode, elts[0], elts[i]);
1743 	    if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1744 	      return NULL_TREE;
1745 	  }
1746 
1747 	return elts[0];
1748       }
1749 
1750     default:
1751       break;
1752     }
1753 
1754   return NULL_TREE;
1755 }
1756 
1757 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1758    indicates which particular sizetype to create.  */
1759 
1760 tree
1761 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1762 {
1763   return build_int_cst (sizetype_tab[(int) kind], number);
1764 }
1765 
1766 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1767    is a tree code.  The type of the result is taken from the operands.
1768    Both must be equivalent integer types, ala int_binop_types_match_p.
1769    If the operands are constant, so is the result.  */
1770 
1771 tree
1772 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1773 {
1774   tree type = TREE_TYPE (arg0);
1775 
1776   if (arg0 == error_mark_node || arg1 == error_mark_node)
1777     return error_mark_node;
1778 
1779   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1780                                        TREE_TYPE (arg1)));
1781 
1782   /* Handle the special case of two integer constants faster.  */
1783   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1784     {
1785       /* And some specific cases even faster than that.  */
1786       if (code == PLUS_EXPR)
1787 	{
1788 	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1789 	    return arg1;
1790 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1791 	    return arg0;
1792 	}
1793       else if (code == MINUS_EXPR)
1794 	{
1795 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1796 	    return arg0;
1797 	}
1798       else if (code == MULT_EXPR)
1799 	{
1800 	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1801 	    return arg1;
1802 	}
1803 
1804       /* Handle general case of two integer constants.  For sizetype
1805          constant calculations we always want to know about overflow,
1806 	 even in the unsigned case.  */
1807       return int_const_binop_1 (code, arg0, arg1, -1);
1808     }
1809 
1810   return fold_build2_loc (loc, code, type, arg0, arg1);
1811 }
1812 
1813 /* Given two values, either both of sizetype or both of bitsizetype,
1814    compute the difference between the two values.  Return the value
1815    in signed type corresponding to the type of the operands.  */
1816 
1817 tree
1818 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1819 {
1820   tree type = TREE_TYPE (arg0);
1821   tree ctype;
1822 
1823   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1824 				       TREE_TYPE (arg1)));
1825 
1826   /* If the type is already signed, just do the simple thing.  */
1827   if (!TYPE_UNSIGNED (type))
1828     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1829 
1830   if (type == sizetype)
1831     ctype = ssizetype;
1832   else if (type == bitsizetype)
1833     ctype = sbitsizetype;
1834   else
1835     ctype = signed_type_for (type);
1836 
1837   /* If either operand is not a constant, do the conversions to the signed
1838      type and subtract.  The hardware will do the right thing with any
1839      overflow in the subtraction.  */
1840   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1841     return size_binop_loc (loc, MINUS_EXPR,
1842 			   fold_convert_loc (loc, ctype, arg0),
1843 			   fold_convert_loc (loc, ctype, arg1));
1844 
1845   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1846      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1847      overflow) and negate (which can't either).  Special-case a result
1848      of zero while we're here.  */
1849   if (tree_int_cst_equal (arg0, arg1))
1850     return build_int_cst (ctype, 0);
1851   else if (tree_int_cst_lt (arg1, arg0))
1852     return fold_convert_loc (loc, ctype,
1853 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1854   else
1855     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1856 			   fold_convert_loc (loc, ctype,
1857 					     size_binop_loc (loc,
1858 							     MINUS_EXPR,
1859 							     arg1, arg0)));
1860 }
1861 
1862 /* A subroutine of fold_convert_const handling conversions of an
1863    INTEGER_CST to another integer type.  */
1864 
1865 static tree
1866 fold_convert_const_int_from_int (tree type, const_tree arg1)
1867 {
1868   /* Given an integer constant, make new constant with new type,
1869      appropriately sign-extended or truncated.  Use widest_int
1870      so that any extension is done according ARG1's type.  */
1871   return force_fit_type (type, wi::to_widest (arg1),
1872 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1873 			 TREE_OVERFLOW (arg1));
1874 }
1875 
1876 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1877    to an integer type.  */
1878 
1879 static tree
1880 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1881 {
1882   bool overflow = false;
1883   tree t;
1884 
1885   /* The following code implements the floating point to integer
1886      conversion rules required by the Java Language Specification,
1887      that IEEE NaNs are mapped to zero and values that overflow
1888      the target precision saturate, i.e. values greater than
1889      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1890      are mapped to INT_MIN.  These semantics are allowed by the
1891      C and C++ standards that simply state that the behavior of
1892      FP-to-integer conversion is unspecified upon overflow.  */
1893 
1894   wide_int val;
1895   REAL_VALUE_TYPE r;
1896   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1897 
1898   switch (code)
1899     {
1900     case FIX_TRUNC_EXPR:
1901       real_trunc (&r, VOIDmode, &x);
1902       break;
1903 
1904     default:
1905       gcc_unreachable ();
1906     }
1907 
1908   /* If R is NaN, return zero and show we have an overflow.  */
1909   if (REAL_VALUE_ISNAN (r))
1910     {
1911       overflow = true;
1912       val = wi::zero (TYPE_PRECISION (type));
1913     }
1914 
1915   /* See if R is less than the lower bound or greater than the
1916      upper bound.  */
1917 
1918   if (! overflow)
1919     {
1920       tree lt = TYPE_MIN_VALUE (type);
1921       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1922       if (real_less (&r, &l))
1923 	{
1924 	  overflow = true;
1925 	  val = lt;
1926 	}
1927     }
1928 
1929   if (! overflow)
1930     {
1931       tree ut = TYPE_MAX_VALUE (type);
1932       if (ut)
1933 	{
1934 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1935 	  if (real_less (&u, &r))
1936 	    {
1937 	      overflow = true;
1938 	      val = ut;
1939 	    }
1940 	}
1941     }
1942 
1943   if (! overflow)
1944     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1945 
1946   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1947   return t;
1948 }
1949 
1950 /* A subroutine of fold_convert_const handling conversions of a
1951    FIXED_CST to an integer type.  */
1952 
1953 static tree
1954 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1955 {
1956   tree t;
1957   double_int temp, temp_trunc;
1958   unsigned int mode;
1959 
1960   /* Right shift FIXED_CST to temp by fbit.  */
1961   temp = TREE_FIXED_CST (arg1).data;
1962   mode = TREE_FIXED_CST (arg1).mode;
1963   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1964     {
1965       temp = temp.rshift (GET_MODE_FBIT (mode),
1966 			  HOST_BITS_PER_DOUBLE_INT,
1967 			  SIGNED_FIXED_POINT_MODE_P (mode));
1968 
1969       /* Left shift temp to temp_trunc by fbit.  */
1970       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1971 				HOST_BITS_PER_DOUBLE_INT,
1972 				SIGNED_FIXED_POINT_MODE_P (mode));
1973     }
1974   else
1975     {
1976       temp = double_int_zero;
1977       temp_trunc = double_int_zero;
1978     }
1979 
1980   /* If FIXED_CST is negative, we need to round the value toward 0.
1981      By checking if the fractional bits are not zero to add 1 to temp.  */
1982   if (SIGNED_FIXED_POINT_MODE_P (mode)
1983       && temp_trunc.is_negative ()
1984       && TREE_FIXED_CST (arg1).data != temp_trunc)
1985     temp += double_int_one;
1986 
1987   /* Given a fixed-point constant, make new constant with new type,
1988      appropriately sign-extended or truncated.  */
1989   t = force_fit_type (type, temp, -1,
1990 		      (temp.is_negative ()
1991 		       && (TYPE_UNSIGNED (type)
1992 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1993 		      | TREE_OVERFLOW (arg1));
1994 
1995   return t;
1996 }
1997 
1998 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1999    to another floating point type.  */
2000 
2001 static tree
2002 fold_convert_const_real_from_real (tree type, const_tree arg1)
2003 {
2004   REAL_VALUE_TYPE value;
2005   tree t;
2006 
2007   /* Don't perform the operation if flag_signaling_nans is on
2008      and the operand is a signaling NaN.  */
2009   if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2010       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2011     return NULL_TREE;
2012 
2013   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2014   t = build_real (type, value);
2015 
2016   /* If converting an infinity or NAN to a representation that doesn't
2017      have one, set the overflow bit so that we can produce some kind of
2018      error message at the appropriate point if necessary.  It's not the
2019      most user-friendly message, but it's better than nothing.  */
2020   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2021       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2022     TREE_OVERFLOW (t) = 1;
2023   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2024 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2025     TREE_OVERFLOW (t) = 1;
2026   /* Regular overflow, conversion produced an infinity in a mode that
2027      can't represent them.  */
2028   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2029 	   && REAL_VALUE_ISINF (value)
2030 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2031     TREE_OVERFLOW (t) = 1;
2032   else
2033     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2034   return t;
2035 }
2036 
2037 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2038    to a floating point type.  */
2039 
2040 static tree
2041 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2042 {
2043   REAL_VALUE_TYPE value;
2044   tree t;
2045 
2046   real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2047   t = build_real (type, value);
2048 
2049   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2050   return t;
2051 }
2052 
2053 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2054    to another fixed-point type.  */
2055 
2056 static tree
2057 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2058 {
2059   FIXED_VALUE_TYPE value;
2060   tree t;
2061   bool overflow_p;
2062 
2063   overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2064 			      TYPE_SATURATING (type));
2065   t = build_fixed (type, value);
2066 
2067   /* Propagate overflow flags.  */
2068   if (overflow_p | TREE_OVERFLOW (arg1))
2069     TREE_OVERFLOW (t) = 1;
2070   return t;
2071 }
2072 
2073 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2074    to a fixed-point type.  */
2075 
2076 static tree
2077 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2078 {
2079   FIXED_VALUE_TYPE value;
2080   tree t;
2081   bool overflow_p;
2082   double_int di;
2083 
2084   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2085 
2086   di.low = TREE_INT_CST_ELT (arg1, 0);
2087   if (TREE_INT_CST_NUNITS (arg1) == 1)
2088     di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2089   else
2090     di.high = TREE_INT_CST_ELT (arg1, 1);
2091 
2092   overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2093 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2094 				       TYPE_SATURATING (type));
2095   t = build_fixed (type, value);
2096 
2097   /* Propagate overflow flags.  */
2098   if (overflow_p | TREE_OVERFLOW (arg1))
2099     TREE_OVERFLOW (t) = 1;
2100   return t;
2101 }
2102 
2103 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2104    to a fixed-point type.  */
2105 
2106 static tree
2107 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2108 {
2109   FIXED_VALUE_TYPE value;
2110   tree t;
2111   bool overflow_p;
2112 
2113   overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2114 					&TREE_REAL_CST (arg1),
2115 					TYPE_SATURATING (type));
2116   t = build_fixed (type, value);
2117 
2118   /* Propagate overflow flags.  */
2119   if (overflow_p | TREE_OVERFLOW (arg1))
2120     TREE_OVERFLOW (t) = 1;
2121   return t;
2122 }
2123 
2124 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2125    type TYPE.  If no simplification can be done return NULL_TREE.  */
2126 
2127 static tree
2128 fold_convert_const (enum tree_code code, tree type, tree arg1)
2129 {
2130   if (TREE_TYPE (arg1) == type)
2131     return arg1;
2132 
2133   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2134       || TREE_CODE (type) == OFFSET_TYPE)
2135     {
2136       if (TREE_CODE (arg1) == INTEGER_CST)
2137 	return fold_convert_const_int_from_int (type, arg1);
2138       else if (TREE_CODE (arg1) == REAL_CST)
2139 	return fold_convert_const_int_from_real (code, type, arg1);
2140       else if (TREE_CODE (arg1) == FIXED_CST)
2141 	return fold_convert_const_int_from_fixed (type, arg1);
2142     }
2143   else if (TREE_CODE (type) == REAL_TYPE)
2144     {
2145       if (TREE_CODE (arg1) == INTEGER_CST)
2146 	return build_real_from_int_cst (type, arg1);
2147       else if (TREE_CODE (arg1) == REAL_CST)
2148 	return fold_convert_const_real_from_real (type, arg1);
2149       else if (TREE_CODE (arg1) == FIXED_CST)
2150 	return fold_convert_const_real_from_fixed (type, arg1);
2151     }
2152   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2153     {
2154       if (TREE_CODE (arg1) == FIXED_CST)
2155 	return fold_convert_const_fixed_from_fixed (type, arg1);
2156       else if (TREE_CODE (arg1) == INTEGER_CST)
2157 	return fold_convert_const_fixed_from_int (type, arg1);
2158       else if (TREE_CODE (arg1) == REAL_CST)
2159 	return fold_convert_const_fixed_from_real (type, arg1);
2160     }
2161   else if (TREE_CODE (type) == VECTOR_TYPE)
2162     {
2163       if (TREE_CODE (arg1) == VECTOR_CST
2164 	  && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2165 	{
2166 	  int len = TYPE_VECTOR_SUBPARTS (type);
2167 	  tree elttype = TREE_TYPE (type);
2168 	  tree *v = XALLOCAVEC (tree, len);
2169 	  for (int i = 0; i < len; ++i)
2170 	    {
2171 	      tree elt = VECTOR_CST_ELT (arg1, i);
2172 	      tree cvt = fold_convert_const (code, elttype, elt);
2173 	      if (cvt == NULL_TREE)
2174 		return NULL_TREE;
2175 	      v[i] = cvt;
2176 	    }
2177 	  return build_vector (type, v);
2178 	}
2179     }
2180   return NULL_TREE;
2181 }
2182 
2183 /* Construct a vector of zero elements of vector type TYPE.  */
2184 
2185 static tree
2186 build_zero_vector (tree type)
2187 {
2188   tree t;
2189 
2190   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2191   return build_vector_from_val (type, t);
2192 }
2193 
2194 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2195 
2196 bool
2197 fold_convertible_p (const_tree type, const_tree arg)
2198 {
2199   tree orig = TREE_TYPE (arg);
2200 
2201   if (type == orig)
2202     return true;
2203 
2204   if (TREE_CODE (arg) == ERROR_MARK
2205       || TREE_CODE (type) == ERROR_MARK
2206       || TREE_CODE (orig) == ERROR_MARK)
2207     return false;
2208 
2209   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2210     return true;
2211 
2212   switch (TREE_CODE (type))
2213     {
2214     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2215     case POINTER_TYPE: case REFERENCE_TYPE:
2216     case OFFSET_TYPE:
2217       return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2218 	      || TREE_CODE (orig) == OFFSET_TYPE);
2219 
2220     case REAL_TYPE:
2221     case FIXED_POINT_TYPE:
2222     case VECTOR_TYPE:
2223     case VOID_TYPE:
2224       return TREE_CODE (type) == TREE_CODE (orig);
2225 
2226     default:
2227       return false;
2228     }
2229 }
2230 
2231 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2232    simple conversions in preference to calling the front-end's convert.  */
2233 
2234 tree
2235 fold_convert_loc (location_t loc, tree type, tree arg)
2236 {
2237   tree orig = TREE_TYPE (arg);
2238   tree tem;
2239 
2240   if (type == orig)
2241     return arg;
2242 
2243   if (TREE_CODE (arg) == ERROR_MARK
2244       || TREE_CODE (type) == ERROR_MARK
2245       || TREE_CODE (orig) == ERROR_MARK)
2246     return error_mark_node;
2247 
2248   switch (TREE_CODE (type))
2249     {
2250     case POINTER_TYPE:
2251     case REFERENCE_TYPE:
2252       /* Handle conversions between pointers to different address spaces.  */
2253       if (POINTER_TYPE_P (orig)
2254 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2255 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2256 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2257       /* fall through */
2258 
2259     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2260     case OFFSET_TYPE:
2261       if (TREE_CODE (arg) == INTEGER_CST)
2262 	{
2263 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2264 	  if (tem != NULL_TREE)
2265 	    return tem;
2266 	}
2267       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2268 	  || TREE_CODE (orig) == OFFSET_TYPE)
2269 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2270       if (TREE_CODE (orig) == COMPLEX_TYPE)
2271 	return fold_convert_loc (loc, type,
2272 				 fold_build1_loc (loc, REALPART_EXPR,
2273 						  TREE_TYPE (orig), arg));
2274       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2275 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2276       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2277 
2278     case REAL_TYPE:
2279       if (TREE_CODE (arg) == INTEGER_CST)
2280 	{
2281 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2282 	  if (tem != NULL_TREE)
2283 	    return tem;
2284 	}
2285       else if (TREE_CODE (arg) == REAL_CST)
2286 	{
2287 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2288 	  if (tem != NULL_TREE)
2289 	    return tem;
2290 	}
2291       else if (TREE_CODE (arg) == FIXED_CST)
2292 	{
2293 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2294 	  if (tem != NULL_TREE)
2295 	    return tem;
2296 	}
2297 
2298       switch (TREE_CODE (orig))
2299 	{
2300 	case INTEGER_TYPE:
2301 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2302 	case POINTER_TYPE: case REFERENCE_TYPE:
2303 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2304 
2305 	case REAL_TYPE:
2306 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2307 
2308 	case FIXED_POINT_TYPE:
2309 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2310 
2311 	case COMPLEX_TYPE:
2312 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2313 	  return fold_convert_loc (loc, type, tem);
2314 
2315 	default:
2316 	  gcc_unreachable ();
2317 	}
2318 
2319     case FIXED_POINT_TYPE:
2320       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2321 	  || TREE_CODE (arg) == REAL_CST)
2322 	{
2323 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2324 	  if (tem != NULL_TREE)
2325 	    goto fold_convert_exit;
2326 	}
2327 
2328       switch (TREE_CODE (orig))
2329 	{
2330 	case FIXED_POINT_TYPE:
2331 	case INTEGER_TYPE:
2332 	case ENUMERAL_TYPE:
2333 	case BOOLEAN_TYPE:
2334 	case REAL_TYPE:
2335 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2336 
2337 	case COMPLEX_TYPE:
2338 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2339 	  return fold_convert_loc (loc, type, tem);
2340 
2341 	default:
2342 	  gcc_unreachable ();
2343 	}
2344 
2345     case COMPLEX_TYPE:
2346       switch (TREE_CODE (orig))
2347 	{
2348 	case INTEGER_TYPE:
2349 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2350 	case POINTER_TYPE: case REFERENCE_TYPE:
2351 	case REAL_TYPE:
2352 	case FIXED_POINT_TYPE:
2353 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2354 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2355 			      fold_convert_loc (loc, TREE_TYPE (type),
2356 					    integer_zero_node));
2357 	case COMPLEX_TYPE:
2358 	  {
2359 	    tree rpart, ipart;
2360 
2361 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2362 	      {
2363 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2364 				      TREE_OPERAND (arg, 0));
2365 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2366 				      TREE_OPERAND (arg, 1));
2367 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2368 	      }
2369 
2370 	    arg = save_expr (arg);
2371 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2372 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2373 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2374 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2375 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2376 	  }
2377 
2378 	default:
2379 	  gcc_unreachable ();
2380 	}
2381 
2382     case VECTOR_TYPE:
2383       if (integer_zerop (arg))
2384 	return build_zero_vector (type);
2385       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2386       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2387 		  || TREE_CODE (orig) == VECTOR_TYPE);
2388       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2389 
2390     case VOID_TYPE:
2391       tem = fold_ignored_result (arg);
2392       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2393 
2394     default:
2395       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2396 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2397       gcc_unreachable ();
2398     }
2399  fold_convert_exit:
2400   protected_set_expr_location_unshare (tem, loc);
2401   return tem;
2402 }
2403 
2404 /* Return false if expr can be assumed not to be an lvalue, true
2405    otherwise.  */
2406 
2407 static bool
2408 maybe_lvalue_p (const_tree x)
2409 {
2410   /* We only need to wrap lvalue tree codes.  */
2411   switch (TREE_CODE (x))
2412   {
2413   case VAR_DECL:
2414   case PARM_DECL:
2415   case RESULT_DECL:
2416   case LABEL_DECL:
2417   case FUNCTION_DECL:
2418   case SSA_NAME:
2419 
2420   case COMPONENT_REF:
2421   case MEM_REF:
2422   case INDIRECT_REF:
2423   case ARRAY_REF:
2424   case ARRAY_RANGE_REF:
2425   case BIT_FIELD_REF:
2426   case OBJ_TYPE_REF:
2427 
2428   case REALPART_EXPR:
2429   case IMAGPART_EXPR:
2430   case PREINCREMENT_EXPR:
2431   case PREDECREMENT_EXPR:
2432   case SAVE_EXPR:
2433   case TRY_CATCH_EXPR:
2434   case WITH_CLEANUP_EXPR:
2435   case COMPOUND_EXPR:
2436   case MODIFY_EXPR:
2437   case TARGET_EXPR:
2438   case COND_EXPR:
2439   case BIND_EXPR:
2440     break;
2441 
2442   default:
2443     /* Assume the worst for front-end tree codes.  */
2444     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2445       break;
2446     return false;
2447   }
2448 
2449   return true;
2450 }
2451 
2452 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2453 
2454 tree
2455 non_lvalue_loc (location_t loc, tree x)
2456 {
2457   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2458      us.  */
2459   if (in_gimple_form)
2460     return x;
2461 
2462   if (! maybe_lvalue_p (x))
2463     return x;
2464   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2465 }
2466 
2467 /* When pedantic, return an expr equal to X but certainly not valid as a
2468    pedantic lvalue.  Otherwise, return X.  */
2469 
2470 static tree
2471 pedantic_non_lvalue_loc (location_t loc, tree x)
2472 {
2473   return protected_set_expr_location_unshare (x, loc);
2474 }
2475 
2476 /* Given a tree comparison code, return the code that is the logical inverse.
2477    It is generally not safe to do this for floating-point comparisons, except
2478    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2479    ERROR_MARK in this case.  */
2480 
2481 enum tree_code
2482 invert_tree_comparison (enum tree_code code, bool honor_nans)
2483 {
2484   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2485       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2486     return ERROR_MARK;
2487 
2488   switch (code)
2489     {
2490     case EQ_EXPR:
2491       return NE_EXPR;
2492     case NE_EXPR:
2493       return EQ_EXPR;
2494     case GT_EXPR:
2495       return honor_nans ? UNLE_EXPR : LE_EXPR;
2496     case GE_EXPR:
2497       return honor_nans ? UNLT_EXPR : LT_EXPR;
2498     case LT_EXPR:
2499       return honor_nans ? UNGE_EXPR : GE_EXPR;
2500     case LE_EXPR:
2501       return honor_nans ? UNGT_EXPR : GT_EXPR;
2502     case LTGT_EXPR:
2503       return UNEQ_EXPR;
2504     case UNEQ_EXPR:
2505       return LTGT_EXPR;
2506     case UNGT_EXPR:
2507       return LE_EXPR;
2508     case UNGE_EXPR:
2509       return LT_EXPR;
2510     case UNLT_EXPR:
2511       return GE_EXPR;
2512     case UNLE_EXPR:
2513       return GT_EXPR;
2514     case ORDERED_EXPR:
2515       return UNORDERED_EXPR;
2516     case UNORDERED_EXPR:
2517       return ORDERED_EXPR;
2518     default:
2519       gcc_unreachable ();
2520     }
2521 }
2522 
2523 /* Similar, but return the comparison that results if the operands are
2524    swapped.  This is safe for floating-point.  */
2525 
2526 enum tree_code
2527 swap_tree_comparison (enum tree_code code)
2528 {
2529   switch (code)
2530     {
2531     case EQ_EXPR:
2532     case NE_EXPR:
2533     case ORDERED_EXPR:
2534     case UNORDERED_EXPR:
2535     case LTGT_EXPR:
2536     case UNEQ_EXPR:
2537       return code;
2538     case GT_EXPR:
2539       return LT_EXPR;
2540     case GE_EXPR:
2541       return LE_EXPR;
2542     case LT_EXPR:
2543       return GT_EXPR;
2544     case LE_EXPR:
2545       return GE_EXPR;
2546     case UNGT_EXPR:
2547       return UNLT_EXPR;
2548     case UNGE_EXPR:
2549       return UNLE_EXPR;
2550     case UNLT_EXPR:
2551       return UNGT_EXPR;
2552     case UNLE_EXPR:
2553       return UNGE_EXPR;
2554     default:
2555       gcc_unreachable ();
2556     }
2557 }
2558 
2559 
2560 /* Convert a comparison tree code from an enum tree_code representation
2561    into a compcode bit-based encoding.  This function is the inverse of
2562    compcode_to_comparison.  */
2563 
2564 static enum comparison_code
2565 comparison_to_compcode (enum tree_code code)
2566 {
2567   switch (code)
2568     {
2569     case LT_EXPR:
2570       return COMPCODE_LT;
2571     case EQ_EXPR:
2572       return COMPCODE_EQ;
2573     case LE_EXPR:
2574       return COMPCODE_LE;
2575     case GT_EXPR:
2576       return COMPCODE_GT;
2577     case NE_EXPR:
2578       return COMPCODE_NE;
2579     case GE_EXPR:
2580       return COMPCODE_GE;
2581     case ORDERED_EXPR:
2582       return COMPCODE_ORD;
2583     case UNORDERED_EXPR:
2584       return COMPCODE_UNORD;
2585     case UNLT_EXPR:
2586       return COMPCODE_UNLT;
2587     case UNEQ_EXPR:
2588       return COMPCODE_UNEQ;
2589     case UNLE_EXPR:
2590       return COMPCODE_UNLE;
2591     case UNGT_EXPR:
2592       return COMPCODE_UNGT;
2593     case LTGT_EXPR:
2594       return COMPCODE_LTGT;
2595     case UNGE_EXPR:
2596       return COMPCODE_UNGE;
2597     default:
2598       gcc_unreachable ();
2599     }
2600 }
2601 
2602 /* Convert a compcode bit-based encoding of a comparison operator back
2603    to GCC's enum tree_code representation.  This function is the
2604    inverse of comparison_to_compcode.  */
2605 
2606 static enum tree_code
2607 compcode_to_comparison (enum comparison_code code)
2608 {
2609   switch (code)
2610     {
2611     case COMPCODE_LT:
2612       return LT_EXPR;
2613     case COMPCODE_EQ:
2614       return EQ_EXPR;
2615     case COMPCODE_LE:
2616       return LE_EXPR;
2617     case COMPCODE_GT:
2618       return GT_EXPR;
2619     case COMPCODE_NE:
2620       return NE_EXPR;
2621     case COMPCODE_GE:
2622       return GE_EXPR;
2623     case COMPCODE_ORD:
2624       return ORDERED_EXPR;
2625     case COMPCODE_UNORD:
2626       return UNORDERED_EXPR;
2627     case COMPCODE_UNLT:
2628       return UNLT_EXPR;
2629     case COMPCODE_UNEQ:
2630       return UNEQ_EXPR;
2631     case COMPCODE_UNLE:
2632       return UNLE_EXPR;
2633     case COMPCODE_UNGT:
2634       return UNGT_EXPR;
2635     case COMPCODE_LTGT:
2636       return LTGT_EXPR;
2637     case COMPCODE_UNGE:
2638       return UNGE_EXPR;
2639     default:
2640       gcc_unreachable ();
2641     }
2642 }
2643 
2644 /* Return a tree for the comparison which is the combination of
2645    doing the AND or OR (depending on CODE) of the two operations LCODE
2646    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2647    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2648    if this makes the transformation invalid.  */
2649 
2650 tree
2651 combine_comparisons (location_t loc,
2652 		     enum tree_code code, enum tree_code lcode,
2653 		     enum tree_code rcode, tree truth_type,
2654 		     tree ll_arg, tree lr_arg)
2655 {
2656   bool honor_nans = HONOR_NANS (ll_arg);
2657   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2658   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2659   int compcode;
2660 
2661   switch (code)
2662     {
2663     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2664       compcode = lcompcode & rcompcode;
2665       break;
2666 
2667     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2668       compcode = lcompcode | rcompcode;
2669       break;
2670 
2671     default:
2672       return NULL_TREE;
2673     }
2674 
2675   if (!honor_nans)
2676     {
2677       /* Eliminate unordered comparisons, as well as LTGT and ORD
2678 	 which are not used unless the mode has NaNs.  */
2679       compcode &= ~COMPCODE_UNORD;
2680       if (compcode == COMPCODE_LTGT)
2681 	compcode = COMPCODE_NE;
2682       else if (compcode == COMPCODE_ORD)
2683 	compcode = COMPCODE_TRUE;
2684     }
2685    else if (flag_trapping_math)
2686      {
2687 	/* Check that the original operation and the optimized ones will trap
2688 	   under the same condition.  */
2689 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2690 		     && (lcompcode != COMPCODE_EQ)
2691 		     && (lcompcode != COMPCODE_ORD);
2692 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2693 		     && (rcompcode != COMPCODE_EQ)
2694 		     && (rcompcode != COMPCODE_ORD);
2695 	bool trap = (compcode & COMPCODE_UNORD) == 0
2696 		    && (compcode != COMPCODE_EQ)
2697 		    && (compcode != COMPCODE_ORD);
2698 
2699         /* In a short-circuited boolean expression the LHS might be
2700 	   such that the RHS, if evaluated, will never trap.  For
2701 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2702 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2703 	   example, the expression above will never trap, hence
2704 	   optimizing it to x < y would be invalid).  */
2705         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2706             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2707           rtrap = false;
2708 
2709         /* If the comparison was short-circuited, and only the RHS
2710 	   trapped, we may now generate a spurious trap.  */
2711 	if (rtrap && !ltrap
2712 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2713 	  return NULL_TREE;
2714 
2715 	/* If we changed the conditions that cause a trap, we lose.  */
2716 	if ((ltrap || rtrap) != trap)
2717 	  return NULL_TREE;
2718       }
2719 
2720   if (compcode == COMPCODE_TRUE)
2721     return constant_boolean_node (true, truth_type);
2722   else if (compcode == COMPCODE_FALSE)
2723     return constant_boolean_node (false, truth_type);
2724   else
2725     {
2726       enum tree_code tcode;
2727 
2728       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2729       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2730     }
2731 }
2732 
2733 /* Return nonzero if two operands (typically of the same tree node)
2734    are necessarily equal. FLAGS modifies behavior as follows:
2735 
2736    If OEP_ONLY_CONST is set, only return nonzero for constants.
2737    This function tests whether the operands are indistinguishable;
2738    it does not test whether they are equal using C's == operation.
2739    The distinction is important for IEEE floating point, because
2740    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2741    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2742 
2743    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2744    even though it may hold multiple values during a function.
2745    This is because a GCC tree node guarantees that nothing else is
2746    executed between the evaluation of its "operands" (which may often
2747    be evaluated in arbitrary order).  Hence if the operands themselves
2748    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2749    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2750    unset means assuming isochronic (or instantaneous) tree equivalence.
2751    Unless comparing arbitrary expression trees, such as from different
2752    statements, this flag can usually be left unset.
2753 
2754    If OEP_PURE_SAME is set, then pure functions with identical arguments
2755    are considered the same.  It is used when the caller has other ways
2756    to ensure that global memory is unchanged in between.
2757 
2758    If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2759    not values of expressions.
2760 
2761    If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2762    such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2763 
2764    Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2765    any operand with side effect.  This is unnecesarily conservative in the
2766    case we know that arg0 and arg1 are in disjoint code paths (such as in
2767    ?: operator).  In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2768    addresses with TREE_CONSTANT flag set so we know that &var == &var
2769    even if var is volatile.  */
2770 
2771 int
2772 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2773 {
2774   /* When checking, verify at the outermost operand_equal_p call that
2775      if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2776      hash value.  */
2777   if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2778     {
2779       if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2780 	{
2781 	  if (arg0 != arg1)
2782 	    {
2783 	      inchash::hash hstate0 (0), hstate1 (0);
2784 	      inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2785 	      inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2786 	      hashval_t h0 = hstate0.end ();
2787 	      hashval_t h1 = hstate1.end ();
2788 	      gcc_assert (h0 == h1);
2789 	    }
2790 	  return 1;
2791 	}
2792       else
2793 	return 0;
2794     }
2795 
2796   /* If either is ERROR_MARK, they aren't equal.  */
2797   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2798       || TREE_TYPE (arg0) == error_mark_node
2799       || TREE_TYPE (arg1) == error_mark_node)
2800     return 0;
2801 
2802   /* Similar, if either does not have a type (like a released SSA name),
2803      they aren't equal.  */
2804   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2805     return 0;
2806 
2807   /* We cannot consider pointers to different address space equal.  */
2808   if (POINTER_TYPE_P (TREE_TYPE (arg0))
2809       && POINTER_TYPE_P (TREE_TYPE (arg1))
2810       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2811 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2812     return 0;
2813 
2814   /* Check equality of integer constants before bailing out due to
2815      precision differences.  */
2816   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2817     {
2818       /* Address of INTEGER_CST is not defined; check that we did not forget
2819 	 to drop the OEP_ADDRESS_OF flags.  */
2820       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2821       return tree_int_cst_equal (arg0, arg1);
2822     }
2823 
2824   if (!(flags & OEP_ADDRESS_OF))
2825     {
2826       /* If both types don't have the same signedness, then we can't consider
2827 	 them equal.  We must check this before the STRIP_NOPS calls
2828 	 because they may change the signedness of the arguments.  As pointers
2829 	 strictly don't have a signedness, require either two pointers or
2830 	 two non-pointers as well.  */
2831       if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2832 	  || POINTER_TYPE_P (TREE_TYPE (arg0))
2833 			     != POINTER_TYPE_P (TREE_TYPE (arg1)))
2834 	return 0;
2835 
2836       /* If both types don't have the same precision, then it is not safe
2837 	 to strip NOPs.  */
2838       if (element_precision (TREE_TYPE (arg0))
2839 	  != element_precision (TREE_TYPE (arg1)))
2840 	return 0;
2841 
2842       STRIP_NOPS (arg0);
2843       STRIP_NOPS (arg1);
2844     }
2845 #if 0
2846   /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2847      sanity check once the issue is solved.  */
2848   else
2849     /* Addresses of conversions and SSA_NAMEs (and many other things)
2850        are not defined.  Check that we did not forget to drop the
2851        OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags.  */
2852     gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2853 			 && TREE_CODE (arg0) != SSA_NAME);
2854 #endif
2855 
2856   /* In case both args are comparisons but with different comparison
2857      code, try to swap the comparison operands of one arg to produce
2858      a match and compare that variant.  */
2859   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2860       && COMPARISON_CLASS_P (arg0)
2861       && COMPARISON_CLASS_P (arg1))
2862     {
2863       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2864 
2865       if (TREE_CODE (arg0) == swap_code)
2866 	return operand_equal_p (TREE_OPERAND (arg0, 0),
2867 			        TREE_OPERAND (arg1, 1), flags)
2868 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
2869 				   TREE_OPERAND (arg1, 0), flags);
2870     }
2871 
2872   if (TREE_CODE (arg0) != TREE_CODE (arg1))
2873     {
2874       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
2875       if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2876 	;
2877       else if (flags & OEP_ADDRESS_OF)
2878 	{
2879 	  /* If we are interested in comparing addresses ignore
2880 	     MEM_REF wrappings of the base that can appear just for
2881 	     TBAA reasons.  */
2882 	  if (TREE_CODE (arg0) == MEM_REF
2883 	      && DECL_P (arg1)
2884 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2885 	      && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2886 	      && integer_zerop (TREE_OPERAND (arg0, 1)))
2887 	    return 1;
2888 	  else if (TREE_CODE (arg1) == MEM_REF
2889 		   && DECL_P (arg0)
2890 		   && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2891 		   && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2892 		   && integer_zerop (TREE_OPERAND (arg1, 1)))
2893 	    return 1;
2894 	  return 0;
2895 	}
2896       else
2897 	return 0;
2898     }
2899 
2900   /* When not checking adddresses, this is needed for conversions and for
2901      COMPONENT_REF.  Might as well play it safe and always test this.  */
2902   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2903       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2904       || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2905 	  && !(flags & OEP_ADDRESS_OF)))
2906     return 0;
2907 
2908   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2909      We don't care about side effects in that case because the SAVE_EXPR
2910      takes care of that for us. In all other cases, two expressions are
2911      equal if they have no side effects.  If we have two identical
2912      expressions with side effects that should be treated the same due
2913      to the only side effects being identical SAVE_EXPR's, that will
2914      be detected in the recursive calls below.
2915      If we are taking an invariant address of two identical objects
2916      they are necessarily equal as well.  */
2917   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2918       && (TREE_CODE (arg0) == SAVE_EXPR
2919 	  || (flags & OEP_MATCH_SIDE_EFFECTS)
2920 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2921     return 1;
2922 
2923   /* Next handle constant cases, those for which we can return 1 even
2924      if ONLY_CONST is set.  */
2925   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2926     switch (TREE_CODE (arg0))
2927       {
2928       case INTEGER_CST:
2929 	return tree_int_cst_equal (arg0, arg1);
2930 
2931       case FIXED_CST:
2932 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2933 				       TREE_FIXED_CST (arg1));
2934 
2935       case REAL_CST:
2936 	if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2937 	  return 1;
2938 
2939 
2940 	if (!HONOR_SIGNED_ZEROS (arg0))
2941 	  {
2942 	    /* If we do not distinguish between signed and unsigned zero,
2943 	       consider them equal.  */
2944 	    if (real_zerop (arg0) && real_zerop (arg1))
2945 	      return 1;
2946 	  }
2947 	return 0;
2948 
2949       case VECTOR_CST:
2950 	{
2951 	  unsigned i;
2952 
2953 	  if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2954 	    return 0;
2955 
2956 	  for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2957 	    {
2958 	      if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2959 				    VECTOR_CST_ELT (arg1, i), flags))
2960 		return 0;
2961 	    }
2962 	  return 1;
2963 	}
2964 
2965       case COMPLEX_CST:
2966 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2967 				 flags)
2968 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2969 				    flags));
2970 
2971       case STRING_CST:
2972 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2973 		&& ! memcmp (TREE_STRING_POINTER (arg0),
2974 			      TREE_STRING_POINTER (arg1),
2975 			      TREE_STRING_LENGTH (arg0)));
2976 
2977       case ADDR_EXPR:
2978 	gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2979 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2980 				flags | OEP_ADDRESS_OF
2981 				| OEP_MATCH_SIDE_EFFECTS);
2982       case CONSTRUCTOR:
2983 	/* In GIMPLE empty constructors are allowed in initializers of
2984 	   aggregates.  */
2985 	return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2986       default:
2987 	break;
2988       }
2989 
2990   if (flags & OEP_ONLY_CONST)
2991     return 0;
2992 
2993 /* Define macros to test an operand from arg0 and arg1 for equality and a
2994    variant that allows null and views null as being different from any
2995    non-null value.  In the latter case, if either is null, the both
2996    must be; otherwise, do the normal comparison.  */
2997 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
2998 				    TREE_OPERAND (arg1, N), flags)
2999 
3000 #define OP_SAME_WITH_NULL(N)				\
3001   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3002    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3003 
3004   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3005     {
3006     case tcc_unary:
3007       /* Two conversions are equal only if signedness and modes match.  */
3008       switch (TREE_CODE (arg0))
3009         {
3010 	CASE_CONVERT:
3011         case FIX_TRUNC_EXPR:
3012 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3013 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3014 	    return 0;
3015 	  break;
3016 	default:
3017 	  break;
3018 	}
3019 
3020       return OP_SAME (0);
3021 
3022 
3023     case tcc_comparison:
3024     case tcc_binary:
3025       if (OP_SAME (0) && OP_SAME (1))
3026 	return 1;
3027 
3028       /* For commutative ops, allow the other order.  */
3029       return (commutative_tree_code (TREE_CODE (arg0))
3030 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3031 				  TREE_OPERAND (arg1, 1), flags)
3032 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3033 				  TREE_OPERAND (arg1, 0), flags));
3034 
3035     case tcc_reference:
3036       /* If either of the pointer (or reference) expressions we are
3037 	 dereferencing contain a side effect, these cannot be equal,
3038 	 but their addresses can be.  */
3039       if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3040 	  && (TREE_SIDE_EFFECTS (arg0)
3041 	      || TREE_SIDE_EFFECTS (arg1)))
3042 	return 0;
3043 
3044       switch (TREE_CODE (arg0))
3045 	{
3046 	case INDIRECT_REF:
3047 	  if (!(flags & OEP_ADDRESS_OF)
3048 	      && (TYPE_ALIGN (TREE_TYPE (arg0))
3049 		  != TYPE_ALIGN (TREE_TYPE (arg1))))
3050 	    return 0;
3051 	  flags &= ~OEP_ADDRESS_OF;
3052 	  return OP_SAME (0);
3053 
3054 	case IMAGPART_EXPR:
3055 	  /* Require the same offset.  */
3056 	  if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3057 				TYPE_SIZE (TREE_TYPE (arg1)),
3058 				flags & ~OEP_ADDRESS_OF))
3059 	    return 0;
3060 
3061 	/* Fallthru.  */
3062 	case REALPART_EXPR:
3063 	case VIEW_CONVERT_EXPR:
3064 	  return OP_SAME (0);
3065 
3066 	case TARGET_MEM_REF:
3067 	case MEM_REF:
3068 	  if (!(flags & OEP_ADDRESS_OF))
3069 	    {
3070 	      /* Require equal access sizes */
3071 	      if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3072 		  && (!TYPE_SIZE (TREE_TYPE (arg0))
3073 		      || !TYPE_SIZE (TREE_TYPE (arg1))
3074 		      || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3075 					   TYPE_SIZE (TREE_TYPE (arg1)),
3076 					   flags)))
3077 		return 0;
3078 	      /* Verify that access happens in similar types.  */
3079 	      if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3080 		return 0;
3081 	      /* Verify that accesses are TBAA compatible.  */
3082 	      if (!alias_ptr_types_compatible_p
3083 		    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3084 		     TREE_TYPE (TREE_OPERAND (arg1, 1)))
3085 		  || (MR_DEPENDENCE_CLIQUE (arg0)
3086 		      != MR_DEPENDENCE_CLIQUE (arg1))
3087 		  || (MR_DEPENDENCE_BASE (arg0)
3088 		      != MR_DEPENDENCE_BASE (arg1)))
3089 		return 0;
3090 	     /* Verify that alignment is compatible.  */
3091 	     if (TYPE_ALIGN (TREE_TYPE (arg0))
3092 		 != TYPE_ALIGN (TREE_TYPE (arg1)))
3093 		return 0;
3094 	    }
3095 	  flags &= ~OEP_ADDRESS_OF;
3096 	  return (OP_SAME (0) && OP_SAME (1)
3097 		  /* TARGET_MEM_REF require equal extra operands.  */
3098 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
3099 		      || (OP_SAME_WITH_NULL (2)
3100 			  && OP_SAME_WITH_NULL (3)
3101 			  && OP_SAME_WITH_NULL (4))));
3102 
3103 	case ARRAY_REF:
3104 	case ARRAY_RANGE_REF:
3105 	  if (!OP_SAME (0))
3106 	    return 0;
3107 	  flags &= ~OEP_ADDRESS_OF;
3108 	  /* Compare the array index by value if it is constant first as we
3109 	     may have different types but same value here.  */
3110 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3111 				       TREE_OPERAND (arg1, 1))
3112 		   || OP_SAME (1))
3113 		  && OP_SAME_WITH_NULL (2)
3114 		  && OP_SAME_WITH_NULL (3)
3115 		  /* Compare low bound and element size as with OEP_ADDRESS_OF
3116 		     we have to account for the offset of the ref.  */
3117 		  && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3118 		      == TREE_TYPE (TREE_OPERAND (arg1, 0))
3119 		      || (operand_equal_p (array_ref_low_bound
3120 					     (CONST_CAST_TREE (arg0)),
3121 					   array_ref_low_bound
3122 					     (CONST_CAST_TREE (arg1)), flags)
3123 			  && operand_equal_p (array_ref_element_size
3124 					        (CONST_CAST_TREE (arg0)),
3125 					      array_ref_element_size
3126 					        (CONST_CAST_TREE (arg1)),
3127 					      flags))));
3128 
3129 	case COMPONENT_REF:
3130 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3131 	     may be NULL when we're called to compare MEM_EXPRs.  */
3132 	  if (!OP_SAME_WITH_NULL (0)
3133 	      || !OP_SAME (1))
3134 	    return 0;
3135 	  flags &= ~OEP_ADDRESS_OF;
3136 	  return OP_SAME_WITH_NULL (2);
3137 
3138 	case BIT_FIELD_REF:
3139 	  if (!OP_SAME (0))
3140 	    return 0;
3141 	  flags &= ~OEP_ADDRESS_OF;
3142 	  return OP_SAME (1) && OP_SAME (2);
3143 
3144 	default:
3145 	  return 0;
3146 	}
3147 
3148     case tcc_expression:
3149       switch (TREE_CODE (arg0))
3150 	{
3151 	case ADDR_EXPR:
3152 	  /* Be sure we pass right ADDRESS_OF flag.  */
3153 	  gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3154 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3155 				  TREE_OPERAND (arg1, 0),
3156 				  flags | OEP_ADDRESS_OF);
3157 
3158 	case TRUTH_NOT_EXPR:
3159 	  return OP_SAME (0);
3160 
3161 	case TRUTH_ANDIF_EXPR:
3162 	case TRUTH_ORIF_EXPR:
3163 	  return OP_SAME (0) && OP_SAME (1);
3164 
3165 	case FMA_EXPR:
3166 	case WIDEN_MULT_PLUS_EXPR:
3167 	case WIDEN_MULT_MINUS_EXPR:
3168 	  if (!OP_SAME (2))
3169 	    return 0;
3170 	  /* The multiplcation operands are commutative.  */
3171 	  /* FALLTHRU */
3172 
3173 	case TRUTH_AND_EXPR:
3174 	case TRUTH_OR_EXPR:
3175 	case TRUTH_XOR_EXPR:
3176 	  if (OP_SAME (0) && OP_SAME (1))
3177 	    return 1;
3178 
3179 	  /* Otherwise take into account this is a commutative operation.  */
3180 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3181 				   TREE_OPERAND (arg1, 1), flags)
3182 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3183 				      TREE_OPERAND (arg1, 0), flags));
3184 
3185 	case COND_EXPR:
3186 	  if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3187 	    return 0;
3188 	  flags &= ~OEP_ADDRESS_OF;
3189 	  return OP_SAME (0);
3190 
3191 	case VEC_COND_EXPR:
3192 	case DOT_PROD_EXPR:
3193 	case BIT_INSERT_EXPR:
3194 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3195 
3196 	case MODIFY_EXPR:
3197 	case INIT_EXPR:
3198 	case COMPOUND_EXPR:
3199 	case PREDECREMENT_EXPR:
3200 	case PREINCREMENT_EXPR:
3201 	case POSTDECREMENT_EXPR:
3202 	case POSTINCREMENT_EXPR:
3203 	  if (flags & OEP_LEXICOGRAPHIC)
3204 	    return OP_SAME (0) && OP_SAME (1);
3205 	  return 0;
3206 
3207 	case CLEANUP_POINT_EXPR:
3208 	case EXPR_STMT:
3209 	  if (flags & OEP_LEXICOGRAPHIC)
3210 	    return OP_SAME (0);
3211 	  return 0;
3212 
3213 	default:
3214 	  return 0;
3215 	}
3216 
3217     case tcc_vl_exp:
3218       switch (TREE_CODE (arg0))
3219 	{
3220 	case CALL_EXPR:
3221 	  if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3222 	      != (CALL_EXPR_FN (arg1) == NULL_TREE))
3223 	    /* If not both CALL_EXPRs are either internal or normal function
3224 	       functions, then they are not equal.  */
3225 	    return 0;
3226 	  else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3227 	    {
3228 	      /* If the CALL_EXPRs call different internal functions, then they
3229 		 are not equal.  */
3230 	      if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3231 		return 0;
3232 	    }
3233 	  else
3234 	    {
3235 	      /* If the CALL_EXPRs call different functions, then they are not
3236 		 equal.  */
3237 	      if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3238 				     flags))
3239 		return 0;
3240 	    }
3241 
3242 	  /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS.  */
3243 	  {
3244 	    unsigned int cef = call_expr_flags (arg0);
3245 	    if (flags & OEP_PURE_SAME)
3246 	      cef &= ECF_CONST | ECF_PURE;
3247 	    else
3248 	      cef &= ECF_CONST;
3249 	    if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3250 	      return 0;
3251 	  }
3252 
3253 	  /* Now see if all the arguments are the same.  */
3254 	  {
3255 	    const_call_expr_arg_iterator iter0, iter1;
3256 	    const_tree a0, a1;
3257 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3258 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3259 		 a0 && a1;
3260 		 a0 = next_const_call_expr_arg (&iter0),
3261 		   a1 = next_const_call_expr_arg (&iter1))
3262 	      if (! operand_equal_p (a0, a1, flags))
3263 		return 0;
3264 
3265 	    /* If we get here and both argument lists are exhausted
3266 	       then the CALL_EXPRs are equal.  */
3267 	    return ! (a0 || a1);
3268 	  }
3269 	default:
3270 	  return 0;
3271 	}
3272 
3273     case tcc_declaration:
3274       /* Consider __builtin_sqrt equal to sqrt.  */
3275       return (TREE_CODE (arg0) == FUNCTION_DECL
3276 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3277 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3278 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3279 
3280     case tcc_exceptional:
3281       if (TREE_CODE (arg0) == CONSTRUCTOR)
3282 	{
3283 	  /* In GIMPLE constructors are used only to build vectors from
3284 	     elements.  Individual elements in the constructor must be
3285 	     indexed in increasing order and form an initial sequence.
3286 
3287 	     We make no effort to compare constructors in generic.
3288 	     (see sem_variable::equals in ipa-icf which can do so for
3289 	      constants).  */
3290 	  if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3291 	      || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3292 	    return 0;
3293 
3294 	  /* Be sure that vectors constructed have the same representation.
3295 	     We only tested element precision and modes to match.
3296 	     Vectors may be BLKmode and thus also check that the number of
3297 	     parts match.  */
3298 	  if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3299 	      != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3300 	    return 0;
3301 
3302 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3303 	  vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3304 	  unsigned int len = vec_safe_length (v0);
3305 
3306 	  if (len != vec_safe_length (v1))
3307 	    return 0;
3308 
3309 	  for (unsigned int i = 0; i < len; i++)
3310 	    {
3311 	      constructor_elt *c0 = &(*v0)[i];
3312 	      constructor_elt *c1 = &(*v1)[i];
3313 
3314 	      if (!operand_equal_p (c0->value, c1->value, flags)
3315 		  /* In GIMPLE the indexes can be either NULL or matching i.
3316 		     Double check this so we won't get false
3317 		     positives for GENERIC.  */
3318 		  || (c0->index
3319 		      && (TREE_CODE (c0->index) != INTEGER_CST
3320 			  || !compare_tree_int (c0->index, i)))
3321 		  || (c1->index
3322 		      && (TREE_CODE (c1->index) != INTEGER_CST
3323 			  || !compare_tree_int (c1->index, i))))
3324 		return 0;
3325 	    }
3326 	  return 1;
3327 	}
3328       else if (TREE_CODE (arg0) == STATEMENT_LIST
3329 	       && (flags & OEP_LEXICOGRAPHIC))
3330 	{
3331 	  /* Compare the STATEMENT_LISTs.  */
3332 	  tree_stmt_iterator tsi1, tsi2;
3333 	  tree body1 = CONST_CAST_TREE (arg0);
3334 	  tree body2 = CONST_CAST_TREE (arg1);
3335 	  for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3336 	       tsi_next (&tsi1), tsi_next (&tsi2))
3337 	    {
3338 	      /* The lists don't have the same number of statements.  */
3339 	      if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3340 		return 0;
3341 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3342 		return 1;
3343 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3344 				    OEP_LEXICOGRAPHIC))
3345 		return 0;
3346 	    }
3347 	}
3348       return 0;
3349 
3350     case tcc_statement:
3351       switch (TREE_CODE (arg0))
3352 	{
3353 	case RETURN_EXPR:
3354 	  if (flags & OEP_LEXICOGRAPHIC)
3355 	    return OP_SAME_WITH_NULL (0);
3356 	  return 0;
3357 	default:
3358 	  return 0;
3359 	 }
3360 
3361     default:
3362       return 0;
3363     }
3364 
3365 #undef OP_SAME
3366 #undef OP_SAME_WITH_NULL
3367 }
3368 
3369 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3370    shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3371 
3372    When in doubt, return 0.  */
3373 
3374 static int
3375 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3376 {
3377   int unsignedp1, unsignedpo;
3378   tree primarg0, primarg1, primother;
3379   unsigned int correct_width;
3380 
3381   if (operand_equal_p (arg0, arg1, 0))
3382     return 1;
3383 
3384   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3385       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3386     return 0;
3387 
3388   /* Discard any conversions that don't change the modes of ARG0 and ARG1
3389      and see if the inner values are the same.  This removes any
3390      signedness comparison, which doesn't matter here.  */
3391   primarg0 = arg0, primarg1 = arg1;
3392   STRIP_NOPS (primarg0);
3393   STRIP_NOPS (primarg1);
3394   if (operand_equal_p (primarg0, primarg1, 0))
3395     return 1;
3396 
3397   /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3398      actual comparison operand, ARG0.
3399 
3400      First throw away any conversions to wider types
3401      already present in the operands.  */
3402 
3403   primarg1 = get_narrower (arg1, &unsignedp1);
3404   primother = get_narrower (other, &unsignedpo);
3405 
3406   correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3407   if (unsignedp1 == unsignedpo
3408       && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3409       && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3410     {
3411       tree type = TREE_TYPE (arg0);
3412 
3413       /* Make sure shorter operand is extended the right way
3414 	 to match the longer operand.  */
3415       primarg1 = fold_convert (signed_or_unsigned_type_for
3416 			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3417 
3418       if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3419 	return 1;
3420     }
3421 
3422   return 0;
3423 }
3424 
3425 /* See if ARG is an expression that is either a comparison or is performing
3426    arithmetic on comparisons.  The comparisons must only be comparing
3427    two different values, which will be stored in *CVAL1 and *CVAL2; if
3428    they are nonzero it means that some operands have already been found.
3429    No variables may be used anywhere else in the expression except in the
3430    comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
3431    the expression and save_expr needs to be called with CVAL1 and CVAL2.
3432 
3433    If this is true, return 1.  Otherwise, return zero.  */
3434 
3435 static int
3436 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3437 {
3438   enum tree_code code = TREE_CODE (arg);
3439   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3440 
3441   /* We can handle some of the tcc_expression cases here.  */
3442   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3443     tclass = tcc_unary;
3444   else if (tclass == tcc_expression
3445 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3446 	       || code == COMPOUND_EXPR))
3447     tclass = tcc_binary;
3448 
3449   else if (tclass == tcc_expression && code == SAVE_EXPR
3450 	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3451     {
3452       /* If we've already found a CVAL1 or CVAL2, this expression is
3453 	 two complex to handle.  */
3454       if (*cval1 || *cval2)
3455 	return 0;
3456 
3457       tclass = tcc_unary;
3458       *save_p = 1;
3459     }
3460 
3461   switch (tclass)
3462     {
3463     case tcc_unary:
3464       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3465 
3466     case tcc_binary:
3467       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3468 	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
3469 				      cval1, cval2, save_p));
3470 
3471     case tcc_constant:
3472       return 1;
3473 
3474     case tcc_expression:
3475       if (code == COND_EXPR)
3476 	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3477 				     cval1, cval2, save_p)
3478 		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
3479 					cval1, cval2, save_p)
3480 		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
3481 					cval1, cval2, save_p));
3482       return 0;
3483 
3484     case tcc_comparison:
3485       /* First see if we can handle the first operand, then the second.  For
3486 	 the second operand, we know *CVAL1 can't be zero.  It must be that
3487 	 one side of the comparison is each of the values; test for the
3488 	 case where this isn't true by failing if the two operands
3489 	 are the same.  */
3490 
3491       if (operand_equal_p (TREE_OPERAND (arg, 0),
3492 			   TREE_OPERAND (arg, 1), 0))
3493 	return 0;
3494 
3495       if (*cval1 == 0)
3496 	*cval1 = TREE_OPERAND (arg, 0);
3497       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3498 	;
3499       else if (*cval2 == 0)
3500 	*cval2 = TREE_OPERAND (arg, 0);
3501       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3502 	;
3503       else
3504 	return 0;
3505 
3506       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3507 	;
3508       else if (*cval2 == 0)
3509 	*cval2 = TREE_OPERAND (arg, 1);
3510       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3511 	;
3512       else
3513 	return 0;
3514 
3515       return 1;
3516 
3517     default:
3518       return 0;
3519     }
3520 }
3521 
3522 /* ARG is a tree that is known to contain just arithmetic operations and
3523    comparisons.  Evaluate the operations in the tree substituting NEW0 for
3524    any occurrence of OLD0 as an operand of a comparison and likewise for
3525    NEW1 and OLD1.  */
3526 
3527 static tree
3528 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3529 	    tree old1, tree new1)
3530 {
3531   tree type = TREE_TYPE (arg);
3532   enum tree_code code = TREE_CODE (arg);
3533   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3534 
3535   /* We can handle some of the tcc_expression cases here.  */
3536   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3537     tclass = tcc_unary;
3538   else if (tclass == tcc_expression
3539 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3540     tclass = tcc_binary;
3541 
3542   switch (tclass)
3543     {
3544     case tcc_unary:
3545       return fold_build1_loc (loc, code, type,
3546 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3547 				      old0, new0, old1, new1));
3548 
3549     case tcc_binary:
3550       return fold_build2_loc (loc, code, type,
3551 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3552 				      old0, new0, old1, new1),
3553 			  eval_subst (loc, TREE_OPERAND (arg, 1),
3554 				      old0, new0, old1, new1));
3555 
3556     case tcc_expression:
3557       switch (code)
3558 	{
3559 	case SAVE_EXPR:
3560 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3561 			     old1, new1);
3562 
3563 	case COMPOUND_EXPR:
3564 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3565 			     old1, new1);
3566 
3567 	case COND_EXPR:
3568 	  return fold_build3_loc (loc, code, type,
3569 			      eval_subst (loc, TREE_OPERAND (arg, 0),
3570 					  old0, new0, old1, new1),
3571 			      eval_subst (loc, TREE_OPERAND (arg, 1),
3572 					  old0, new0, old1, new1),
3573 			      eval_subst (loc, TREE_OPERAND (arg, 2),
3574 					  old0, new0, old1, new1));
3575 	default:
3576 	  break;
3577 	}
3578       /* Fall through - ???  */
3579 
3580     case tcc_comparison:
3581       {
3582 	tree arg0 = TREE_OPERAND (arg, 0);
3583 	tree arg1 = TREE_OPERAND (arg, 1);
3584 
3585 	/* We need to check both for exact equality and tree equality.  The
3586 	   former will be true if the operand has a side-effect.  In that
3587 	   case, we know the operand occurred exactly once.  */
3588 
3589 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3590 	  arg0 = new0;
3591 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3592 	  arg0 = new1;
3593 
3594 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3595 	  arg1 = new0;
3596 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3597 	  arg1 = new1;
3598 
3599 	return fold_build2_loc (loc, code, type, arg0, arg1);
3600       }
3601 
3602     default:
3603       return arg;
3604     }
3605 }
3606 
3607 /* Return a tree for the case when the result of an expression is RESULT
3608    converted to TYPE and OMITTED was previously an operand of the expression
3609    but is now not needed (e.g., we folded OMITTED * 0).
3610 
3611    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3612    the conversion of RESULT to TYPE.  */
3613 
3614 tree
3615 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3616 {
3617   tree t = fold_convert_loc (loc, type, result);
3618 
3619   /* If the resulting operand is an empty statement, just return the omitted
3620      statement casted to void. */
3621   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3622     return build1_loc (loc, NOP_EXPR, void_type_node,
3623 		       fold_ignored_result (omitted));
3624 
3625   if (TREE_SIDE_EFFECTS (omitted))
3626     return build2_loc (loc, COMPOUND_EXPR, type,
3627 		       fold_ignored_result (omitted), t);
3628 
3629   return non_lvalue_loc (loc, t);
3630 }
3631 
3632 /* Return a tree for the case when the result of an expression is RESULT
3633    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3634    of the expression but are now not needed.
3635 
3636    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3637    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3638    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3639    just do the conversion of RESULT to TYPE.  */
3640 
3641 tree
3642 omit_two_operands_loc (location_t loc, tree type, tree result,
3643 		       tree omitted1, tree omitted2)
3644 {
3645   tree t = fold_convert_loc (loc, type, result);
3646 
3647   if (TREE_SIDE_EFFECTS (omitted2))
3648     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3649   if (TREE_SIDE_EFFECTS (omitted1))
3650     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3651 
3652   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3653 }
3654 
3655 
3656 /* Return a simplified tree node for the truth-negation of ARG.  This
3657    never alters ARG itself.  We assume that ARG is an operation that
3658    returns a truth value (0 or 1).
3659 
3660    FIXME: one would think we would fold the result, but it causes
3661    problems with the dominator optimizer.  */
3662 
3663 static tree
3664 fold_truth_not_expr (location_t loc, tree arg)
3665 {
3666   tree type = TREE_TYPE (arg);
3667   enum tree_code code = TREE_CODE (arg);
3668   location_t loc1, loc2;
3669 
3670   /* If this is a comparison, we can simply invert it, except for
3671      floating-point non-equality comparisons, in which case we just
3672      enclose a TRUTH_NOT_EXPR around what we have.  */
3673 
3674   if (TREE_CODE_CLASS (code) == tcc_comparison)
3675     {
3676       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3677       if (FLOAT_TYPE_P (op_type)
3678 	  && flag_trapping_math
3679 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3680 	  && code != NE_EXPR && code != EQ_EXPR)
3681 	return NULL_TREE;
3682 
3683       code = invert_tree_comparison (code, HONOR_NANS (op_type));
3684       if (code == ERROR_MARK)
3685 	return NULL_TREE;
3686 
3687       tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3688 			     TREE_OPERAND (arg, 1));
3689       if (TREE_NO_WARNING (arg))
3690 	TREE_NO_WARNING (ret) = 1;
3691       return ret;
3692     }
3693 
3694   switch (code)
3695     {
3696     case INTEGER_CST:
3697       return constant_boolean_node (integer_zerop (arg), type);
3698 
3699     case TRUTH_AND_EXPR:
3700       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3701       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3702       return build2_loc (loc, TRUTH_OR_EXPR, type,
3703 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3704 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3705 
3706     case TRUTH_OR_EXPR:
3707       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3708       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3709       return build2_loc (loc, TRUTH_AND_EXPR, type,
3710 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3711 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3712 
3713     case TRUTH_XOR_EXPR:
3714       /* Here we can invert either operand.  We invert the first operand
3715 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3716 	 result is the XOR of the first operand with the inside of the
3717 	 negation of the second operand.  */
3718 
3719       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3720 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3721 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3722       else
3723 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3724 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3725 			   TREE_OPERAND (arg, 1));
3726 
3727     case TRUTH_ANDIF_EXPR:
3728       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3729       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3730       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3731 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3732 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3733 
3734     case TRUTH_ORIF_EXPR:
3735       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3736       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3737       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3738 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3739 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3740 
3741     case TRUTH_NOT_EXPR:
3742       return TREE_OPERAND (arg, 0);
3743 
3744     case COND_EXPR:
3745       {
3746 	tree arg1 = TREE_OPERAND (arg, 1);
3747 	tree arg2 = TREE_OPERAND (arg, 2);
3748 
3749 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3750 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3751 
3752 	/* A COND_EXPR may have a throw as one operand, which
3753 	   then has void type.  Just leave void operands
3754 	   as they are.  */
3755 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3756 			   VOID_TYPE_P (TREE_TYPE (arg1))
3757 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3758 			   VOID_TYPE_P (TREE_TYPE (arg2))
3759 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3760       }
3761 
3762     case COMPOUND_EXPR:
3763       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3764       return build2_loc (loc, COMPOUND_EXPR, type,
3765 			 TREE_OPERAND (arg, 0),
3766 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3767 
3768     case NON_LVALUE_EXPR:
3769       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3770       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3771 
3772     CASE_CONVERT:
3773       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3774 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3775 
3776       /* fall through */
3777 
3778     case FLOAT_EXPR:
3779       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3780       return build1_loc (loc, TREE_CODE (arg), type,
3781 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3782 
3783     case BIT_AND_EXPR:
3784       if (!integer_onep (TREE_OPERAND (arg, 1)))
3785 	return NULL_TREE;
3786       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3787 
3788     case SAVE_EXPR:
3789       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3790 
3791     case CLEANUP_POINT_EXPR:
3792       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3793       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3794 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3795 
3796     default:
3797       return NULL_TREE;
3798     }
3799 }
3800 
3801 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
3802    assume that ARG is an operation that returns a truth value (0 or 1
3803    for scalars, 0 or -1 for vectors).  Return the folded expression if
3804    folding is successful.  Otherwise, return NULL_TREE.  */
3805 
3806 static tree
3807 fold_invert_truthvalue (location_t loc, tree arg)
3808 {
3809   tree type = TREE_TYPE (arg);
3810   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3811 			      ? BIT_NOT_EXPR
3812 			      : TRUTH_NOT_EXPR,
3813 			 type, arg);
3814 }
3815 
3816 /* Return a simplified tree node for the truth-negation of ARG.  This
3817    never alters ARG itself.  We assume that ARG is an operation that
3818    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
3819 
3820 tree
3821 invert_truthvalue_loc (location_t loc, tree arg)
3822 {
3823   if (TREE_CODE (arg) == ERROR_MARK)
3824     return arg;
3825 
3826   tree type = TREE_TYPE (arg);
3827   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3828 			       ? BIT_NOT_EXPR
3829 			       : TRUTH_NOT_EXPR,
3830 			  type, arg);
3831 }
3832 
3833 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3834    with code CODE.  This optimization is unsafe.  */
3835 static tree
3836 distribute_real_division (location_t loc, enum tree_code code, tree type,
3837 			  tree arg0, tree arg1)
3838 {
3839   bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3840   bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3841 
3842   /* (A / C) +- (B / C) -> (A +- B) / C.  */
3843   if (mul0 == mul1
3844       && operand_equal_p (TREE_OPERAND (arg0, 1),
3845 		       TREE_OPERAND (arg1, 1), 0))
3846     return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3847 			fold_build2_loc (loc, code, type,
3848 				     TREE_OPERAND (arg0, 0),
3849 				     TREE_OPERAND (arg1, 0)),
3850 			TREE_OPERAND (arg0, 1));
3851 
3852   /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3853   if (operand_equal_p (TREE_OPERAND (arg0, 0),
3854 		       TREE_OPERAND (arg1, 0), 0)
3855       && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3856       && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3857     {
3858       REAL_VALUE_TYPE r0, r1;
3859       r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3860       r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3861       if (!mul0)
3862 	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3863       if (!mul1)
3864         real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3865       real_arithmetic (&r0, code, &r0, &r1);
3866       return fold_build2_loc (loc, MULT_EXPR, type,
3867 			  TREE_OPERAND (arg0, 0),
3868 			  build_real (type, r0));
3869     }
3870 
3871   return NULL_TREE;
3872 }
3873 
3874 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3875    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero
3876    and uses reverse storage order if REVERSEP is nonzero.  ORIG_INNER
3877    is the original memory reference used to preserve the alias set of
3878    the access.  */
3879 
3880 static tree
3881 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3882 		    HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3883 		    int unsignedp, int reversep)
3884 {
3885   tree result, bftype;
3886 
3887   /* Attempt not to lose the access path if possible.  */
3888   if (TREE_CODE (orig_inner) == COMPONENT_REF)
3889     {
3890       tree ninner = TREE_OPERAND (orig_inner, 0);
3891       machine_mode nmode;
3892       HOST_WIDE_INT nbitsize, nbitpos;
3893       tree noffset;
3894       int nunsignedp, nreversep, nvolatilep = 0;
3895       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3896 				       &noffset, &nmode, &nunsignedp,
3897 				       &nreversep, &nvolatilep);
3898       if (base == inner
3899 	  && noffset == NULL_TREE
3900 	  && nbitsize >= bitsize
3901 	  && nbitpos <= bitpos
3902 	  && bitpos + bitsize <= nbitpos + nbitsize
3903 	  && !reversep
3904 	  && !nreversep
3905 	  && !nvolatilep)
3906 	{
3907 	  inner = ninner;
3908 	  bitpos -= nbitpos;
3909 	}
3910     }
3911 
3912   alias_set_type iset = get_alias_set (orig_inner);
3913   if (iset == 0 && get_alias_set (inner) != iset)
3914     inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3915 			 build_fold_addr_expr (inner),
3916 			 build_int_cst (ptr_type_node, 0));
3917 
3918   if (bitpos == 0 && !reversep)
3919     {
3920       tree size = TYPE_SIZE (TREE_TYPE (inner));
3921       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3922 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3923 	  && tree_fits_shwi_p (size)
3924 	  && tree_to_shwi (size) == bitsize)
3925 	return fold_convert_loc (loc, type, inner);
3926     }
3927 
3928   bftype = type;
3929   if (TYPE_PRECISION (bftype) != bitsize
3930       || TYPE_UNSIGNED (bftype) == !unsignedp)
3931     bftype = build_nonstandard_integer_type (bitsize, 0);
3932 
3933   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3934 		       size_int (bitsize), bitsize_int (bitpos));
3935   REF_REVERSE_STORAGE_ORDER (result) = reversep;
3936 
3937   if (bftype != type)
3938     result = fold_convert_loc (loc, type, result);
3939 
3940   return result;
3941 }
3942 
3943 /* Optimize a bit-field compare.
3944 
3945    There are two cases:  First is a compare against a constant and the
3946    second is a comparison of two items where the fields are at the same
3947    bit position relative to the start of a chunk (byte, halfword, word)
3948    large enough to contain it.  In these cases we can avoid the shift
3949    implicit in bitfield extractions.
3950 
3951    For constants, we emit a compare of the shifted constant with the
3952    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3953    compared.  For two fields at the same position, we do the ANDs with the
3954    similar mask and compare the result of the ANDs.
3955 
3956    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3957    COMPARE_TYPE is the type of the comparison, and LHS and RHS
3958    are the left and right operands of the comparison, respectively.
3959 
3960    If the optimization described above can be done, we return the resulting
3961    tree.  Otherwise we return zero.  */
3962 
3963 static tree
3964 optimize_bit_field_compare (location_t loc, enum tree_code code,
3965 			    tree compare_type, tree lhs, tree rhs)
3966 {
3967   HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3968   tree type = TREE_TYPE (lhs);
3969   tree unsigned_type;
3970   int const_p = TREE_CODE (rhs) == INTEGER_CST;
3971   machine_mode lmode, rmode, nmode;
3972   int lunsignedp, runsignedp;
3973   int lreversep, rreversep;
3974   int lvolatilep = 0, rvolatilep = 0;
3975   tree linner, rinner = NULL_TREE;
3976   tree mask;
3977   tree offset;
3978 
3979   /* Get all the information about the extractions being done.  If the bit size
3980      if the same as the size of the underlying object, we aren't doing an
3981      extraction at all and so can do nothing.  We also don't want to
3982      do anything if the inner expression is a PLACEHOLDER_EXPR since we
3983      then will no longer be able to replace it.  */
3984   linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3985 				&lunsignedp, &lreversep, &lvolatilep);
3986   if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3987       || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3988     return 0;
3989 
3990   if (const_p)
3991     rreversep = lreversep;
3992   else
3993    {
3994      /* If this is not a constant, we can only do something if bit positions,
3995 	sizes, signedness and storage order are the same.  */
3996      rinner
3997        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3998 			      &runsignedp, &rreversep, &rvolatilep);
3999 
4000      if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4001 	 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
4002 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
4003        return 0;
4004    }
4005 
4006   /* Honor the C++ memory model and mimic what RTL expansion does.  */
4007   unsigned HOST_WIDE_INT bitstart = 0;
4008   unsigned HOST_WIDE_INT bitend = 0;
4009   if (TREE_CODE (lhs) == COMPONENT_REF)
4010     {
4011       get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
4012       if (offset != NULL_TREE)
4013 	return 0;
4014     }
4015 
4016   /* See if we can find a mode to refer to this field.  We should be able to,
4017      but fail if we can't.  */
4018   nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4019 			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4020 			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4021 				TYPE_ALIGN (TREE_TYPE (rinner))),
4022 			 word_mode, false);
4023   if (nmode == VOIDmode)
4024     return 0;
4025 
4026   /* Set signed and unsigned types of the precision of this mode for the
4027      shifts below.  */
4028   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4029 
4030   /* Compute the bit position and size for the new reference and our offset
4031      within it. If the new reference is the same size as the original, we
4032      won't optimize anything, so return zero.  */
4033   nbitsize = GET_MODE_BITSIZE (nmode);
4034   nbitpos = lbitpos & ~ (nbitsize - 1);
4035   lbitpos -= nbitpos;
4036   if (nbitsize == lbitsize)
4037     return 0;
4038 
4039   if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4040     lbitpos = nbitsize - lbitsize - lbitpos;
4041 
4042   /* Make the mask to be used against the extracted field.  */
4043   mask = build_int_cst_type (unsigned_type, -1);
4044   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4045   mask = const_binop (RSHIFT_EXPR, mask,
4046 		      size_int (nbitsize - lbitsize - lbitpos));
4047 
4048   if (! const_p)
4049     {
4050       if (nbitpos < 0)
4051 	return 0;
4052 
4053       /* If not comparing with constant, just rework the comparison
4054 	 and return.  */
4055       tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4056 				    nbitsize, nbitpos, 1, lreversep);
4057       t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4058       tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4059 				    nbitsize, nbitpos, 1, rreversep);
4060       t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4061       return fold_build2_loc (loc, code, compare_type, t1, t2);
4062     }
4063 
4064   /* Otherwise, we are handling the constant case.  See if the constant is too
4065      big for the field.  Warn and return a tree for 0 (false) if so.  We do
4066      this not only for its own sake, but to avoid having to test for this
4067      error case below.  If we didn't, we might generate wrong code.
4068 
4069      For unsigned fields, the constant shifted right by the field length should
4070      be all zero.  For signed fields, the high-order bits should agree with
4071      the sign bit.  */
4072 
4073   if (lunsignedp)
4074     {
4075       if (wi::lrshift (rhs, lbitsize) != 0)
4076 	{
4077 	  warning (0, "comparison is always %d due to width of bit-field",
4078 		   code == NE_EXPR);
4079 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4080 	}
4081     }
4082   else
4083     {
4084       wide_int tem = wi::arshift (rhs, lbitsize - 1);
4085       if (tem != 0 && tem != -1)
4086 	{
4087 	  warning (0, "comparison is always %d due to width of bit-field",
4088 		   code == NE_EXPR);
4089 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4090 	}
4091     }
4092 
4093   if (nbitpos < 0)
4094     return 0;
4095 
4096   /* Single-bit compares should always be against zero.  */
4097   if (lbitsize == 1 && ! integer_zerop (rhs))
4098     {
4099       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4100       rhs = build_int_cst (type, 0);
4101     }
4102 
4103   /* Make a new bitfield reference, shift the constant over the
4104      appropriate number of bits and mask it with the computed mask
4105      (in case this was a signed field).  If we changed it, make a new one.  */
4106   lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4107 			    nbitsize, nbitpos, 1, lreversep);
4108 
4109   rhs = const_binop (BIT_AND_EXPR,
4110 		     const_binop (LSHIFT_EXPR,
4111 				  fold_convert_loc (loc, unsigned_type, rhs),
4112 				  size_int (lbitpos)),
4113 		     mask);
4114 
4115   lhs = build2_loc (loc, code, compare_type,
4116 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4117   return lhs;
4118 }
4119 
4120 /* Subroutine for fold_truth_andor_1: decode a field reference.
4121 
4122    If EXP is a comparison reference, we return the innermost reference.
4123 
4124    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4125    set to the starting bit number.
4126 
4127    If the innermost field can be completely contained in a mode-sized
4128    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4129 
4130    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4131    otherwise it is not changed.
4132 
4133    *PUNSIGNEDP is set to the signedness of the field.
4134 
4135    *PREVERSEP is set to the storage order of the field.
4136 
4137    *PMASK is set to the mask used.  This is either contained in a
4138    BIT_AND_EXPR or derived from the width of the field.
4139 
4140    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4141 
4142    Return 0 if this is not a component reference or is one that we can't
4143    do anything with.  */
4144 
4145 static tree
4146 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4147 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4148 			int *punsignedp, int *preversep, int *pvolatilep,
4149 			tree *pmask, tree *pand_mask)
4150 {
4151   tree exp = *exp_;
4152   tree outer_type = 0;
4153   tree and_mask = 0;
4154   tree mask, inner, offset;
4155   tree unsigned_type;
4156   unsigned int precision;
4157 
4158   /* All the optimizations using this function assume integer fields.
4159      There are problems with FP fields since the type_for_size call
4160      below can fail for, e.g., XFmode.  */
4161   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4162     return 0;
4163 
4164   /* We are interested in the bare arrangement of bits, so strip everything
4165      that doesn't affect the machine mode.  However, record the type of the
4166      outermost expression if it may matter below.  */
4167   if (CONVERT_EXPR_P (exp)
4168       || TREE_CODE (exp) == NON_LVALUE_EXPR)
4169     outer_type = TREE_TYPE (exp);
4170   STRIP_NOPS (exp);
4171 
4172   if (TREE_CODE (exp) == BIT_AND_EXPR)
4173     {
4174       and_mask = TREE_OPERAND (exp, 1);
4175       exp = TREE_OPERAND (exp, 0);
4176       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4177       if (TREE_CODE (and_mask) != INTEGER_CST)
4178 	return 0;
4179     }
4180 
4181   inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4182 			       punsignedp, preversep, pvolatilep);
4183   if ((inner == exp && and_mask == 0)
4184       || *pbitsize < 0 || offset != 0
4185       || TREE_CODE (inner) == PLACEHOLDER_EXPR
4186       /* Reject out-of-bound accesses (PR79731).  */
4187       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4188 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4189 			       *pbitpos + *pbitsize) < 0))
4190     return 0;
4191 
4192   *exp_ = exp;
4193 
4194   /* If the number of bits in the reference is the same as the bitsize of
4195      the outer type, then the outer type gives the signedness. Otherwise
4196      (in case of a small bitfield) the signedness is unchanged.  */
4197   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4198     *punsignedp = TYPE_UNSIGNED (outer_type);
4199 
4200   /* Compute the mask to access the bitfield.  */
4201   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4202   precision = TYPE_PRECISION (unsigned_type);
4203 
4204   mask = build_int_cst_type (unsigned_type, -1);
4205 
4206   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4207   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4208 
4209   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4210   if (and_mask != 0)
4211     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4212 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4213 
4214   *pmask = mask;
4215   *pand_mask = and_mask;
4216   return inner;
4217 }
4218 
4219 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4220    bit positions and MASK is SIGNED.  */
4221 
4222 static int
4223 all_ones_mask_p (const_tree mask, unsigned int size)
4224 {
4225   tree type = TREE_TYPE (mask);
4226   unsigned int precision = TYPE_PRECISION (type);
4227 
4228   /* If this function returns true when the type of the mask is
4229      UNSIGNED, then there will be errors.  In particular see
4230      gcc.c-torture/execute/990326-1.c.  There does not appear to be
4231      any documentation paper trail as to why this is so.  But the pre
4232      wide-int worked with that restriction and it has been preserved
4233      here.  */
4234   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4235     return false;
4236 
4237   return wi::mask (size, false, precision) == mask;
4238 }
4239 
4240 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4241    represents the sign bit of EXP's type.  If EXP represents a sign
4242    or zero extension, also test VAL against the unextended type.
4243    The return value is the (sub)expression whose sign bit is VAL,
4244    or NULL_TREE otherwise.  */
4245 
4246 tree
4247 sign_bit_p (tree exp, const_tree val)
4248 {
4249   int width;
4250   tree t;
4251 
4252   /* Tree EXP must have an integral type.  */
4253   t = TREE_TYPE (exp);
4254   if (! INTEGRAL_TYPE_P (t))
4255     return NULL_TREE;
4256 
4257   /* Tree VAL must be an integer constant.  */
4258   if (TREE_CODE (val) != INTEGER_CST
4259       || TREE_OVERFLOW (val))
4260     return NULL_TREE;
4261 
4262   width = TYPE_PRECISION (t);
4263   if (wi::only_sign_bit_p (val, width))
4264     return exp;
4265 
4266   /* Handle extension from a narrower type.  */
4267   if (TREE_CODE (exp) == NOP_EXPR
4268       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4269     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4270 
4271   return NULL_TREE;
4272 }
4273 
4274 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4275    to be evaluated unconditionally.  */
4276 
4277 static int
4278 simple_operand_p (const_tree exp)
4279 {
4280   /* Strip any conversions that don't change the machine mode.  */
4281   STRIP_NOPS (exp);
4282 
4283   return (CONSTANT_CLASS_P (exp)
4284   	  || TREE_CODE (exp) == SSA_NAME
4285 	  || (DECL_P (exp)
4286 	      && ! TREE_ADDRESSABLE (exp)
4287 	      && ! TREE_THIS_VOLATILE (exp)
4288 	      && ! DECL_NONLOCAL (exp)
4289 	      /* Don't regard global variables as simple.  They may be
4290 		 allocated in ways unknown to the compiler (shared memory,
4291 		 #pragma weak, etc).  */
4292 	      && ! TREE_PUBLIC (exp)
4293 	      && ! DECL_EXTERNAL (exp)
4294 	      /* Weakrefs are not safe to be read, since they can be NULL.
4295  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4296 		 have DECL_WEAK flag set.  */
4297 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4298 	      /* Loading a static variable is unduly expensive, but global
4299 		 registers aren't expensive.  */
4300 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4301 }
4302 
4303 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4304    to be evaluated unconditionally.
4305    I addition to simple_operand_p, we assume that comparisons, conversions,
4306    and logic-not operations are simple, if their operands are simple, too.  */
4307 
4308 static bool
4309 simple_operand_p_2 (tree exp)
4310 {
4311   enum tree_code code;
4312 
4313   if (TREE_SIDE_EFFECTS (exp)
4314       || tree_could_trap_p (exp))
4315     return false;
4316 
4317   while (CONVERT_EXPR_P (exp))
4318     exp = TREE_OPERAND (exp, 0);
4319 
4320   code = TREE_CODE (exp);
4321 
4322   if (TREE_CODE_CLASS (code) == tcc_comparison)
4323     return (simple_operand_p (TREE_OPERAND (exp, 0))
4324 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4325 
4326   if (code == TRUTH_NOT_EXPR)
4327       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4328 
4329   return simple_operand_p (exp);
4330 }
4331 
4332 
4333 /* The following functions are subroutines to fold_range_test and allow it to
4334    try to change a logical combination of comparisons into a range test.
4335 
4336    For example, both
4337 	X == 2 || X == 3 || X == 4 || X == 5
4338    and
4339 	X >= 2 && X <= 5
4340    are converted to
4341 	(unsigned) (X - 2) <= 3
4342 
4343    We describe each set of comparisons as being either inside or outside
4344    a range, using a variable named like IN_P, and then describe the
4345    range with a lower and upper bound.  If one of the bounds is omitted,
4346    it represents either the highest or lowest value of the type.
4347 
4348    In the comments below, we represent a range by two numbers in brackets
4349    preceded by a "+" to designate being inside that range, or a "-" to
4350    designate being outside that range, so the condition can be inverted by
4351    flipping the prefix.  An omitted bound is represented by a "-".  For
4352    example, "- [-, 10]" means being outside the range starting at the lowest
4353    possible value and ending at 10, in other words, being greater than 10.
4354    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4355    always false.
4356 
4357    We set up things so that the missing bounds are handled in a consistent
4358    manner so neither a missing bound nor "true" and "false" need to be
4359    handled using a special case.  */
4360 
4361 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4362    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4363    and UPPER1_P are nonzero if the respective argument is an upper bound
4364    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4365    must be specified for a comparison.  ARG1 will be converted to ARG0's
4366    type if both are specified.  */
4367 
4368 static tree
4369 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4370 	     tree arg1, int upper1_p)
4371 {
4372   tree tem;
4373   int result;
4374   int sgn0, sgn1;
4375 
4376   /* If neither arg represents infinity, do the normal operation.
4377      Else, if not a comparison, return infinity.  Else handle the special
4378      comparison rules. Note that most of the cases below won't occur, but
4379      are handled for consistency.  */
4380 
4381   if (arg0 != 0 && arg1 != 0)
4382     {
4383       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4384 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4385       STRIP_NOPS (tem);
4386       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4387     }
4388 
4389   if (TREE_CODE_CLASS (code) != tcc_comparison)
4390     return 0;
4391 
4392   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4393      for neither.  In real maths, we cannot assume open ended ranges are
4394      the same. But, this is computer arithmetic, where numbers are finite.
4395      We can therefore make the transformation of any unbounded range with
4396      the value Z, Z being greater than any representable number. This permits
4397      us to treat unbounded ranges as equal.  */
4398   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4399   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4400   switch (code)
4401     {
4402     case EQ_EXPR:
4403       result = sgn0 == sgn1;
4404       break;
4405     case NE_EXPR:
4406       result = sgn0 != sgn1;
4407       break;
4408     case LT_EXPR:
4409       result = sgn0 < sgn1;
4410       break;
4411     case LE_EXPR:
4412       result = sgn0 <= sgn1;
4413       break;
4414     case GT_EXPR:
4415       result = sgn0 > sgn1;
4416       break;
4417     case GE_EXPR:
4418       result = sgn0 >= sgn1;
4419       break;
4420     default:
4421       gcc_unreachable ();
4422     }
4423 
4424   return constant_boolean_node (result, type);
4425 }
4426 
4427 /* Helper routine for make_range.  Perform one step for it, return
4428    new expression if the loop should continue or NULL_TREE if it should
4429    stop.  */
4430 
4431 tree
4432 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4433 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4434 		 bool *strict_overflow_p)
4435 {
4436   tree arg0_type = TREE_TYPE (arg0);
4437   tree n_low, n_high, low = *p_low, high = *p_high;
4438   int in_p = *p_in_p, n_in_p;
4439 
4440   switch (code)
4441     {
4442     case TRUTH_NOT_EXPR:
4443       /* We can only do something if the range is testing for zero.  */
4444       if (low == NULL_TREE || high == NULL_TREE
4445 	  || ! integer_zerop (low) || ! integer_zerop (high))
4446 	return NULL_TREE;
4447       *p_in_p = ! in_p;
4448       return arg0;
4449 
4450     case EQ_EXPR: case NE_EXPR:
4451     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4452       /* We can only do something if the range is testing for zero
4453 	 and if the second operand is an integer constant.  Note that
4454 	 saying something is "in" the range we make is done by
4455 	 complementing IN_P since it will set in the initial case of
4456 	 being not equal to zero; "out" is leaving it alone.  */
4457       if (low == NULL_TREE || high == NULL_TREE
4458 	  || ! integer_zerop (low) || ! integer_zerop (high)
4459 	  || TREE_CODE (arg1) != INTEGER_CST)
4460 	return NULL_TREE;
4461 
4462       switch (code)
4463 	{
4464 	case NE_EXPR:  /* - [c, c]  */
4465 	  low = high = arg1;
4466 	  break;
4467 	case EQ_EXPR:  /* + [c, c]  */
4468 	  in_p = ! in_p, low = high = arg1;
4469 	  break;
4470 	case GT_EXPR:  /* - [-, c] */
4471 	  low = 0, high = arg1;
4472 	  break;
4473 	case GE_EXPR:  /* + [c, -] */
4474 	  in_p = ! in_p, low = arg1, high = 0;
4475 	  break;
4476 	case LT_EXPR:  /* - [c, -] */
4477 	  low = arg1, high = 0;
4478 	  break;
4479 	case LE_EXPR:  /* + [-, c] */
4480 	  in_p = ! in_p, low = 0, high = arg1;
4481 	  break;
4482 	default:
4483 	  gcc_unreachable ();
4484 	}
4485 
4486       /* If this is an unsigned comparison, we also know that EXP is
4487 	 greater than or equal to zero.  We base the range tests we make
4488 	 on that fact, so we record it here so we can parse existing
4489 	 range tests.  We test arg0_type since often the return type
4490 	 of, e.g. EQ_EXPR, is boolean.  */
4491       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4492 	{
4493 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4494 			      in_p, low, high, 1,
4495 			      build_int_cst (arg0_type, 0),
4496 			      NULL_TREE))
4497 	    return NULL_TREE;
4498 
4499 	  in_p = n_in_p, low = n_low, high = n_high;
4500 
4501 	  /* If the high bound is missing, but we have a nonzero low
4502 	     bound, reverse the range so it goes from zero to the low bound
4503 	     minus 1.  */
4504 	  if (high == 0 && low && ! integer_zerop (low))
4505 	    {
4506 	      in_p = ! in_p;
4507 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4508 				  build_int_cst (TREE_TYPE (low), 1), 0);
4509 	      low = build_int_cst (arg0_type, 0);
4510 	    }
4511 	}
4512 
4513       *p_low = low;
4514       *p_high = high;
4515       *p_in_p = in_p;
4516       return arg0;
4517 
4518     case NEGATE_EXPR:
4519       /* If flag_wrapv and ARG0_TYPE is signed, make sure
4520 	 low and high are non-NULL, then normalize will DTRT.  */
4521       if (!TYPE_UNSIGNED (arg0_type)
4522 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4523 	{
4524 	  if (low == NULL_TREE)
4525 	    low = TYPE_MIN_VALUE (arg0_type);
4526 	  if (high == NULL_TREE)
4527 	    high = TYPE_MAX_VALUE (arg0_type);
4528 	}
4529 
4530       /* (-x) IN [a,b] -> x in [-b, -a]  */
4531       n_low = range_binop (MINUS_EXPR, exp_type,
4532 			   build_int_cst (exp_type, 0),
4533 			   0, high, 1);
4534       n_high = range_binop (MINUS_EXPR, exp_type,
4535 			    build_int_cst (exp_type, 0),
4536 			    0, low, 0);
4537       if (n_high != 0 && TREE_OVERFLOW (n_high))
4538 	return NULL_TREE;
4539       goto normalize;
4540 
4541     case BIT_NOT_EXPR:
4542       /* ~ X -> -X - 1  */
4543       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4544 			 build_int_cst (exp_type, 1));
4545 
4546     case PLUS_EXPR:
4547     case MINUS_EXPR:
4548       if (TREE_CODE (arg1) != INTEGER_CST)
4549 	return NULL_TREE;
4550 
4551       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4552 	 move a constant to the other side.  */
4553       if (!TYPE_UNSIGNED (arg0_type)
4554 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4555 	return NULL_TREE;
4556 
4557       /* If EXP is signed, any overflow in the computation is undefined,
4558 	 so we don't worry about it so long as our computations on
4559 	 the bounds don't overflow.  For unsigned, overflow is defined
4560 	 and this is exactly the right thing.  */
4561       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4562 			   arg0_type, low, 0, arg1, 0);
4563       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4564 			    arg0_type, high, 1, arg1, 0);
4565       if ((n_low != 0 && TREE_OVERFLOW (n_low))
4566 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
4567 	return NULL_TREE;
4568 
4569       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4570 	*strict_overflow_p = true;
4571 
4572       normalize:
4573 	/* Check for an unsigned range which has wrapped around the maximum
4574 	   value thus making n_high < n_low, and normalize it.  */
4575 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4576 	  {
4577 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4578 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
4579 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4580 				build_int_cst (TREE_TYPE (n_low), 1), 0);
4581 
4582 	    /* If the range is of the form +/- [ x+1, x ], we won't
4583 	       be able to normalize it.  But then, it represents the
4584 	       whole range or the empty set, so make it
4585 	       +/- [ -, - ].  */
4586 	    if (tree_int_cst_equal (n_low, low)
4587 		&& tree_int_cst_equal (n_high, high))
4588 	      low = high = 0;
4589 	    else
4590 	      in_p = ! in_p;
4591 	  }
4592 	else
4593 	  low = n_low, high = n_high;
4594 
4595 	*p_low = low;
4596 	*p_high = high;
4597 	*p_in_p = in_p;
4598 	return arg0;
4599 
4600     CASE_CONVERT:
4601     case NON_LVALUE_EXPR:
4602       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4603 	return NULL_TREE;
4604 
4605       if (! INTEGRAL_TYPE_P (arg0_type)
4606 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
4607 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4608 	return NULL_TREE;
4609 
4610       n_low = low, n_high = high;
4611 
4612       if (n_low != 0)
4613 	n_low = fold_convert_loc (loc, arg0_type, n_low);
4614 
4615       if (n_high != 0)
4616 	n_high = fold_convert_loc (loc, arg0_type, n_high);
4617 
4618       /* If we're converting arg0 from an unsigned type, to exp,
4619 	 a signed type,  we will be doing the comparison as unsigned.
4620 	 The tests above have already verified that LOW and HIGH
4621 	 are both positive.
4622 
4623 	 So we have to ensure that we will handle large unsigned
4624 	 values the same way that the current signed bounds treat
4625 	 negative values.  */
4626 
4627       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4628 	{
4629 	  tree high_positive;
4630 	  tree equiv_type;
4631 	  /* For fixed-point modes, we need to pass the saturating flag
4632 	     as the 2nd parameter.  */
4633 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4634 	    equiv_type
4635 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4636 						TYPE_SATURATING (arg0_type));
4637 	  else
4638 	    equiv_type
4639 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4640 
4641 	  /* A range without an upper bound is, naturally, unbounded.
4642 	     Since convert would have cropped a very large value, use
4643 	     the max value for the destination type.  */
4644 	  high_positive
4645 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4646 	      : TYPE_MAX_VALUE (arg0_type);
4647 
4648 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4649 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4650 					     fold_convert_loc (loc, arg0_type,
4651 							       high_positive),
4652 					     build_int_cst (arg0_type, 1));
4653 
4654 	  /* If the low bound is specified, "and" the range with the
4655 	     range for which the original unsigned value will be
4656 	     positive.  */
4657 	  if (low != 0)
4658 	    {
4659 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4660 				  1, fold_convert_loc (loc, arg0_type,
4661 						       integer_zero_node),
4662 				  high_positive))
4663 		return NULL_TREE;
4664 
4665 	      in_p = (n_in_p == in_p);
4666 	    }
4667 	  else
4668 	    {
4669 	      /* Otherwise, "or" the range with the range of the input
4670 		 that will be interpreted as negative.  */
4671 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4672 				  1, fold_convert_loc (loc, arg0_type,
4673 						       integer_zero_node),
4674 				  high_positive))
4675 		return NULL_TREE;
4676 
4677 	      in_p = (in_p != n_in_p);
4678 	    }
4679 	}
4680 
4681       *p_low = n_low;
4682       *p_high = n_high;
4683       *p_in_p = in_p;
4684       return arg0;
4685 
4686     default:
4687       return NULL_TREE;
4688     }
4689 }
4690 
4691 /* Given EXP, a logical expression, set the range it is testing into
4692    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4693    actually being tested.  *PLOW and *PHIGH will be made of the same
4694    type as the returned expression.  If EXP is not a comparison, we
4695    will most likely not be returning a useful value and range.  Set
4696    *STRICT_OVERFLOW_P to true if the return value is only valid
4697    because signed overflow is undefined; otherwise, do not change
4698    *STRICT_OVERFLOW_P.  */
4699 
4700 tree
4701 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4702 	    bool *strict_overflow_p)
4703 {
4704   enum tree_code code;
4705   tree arg0, arg1 = NULL_TREE;
4706   tree exp_type, nexp;
4707   int in_p;
4708   tree low, high;
4709   location_t loc = EXPR_LOCATION (exp);
4710 
4711   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4712      and see if we can refine the range.  Some of the cases below may not
4713      happen, but it doesn't seem worth worrying about this.  We "continue"
4714      the outer loop when we've changed something; otherwise we "break"
4715      the switch, which will "break" the while.  */
4716 
4717   in_p = 0;
4718   low = high = build_int_cst (TREE_TYPE (exp), 0);
4719 
4720   while (1)
4721     {
4722       code = TREE_CODE (exp);
4723       exp_type = TREE_TYPE (exp);
4724       arg0 = NULL_TREE;
4725 
4726       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4727 	{
4728 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4729 	    arg0 = TREE_OPERAND (exp, 0);
4730 	  if (TREE_CODE_CLASS (code) == tcc_binary
4731 	      || TREE_CODE_CLASS (code) == tcc_comparison
4732 	      || (TREE_CODE_CLASS (code) == tcc_expression
4733 		  && TREE_OPERAND_LENGTH (exp) > 1))
4734 	    arg1 = TREE_OPERAND (exp, 1);
4735 	}
4736       if (arg0 == NULL_TREE)
4737 	break;
4738 
4739       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4740 			      &high, &in_p, strict_overflow_p);
4741       if (nexp == NULL_TREE)
4742 	break;
4743       exp = nexp;
4744     }
4745 
4746   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4747   if (TREE_CODE (exp) == INTEGER_CST)
4748     {
4749       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4750 						 exp, 0, low, 0))
4751 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4752 						    exp, 1, high, 1)));
4753       low = high = 0;
4754       exp = 0;
4755     }
4756 
4757   *pin_p = in_p, *plow = low, *phigh = high;
4758   return exp;
4759 }
4760 
4761 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4762    type, TYPE, return an expression to test if EXP is in (or out of, depending
4763    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4764 
4765 tree
4766 build_range_check (location_t loc, tree type, tree exp, int in_p,
4767 		   tree low, tree high)
4768 {
4769   tree etype = TREE_TYPE (exp), value;
4770 
4771   /* Disable this optimization for function pointer expressions
4772      on targets that require function pointer canonicalization.  */
4773   if (targetm.have_canonicalize_funcptr_for_compare ()
4774       && TREE_CODE (etype) == POINTER_TYPE
4775       && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4776     return NULL_TREE;
4777 
4778   if (! in_p)
4779     {
4780       value = build_range_check (loc, type, exp, 1, low, high);
4781       if (value != 0)
4782         return invert_truthvalue_loc (loc, value);
4783 
4784       return 0;
4785     }
4786 
4787   if (low == 0 && high == 0)
4788     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4789 
4790   if (low == 0)
4791     return fold_build2_loc (loc, LE_EXPR, type, exp,
4792 			fold_convert_loc (loc, etype, high));
4793 
4794   if (high == 0)
4795     return fold_build2_loc (loc, GE_EXPR, type, exp,
4796 			fold_convert_loc (loc, etype, low));
4797 
4798   if (operand_equal_p (low, high, 0))
4799     return fold_build2_loc (loc, EQ_EXPR, type, exp,
4800 			fold_convert_loc (loc, etype, low));
4801 
4802   if (integer_zerop (low))
4803     {
4804       if (! TYPE_UNSIGNED (etype))
4805 	{
4806 	  etype = unsigned_type_for (etype);
4807 	  high = fold_convert_loc (loc, etype, high);
4808 	  exp = fold_convert_loc (loc, etype, exp);
4809 	}
4810       return build_range_check (loc, type, exp, 1, 0, high);
4811     }
4812 
4813   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4814   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4815     {
4816       int prec = TYPE_PRECISION (etype);
4817 
4818       if (wi::mask (prec - 1, false, prec) == high)
4819 	{
4820 	  if (TYPE_UNSIGNED (etype))
4821 	    {
4822 	      tree signed_etype = signed_type_for (etype);
4823 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4824 		etype
4825 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4826 	      else
4827 		etype = signed_etype;
4828 	      exp = fold_convert_loc (loc, etype, exp);
4829 	    }
4830 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
4831 			      build_int_cst (etype, 0));
4832 	}
4833     }
4834 
4835   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4836      This requires wrap-around arithmetics for the type of the expression.
4837      First make sure that arithmetics in this type is valid, then make sure
4838      that it wraps around.  */
4839   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4840     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4841 					    TYPE_UNSIGNED (etype));
4842 
4843   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4844     {
4845       tree utype, minv, maxv;
4846 
4847       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4848 	 for the type in question, as we rely on this here.  */
4849       utype = unsigned_type_for (etype);
4850       maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4851       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4852 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
4853       minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4854 
4855       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4856 				      minv, 1, maxv, 1)))
4857 	etype = utype;
4858       else
4859 	return 0;
4860     }
4861 
4862   high = fold_convert_loc (loc, etype, high);
4863   low = fold_convert_loc (loc, etype, low);
4864   exp = fold_convert_loc (loc, etype, exp);
4865 
4866   value = const_binop (MINUS_EXPR, high, low);
4867 
4868 
4869   if (POINTER_TYPE_P (etype))
4870     {
4871       if (value != 0 && !TREE_OVERFLOW (value))
4872 	{
4873 	  low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4874           return build_range_check (loc, type,
4875 			     	    fold_build_pointer_plus_loc (loc, exp, low),
4876 			            1, build_int_cst (etype, 0), value);
4877 	}
4878       return 0;
4879     }
4880 
4881   if (value != 0 && !TREE_OVERFLOW (value))
4882     return build_range_check (loc, type,
4883 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4884 			      1, build_int_cst (etype, 0), value);
4885 
4886   return 0;
4887 }
4888 
4889 /* Return the predecessor of VAL in its type, handling the infinite case.  */
4890 
4891 static tree
4892 range_predecessor (tree val)
4893 {
4894   tree type = TREE_TYPE (val);
4895 
4896   if (INTEGRAL_TYPE_P (type)
4897       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4898     return 0;
4899   else
4900     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4901 			build_int_cst (TREE_TYPE (val), 1), 0);
4902 }
4903 
4904 /* Return the successor of VAL in its type, handling the infinite case.  */
4905 
4906 static tree
4907 range_successor (tree val)
4908 {
4909   tree type = TREE_TYPE (val);
4910 
4911   if (INTEGRAL_TYPE_P (type)
4912       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4913     return 0;
4914   else
4915     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4916 			build_int_cst (TREE_TYPE (val), 1), 0);
4917 }
4918 
4919 /* Given two ranges, see if we can merge them into one.  Return 1 if we
4920    can, 0 if we can't.  Set the output range into the specified parameters.  */
4921 
4922 bool
4923 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4924 	      tree high0, int in1_p, tree low1, tree high1)
4925 {
4926   int no_overlap;
4927   int subset;
4928   int temp;
4929   tree tem;
4930   int in_p;
4931   tree low, high;
4932   int lowequal = ((low0 == 0 && low1 == 0)
4933 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4934 						low0, 0, low1, 0)));
4935   int highequal = ((high0 == 0 && high1 == 0)
4936 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4937 						 high0, 1, high1, 1)));
4938 
4939   /* Make range 0 be the range that starts first, or ends last if they
4940      start at the same value.  Swap them if it isn't.  */
4941   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4942 				 low0, 0, low1, 0))
4943       || (lowequal
4944 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
4945 					high1, 1, high0, 1))))
4946     {
4947       temp = in0_p, in0_p = in1_p, in1_p = temp;
4948       tem = low0, low0 = low1, low1 = tem;
4949       tem = high0, high0 = high1, high1 = tem;
4950     }
4951 
4952   /* Now flag two cases, whether the ranges are disjoint or whether the
4953      second range is totally subsumed in the first.  Note that the tests
4954      below are simplified by the ones above.  */
4955   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4956 					  high0, 1, low1, 0));
4957   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4958 				      high1, 1, high0, 1));
4959 
4960   /* We now have four cases, depending on whether we are including or
4961      excluding the two ranges.  */
4962   if (in0_p && in1_p)
4963     {
4964       /* If they don't overlap, the result is false.  If the second range
4965 	 is a subset it is the result.  Otherwise, the range is from the start
4966 	 of the second to the end of the first.  */
4967       if (no_overlap)
4968 	in_p = 0, low = high = 0;
4969       else if (subset)
4970 	in_p = 1, low = low1, high = high1;
4971       else
4972 	in_p = 1, low = low1, high = high0;
4973     }
4974 
4975   else if (in0_p && ! in1_p)
4976     {
4977       /* If they don't overlap, the result is the first range.  If they are
4978 	 equal, the result is false.  If the second range is a subset of the
4979 	 first, and the ranges begin at the same place, we go from just after
4980 	 the end of the second range to the end of the first.  If the second
4981 	 range is not a subset of the first, or if it is a subset and both
4982 	 ranges end at the same place, the range starts at the start of the
4983 	 first range and ends just before the second range.
4984 	 Otherwise, we can't describe this as a single range.  */
4985       if (no_overlap)
4986 	in_p = 1, low = low0, high = high0;
4987       else if (lowequal && highequal)
4988 	in_p = 0, low = high = 0;
4989       else if (subset && lowequal)
4990 	{
4991 	  low = range_successor (high1);
4992 	  high = high0;
4993 	  in_p = 1;
4994 	  if (low == 0)
4995 	    {
4996 	      /* We are in the weird situation where high0 > high1 but
4997 		 high1 has no successor.  Punt.  */
4998 	      return 0;
4999 	    }
5000 	}
5001       else if (! subset || highequal)
5002 	{
5003 	  low = low0;
5004 	  high = range_predecessor (low1);
5005 	  in_p = 1;
5006 	  if (high == 0)
5007 	    {
5008 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5009 	      return 0;
5010 	    }
5011 	}
5012       else
5013 	return 0;
5014     }
5015 
5016   else if (! in0_p && in1_p)
5017     {
5018       /* If they don't overlap, the result is the second range.  If the second
5019 	 is a subset of the first, the result is false.  Otherwise,
5020 	 the range starts just after the first range and ends at the
5021 	 end of the second.  */
5022       if (no_overlap)
5023 	in_p = 1, low = low1, high = high1;
5024       else if (subset || highequal)
5025 	in_p = 0, low = high = 0;
5026       else
5027 	{
5028 	  low = range_successor (high0);
5029 	  high = high1;
5030 	  in_p = 1;
5031 	  if (low == 0)
5032 	    {
5033 	      /* high1 > high0 but high0 has no successor.  Punt.  */
5034 	      return 0;
5035 	    }
5036 	}
5037     }
5038 
5039   else
5040     {
5041       /* The case where we are excluding both ranges.  Here the complex case
5042 	 is if they don't overlap.  In that case, the only time we have a
5043 	 range is if they are adjacent.  If the second is a subset of the
5044 	 first, the result is the first.  Otherwise, the range to exclude
5045 	 starts at the beginning of the first range and ends at the end of the
5046 	 second.  */
5047       if (no_overlap)
5048 	{
5049 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5050 					 range_successor (high0),
5051 					 1, low1, 0)))
5052 	    in_p = 0, low = low0, high = high1;
5053 	  else
5054 	    {
5055 	      /* Canonicalize - [min, x] into - [-, x].  */
5056 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5057 		switch (TREE_CODE (TREE_TYPE (low0)))
5058 		  {
5059 		  case ENUMERAL_TYPE:
5060 		    if (TYPE_PRECISION (TREE_TYPE (low0))
5061 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5062 		      break;
5063 		    /* FALLTHROUGH */
5064 		  case INTEGER_TYPE:
5065 		    if (tree_int_cst_equal (low0,
5066 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5067 		      low0 = 0;
5068 		    break;
5069 		  case POINTER_TYPE:
5070 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5071 			&& integer_zerop (low0))
5072 		      low0 = 0;
5073 		    break;
5074 		  default:
5075 		    break;
5076 		  }
5077 
5078 	      /* Canonicalize - [x, max] into - [x, -].  */
5079 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5080 		switch (TREE_CODE (TREE_TYPE (high1)))
5081 		  {
5082 		  case ENUMERAL_TYPE:
5083 		    if (TYPE_PRECISION (TREE_TYPE (high1))
5084 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5085 		      break;
5086 		    /* FALLTHROUGH */
5087 		  case INTEGER_TYPE:
5088 		    if (tree_int_cst_equal (high1,
5089 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5090 		      high1 = 0;
5091 		    break;
5092 		  case POINTER_TYPE:
5093 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5094 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5095 						       high1, 1,
5096 						       build_int_cst (TREE_TYPE (high1), 1),
5097 						       1)))
5098 		      high1 = 0;
5099 		    break;
5100 		  default:
5101 		    break;
5102 		  }
5103 
5104 	      /* The ranges might be also adjacent between the maximum and
5105 	         minimum values of the given type.  For
5106 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5107 	         return + [x + 1, y - 1].  */
5108 	      if (low0 == 0 && high1 == 0)
5109 	        {
5110 		  low = range_successor (high0);
5111 		  high = range_predecessor (low1);
5112 		  if (low == 0 || high == 0)
5113 		    return 0;
5114 
5115 		  in_p = 1;
5116 		}
5117 	      else
5118 		return 0;
5119 	    }
5120 	}
5121       else if (subset)
5122 	in_p = 0, low = low0, high = high0;
5123       else
5124 	in_p = 0, low = low0, high = high1;
5125     }
5126 
5127   *pin_p = in_p, *plow = low, *phigh = high;
5128   return 1;
5129 }
5130 
5131 
5132 /* Subroutine of fold, looking inside expressions of the form
5133    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5134    of the COND_EXPR.  This function is being used also to optimize
5135    A op B ? C : A, by reversing the comparison first.
5136 
5137    Return a folded expression whose code is not a COND_EXPR
5138    anymore, or NULL_TREE if no folding opportunity is found.  */
5139 
5140 static tree
5141 fold_cond_expr_with_comparison (location_t loc, tree type,
5142 				tree arg0, tree arg1, tree arg2)
5143 {
5144   enum tree_code comp_code = TREE_CODE (arg0);
5145   tree arg00 = TREE_OPERAND (arg0, 0);
5146   tree arg01 = TREE_OPERAND (arg0, 1);
5147   tree arg1_type = TREE_TYPE (arg1);
5148   tree tem;
5149 
5150   STRIP_NOPS (arg1);
5151   STRIP_NOPS (arg2);
5152 
5153   /* If we have A op 0 ? A : -A, consider applying the following
5154      transformations:
5155 
5156      A == 0? A : -A    same as -A
5157      A != 0? A : -A    same as A
5158      A >= 0? A : -A    same as abs (A)
5159      A > 0?  A : -A    same as abs (A)
5160      A <= 0? A : -A    same as -abs (A)
5161      A < 0?  A : -A    same as -abs (A)
5162 
5163      None of these transformations work for modes with signed
5164      zeros.  If A is +/-0, the first two transformations will
5165      change the sign of the result (from +0 to -0, or vice
5166      versa).  The last four will fix the sign of the result,
5167      even though the original expressions could be positive or
5168      negative, depending on the sign of A.
5169 
5170      Note that all these transformations are correct if A is
5171      NaN, since the two alternatives (A and -A) are also NaNs.  */
5172   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5173       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5174 	  ? real_zerop (arg01)
5175 	  : integer_zerop (arg01))
5176       && ((TREE_CODE (arg2) == NEGATE_EXPR
5177 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5178 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5179 	        have already been folded to Y-X, check for that. */
5180 	  || (TREE_CODE (arg1) == MINUS_EXPR
5181 	      && TREE_CODE (arg2) == MINUS_EXPR
5182 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5183 				  TREE_OPERAND (arg2, 1), 0)
5184 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5185 				  TREE_OPERAND (arg2, 0), 0))))
5186     switch (comp_code)
5187       {
5188       case EQ_EXPR:
5189       case UNEQ_EXPR:
5190 	tem = fold_convert_loc (loc, arg1_type, arg1);
5191 	return fold_convert_loc (loc, type, negate_expr (tem));
5192       case NE_EXPR:
5193       case LTGT_EXPR:
5194 	return fold_convert_loc (loc, type, arg1);
5195       case UNGE_EXPR:
5196       case UNGT_EXPR:
5197 	if (flag_trapping_math)
5198 	  break;
5199 	/* Fall through.  */
5200       case GE_EXPR:
5201       case GT_EXPR:
5202 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5203 	  break;
5204 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5205 	return fold_convert_loc (loc, type, tem);
5206       case UNLE_EXPR:
5207       case UNLT_EXPR:
5208 	if (flag_trapping_math)
5209 	  break;
5210 	/* FALLTHRU */
5211       case LE_EXPR:
5212       case LT_EXPR:
5213 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5214 	  break;
5215 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5216 	return negate_expr (fold_convert_loc (loc, type, tem));
5217       default:
5218 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5219 	break;
5220       }
5221 
5222   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5223      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5224      both transformations are correct when A is NaN: A != 0
5225      is then true, and A == 0 is false.  */
5226 
5227   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5228       && integer_zerop (arg01) && integer_zerop (arg2))
5229     {
5230       if (comp_code == NE_EXPR)
5231 	return fold_convert_loc (loc, type, arg1);
5232       else if (comp_code == EQ_EXPR)
5233 	return build_zero_cst (type);
5234     }
5235 
5236   /* Try some transformations of A op B ? A : B.
5237 
5238      A == B? A : B    same as B
5239      A != B? A : B    same as A
5240      A >= B? A : B    same as max (A, B)
5241      A > B?  A : B    same as max (B, A)
5242      A <= B? A : B    same as min (A, B)
5243      A < B?  A : B    same as min (B, A)
5244 
5245      As above, these transformations don't work in the presence
5246      of signed zeros.  For example, if A and B are zeros of
5247      opposite sign, the first two transformations will change
5248      the sign of the result.  In the last four, the original
5249      expressions give different results for (A=+0, B=-0) and
5250      (A=-0, B=+0), but the transformed expressions do not.
5251 
5252      The first two transformations are correct if either A or B
5253      is a NaN.  In the first transformation, the condition will
5254      be false, and B will indeed be chosen.  In the case of the
5255      second transformation, the condition A != B will be true,
5256      and A will be chosen.
5257 
5258      The conversions to max() and min() are not correct if B is
5259      a number and A is not.  The conditions in the original
5260      expressions will be false, so all four give B.  The min()
5261      and max() versions would give a NaN instead.  */
5262   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5263       && operand_equal_for_comparison_p (arg01, arg2, arg00)
5264       /* Avoid these transformations if the COND_EXPR may be used
5265 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5266       && (in_gimple_form
5267 	  || VECTOR_TYPE_P (type)
5268 	  || (! lang_GNU_CXX ()
5269 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5270 	  || ! maybe_lvalue_p (arg1)
5271 	  || ! maybe_lvalue_p (arg2)))
5272     {
5273       tree comp_op0 = arg00;
5274       tree comp_op1 = arg01;
5275       tree comp_type = TREE_TYPE (comp_op0);
5276 
5277       switch (comp_code)
5278 	{
5279 	case EQ_EXPR:
5280 	  return fold_convert_loc (loc, type, arg2);
5281 	case NE_EXPR:
5282 	  return fold_convert_loc (loc, type, arg1);
5283 	case LE_EXPR:
5284 	case LT_EXPR:
5285 	case UNLE_EXPR:
5286 	case UNLT_EXPR:
5287 	  /* In C++ a ?: expression can be an lvalue, so put the
5288 	     operand which will be used if they are equal first
5289 	     so that we can convert this back to the
5290 	     corresponding COND_EXPR.  */
5291 	  if (!HONOR_NANS (arg1))
5292 	    {
5293 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5294 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5295 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5296 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5297 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5298 				   comp_op1, comp_op0);
5299 	      return fold_convert_loc (loc, type, tem);
5300 	    }
5301 	  break;
5302 	case GE_EXPR:
5303 	case GT_EXPR:
5304 	case UNGE_EXPR:
5305 	case UNGT_EXPR:
5306 	  if (!HONOR_NANS (arg1))
5307 	    {
5308 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5309 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5310 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5311 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5312 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5313 				   comp_op1, comp_op0);
5314 	      return fold_convert_loc (loc, type, tem);
5315 	    }
5316 	  break;
5317 	case UNEQ_EXPR:
5318 	  if (!HONOR_NANS (arg1))
5319 	    return fold_convert_loc (loc, type, arg2);
5320 	  break;
5321 	case LTGT_EXPR:
5322 	  if (!HONOR_NANS (arg1))
5323 	    return fold_convert_loc (loc, type, arg1);
5324 	  break;
5325 	default:
5326 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5327 	  break;
5328 	}
5329     }
5330 
5331   return NULL_TREE;
5332 }
5333 
5334 
5335 
5336 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5337 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5338   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5339 		false) >= 2)
5340 #endif
5341 
5342 /* EXP is some logical combination of boolean tests.  See if we can
5343    merge it into some range test.  Return the new tree if so.  */
5344 
5345 static tree
5346 fold_range_test (location_t loc, enum tree_code code, tree type,
5347 		 tree op0, tree op1)
5348 {
5349   int or_op = (code == TRUTH_ORIF_EXPR
5350 	       || code == TRUTH_OR_EXPR);
5351   int in0_p, in1_p, in_p;
5352   tree low0, low1, low, high0, high1, high;
5353   bool strict_overflow_p = false;
5354   tree tem, lhs, rhs;
5355   const char * const warnmsg = G_("assuming signed overflow does not occur "
5356 				  "when simplifying range test");
5357 
5358   if (!INTEGRAL_TYPE_P (type))
5359     return 0;
5360 
5361   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5362   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5363 
5364   /* If this is an OR operation, invert both sides; we will invert
5365      again at the end.  */
5366   if (or_op)
5367     in0_p = ! in0_p, in1_p = ! in1_p;
5368 
5369   /* If both expressions are the same, if we can merge the ranges, and we
5370      can build the range test, return it or it inverted.  If one of the
5371      ranges is always true or always false, consider it to be the same
5372      expression as the other.  */
5373   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5374       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5375 		       in1_p, low1, high1)
5376       && 0 != (tem = (build_range_check (loc, type,
5377 					 lhs != 0 ? lhs
5378 					 : rhs != 0 ? rhs : integer_zero_node,
5379 					 in_p, low, high))))
5380     {
5381       if (strict_overflow_p)
5382 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5383       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5384     }
5385 
5386   /* On machines where the branch cost is expensive, if this is a
5387      short-circuited branch and the underlying object on both sides
5388      is the same, make a non-short-circuit operation.  */
5389   else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5390 	   && lhs != 0 && rhs != 0
5391 	   && (code == TRUTH_ANDIF_EXPR
5392 	       || code == TRUTH_ORIF_EXPR)
5393 	   && operand_equal_p (lhs, rhs, 0))
5394     {
5395       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5396 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5397 	 which cases we can't do this.  */
5398       if (simple_operand_p (lhs))
5399 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5400 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5401 			   type, op0, op1);
5402 
5403       else if (!lang_hooks.decls.global_bindings_p ()
5404 	       && !CONTAINS_PLACEHOLDER_P (lhs))
5405 	{
5406 	  tree common = save_expr (lhs);
5407 
5408 	  if (0 != (lhs = build_range_check (loc, type, common,
5409 					     or_op ? ! in0_p : in0_p,
5410 					     low0, high0))
5411 	      && (0 != (rhs = build_range_check (loc, type, common,
5412 						 or_op ? ! in1_p : in1_p,
5413 						 low1, high1))))
5414 	    {
5415 	      if (strict_overflow_p)
5416 		fold_overflow_warning (warnmsg,
5417 				       WARN_STRICT_OVERFLOW_COMPARISON);
5418 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5419 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5420 				 type, lhs, rhs);
5421 	    }
5422 	}
5423     }
5424 
5425   return 0;
5426 }
5427 
5428 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5429    bit value.  Arrange things so the extra bits will be set to zero if and
5430    only if C is signed-extended to its full width.  If MASK is nonzero,
5431    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5432 
5433 static tree
5434 unextend (tree c, int p, int unsignedp, tree mask)
5435 {
5436   tree type = TREE_TYPE (c);
5437   int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5438   tree temp;
5439 
5440   if (p == modesize || unsignedp)
5441     return c;
5442 
5443   /* We work by getting just the sign bit into the low-order bit, then
5444      into the high-order bit, then sign-extend.  We then XOR that value
5445      with C.  */
5446   temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5447 
5448   /* We must use a signed type in order to get an arithmetic right shift.
5449      However, we must also avoid introducing accidental overflows, so that
5450      a subsequent call to integer_zerop will work.  Hence we must
5451      do the type conversion here.  At this point, the constant is either
5452      zero or one, and the conversion to a signed type can never overflow.
5453      We could get an overflow if this conversion is done anywhere else.  */
5454   if (TYPE_UNSIGNED (type))
5455     temp = fold_convert (signed_type_for (type), temp);
5456 
5457   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5458   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5459   if (mask != 0)
5460     temp = const_binop (BIT_AND_EXPR, temp,
5461 			fold_convert (TREE_TYPE (c), mask));
5462   /* If necessary, convert the type back to match the type of C.  */
5463   if (TYPE_UNSIGNED (type))
5464     temp = fold_convert (type, temp);
5465 
5466   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5467 }
5468 
5469 /* For an expression that has the form
5470      (A && B) || ~B
5471    or
5472      (A || B) && ~B,
5473    we can drop one of the inner expressions and simplify to
5474      A || ~B
5475    or
5476      A && ~B
5477    LOC is the location of the resulting expression.  OP is the inner
5478    logical operation; the left-hand side in the examples above, while CMPOP
5479    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
5480    removing a condition that guards another, as in
5481      (A != NULL && A->...) || A == NULL
5482    which we must not transform.  If RHS_ONLY is true, only eliminate the
5483    right-most operand of the inner logical operation.  */
5484 
5485 static tree
5486 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5487 				 bool rhs_only)
5488 {
5489   tree type = TREE_TYPE (cmpop);
5490   enum tree_code code = TREE_CODE (cmpop);
5491   enum tree_code truthop_code = TREE_CODE (op);
5492   tree lhs = TREE_OPERAND (op, 0);
5493   tree rhs = TREE_OPERAND (op, 1);
5494   tree orig_lhs = lhs, orig_rhs = rhs;
5495   enum tree_code rhs_code = TREE_CODE (rhs);
5496   enum tree_code lhs_code = TREE_CODE (lhs);
5497   enum tree_code inv_code;
5498 
5499   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5500     return NULL_TREE;
5501 
5502   if (TREE_CODE_CLASS (code) != tcc_comparison)
5503     return NULL_TREE;
5504 
5505   if (rhs_code == truthop_code)
5506     {
5507       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5508       if (newrhs != NULL_TREE)
5509 	{
5510 	  rhs = newrhs;
5511 	  rhs_code = TREE_CODE (rhs);
5512 	}
5513     }
5514   if (lhs_code == truthop_code && !rhs_only)
5515     {
5516       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5517       if (newlhs != NULL_TREE)
5518 	{
5519 	  lhs = newlhs;
5520 	  lhs_code = TREE_CODE (lhs);
5521 	}
5522     }
5523 
5524   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5525   if (inv_code == rhs_code
5526       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5527       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5528     return lhs;
5529   if (!rhs_only && inv_code == lhs_code
5530       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5531       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5532     return rhs;
5533   if (rhs != orig_rhs || lhs != orig_lhs)
5534     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5535 			    lhs, rhs);
5536   return NULL_TREE;
5537 }
5538 
5539 /* Find ways of folding logical expressions of LHS and RHS:
5540    Try to merge two comparisons to the same innermost item.
5541    Look for range tests like "ch >= '0' && ch <= '9'".
5542    Look for combinations of simple terms on machines with expensive branches
5543    and evaluate the RHS unconditionally.
5544 
5545    For example, if we have p->a == 2 && p->b == 4 and we can make an
5546    object large enough to span both A and B, we can do this with a comparison
5547    against the object ANDed with the a mask.
5548 
5549    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5550    operations to do this with one comparison.
5551 
5552    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5553    function and the one above.
5554 
5555    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5556    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5557 
5558    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5559    two operands.
5560 
5561    We return the simplified tree or 0 if no optimization is possible.  */
5562 
5563 static tree
5564 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5565 		    tree lhs, tree rhs)
5566 {
5567   /* If this is the "or" of two comparisons, we can do something if
5568      the comparisons are NE_EXPR.  If this is the "and", we can do something
5569      if the comparisons are EQ_EXPR.  I.e.,
5570 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5571 
5572      WANTED_CODE is this operation code.  For single bit fields, we can
5573      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5574      comparison for one-bit fields.  */
5575 
5576   enum tree_code wanted_code;
5577   enum tree_code lcode, rcode;
5578   tree ll_arg, lr_arg, rl_arg, rr_arg;
5579   tree ll_inner, lr_inner, rl_inner, rr_inner;
5580   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5581   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5582   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5583   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5584   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5585   int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5586   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5587   machine_mode lnmode, rnmode;
5588   tree ll_mask, lr_mask, rl_mask, rr_mask;
5589   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5590   tree l_const, r_const;
5591   tree lntype, rntype, result;
5592   HOST_WIDE_INT first_bit, end_bit;
5593   int volatilep;
5594 
5595   /* Start by getting the comparison codes.  Fail if anything is volatile.
5596      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5597      it were surrounded with a NE_EXPR.  */
5598 
5599   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5600     return 0;
5601 
5602   lcode = TREE_CODE (lhs);
5603   rcode = TREE_CODE (rhs);
5604 
5605   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5606     {
5607       lhs = build2 (NE_EXPR, truth_type, lhs,
5608 		    build_int_cst (TREE_TYPE (lhs), 0));
5609       lcode = NE_EXPR;
5610     }
5611 
5612   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5613     {
5614       rhs = build2 (NE_EXPR, truth_type, rhs,
5615 		    build_int_cst (TREE_TYPE (rhs), 0));
5616       rcode = NE_EXPR;
5617     }
5618 
5619   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5620       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5621     return 0;
5622 
5623   ll_arg = TREE_OPERAND (lhs, 0);
5624   lr_arg = TREE_OPERAND (lhs, 1);
5625   rl_arg = TREE_OPERAND (rhs, 0);
5626   rr_arg = TREE_OPERAND (rhs, 1);
5627 
5628   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5629   if (simple_operand_p (ll_arg)
5630       && simple_operand_p (lr_arg))
5631     {
5632       if (operand_equal_p (ll_arg, rl_arg, 0)
5633           && operand_equal_p (lr_arg, rr_arg, 0))
5634 	{
5635           result = combine_comparisons (loc, code, lcode, rcode,
5636 					truth_type, ll_arg, lr_arg);
5637 	  if (result)
5638 	    return result;
5639 	}
5640       else if (operand_equal_p (ll_arg, rr_arg, 0)
5641                && operand_equal_p (lr_arg, rl_arg, 0))
5642 	{
5643           result = combine_comparisons (loc, code, lcode,
5644 					swap_tree_comparison (rcode),
5645 					truth_type, ll_arg, lr_arg);
5646 	  if (result)
5647 	    return result;
5648 	}
5649     }
5650 
5651   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5652 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5653 
5654   /* If the RHS can be evaluated unconditionally and its operands are
5655      simple, it wins to evaluate the RHS unconditionally on machines
5656      with expensive branches.  In this case, this isn't a comparison
5657      that can be merged.  */
5658 
5659   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5660 		   false) >= 2
5661       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5662       && simple_operand_p (rl_arg)
5663       && simple_operand_p (rr_arg))
5664     {
5665       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5666       if (code == TRUTH_OR_EXPR
5667 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5668 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5669 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5670 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5671 	return build2_loc (loc, NE_EXPR, truth_type,
5672 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5673 				   ll_arg, rl_arg),
5674 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5675 
5676       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5677       if (code == TRUTH_AND_EXPR
5678 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5679 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5680 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5681 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5682 	return build2_loc (loc, EQ_EXPR, truth_type,
5683 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5684 				   ll_arg, rl_arg),
5685 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5686     }
5687 
5688   /* See if the comparisons can be merged.  Then get all the parameters for
5689      each side.  */
5690 
5691   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5692       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5693     return 0;
5694 
5695   ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5696   volatilep = 0;
5697   ll_inner = decode_field_reference (loc, &ll_arg,
5698 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5699 				     &ll_unsignedp, &ll_reversep, &volatilep,
5700 				     &ll_mask, &ll_and_mask);
5701   lr_inner = decode_field_reference (loc, &lr_arg,
5702 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5703 				     &lr_unsignedp, &lr_reversep, &volatilep,
5704 				     &lr_mask, &lr_and_mask);
5705   rl_inner = decode_field_reference (loc, &rl_arg,
5706 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5707 				     &rl_unsignedp, &rl_reversep, &volatilep,
5708 				     &rl_mask, &rl_and_mask);
5709   rr_inner = decode_field_reference (loc, &rr_arg,
5710 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5711 				     &rr_unsignedp, &rr_reversep, &volatilep,
5712 				     &rr_mask, &rr_and_mask);
5713 
5714   /* It must be true that the inner operation on the lhs of each
5715      comparison must be the same if we are to be able to do anything.
5716      Then see if we have constants.  If not, the same must be true for
5717      the rhs's.  */
5718   if (volatilep
5719       || ll_reversep != rl_reversep
5720       || ll_inner == 0 || rl_inner == 0
5721       || ! operand_equal_p (ll_inner, rl_inner, 0))
5722     return 0;
5723 
5724   if (TREE_CODE (lr_arg) == INTEGER_CST
5725       && TREE_CODE (rr_arg) == INTEGER_CST)
5726     {
5727       l_const = lr_arg, r_const = rr_arg;
5728       lr_reversep = ll_reversep;
5729     }
5730   else if (lr_reversep != rr_reversep
5731 	   || lr_inner == 0 || rr_inner == 0
5732 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5733     return 0;
5734   else
5735     l_const = r_const = 0;
5736 
5737   /* If either comparison code is not correct for our logical operation,
5738      fail.  However, we can convert a one-bit comparison against zero into
5739      the opposite comparison against that bit being set in the field.  */
5740 
5741   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5742   if (lcode != wanted_code)
5743     {
5744       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5745 	{
5746 	  /* Make the left operand unsigned, since we are only interested
5747 	     in the value of one bit.  Otherwise we are doing the wrong
5748 	     thing below.  */
5749 	  ll_unsignedp = 1;
5750 	  l_const = ll_mask;
5751 	}
5752       else
5753 	return 0;
5754     }
5755 
5756   /* This is analogous to the code for l_const above.  */
5757   if (rcode != wanted_code)
5758     {
5759       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5760 	{
5761 	  rl_unsignedp = 1;
5762 	  r_const = rl_mask;
5763 	}
5764       else
5765 	return 0;
5766     }
5767 
5768   /* See if we can find a mode that contains both fields being compared on
5769      the left.  If we can't, fail.  Otherwise, update all constants and masks
5770      to be relative to a field of that size.  */
5771   first_bit = MIN (ll_bitpos, rl_bitpos);
5772   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5773   lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5774 			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5775 			  volatilep);
5776   if (lnmode == VOIDmode)
5777     return 0;
5778 
5779   lnbitsize = GET_MODE_BITSIZE (lnmode);
5780   lnbitpos = first_bit & ~ (lnbitsize - 1);
5781   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5782   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5783 
5784   if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5785     {
5786       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5787       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5788     }
5789 
5790   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5791 			 size_int (xll_bitpos));
5792   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5793 			 size_int (xrl_bitpos));
5794 
5795   if (l_const)
5796     {
5797       l_const = fold_convert_loc (loc, lntype, l_const);
5798       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5799       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5800       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5801 					fold_build1_loc (loc, BIT_NOT_EXPR,
5802 						     lntype, ll_mask))))
5803 	{
5804 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5805 
5806 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5807 	}
5808     }
5809   if (r_const)
5810     {
5811       r_const = fold_convert_loc (loc, lntype, r_const);
5812       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5813       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5814       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5815 					fold_build1_loc (loc, BIT_NOT_EXPR,
5816 						     lntype, rl_mask))))
5817 	{
5818 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5819 
5820 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5821 	}
5822     }
5823 
5824   /* If the right sides are not constant, do the same for it.  Also,
5825      disallow this optimization if a size, signedness or storage order
5826      mismatch occurs between the left and right sides.  */
5827   if (l_const == 0)
5828     {
5829       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5830 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5831 	  || ll_reversep != lr_reversep
5832 	  /* Make sure the two fields on the right
5833 	     correspond to the left without being swapped.  */
5834 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5835 	return 0;
5836 
5837       first_bit = MIN (lr_bitpos, rr_bitpos);
5838       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5839       rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5840 			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5841 			      volatilep);
5842       if (rnmode == VOIDmode)
5843 	return 0;
5844 
5845       rnbitsize = GET_MODE_BITSIZE (rnmode);
5846       rnbitpos = first_bit & ~ (rnbitsize - 1);
5847       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5848       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5849 
5850       if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5851 	{
5852 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5853 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5854 	}
5855 
5856       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5857 							    rntype, lr_mask),
5858 			     size_int (xlr_bitpos));
5859       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5860 							    rntype, rr_mask),
5861 			     size_int (xrr_bitpos));
5862 
5863       /* Make a mask that corresponds to both fields being compared.
5864 	 Do this for both items being compared.  If the operands are the
5865 	 same size and the bits being compared are in the same position
5866 	 then we can do this by masking both and comparing the masked
5867 	 results.  */
5868       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5869       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5870       if (lnbitsize == rnbitsize
5871 	  && xll_bitpos == xlr_bitpos
5872 	  && lnbitpos >= 0
5873 	  && rnbitpos >= 0)
5874 	{
5875 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5876 				    lntype, lnbitsize, lnbitpos,
5877 				    ll_unsignedp || rl_unsignedp, ll_reversep);
5878 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
5879 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5880 
5881 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5882 				    rntype, rnbitsize, rnbitpos,
5883 				    lr_unsignedp || rr_unsignedp, lr_reversep);
5884 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
5885 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5886 
5887 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5888 	}
5889 
5890       /* There is still another way we can do something:  If both pairs of
5891 	 fields being compared are adjacent, we may be able to make a wider
5892 	 field containing them both.
5893 
5894 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
5895 	 the mask must be shifted to account for the shift done by
5896 	 make_bit_field_ref.  */
5897       if (((ll_bitsize + ll_bitpos == rl_bitpos
5898 	    && lr_bitsize + lr_bitpos == rr_bitpos)
5899 	   || (ll_bitpos == rl_bitpos + rl_bitsize
5900 	       && lr_bitpos == rr_bitpos + rr_bitsize))
5901 	  && ll_bitpos >= 0
5902 	  && rl_bitpos >= 0
5903 	  && lr_bitpos >= 0
5904 	  && rr_bitpos >= 0)
5905 	{
5906 	  tree type;
5907 
5908 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5909 				    ll_bitsize + rl_bitsize,
5910 				    MIN (ll_bitpos, rl_bitpos),
5911 				    ll_unsignedp, ll_reversep);
5912 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5913 				    lr_bitsize + rr_bitsize,
5914 				    MIN (lr_bitpos, rr_bitpos),
5915 				    lr_unsignedp, lr_reversep);
5916 
5917 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5918 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
5919 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5920 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5921 
5922 	  /* Convert to the smaller type before masking out unwanted bits.  */
5923 	  type = lntype;
5924 	  if (lntype != rntype)
5925 	    {
5926 	      if (lnbitsize > rnbitsize)
5927 		{
5928 		  lhs = fold_convert_loc (loc, rntype, lhs);
5929 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5930 		  type = rntype;
5931 		}
5932 	      else if (lnbitsize < rnbitsize)
5933 		{
5934 		  rhs = fold_convert_loc (loc, lntype, rhs);
5935 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5936 		  type = lntype;
5937 		}
5938 	    }
5939 
5940 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5941 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5942 
5943 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5944 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5945 
5946 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5947 	}
5948 
5949       return 0;
5950     }
5951 
5952   /* Handle the case of comparisons with constants.  If there is something in
5953      common between the masks, those bits of the constants must be the same.
5954      If not, the condition is always false.  Test for this to avoid generating
5955      incorrect code below.  */
5956   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5957   if (! integer_zerop (result)
5958       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5959 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5960     {
5961       if (wanted_code == NE_EXPR)
5962 	{
5963 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
5964 	  return constant_boolean_node (true, truth_type);
5965 	}
5966       else
5967 	{
5968 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5969 	  return constant_boolean_node (false, truth_type);
5970 	}
5971     }
5972 
5973   if (lnbitpos < 0)
5974     return 0;
5975 
5976   /* Construct the expression we will return.  First get the component
5977      reference we will make.  Unless the mask is all ones the width of
5978      that field, perform the mask operation.  Then compare with the
5979      merged constant.  */
5980   result = make_bit_field_ref (loc, ll_inner, ll_arg,
5981 			       lntype, lnbitsize, lnbitpos,
5982 			       ll_unsignedp || rl_unsignedp, ll_reversep);
5983 
5984   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5985   if (! all_ones_mask_p (ll_mask, lnbitsize))
5986     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5987 
5988   return build2_loc (loc, wanted_code, truth_type, result,
5989 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
5990 }
5991 
5992 /* T is an integer expression that is being multiplied, divided, or taken a
5993    modulus (CODE says which and what kind of divide or modulus) by a
5994    constant C.  See if we can eliminate that operation by folding it with
5995    other operations already in T.  WIDE_TYPE, if non-null, is a type that
5996    should be used for the computation if wider than our type.
5997 
5998    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5999    (X * 2) + (Y * 4).  We must, however, be assured that either the original
6000    expression would not overflow or that overflow is undefined for the type
6001    in the language in question.
6002 
6003    If we return a non-null expression, it is an equivalent form of the
6004    original computation, but need not be in the original type.
6005 
6006    We set *STRICT_OVERFLOW_P to true if the return values depends on
6007    signed overflow being undefined.  Otherwise we do not change
6008    *STRICT_OVERFLOW_P.  */
6009 
6010 static tree
6011 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6012 		bool *strict_overflow_p)
6013 {
6014   /* To avoid exponential search depth, refuse to allow recursion past
6015      three levels.  Beyond that (1) it's highly unlikely that we'll find
6016      something interesting and (2) we've probably processed it before
6017      when we built the inner expression.  */
6018 
6019   static int depth;
6020   tree ret;
6021 
6022   if (depth > 3)
6023     return NULL;
6024 
6025   depth++;
6026   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6027   depth--;
6028 
6029   return ret;
6030 }
6031 
6032 static tree
6033 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6034 		  bool *strict_overflow_p)
6035 {
6036   tree type = TREE_TYPE (t);
6037   enum tree_code tcode = TREE_CODE (t);
6038   tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6039 				   > GET_MODE_SIZE (TYPE_MODE (type)))
6040 		? wide_type : type);
6041   tree t1, t2;
6042   int same_p = tcode == code;
6043   tree op0 = NULL_TREE, op1 = NULL_TREE;
6044   bool sub_strict_overflow_p;
6045 
6046   /* Don't deal with constants of zero here; they confuse the code below.  */
6047   if (integer_zerop (c))
6048     return NULL_TREE;
6049 
6050   if (TREE_CODE_CLASS (tcode) == tcc_unary)
6051     op0 = TREE_OPERAND (t, 0);
6052 
6053   if (TREE_CODE_CLASS (tcode) == tcc_binary)
6054     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6055 
6056   /* Note that we need not handle conditional operations here since fold
6057      already handles those cases.  So just do arithmetic here.  */
6058   switch (tcode)
6059     {
6060     case INTEGER_CST:
6061       /* For a constant, we can always simplify if we are a multiply
6062 	 or (for divide and modulus) if it is a multiple of our constant.  */
6063       if (code == MULT_EXPR
6064 	  || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6065 	{
6066 	  tree tem = const_binop (code, fold_convert (ctype, t),
6067 				  fold_convert (ctype, c));
6068 	  /* If the multiplication overflowed, we lost information on it.
6069 	     See PR68142 and PR69845.  */
6070 	  if (TREE_OVERFLOW (tem))
6071 	    return NULL_TREE;
6072 	  return tem;
6073 	}
6074       break;
6075 
6076     CASE_CONVERT: case NON_LVALUE_EXPR:
6077       /* If op0 is an expression ...  */
6078       if ((COMPARISON_CLASS_P (op0)
6079 	   || UNARY_CLASS_P (op0)
6080 	   || BINARY_CLASS_P (op0)
6081 	   || VL_EXP_CLASS_P (op0)
6082 	   || EXPRESSION_CLASS_P (op0))
6083 	  /* ... and has wrapping overflow, and its type is smaller
6084 	     than ctype, then we cannot pass through as widening.  */
6085 	  && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6086 		&& TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6087 	       && (TYPE_PRECISION (ctype)
6088 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6089 	      /* ... or this is a truncation (t is narrower than op0),
6090 		 then we cannot pass through this narrowing.  */
6091 	      || (TYPE_PRECISION (type)
6092 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6093 	      /* ... or signedness changes for division or modulus,
6094 		 then we cannot pass through this conversion.  */
6095 	      || (code != MULT_EXPR
6096 		  && (TYPE_UNSIGNED (ctype)
6097 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6098 	      /* ... or has undefined overflow while the converted to
6099 		 type has not, we cannot do the operation in the inner type
6100 		 as that would introduce undefined overflow.  */
6101 	      || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6102 		   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6103 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6104 	break;
6105 
6106       /* Pass the constant down and see if we can make a simplification.  If
6107 	 we can, replace this expression with the inner simplification for
6108 	 possible later conversion to our or some other type.  */
6109       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6110 	  && TREE_CODE (t2) == INTEGER_CST
6111 	  && !TREE_OVERFLOW (t2)
6112 	  && (0 != (t1 = extract_muldiv (op0, t2, code,
6113 					 code == MULT_EXPR
6114 					 ? ctype : NULL_TREE,
6115 					 strict_overflow_p))))
6116 	return t1;
6117       break;
6118 
6119     case ABS_EXPR:
6120       /* If widening the type changes it from signed to unsigned, then we
6121          must avoid building ABS_EXPR itself as unsigned.  */
6122       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6123         {
6124           tree cstype = (*signed_type_for) (ctype);
6125           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6126 	      != 0)
6127             {
6128               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6129               return fold_convert (ctype, t1);
6130             }
6131           break;
6132         }
6133       /* If the constant is negative, we cannot simplify this.  */
6134       if (tree_int_cst_sgn (c) == -1)
6135         break;
6136       /* FALLTHROUGH */
6137     case NEGATE_EXPR:
6138       /* For division and modulus, type can't be unsigned, as e.g.
6139 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6140 	 For signed types, even with wrapping overflow, this is fine.  */
6141       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6142 	break;
6143       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6144 	  != 0)
6145 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6146       break;
6147 
6148     case MIN_EXPR:  case MAX_EXPR:
6149       /* If widening the type changes the signedness, then we can't perform
6150 	 this optimization as that changes the result.  */
6151       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6152 	break;
6153 
6154       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6155       sub_strict_overflow_p = false;
6156       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6157 				&sub_strict_overflow_p)) != 0
6158 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6159 				   &sub_strict_overflow_p)) != 0)
6160 	{
6161 	  if (tree_int_cst_sgn (c) < 0)
6162 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6163 	  if (sub_strict_overflow_p)
6164 	    *strict_overflow_p = true;
6165 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6166 			      fold_convert (ctype, t2));
6167 	}
6168       break;
6169 
6170     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6171       /* If the second operand is constant, this is a multiplication
6172 	 or floor division, by a power of two, so we can treat it that
6173 	 way unless the multiplier or divisor overflows.  Signed
6174 	 left-shift overflow is implementation-defined rather than
6175 	 undefined in C90, so do not convert signed left shift into
6176 	 multiplication.  */
6177       if (TREE_CODE (op1) == INTEGER_CST
6178 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6179 	  /* const_binop may not detect overflow correctly,
6180 	     so check for it explicitly here.  */
6181 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6182 	  && 0 != (t1 = fold_convert (ctype,
6183 				      const_binop (LSHIFT_EXPR,
6184 						   size_one_node,
6185 						   op1)))
6186 	  && !TREE_OVERFLOW (t1))
6187 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6188 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6189 				       ctype,
6190 				       fold_convert (ctype, op0),
6191 				       t1),
6192 			       c, code, wide_type, strict_overflow_p);
6193       break;
6194 
6195     case PLUS_EXPR:  case MINUS_EXPR:
6196       /* See if we can eliminate the operation on both sides.  If we can, we
6197 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6198 	 cases where we can do anything are if the second operand is a
6199 	 constant.  */
6200       sub_strict_overflow_p = false;
6201       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6202       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6203       if (t1 != 0 && t2 != 0
6204 	  && TYPE_OVERFLOW_WRAPS (ctype)
6205 	  && (code == MULT_EXPR
6206 	      /* If not multiplication, we can only do this if both operands
6207 		 are divisible by c.  */
6208 	      || (multiple_of_p (ctype, op0, c)
6209 	          && multiple_of_p (ctype, op1, c))))
6210 	{
6211 	  if (sub_strict_overflow_p)
6212 	    *strict_overflow_p = true;
6213 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6214 			      fold_convert (ctype, t2));
6215 	}
6216 
6217       /* If this was a subtraction, negate OP1 and set it to be an addition.
6218 	 This simplifies the logic below.  */
6219       if (tcode == MINUS_EXPR)
6220 	{
6221 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6222 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6223 	  if (TREE_CODE (op0) == INTEGER_CST)
6224 	    {
6225 	      std::swap (op0, op1);
6226 	      std::swap (t1, t2);
6227 	    }
6228 	}
6229 
6230       if (TREE_CODE (op1) != INTEGER_CST)
6231 	break;
6232 
6233       /* If either OP1 or C are negative, this optimization is not safe for
6234 	 some of the division and remainder types while for others we need
6235 	 to change the code.  */
6236       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6237 	{
6238 	  if (code == CEIL_DIV_EXPR)
6239 	    code = FLOOR_DIV_EXPR;
6240 	  else if (code == FLOOR_DIV_EXPR)
6241 	    code = CEIL_DIV_EXPR;
6242 	  else if (code != MULT_EXPR
6243 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6244 	    break;
6245 	}
6246 
6247       /* If it's a multiply or a division/modulus operation of a multiple
6248          of our constant, do the operation and verify it doesn't overflow.  */
6249       if (code == MULT_EXPR
6250 	  || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6251 	{
6252 	  op1 = const_binop (code, fold_convert (ctype, op1),
6253 			     fold_convert (ctype, c));
6254 	  /* We allow the constant to overflow with wrapping semantics.  */
6255 	  if (op1 == 0
6256 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6257 	    break;
6258 	}
6259       else
6260 	break;
6261 
6262       /* If we have an unsigned type, we cannot widen the operation since it
6263 	 will change the result if the original computation overflowed.  */
6264       if (TYPE_UNSIGNED (ctype) && ctype != type)
6265 	break;
6266 
6267       /* The last case is if we are a multiply.  In that case, we can
6268 	 apply the distributive law to commute the multiply and addition
6269 	 if the multiplication of the constants doesn't overflow
6270 	 and overflow is defined.  With undefined overflow
6271 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.  */
6272       if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6273 	return fold_build2 (tcode, ctype,
6274 			    fold_build2 (code, ctype,
6275 					 fold_convert (ctype, op0),
6276 					 fold_convert (ctype, c)),
6277 			    op1);
6278 
6279       break;
6280 
6281     case MULT_EXPR:
6282       /* We have a special case here if we are doing something like
6283 	 (C * 8) % 4 since we know that's zero.  */
6284       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6285 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6286 	  /* If the multiplication can overflow we cannot optimize this.  */
6287 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6288 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6289 	  && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6290 	{
6291 	  *strict_overflow_p = true;
6292 	  return omit_one_operand (type, integer_zero_node, op0);
6293 	}
6294 
6295       /* ... fall through ...  */
6296 
6297     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6298     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6299       /* If we can extract our operation from the LHS, do so and return a
6300 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6301 	 do something only if the second operand is a constant.  */
6302       if (same_p
6303 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6304 				   strict_overflow_p)) != 0)
6305 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6306 			    fold_convert (ctype, op1));
6307       else if (tcode == MULT_EXPR && code == MULT_EXPR
6308 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6309 					strict_overflow_p)) != 0)
6310 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6311 			    fold_convert (ctype, t1));
6312       else if (TREE_CODE (op1) != INTEGER_CST)
6313 	return 0;
6314 
6315       /* If these are the same operation types, we can associate them
6316 	 assuming no overflow.  */
6317       if (tcode == code)
6318 	{
6319 	  bool overflow_p = false;
6320 	  bool overflow_mul_p;
6321 	  signop sign = TYPE_SIGN (ctype);
6322 	  unsigned prec = TYPE_PRECISION (ctype);
6323 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
6324 				  wi::to_wide (c, prec),
6325 				  sign, &overflow_mul_p);
6326 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6327 	  if (overflow_mul_p
6328 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6329 	    overflow_p = true;
6330 	  if (!overflow_p)
6331 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6332 				wide_int_to_tree (ctype, mul));
6333 	}
6334 
6335       /* If these operations "cancel" each other, we have the main
6336 	 optimizations of this pass, which occur when either constant is a
6337 	 multiple of the other, in which case we replace this with either an
6338 	 operation or CODE or TCODE.
6339 
6340 	 If we have an unsigned type, we cannot do this since it will change
6341 	 the result if the original computation overflowed.  */
6342       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6343 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6344 	      || (tcode == MULT_EXPR
6345 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6346 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6347 		  && code != MULT_EXPR)))
6348 	{
6349 	  if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6350 	    {
6351 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6352 		*strict_overflow_p = true;
6353 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6354 				  fold_convert (ctype,
6355 						const_binop (TRUNC_DIV_EXPR,
6356 							     op1, c)));
6357 	    }
6358 	  else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6359 	    {
6360 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6361 		*strict_overflow_p = true;
6362 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6363 				  fold_convert (ctype,
6364 						const_binop (TRUNC_DIV_EXPR,
6365 							     c, op1)));
6366 	    }
6367 	}
6368       break;
6369 
6370     default:
6371       break;
6372     }
6373 
6374   return 0;
6375 }
6376 
6377 /* Return a node which has the indicated constant VALUE (either 0 or
6378    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6379    and is of the indicated TYPE.  */
6380 
6381 tree
6382 constant_boolean_node (bool value, tree type)
6383 {
6384   if (type == integer_type_node)
6385     return value ? integer_one_node : integer_zero_node;
6386   else if (type == boolean_type_node)
6387     return value ? boolean_true_node : boolean_false_node;
6388   else if (TREE_CODE (type) == VECTOR_TYPE)
6389     return build_vector_from_val (type,
6390 				  build_int_cst (TREE_TYPE (type),
6391 						 value ? -1 : 0));
6392   else
6393     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6394 }
6395 
6396 
6397 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6398    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6399    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6400    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6401    COND is the first argument to CODE; otherwise (as in the example
6402    given here), it is the second argument.  TYPE is the type of the
6403    original expression.  Return NULL_TREE if no simplification is
6404    possible.  */
6405 
6406 static tree
6407 fold_binary_op_with_conditional_arg (location_t loc,
6408 				     enum tree_code code,
6409 				     tree type, tree op0, tree op1,
6410 				     tree cond, tree arg, int cond_first_p)
6411 {
6412   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6413   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6414   tree test, true_value, false_value;
6415   tree lhs = NULL_TREE;
6416   tree rhs = NULL_TREE;
6417   enum tree_code cond_code = COND_EXPR;
6418 
6419   if (TREE_CODE (cond) == COND_EXPR
6420       || TREE_CODE (cond) == VEC_COND_EXPR)
6421     {
6422       test = TREE_OPERAND (cond, 0);
6423       true_value = TREE_OPERAND (cond, 1);
6424       false_value = TREE_OPERAND (cond, 2);
6425       /* If this operand throws an expression, then it does not make
6426 	 sense to try to perform a logical or arithmetic operation
6427 	 involving it.  */
6428       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6429 	lhs = true_value;
6430       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6431 	rhs = false_value;
6432     }
6433   else if (!(TREE_CODE (type) != VECTOR_TYPE
6434 	     && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6435     {
6436       tree testtype = TREE_TYPE (cond);
6437       test = cond;
6438       true_value = constant_boolean_node (true, testtype);
6439       false_value = constant_boolean_node (false, testtype);
6440     }
6441   else
6442     /* Detect the case of mixing vector and scalar types - bail out.  */
6443     return NULL_TREE;
6444 
6445   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6446     cond_code = VEC_COND_EXPR;
6447 
6448   /* This transformation is only worthwhile if we don't have to wrap ARG
6449      in a SAVE_EXPR and the operation can be simplified without recursing
6450      on at least one of the branches once its pushed inside the COND_EXPR.  */
6451   if (!TREE_CONSTANT (arg)
6452       && (TREE_SIDE_EFFECTS (arg)
6453 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6454 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6455     return NULL_TREE;
6456 
6457   arg = fold_convert_loc (loc, arg_type, arg);
6458   if (lhs == 0)
6459     {
6460       true_value = fold_convert_loc (loc, cond_type, true_value);
6461       if (cond_first_p)
6462 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6463       else
6464 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6465     }
6466   if (rhs == 0)
6467     {
6468       false_value = fold_convert_loc (loc, cond_type, false_value);
6469       if (cond_first_p)
6470 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6471       else
6472 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6473     }
6474 
6475   /* Check that we have simplified at least one of the branches.  */
6476   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6477     return NULL_TREE;
6478 
6479   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6480 }
6481 
6482 
6483 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6484 
6485    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6486    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6487    ADDEND is the same as X.
6488 
6489    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6490    and finite.  The problematic cases are when X is zero, and its mode
6491    has signed zeros.  In the case of rounding towards -infinity,
6492    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6493    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6494 
6495 bool
6496 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6497 {
6498   if (!real_zerop (addend))
6499     return false;
6500 
6501   /* Don't allow the fold with -fsignaling-nans.  */
6502   if (HONOR_SNANS (element_mode (type)))
6503     return false;
6504 
6505   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6506   if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6507     return true;
6508 
6509   /* In a vector or complex, we would need to check the sign of all zeros.  */
6510   if (TREE_CODE (addend) != REAL_CST)
6511     return false;
6512 
6513   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6514   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6515     negate = !negate;
6516 
6517   /* The mode has signed zeros, and we have to honor their sign.
6518      In this situation, there is only one case we can return true for.
6519      X - 0 is the same as X unless rounding towards -infinity is
6520      supported.  */
6521   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6522 }
6523 
6524 /* Subroutine of fold() that optimizes comparisons of a division by
6525    a nonzero integer constant against an integer constant, i.e.
6526    X/C1 op C2.
6527 
6528    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6529    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6530    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6531 
6532    The function returns the constant folded tree if a simplification
6533    can be made, and NULL_TREE otherwise.  */
6534 
6535 static tree
6536 fold_div_compare (location_t loc,
6537 		  enum tree_code code, tree type, tree arg0, tree arg1)
6538 {
6539   tree prod, tmp, hi, lo;
6540   tree arg00 = TREE_OPERAND (arg0, 0);
6541   tree arg01 = TREE_OPERAND (arg0, 1);
6542   signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6543   bool neg_overflow = false;
6544   bool overflow;
6545 
6546   /* We have to do this the hard way to detect unsigned overflow.
6547      prod = int_const_binop (MULT_EXPR, arg01, arg1);  */
6548   wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6549   prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6550   neg_overflow = false;
6551 
6552   if (sign == UNSIGNED)
6553     {
6554       tmp = int_const_binop (MINUS_EXPR, arg01,
6555                              build_int_cst (TREE_TYPE (arg01), 1));
6556       lo = prod;
6557 
6558       /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6559       val = wi::add (prod, tmp, sign, &overflow);
6560       hi = force_fit_type (TREE_TYPE (arg00), val,
6561 			   -1, overflow | TREE_OVERFLOW (prod));
6562     }
6563   else if (tree_int_cst_sgn (arg01) >= 0)
6564     {
6565       tmp = int_const_binop (MINUS_EXPR, arg01,
6566 			     build_int_cst (TREE_TYPE (arg01), 1));
6567       switch (tree_int_cst_sgn (arg1))
6568 	{
6569 	case -1:
6570 	  neg_overflow = true;
6571 	  lo = int_const_binop (MINUS_EXPR, prod, tmp);
6572 	  hi = prod;
6573 	  break;
6574 
6575 	case  0:
6576 	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6577 	  hi = tmp;
6578 	  break;
6579 
6580 	case  1:
6581           hi = int_const_binop (PLUS_EXPR, prod, tmp);
6582 	  lo = prod;
6583 	  break;
6584 
6585 	default:
6586 	  gcc_unreachable ();
6587 	}
6588     }
6589   else
6590     {
6591       /* A negative divisor reverses the relational operators.  */
6592       code = swap_tree_comparison (code);
6593 
6594       tmp = int_const_binop (PLUS_EXPR, arg01,
6595 			     build_int_cst (TREE_TYPE (arg01), 1));
6596       switch (tree_int_cst_sgn (arg1))
6597 	{
6598 	case -1:
6599 	  hi = int_const_binop (MINUS_EXPR, prod, tmp);
6600 	  lo = prod;
6601 	  break;
6602 
6603 	case  0:
6604 	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6605 	  lo = tmp;
6606 	  break;
6607 
6608 	case  1:
6609 	  neg_overflow = true;
6610 	  lo = int_const_binop (PLUS_EXPR, prod, tmp);
6611 	  hi = prod;
6612 	  break;
6613 
6614 	default:
6615 	  gcc_unreachable ();
6616 	}
6617     }
6618 
6619   switch (code)
6620     {
6621     case EQ_EXPR:
6622       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6623 	return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6624       if (TREE_OVERFLOW (hi))
6625 	return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6626       if (TREE_OVERFLOW (lo))
6627 	return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6628       return build_range_check (loc, type, arg00, 1, lo, hi);
6629 
6630     case NE_EXPR:
6631       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6632 	return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6633       if (TREE_OVERFLOW (hi))
6634 	return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6635       if (TREE_OVERFLOW (lo))
6636 	return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6637       return build_range_check (loc, type, arg00, 0, lo, hi);
6638 
6639     case LT_EXPR:
6640       if (TREE_OVERFLOW (lo))
6641 	{
6642 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6643 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6644 	}
6645       return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6646 
6647     case LE_EXPR:
6648       if (TREE_OVERFLOW (hi))
6649 	{
6650 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6651 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6652 	}
6653       return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6654 
6655     case GT_EXPR:
6656       if (TREE_OVERFLOW (hi))
6657 	{
6658 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6659 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6660 	}
6661       return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6662 
6663     case GE_EXPR:
6664       if (TREE_OVERFLOW (lo))
6665 	{
6666 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6667 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6668 	}
6669       return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6670 
6671     default:
6672       break;
6673     }
6674 
6675   return NULL_TREE;
6676 }
6677 
6678 
6679 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6680    equality/inequality test, then return a simplified form of the test
6681    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6682    result type.  */
6683 
6684 static tree
6685 fold_single_bit_test_into_sign_test (location_t loc,
6686 				     enum tree_code code, tree arg0, tree arg1,
6687 				     tree result_type)
6688 {
6689   /* If this is testing a single bit, we can optimize the test.  */
6690   if ((code == NE_EXPR || code == EQ_EXPR)
6691       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6692       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6693     {
6694       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6695 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6696       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6697 
6698       if (arg00 != NULL_TREE
6699 	  /* This is only a win if casting to a signed type is cheap,
6700 	     i.e. when arg00's type is not a partial mode.  */
6701 	  && TYPE_PRECISION (TREE_TYPE (arg00))
6702 	     == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6703 	{
6704 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6705 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6706 			      result_type,
6707 			      fold_convert_loc (loc, stype, arg00),
6708 			      build_int_cst (stype, 0));
6709 	}
6710     }
6711 
6712   return NULL_TREE;
6713 }
6714 
6715 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6716    equality/inequality test, then return a simplified form of
6717    the test using shifts and logical operations.  Otherwise return
6718    NULL.  TYPE is the desired result type.  */
6719 
6720 tree
6721 fold_single_bit_test (location_t loc, enum tree_code code,
6722 		      tree arg0, tree arg1, tree result_type)
6723 {
6724   /* If this is testing a single bit, we can optimize the test.  */
6725   if ((code == NE_EXPR || code == EQ_EXPR)
6726       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6727       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6728     {
6729       tree inner = TREE_OPERAND (arg0, 0);
6730       tree type = TREE_TYPE (arg0);
6731       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6732       machine_mode operand_mode = TYPE_MODE (type);
6733       int ops_unsigned;
6734       tree signed_type, unsigned_type, intermediate_type;
6735       tree tem, one;
6736 
6737       /* First, see if we can fold the single bit test into a sign-bit
6738 	 test.  */
6739       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6740 						 result_type);
6741       if (tem)
6742 	return tem;
6743 
6744       /* Otherwise we have (A & C) != 0 where C is a single bit,
6745 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6746 	 Similarly for (A & C) == 0.  */
6747 
6748       /* If INNER is a right shift of a constant and it plus BITNUM does
6749 	 not overflow, adjust BITNUM and INNER.  */
6750       if (TREE_CODE (inner) == RSHIFT_EXPR
6751 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6752 	  && bitnum < TYPE_PRECISION (type)
6753 	  && wi::ltu_p (TREE_OPERAND (inner, 1),
6754 			TYPE_PRECISION (type) - bitnum))
6755 	{
6756 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6757 	  inner = TREE_OPERAND (inner, 0);
6758 	}
6759 
6760       /* If we are going to be able to omit the AND below, we must do our
6761 	 operations as unsigned.  If we must use the AND, we have a choice.
6762 	 Normally unsigned is faster, but for some machines signed is.  */
6763       ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6764 		      && !flag_syntax_only) ? 0 : 1;
6765 
6766       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6767       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6768       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6769       inner = fold_convert_loc (loc, intermediate_type, inner);
6770 
6771       if (bitnum != 0)
6772 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6773 			inner, size_int (bitnum));
6774 
6775       one = build_int_cst (intermediate_type, 1);
6776 
6777       if (code == EQ_EXPR)
6778 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6779 
6780       /* Put the AND last so it can combine with more things.  */
6781       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6782 
6783       /* Make sure to return the proper type.  */
6784       inner = fold_convert_loc (loc, result_type, inner);
6785 
6786       return inner;
6787     }
6788   return NULL_TREE;
6789 }
6790 
6791 /* Test whether it is preferable two swap two operands, ARG0 and
6792    ARG1, for example because ARG0 is an integer constant and ARG1
6793    isn't.  */
6794 
6795 bool
6796 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6797 {
6798   if (CONSTANT_CLASS_P (arg1))
6799     return 0;
6800   if (CONSTANT_CLASS_P (arg0))
6801     return 1;
6802 
6803   STRIP_NOPS (arg0);
6804   STRIP_NOPS (arg1);
6805 
6806   if (TREE_CONSTANT (arg1))
6807     return 0;
6808   if (TREE_CONSTANT (arg0))
6809     return 1;
6810 
6811   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6812      for commutative and comparison operators.  Ensuring a canonical
6813      form allows the optimizers to find additional redundancies without
6814      having to explicitly check for both orderings.  */
6815   if (TREE_CODE (arg0) == SSA_NAME
6816       && TREE_CODE (arg1) == SSA_NAME
6817       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6818     return 1;
6819 
6820   /* Put SSA_NAMEs last.  */
6821   if (TREE_CODE (arg1) == SSA_NAME)
6822     return 0;
6823   if (TREE_CODE (arg0) == SSA_NAME)
6824     return 1;
6825 
6826   /* Put variables last.  */
6827   if (DECL_P (arg1))
6828     return 0;
6829   if (DECL_P (arg0))
6830     return 1;
6831 
6832   return 0;
6833 }
6834 
6835 
6836 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
6837    means A >= Y && A != MAX, but in this case we know that
6838    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
6839 
6840 static tree
6841 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6842 {
6843   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6844 
6845   if (TREE_CODE (bound) == LT_EXPR)
6846     a = TREE_OPERAND (bound, 0);
6847   else if (TREE_CODE (bound) == GT_EXPR)
6848     a = TREE_OPERAND (bound, 1);
6849   else
6850     return NULL_TREE;
6851 
6852   typea = TREE_TYPE (a);
6853   if (!INTEGRAL_TYPE_P (typea)
6854       && !POINTER_TYPE_P (typea))
6855     return NULL_TREE;
6856 
6857   if (TREE_CODE (ineq) == LT_EXPR)
6858     {
6859       a1 = TREE_OPERAND (ineq, 1);
6860       y = TREE_OPERAND (ineq, 0);
6861     }
6862   else if (TREE_CODE (ineq) == GT_EXPR)
6863     {
6864       a1 = TREE_OPERAND (ineq, 0);
6865       y = TREE_OPERAND (ineq, 1);
6866     }
6867   else
6868     return NULL_TREE;
6869 
6870   if (TREE_TYPE (a1) != typea)
6871     return NULL_TREE;
6872 
6873   if (POINTER_TYPE_P (typea))
6874     {
6875       /* Convert the pointer types into integer before taking the difference.  */
6876       tree ta = fold_convert_loc (loc, ssizetype, a);
6877       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6878       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6879     }
6880   else
6881     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6882 
6883   if (!diff || !integer_onep (diff))
6884    return NULL_TREE;
6885 
6886   return fold_build2_loc (loc, GE_EXPR, type, a, y);
6887 }
6888 
6889 /* Fold a sum or difference of at least one multiplication.
6890    Returns the folded tree or NULL if no simplification could be made.  */
6891 
6892 static tree
6893 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6894 			  tree arg0, tree arg1)
6895 {
6896   tree arg00, arg01, arg10, arg11;
6897   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6898 
6899   /* (A * C) +- (B * C) -> (A+-B) * C.
6900      (A * C) +- A -> A * (C+-1).
6901      We are most concerned about the case where C is a constant,
6902      but other combinations show up during loop reduction.  Since
6903      it is not difficult, try all four possibilities.  */
6904 
6905   if (TREE_CODE (arg0) == MULT_EXPR)
6906     {
6907       arg00 = TREE_OPERAND (arg0, 0);
6908       arg01 = TREE_OPERAND (arg0, 1);
6909     }
6910   else if (TREE_CODE (arg0) == INTEGER_CST)
6911     {
6912       arg00 = build_one_cst (type);
6913       arg01 = arg0;
6914     }
6915   else
6916     {
6917       /* We cannot generate constant 1 for fract.  */
6918       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6919 	return NULL_TREE;
6920       arg00 = arg0;
6921       arg01 = build_one_cst (type);
6922     }
6923   if (TREE_CODE (arg1) == MULT_EXPR)
6924     {
6925       arg10 = TREE_OPERAND (arg1, 0);
6926       arg11 = TREE_OPERAND (arg1, 1);
6927     }
6928   else if (TREE_CODE (arg1) == INTEGER_CST)
6929     {
6930       arg10 = build_one_cst (type);
6931       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6932 	 the purpose of this canonicalization.  */
6933       if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6934 	  && negate_expr_p (arg1)
6935 	  && code == PLUS_EXPR)
6936 	{
6937 	  arg11 = negate_expr (arg1);
6938 	  code = MINUS_EXPR;
6939 	}
6940       else
6941 	arg11 = arg1;
6942     }
6943   else
6944     {
6945       /* We cannot generate constant 1 for fract.  */
6946       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6947 	return NULL_TREE;
6948       arg10 = arg1;
6949       arg11 = build_one_cst (type);
6950     }
6951   same = NULL_TREE;
6952 
6953   if (operand_equal_p (arg01, arg11, 0))
6954     same = arg01, alt0 = arg00, alt1 = arg10;
6955   else if (operand_equal_p (arg00, arg10, 0))
6956     same = arg00, alt0 = arg01, alt1 = arg11;
6957   else if (operand_equal_p (arg00, arg11, 0))
6958     same = arg00, alt0 = arg01, alt1 = arg10;
6959   else if (operand_equal_p (arg01, arg10, 0))
6960     same = arg01, alt0 = arg00, alt1 = arg11;
6961 
6962   /* No identical multiplicands; see if we can find a common
6963      power-of-two factor in non-power-of-two multiplies.  This
6964      can help in multi-dimensional array access.  */
6965   else if (tree_fits_shwi_p (arg01)
6966 	   && tree_fits_shwi_p (arg11))
6967     {
6968       HOST_WIDE_INT int01, int11, tmp;
6969       bool swap = false;
6970       tree maybe_same;
6971       int01 = tree_to_shwi (arg01);
6972       int11 = tree_to_shwi (arg11);
6973 
6974       /* Move min of absolute values to int11.  */
6975       if (absu_hwi (int01) < absu_hwi (int11))
6976         {
6977 	  tmp = int01, int01 = int11, int11 = tmp;
6978 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
6979 	  maybe_same = arg01;
6980 	  swap = true;
6981 	}
6982       else
6983 	maybe_same = arg11;
6984 
6985       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6986 	  /* The remainder should not be a constant, otherwise we
6987 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6988 	     increased the number of multiplications necessary.  */
6989 	  && TREE_CODE (arg10) != INTEGER_CST)
6990         {
6991 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6992 			      build_int_cst (TREE_TYPE (arg00),
6993 					     int01 / int11));
6994 	  alt1 = arg10;
6995 	  same = maybe_same;
6996 	  if (swap)
6997 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6998 	}
6999     }
7000 
7001   if (same)
7002     return fold_build2_loc (loc, MULT_EXPR, type,
7003 			fold_build2_loc (loc, code, type,
7004 				     fold_convert_loc (loc, type, alt0),
7005 				     fold_convert_loc (loc, type, alt1)),
7006 			fold_convert_loc (loc, type, same));
7007 
7008   return NULL_TREE;
7009 }
7010 
7011 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7012    specified by EXPR into the buffer PTR of length LEN bytes.
7013    Return the number of bytes placed in the buffer, or zero
7014    upon failure.  */
7015 
7016 static int
7017 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7018 {
7019   tree type = TREE_TYPE (expr);
7020   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7021   int byte, offset, word, words;
7022   unsigned char value;
7023 
7024   if ((off == -1 && total_bytes > len)
7025       || off >= total_bytes)
7026     return 0;
7027   if (off == -1)
7028     off = 0;
7029   words = total_bytes / UNITS_PER_WORD;
7030 
7031   for (byte = 0; byte < total_bytes; byte++)
7032     {
7033       int bitpos = byte * BITS_PER_UNIT;
7034       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7035 	 number of bytes.  */
7036       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7037 
7038       if (total_bytes > UNITS_PER_WORD)
7039 	{
7040 	  word = byte / UNITS_PER_WORD;
7041 	  if (WORDS_BIG_ENDIAN)
7042 	    word = (words - 1) - word;
7043 	  offset = word * UNITS_PER_WORD;
7044 	  if (BYTES_BIG_ENDIAN)
7045 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7046 	  else
7047 	    offset += byte % UNITS_PER_WORD;
7048 	}
7049       else
7050 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7051       if (offset >= off
7052 	  && offset - off < len)
7053 	ptr[offset - off] = value;
7054     }
7055   return MIN (len, total_bytes - off);
7056 }
7057 
7058 
7059 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7060    specified by EXPR into the buffer PTR of length LEN bytes.
7061    Return the number of bytes placed in the buffer, or zero
7062    upon failure.  */
7063 
7064 static int
7065 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7066 {
7067   tree type = TREE_TYPE (expr);
7068   machine_mode mode = TYPE_MODE (type);
7069   int total_bytes = GET_MODE_SIZE (mode);
7070   FIXED_VALUE_TYPE value;
7071   tree i_value, i_type;
7072 
7073   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7074     return 0;
7075 
7076   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7077 
7078   if (NULL_TREE == i_type
7079       || TYPE_PRECISION (i_type) != total_bytes)
7080     return 0;
7081 
7082   value = TREE_FIXED_CST (expr);
7083   i_value = double_int_to_tree (i_type, value.data);
7084 
7085   return native_encode_int (i_value, ptr, len, off);
7086 }
7087 
7088 
7089 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7090    specified by EXPR into the buffer PTR of length LEN bytes.
7091    Return the number of bytes placed in the buffer, or zero
7092    upon failure.  */
7093 
7094 static int
7095 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7096 {
7097   tree type = TREE_TYPE (expr);
7098   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7099   int byte, offset, word, words, bitpos;
7100   unsigned char value;
7101 
7102   /* There are always 32 bits in each long, no matter the size of
7103      the hosts long.  We handle floating point representations with
7104      up to 192 bits.  */
7105   long tmp[6];
7106 
7107   if ((off == -1 && total_bytes > len)
7108       || off >= total_bytes)
7109     return 0;
7110   if (off == -1)
7111     off = 0;
7112   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7113 
7114   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7115 
7116   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7117        bitpos += BITS_PER_UNIT)
7118     {
7119       byte = (bitpos / BITS_PER_UNIT) & 3;
7120       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7121 
7122       if (UNITS_PER_WORD < 4)
7123 	{
7124 	  word = byte / UNITS_PER_WORD;
7125 	  if (WORDS_BIG_ENDIAN)
7126 	    word = (words - 1) - word;
7127 	  offset = word * UNITS_PER_WORD;
7128 	  if (BYTES_BIG_ENDIAN)
7129 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7130 	  else
7131 	    offset += byte % UNITS_PER_WORD;
7132 	}
7133       else
7134 	{
7135 	  offset = byte;
7136 	  if (BYTES_BIG_ENDIAN)
7137 	    {
7138 	      /* Reverse bytes within each long, or within the entire float
7139 		 if it's smaller than a long (for HFmode).  */
7140 	      offset = MIN (3, total_bytes - 1) - offset;
7141 	      gcc_assert (offset >= 0);
7142 	    }
7143 	}
7144       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7145       if (offset >= off
7146 	  && offset - off < len)
7147 	ptr[offset - off] = value;
7148     }
7149   return MIN (len, total_bytes - off);
7150 }
7151 
7152 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7153    specified by EXPR into the buffer PTR of length LEN bytes.
7154    Return the number of bytes placed in the buffer, or zero
7155    upon failure.  */
7156 
7157 static int
7158 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7159 {
7160   int rsize, isize;
7161   tree part;
7162 
7163   part = TREE_REALPART (expr);
7164   rsize = native_encode_expr (part, ptr, len, off);
7165   if (off == -1
7166       && rsize == 0)
7167     return 0;
7168   part = TREE_IMAGPART (expr);
7169   if (off != -1)
7170     off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7171   isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7172   if (off == -1
7173       && isize != rsize)
7174     return 0;
7175   return rsize + isize;
7176 }
7177 
7178 
7179 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7180    specified by EXPR into the buffer PTR of length LEN bytes.
7181    Return the number of bytes placed in the buffer, or zero
7182    upon failure.  */
7183 
7184 static int
7185 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7186 {
7187   unsigned i, count;
7188   int size, offset;
7189   tree itype, elem;
7190 
7191   offset = 0;
7192   count = VECTOR_CST_NELTS (expr);
7193   itype = TREE_TYPE (TREE_TYPE (expr));
7194   size = GET_MODE_SIZE (TYPE_MODE (itype));
7195   for (i = 0; i < count; i++)
7196     {
7197       if (off >= size)
7198 	{
7199 	  off -= size;
7200 	  continue;
7201 	}
7202       elem = VECTOR_CST_ELT (expr, i);
7203       int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7204       if ((off == -1 && res != size)
7205 	  || res == 0)
7206 	return 0;
7207       offset += res;
7208       if (offset >= len)
7209 	return (off == -1 && i < count - 1) ? 0 : offset;
7210       if (off != -1)
7211 	off = 0;
7212     }
7213   return offset;
7214 }
7215 
7216 
7217 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7218    specified by EXPR into the buffer PTR of length LEN bytes.
7219    Return the number of bytes placed in the buffer, or zero
7220    upon failure.  */
7221 
7222 static int
7223 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7224 {
7225   if (! can_native_encode_string_p (expr))
7226     return 0;
7227 
7228   HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7229   if ((off == -1 && total_bytes > len)
7230       || off >= total_bytes)
7231     return 0;
7232   if (off == -1)
7233     off = 0;
7234   if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7235     {
7236       int written = 0;
7237       if (off < TREE_STRING_LENGTH (expr))
7238 	{
7239 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7240 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7241 	}
7242       memset (ptr + written, 0,
7243 	      MIN (total_bytes - written, len - written));
7244     }
7245   else
7246     memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7247   return MIN (total_bytes - off, len);
7248 }
7249 
7250 
7251 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7252    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7253    buffer PTR of length LEN bytes.  If OFF is not -1 then start
7254    the encoding at byte offset OFF and encode at most LEN bytes.
7255    Return the number of bytes placed in the buffer, or zero upon failure.  */
7256 
7257 int
7258 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7259 {
7260   /* We don't support starting at negative offset and -1 is special.  */
7261   if (off < -1)
7262     return 0;
7263 
7264   switch (TREE_CODE (expr))
7265     {
7266     case INTEGER_CST:
7267       return native_encode_int (expr, ptr, len, off);
7268 
7269     case REAL_CST:
7270       return native_encode_real (expr, ptr, len, off);
7271 
7272     case FIXED_CST:
7273       return native_encode_fixed (expr, ptr, len, off);
7274 
7275     case COMPLEX_CST:
7276       return native_encode_complex (expr, ptr, len, off);
7277 
7278     case VECTOR_CST:
7279       return native_encode_vector (expr, ptr, len, off);
7280 
7281     case STRING_CST:
7282       return native_encode_string (expr, ptr, len, off);
7283 
7284     default:
7285       return 0;
7286     }
7287 }
7288 
7289 
7290 /* Subroutine of native_interpret_expr.  Interpret the contents of
7291    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7292    If the buffer cannot be interpreted, return NULL_TREE.  */
7293 
7294 static tree
7295 native_interpret_int (tree type, const unsigned char *ptr, int len)
7296 {
7297   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7298 
7299   if (total_bytes > len
7300       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7301     return NULL_TREE;
7302 
7303   wide_int result = wi::from_buffer (ptr, total_bytes);
7304 
7305   return wide_int_to_tree (type, result);
7306 }
7307 
7308 
7309 /* Subroutine of native_interpret_expr.  Interpret the contents of
7310    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7311    If the buffer cannot be interpreted, return NULL_TREE.  */
7312 
7313 static tree
7314 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7315 {
7316   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7317   double_int result;
7318   FIXED_VALUE_TYPE fixed_value;
7319 
7320   if (total_bytes > len
7321       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7322     return NULL_TREE;
7323 
7324   result = double_int::from_buffer (ptr, total_bytes);
7325   fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7326 
7327   return build_fixed (type, fixed_value);
7328 }
7329 
7330 
7331 /* Subroutine of native_interpret_expr.  Interpret the contents of
7332    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7333    If the buffer cannot be interpreted, return NULL_TREE.  */
7334 
7335 static tree
7336 native_interpret_real (tree type, const unsigned char *ptr, int len)
7337 {
7338   machine_mode mode = TYPE_MODE (type);
7339   int total_bytes = GET_MODE_SIZE (mode);
7340   unsigned char value;
7341   /* There are always 32 bits in each long, no matter the size of
7342      the hosts long.  We handle floating point representations with
7343      up to 192 bits.  */
7344   REAL_VALUE_TYPE r;
7345   long tmp[6];
7346 
7347   total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7348   if (total_bytes > len || total_bytes > 24)
7349     return NULL_TREE;
7350   int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7351 
7352   memset (tmp, 0, sizeof (tmp));
7353   for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7354        bitpos += BITS_PER_UNIT)
7355     {
7356       /* Both OFFSET and BYTE index within a long;
7357 	 bitpos indexes the whole float.  */
7358       int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7359       if (UNITS_PER_WORD < 4)
7360 	{
7361 	  int word = byte / UNITS_PER_WORD;
7362 	  if (WORDS_BIG_ENDIAN)
7363 	    word = (words - 1) - word;
7364 	  offset = word * UNITS_PER_WORD;
7365 	  if (BYTES_BIG_ENDIAN)
7366 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7367 	  else
7368 	    offset += byte % UNITS_PER_WORD;
7369 	}
7370       else
7371 	{
7372 	  offset = byte;
7373 	  if (BYTES_BIG_ENDIAN)
7374 	    {
7375 	      /* Reverse bytes within each long, or within the entire float
7376 		 if it's smaller than a long (for HFmode).  */
7377 	      offset = MIN (3, total_bytes - 1) - offset;
7378 	      gcc_assert (offset >= 0);
7379 	    }
7380 	}
7381       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7382 
7383       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7384     }
7385 
7386   real_from_target (&r, tmp, mode);
7387   return build_real (type, r);
7388 }
7389 
7390 
7391 /* Subroutine of native_interpret_expr.  Interpret the contents of
7392    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7393    If the buffer cannot be interpreted, return NULL_TREE.  */
7394 
7395 static tree
7396 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7397 {
7398   tree etype, rpart, ipart;
7399   int size;
7400 
7401   etype = TREE_TYPE (type);
7402   size = GET_MODE_SIZE (TYPE_MODE (etype));
7403   if (size * 2 > len)
7404     return NULL_TREE;
7405   rpart = native_interpret_expr (etype, ptr, size);
7406   if (!rpart)
7407     return NULL_TREE;
7408   ipart = native_interpret_expr (etype, ptr+size, size);
7409   if (!ipart)
7410     return NULL_TREE;
7411   return build_complex (type, rpart, ipart);
7412 }
7413 
7414 
7415 /* Subroutine of native_interpret_expr.  Interpret the contents of
7416    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7417    If the buffer cannot be interpreted, return NULL_TREE.  */
7418 
7419 static tree
7420 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7421 {
7422   tree etype, elem;
7423   int i, size, count;
7424   tree *elements;
7425 
7426   etype = TREE_TYPE (type);
7427   size = GET_MODE_SIZE (TYPE_MODE (etype));
7428   count = TYPE_VECTOR_SUBPARTS (type);
7429   if (size * count > len)
7430     return NULL_TREE;
7431 
7432   elements = XALLOCAVEC (tree, count);
7433   for (i = count - 1; i >= 0; i--)
7434     {
7435       elem = native_interpret_expr (etype, ptr+(i*size), size);
7436       if (!elem)
7437 	return NULL_TREE;
7438       elements[i] = elem;
7439     }
7440   return build_vector (type, elements);
7441 }
7442 
7443 
7444 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7445    the buffer PTR of length LEN as a constant of type TYPE.  For
7446    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7447    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7448    return NULL_TREE.  */
7449 
7450 tree
7451 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7452 {
7453   switch (TREE_CODE (type))
7454     {
7455     case INTEGER_TYPE:
7456     case ENUMERAL_TYPE:
7457     case BOOLEAN_TYPE:
7458     case POINTER_TYPE:
7459     case REFERENCE_TYPE:
7460       return native_interpret_int (type, ptr, len);
7461 
7462     case REAL_TYPE:
7463       return native_interpret_real (type, ptr, len);
7464 
7465     case FIXED_POINT_TYPE:
7466       return native_interpret_fixed (type, ptr, len);
7467 
7468     case COMPLEX_TYPE:
7469       return native_interpret_complex (type, ptr, len);
7470 
7471     case VECTOR_TYPE:
7472       return native_interpret_vector (type, ptr, len);
7473 
7474     default:
7475       return NULL_TREE;
7476     }
7477 }
7478 
7479 /* Returns true if we can interpret the contents of a native encoding
7480    as TYPE.  */
7481 
7482 static bool
7483 can_native_interpret_type_p (tree type)
7484 {
7485   switch (TREE_CODE (type))
7486     {
7487     case INTEGER_TYPE:
7488     case ENUMERAL_TYPE:
7489     case BOOLEAN_TYPE:
7490     case POINTER_TYPE:
7491     case REFERENCE_TYPE:
7492     case FIXED_POINT_TYPE:
7493     case REAL_TYPE:
7494     case COMPLEX_TYPE:
7495     case VECTOR_TYPE:
7496       return true;
7497     default:
7498       return false;
7499     }
7500 }
7501 
7502 /* Return true iff a constant of type TYPE is accepted by
7503    native_encode_expr.  */
7504 
7505 bool
7506 can_native_encode_type_p (tree type)
7507 {
7508   switch (TREE_CODE (type))
7509     {
7510     case INTEGER_TYPE:
7511     case REAL_TYPE:
7512     case FIXED_POINT_TYPE:
7513     case COMPLEX_TYPE:
7514     case VECTOR_TYPE:
7515     case POINTER_TYPE:
7516       return true;
7517     default:
7518       return false;
7519     }
7520 }
7521 
7522 /* Return true iff a STRING_CST S is accepted by
7523    native_encode_expr.  */
7524 
7525 bool
7526 can_native_encode_string_p (const_tree expr)
7527 {
7528   tree type = TREE_TYPE (expr);
7529 
7530   if (TREE_CODE (type) != ARRAY_TYPE
7531       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7532       || (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT)
7533       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7534     return false;
7535   return true;
7536 }
7537 
7538 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7539    TYPE at compile-time.  If we're unable to perform the conversion
7540    return NULL_TREE.  */
7541 
7542 static tree
7543 fold_view_convert_expr (tree type, tree expr)
7544 {
7545   /* We support up to 512-bit values (for V8DFmode).  */
7546   unsigned char buffer[64];
7547   int len;
7548 
7549   /* Check that the host and target are sane.  */
7550   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7551     return NULL_TREE;
7552 
7553   len = native_encode_expr (expr, buffer, sizeof (buffer));
7554   if (len == 0)
7555     return NULL_TREE;
7556 
7557   return native_interpret_expr (type, buffer, len);
7558 }
7559 
7560 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7561    to avoid confusing the gimplify process.  */
7562 
7563 tree
7564 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7565 {
7566   /* The size of the object is not relevant when talking about its address.  */
7567   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7568     t = TREE_OPERAND (t, 0);
7569 
7570   if (TREE_CODE (t) == INDIRECT_REF)
7571     {
7572       t = TREE_OPERAND (t, 0);
7573 
7574       if (TREE_TYPE (t) != ptrtype)
7575 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7576     }
7577   else if (TREE_CODE (t) == MEM_REF
7578 	   && integer_zerop (TREE_OPERAND (t, 1)))
7579     return TREE_OPERAND (t, 0);
7580   else if (TREE_CODE (t) == MEM_REF
7581 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7582     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7583 			TREE_OPERAND (t, 0),
7584 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7585   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7586     {
7587       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7588 
7589       if (TREE_TYPE (t) != ptrtype)
7590 	t = fold_convert_loc (loc, ptrtype, t);
7591     }
7592   else
7593     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7594 
7595   return t;
7596 }
7597 
7598 /* Build an expression for the address of T.  */
7599 
7600 tree
7601 build_fold_addr_expr_loc (location_t loc, tree t)
7602 {
7603   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7604 
7605   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7606 }
7607 
7608 /* Fold a unary expression of code CODE and type TYPE with operand
7609    OP0.  Return the folded expression if folding is successful.
7610    Otherwise, return NULL_TREE.  */
7611 
7612 tree
7613 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7614 {
7615   tree tem;
7616   tree arg0;
7617   enum tree_code_class kind = TREE_CODE_CLASS (code);
7618 
7619   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7620 	      && TREE_CODE_LENGTH (code) == 1);
7621 
7622   arg0 = op0;
7623   if (arg0)
7624     {
7625       if (CONVERT_EXPR_CODE_P (code)
7626 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7627 	{
7628 	  /* Don't use STRIP_NOPS, because signedness of argument type
7629 	     matters.  */
7630 	  STRIP_SIGN_NOPS (arg0);
7631 	}
7632       else
7633 	{
7634 	  /* Strip any conversions that don't change the mode.  This
7635 	     is safe for every expression, except for a comparison
7636 	     expression because its signedness is derived from its
7637 	     operands.
7638 
7639 	     Note that this is done as an internal manipulation within
7640 	     the constant folder, in order to find the simplest
7641 	     representation of the arguments so that their form can be
7642 	     studied.  In any cases, the appropriate type conversions
7643 	     should be put back in the tree that will get out of the
7644 	     constant folder.  */
7645 	  STRIP_NOPS (arg0);
7646 	}
7647 
7648       if (CONSTANT_CLASS_P (arg0))
7649 	{
7650 	  tree tem = const_unop (code, type, arg0);
7651 	  if (tem)
7652 	    {
7653 	      if (TREE_TYPE (tem) != type)
7654 		tem = fold_convert_loc (loc, type, tem);
7655 	      return tem;
7656 	    }
7657 	}
7658     }
7659 
7660   tem = generic_simplify (loc, code, type, op0);
7661   if (tem)
7662     return tem;
7663 
7664   if (TREE_CODE_CLASS (code) == tcc_unary)
7665     {
7666       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7667 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7668 		       fold_build1_loc (loc, code, type,
7669 				    fold_convert_loc (loc, TREE_TYPE (op0),
7670 						      TREE_OPERAND (arg0, 1))));
7671       else if (TREE_CODE (arg0) == COND_EXPR)
7672 	{
7673 	  tree arg01 = TREE_OPERAND (arg0, 1);
7674 	  tree arg02 = TREE_OPERAND (arg0, 2);
7675 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7676 	    arg01 = fold_build1_loc (loc, code, type,
7677 				 fold_convert_loc (loc,
7678 						   TREE_TYPE (op0), arg01));
7679 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7680 	    arg02 = fold_build1_loc (loc, code, type,
7681 				 fold_convert_loc (loc,
7682 						   TREE_TYPE (op0), arg02));
7683 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7684 			     arg01, arg02);
7685 
7686 	  /* If this was a conversion, and all we did was to move into
7687 	     inside the COND_EXPR, bring it back out.  But leave it if
7688 	     it is a conversion from integer to integer and the
7689 	     result precision is no wider than a word since such a
7690 	     conversion is cheap and may be optimized away by combine,
7691 	     while it couldn't if it were outside the COND_EXPR.  Then return
7692 	     so we don't get into an infinite recursion loop taking the
7693 	     conversion out and then back in.  */
7694 
7695 	  if ((CONVERT_EXPR_CODE_P (code)
7696 	       || code == NON_LVALUE_EXPR)
7697 	      && TREE_CODE (tem) == COND_EXPR
7698 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7699 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7700 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7701 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7702 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7703 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7704 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7705 		     && (INTEGRAL_TYPE_P
7706 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7707 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7708 		  || flag_syntax_only))
7709 	    tem = build1_loc (loc, code, type,
7710 			      build3 (COND_EXPR,
7711 				      TREE_TYPE (TREE_OPERAND
7712 						 (TREE_OPERAND (tem, 1), 0)),
7713 				      TREE_OPERAND (tem, 0),
7714 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7715 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7716 						    0)));
7717 	  return tem;
7718 	}
7719    }
7720 
7721   switch (code)
7722     {
7723     case NON_LVALUE_EXPR:
7724       if (!maybe_lvalue_p (op0))
7725 	return fold_convert_loc (loc, type, op0);
7726       return NULL_TREE;
7727 
7728     CASE_CONVERT:
7729     case FLOAT_EXPR:
7730     case FIX_TRUNC_EXPR:
7731       if (COMPARISON_CLASS_P (op0))
7732 	{
7733 	  /* If we have (type) (a CMP b) and type is an integral type, return
7734 	     new expression involving the new type.  Canonicalize
7735 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7736 	     non-integral type.
7737 	     Do not fold the result as that would not simplify further, also
7738 	     folding again results in recursions.  */
7739 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7740 	    return build2_loc (loc, TREE_CODE (op0), type,
7741 			       TREE_OPERAND (op0, 0),
7742 			       TREE_OPERAND (op0, 1));
7743 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7744 		   && TREE_CODE (type) != VECTOR_TYPE)
7745 	    return build3_loc (loc, COND_EXPR, type, op0,
7746 			       constant_boolean_node (true, type),
7747 			       constant_boolean_node (false, type));
7748 	}
7749 
7750       /* Handle (T *)&A.B.C for A being of type T and B and C
7751 	 living at offset zero.  This occurs frequently in
7752 	 C++ upcasting and then accessing the base.  */
7753       if (TREE_CODE (op0) == ADDR_EXPR
7754 	  && POINTER_TYPE_P (type)
7755 	  && handled_component_p (TREE_OPERAND (op0, 0)))
7756         {
7757 	  HOST_WIDE_INT bitsize, bitpos;
7758 	  tree offset;
7759 	  machine_mode mode;
7760 	  int unsignedp, reversep, volatilep;
7761 	  tree base
7762 	    = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7763 				   &offset, &mode, &unsignedp, &reversep,
7764 				   &volatilep);
7765 	  /* If the reference was to a (constant) zero offset, we can use
7766 	     the address of the base if it has the same base type
7767 	     as the result type and the pointer type is unqualified.  */
7768 	  if (! offset && bitpos == 0
7769 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7770 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7771 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7772 	    return fold_convert_loc (loc, type,
7773 				     build_fold_addr_expr_loc (loc, base));
7774         }
7775 
7776       if (TREE_CODE (op0) == MODIFY_EXPR
7777 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7778 	  /* Detect assigning a bitfield.  */
7779 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7780 	       && DECL_BIT_FIELD
7781 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7782 	{
7783 	  /* Don't leave an assignment inside a conversion
7784 	     unless assigning a bitfield.  */
7785 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7786 	  /* First do the assignment, then return converted constant.  */
7787 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7788 	  TREE_NO_WARNING (tem) = 1;
7789 	  TREE_USED (tem) = 1;
7790 	  return tem;
7791 	}
7792 
7793       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7794 	 constants (if x has signed type, the sign bit cannot be set
7795 	 in c).  This folds extension into the BIT_AND_EXPR.
7796 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7797 	 very likely don't have maximal range for their precision and this
7798 	 transformation effectively doesn't preserve non-maximal ranges.  */
7799       if (TREE_CODE (type) == INTEGER_TYPE
7800 	  && TREE_CODE (op0) == BIT_AND_EXPR
7801 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7802 	{
7803 	  tree and_expr = op0;
7804 	  tree and0 = TREE_OPERAND (and_expr, 0);
7805 	  tree and1 = TREE_OPERAND (and_expr, 1);
7806 	  int change = 0;
7807 
7808 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7809 	      || (TYPE_PRECISION (type)
7810 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7811 	    change = 1;
7812 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7813 		   <= HOST_BITS_PER_WIDE_INT
7814 		   && tree_fits_uhwi_p (and1))
7815 	    {
7816 	      unsigned HOST_WIDE_INT cst;
7817 
7818 	      cst = tree_to_uhwi (and1);
7819 	      cst &= HOST_WIDE_INT_M1U
7820 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7821 	      change = (cst == 0);
7822 	      if (change
7823 		  && !flag_syntax_only
7824 		  && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7825 		      == ZERO_EXTEND))
7826 		{
7827 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
7828 		  and0 = fold_convert_loc (loc, uns, and0);
7829 		  and1 = fold_convert_loc (loc, uns, and1);
7830 		}
7831 	    }
7832 	  if (change)
7833 	    {
7834 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
7835 				    TREE_OVERFLOW (and1));
7836 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
7837 				      fold_convert_loc (loc, type, and0), tem);
7838 	    }
7839 	}
7840 
7841       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7842 	 cast (T1)X will fold away.  We assume that this happens when X itself
7843 	 is a cast.  */
7844       if (POINTER_TYPE_P (type)
7845 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7846 	  && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7847 	{
7848 	  tree arg00 = TREE_OPERAND (arg0, 0);
7849 	  tree arg01 = TREE_OPERAND (arg0, 1);
7850 
7851 	  return fold_build_pointer_plus_loc
7852 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
7853 	}
7854 
7855       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7856 	 of the same precision, and X is an integer type not narrower than
7857 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
7858       if (INTEGRAL_TYPE_P (type)
7859 	  && TREE_CODE (op0) == BIT_NOT_EXPR
7860 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7861 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7862 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7863 	{
7864 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7865 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7866 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7867 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7868 				fold_convert_loc (loc, type, tem));
7869 	}
7870 
7871       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7872 	 type of X and Y (integer types only).  */
7873       if (INTEGRAL_TYPE_P (type)
7874 	  && TREE_CODE (op0) == MULT_EXPR
7875 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7876 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7877 	{
7878 	  /* Be careful not to introduce new overflows.  */
7879 	  tree mult_type;
7880           if (TYPE_OVERFLOW_WRAPS (type))
7881 	    mult_type = type;
7882 	  else
7883 	    mult_type = unsigned_type_for (type);
7884 
7885 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7886 	    {
7887 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7888 				 fold_convert_loc (loc, mult_type,
7889 						   TREE_OPERAND (op0, 0)),
7890 				 fold_convert_loc (loc, mult_type,
7891 						   TREE_OPERAND (op0, 1)));
7892 	      return fold_convert_loc (loc, type, tem);
7893 	    }
7894 	}
7895 
7896       return NULL_TREE;
7897 
7898     case VIEW_CONVERT_EXPR:
7899       if (TREE_CODE (op0) == MEM_REF)
7900         {
7901 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7902 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7903 	  tem = fold_build2_loc (loc, MEM_REF, type,
7904 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7905 	  REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7906 	  return tem;
7907 	}
7908 
7909       return NULL_TREE;
7910 
7911     case NEGATE_EXPR:
7912       tem = fold_negate_expr (loc, arg0);
7913       if (tem)
7914 	return fold_convert_loc (loc, type, tem);
7915       return NULL_TREE;
7916 
7917     case ABS_EXPR:
7918       /* Convert fabs((double)float) into (double)fabsf(float).  */
7919       if (TREE_CODE (arg0) == NOP_EXPR
7920 	  && TREE_CODE (type) == REAL_TYPE)
7921 	{
7922 	  tree targ0 = strip_float_extensions (arg0);
7923 	  if (targ0 != arg0)
7924 	    return fold_convert_loc (loc, type,
7925 				     fold_build1_loc (loc, ABS_EXPR,
7926 						  TREE_TYPE (targ0),
7927 						  targ0));
7928 	}
7929       return NULL_TREE;
7930 
7931     case BIT_NOT_EXPR:
7932       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
7933       if (TREE_CODE (arg0) == BIT_XOR_EXPR
7934 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7935 				    fold_convert_loc (loc, type,
7936 						      TREE_OPERAND (arg0, 0)))))
7937 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7938 				fold_convert_loc (loc, type,
7939 						  TREE_OPERAND (arg0, 1)));
7940       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7941 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7942 			       	     fold_convert_loc (loc, type,
7943 						       TREE_OPERAND (arg0, 1)))))
7944 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7945 			    fold_convert_loc (loc, type,
7946 					      TREE_OPERAND (arg0, 0)), tem);
7947 
7948       return NULL_TREE;
7949 
7950     case TRUTH_NOT_EXPR:
7951       /* Note that the operand of this must be an int
7952 	 and its values must be 0 or 1.
7953 	 ("true" is a fixed value perhaps depending on the language,
7954 	 but we don't handle values other than 1 correctly yet.)  */
7955       tem = fold_truth_not_expr (loc, arg0);
7956       if (!tem)
7957 	return NULL_TREE;
7958       return fold_convert_loc (loc, type, tem);
7959 
7960     case INDIRECT_REF:
7961       /* Fold *&X to X if X is an lvalue.  */
7962       if (TREE_CODE (op0) == ADDR_EXPR)
7963 	{
7964 	  tree op00 = TREE_OPERAND (op0, 0);
7965 	  if ((VAR_P (op00)
7966 	       || TREE_CODE (op00) == PARM_DECL
7967 	       || TREE_CODE (op00) == RESULT_DECL)
7968 	      && !TREE_READONLY (op00))
7969 	    return op00;
7970 	}
7971       return NULL_TREE;
7972 
7973     default:
7974       return NULL_TREE;
7975     } /* switch (code) */
7976 }
7977 
7978 
7979 /* If the operation was a conversion do _not_ mark a resulting constant
7980    with TREE_OVERFLOW if the original constant was not.  These conversions
7981    have implementation defined behavior and retaining the TREE_OVERFLOW
7982    flag here would confuse later passes such as VRP.  */
7983 tree
7984 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7985 				tree type, tree op0)
7986 {
7987   tree res = fold_unary_loc (loc, code, type, op0);
7988   if (res
7989       && TREE_CODE (res) == INTEGER_CST
7990       && TREE_CODE (op0) == INTEGER_CST
7991       && CONVERT_EXPR_CODE_P (code))
7992     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7993 
7994   return res;
7995 }
7996 
7997 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7998    operands OP0 and OP1.  LOC is the location of the resulting expression.
7999    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8000    Return the folded expression if folding is successful.  Otherwise,
8001    return NULL_TREE.  */
8002 static tree
8003 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8004 		  tree arg0, tree arg1, tree op0, tree op1)
8005 {
8006   tree tem;
8007 
8008   /* We only do these simplifications if we are optimizing.  */
8009   if (!optimize)
8010     return NULL_TREE;
8011 
8012   /* Check for things like (A || B) && (A || C).  We can convert this
8013      to A || (B && C).  Note that either operator can be any of the four
8014      truth and/or operations and the transformation will still be
8015      valid.   Also note that we only care about order for the
8016      ANDIF and ORIF operators.  If B contains side effects, this
8017      might change the truth-value of A.  */
8018   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8019       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8020 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8021 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8022 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8023       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8024     {
8025       tree a00 = TREE_OPERAND (arg0, 0);
8026       tree a01 = TREE_OPERAND (arg0, 1);
8027       tree a10 = TREE_OPERAND (arg1, 0);
8028       tree a11 = TREE_OPERAND (arg1, 1);
8029       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8030 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8031 			 && (code == TRUTH_AND_EXPR
8032 			     || code == TRUTH_OR_EXPR));
8033 
8034       if (operand_equal_p (a00, a10, 0))
8035 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8036 			    fold_build2_loc (loc, code, type, a01, a11));
8037       else if (commutative && operand_equal_p (a00, a11, 0))
8038 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8039 			    fold_build2_loc (loc, code, type, a01, a10));
8040       else if (commutative && operand_equal_p (a01, a10, 0))
8041 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8042 			    fold_build2_loc (loc, code, type, a00, a11));
8043 
8044       /* This case if tricky because we must either have commutative
8045 	 operators or else A10 must not have side-effects.  */
8046 
8047       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8048 	       && operand_equal_p (a01, a11, 0))
8049 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8050 			    fold_build2_loc (loc, code, type, a00, a10),
8051 			    a01);
8052     }
8053 
8054   /* See if we can build a range comparison.  */
8055   if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8056     return tem;
8057 
8058   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8059       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8060     {
8061       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8062       if (tem)
8063 	return fold_build2_loc (loc, code, type, tem, arg1);
8064     }
8065 
8066   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8067       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8068     {
8069       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8070       if (tem)
8071 	return fold_build2_loc (loc, code, type, arg0, tem);
8072     }
8073 
8074   /* Check for the possibility of merging component references.  If our
8075      lhs is another similar operation, try to merge its rhs with our
8076      rhs.  Then try to merge our lhs and rhs.  */
8077   if (TREE_CODE (arg0) == code
8078       && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8079 					 TREE_OPERAND (arg0, 1), arg1)))
8080     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8081 
8082   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8083     return tem;
8084 
8085   if (LOGICAL_OP_NON_SHORT_CIRCUIT
8086       && (code == TRUTH_AND_EXPR
8087           || code == TRUTH_ANDIF_EXPR
8088           || code == TRUTH_OR_EXPR
8089           || code == TRUTH_ORIF_EXPR))
8090     {
8091       enum tree_code ncode, icode;
8092 
8093       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8094 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8095       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8096 
8097       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8098 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8099 	 We don't want to pack more than two leafs to a non-IF AND/OR
8100 	 expression.
8101 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8102 	 equal to IF-CODE, then we don't want to add right-hand operand.
8103 	 If the inner right-hand side of left-hand operand has
8104 	 side-effects, or isn't simple, then we can't add to it,
8105 	 as otherwise we might destroy if-sequence.  */
8106       if (TREE_CODE (arg0) == icode
8107 	  && simple_operand_p_2 (arg1)
8108 	  /* Needed for sequence points to handle trappings, and
8109 	     side-effects.  */
8110 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8111 	{
8112 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8113 				 arg1);
8114 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8115 				  tem);
8116 	}
8117 	/* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8118 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8119       else if (TREE_CODE (arg1) == icode
8120 	  && simple_operand_p_2 (arg0)
8121 	  /* Needed for sequence points to handle trappings, and
8122 	     side-effects.  */
8123 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8124 	{
8125 	  tem = fold_build2_loc (loc, ncode, type,
8126 				 arg0, TREE_OPERAND (arg1, 0));
8127 	  return fold_build2_loc (loc, icode, type, tem,
8128 				  TREE_OPERAND (arg1, 1));
8129 	}
8130       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8131 	 into (A OR B).
8132 	 For sequence point consistancy, we need to check for trapping,
8133 	 and side-effects.  */
8134       else if (code == icode && simple_operand_p_2 (arg0)
8135                && simple_operand_p_2 (arg1))
8136 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8137     }
8138 
8139   return NULL_TREE;
8140 }
8141 
8142 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8143    by changing CODE to reduce the magnitude of constants involved in
8144    ARG0 of the comparison.
8145    Returns a canonicalized comparison tree if a simplification was
8146    possible, otherwise returns NULL_TREE.
8147    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8148    valid if signed overflow is undefined.  */
8149 
8150 static tree
8151 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8152 				 tree arg0, tree arg1,
8153 				 bool *strict_overflow_p)
8154 {
8155   enum tree_code code0 = TREE_CODE (arg0);
8156   tree t, cst0 = NULL_TREE;
8157   int sgn0;
8158 
8159   /* Match A +- CST code arg1.  We can change this only if overflow
8160      is undefined.  */
8161   if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8162 	 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8163 	/* In principle pointers also have undefined overflow behavior,
8164 	   but that causes problems elsewhere.  */
8165 	&& !POINTER_TYPE_P (TREE_TYPE (arg0))
8166 	&& (code0 == MINUS_EXPR
8167 	    || code0 == PLUS_EXPR)
8168 	&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8169     return NULL_TREE;
8170 
8171   /* Identify the constant in arg0 and its sign.  */
8172   cst0 = TREE_OPERAND (arg0, 1);
8173   sgn0 = tree_int_cst_sgn (cst0);
8174 
8175   /* Overflowed constants and zero will cause problems.  */
8176   if (integer_zerop (cst0)
8177       || TREE_OVERFLOW (cst0))
8178     return NULL_TREE;
8179 
8180   /* See if we can reduce the magnitude of the constant in
8181      arg0 by changing the comparison code.  */
8182   /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8183   if (code == LT_EXPR
8184       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8185     code = LE_EXPR;
8186   /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8187   else if (code == GT_EXPR
8188 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8189     code = GE_EXPR;
8190   /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8191   else if (code == LE_EXPR
8192 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8193     code = LT_EXPR;
8194   /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8195   else if (code == GE_EXPR
8196 	   && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8197     code = GT_EXPR;
8198   else
8199     return NULL_TREE;
8200   *strict_overflow_p = true;
8201 
8202   /* Now build the constant reduced in magnitude.  But not if that
8203      would produce one outside of its types range.  */
8204   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8205       && ((sgn0 == 1
8206 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8207 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8208 	  || (sgn0 == -1
8209 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8210 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8211     return NULL_TREE;
8212 
8213   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8214 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8215   t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8216   t = fold_convert (TREE_TYPE (arg1), t);
8217 
8218   return fold_build2_loc (loc, code, type, t, arg1);
8219 }
8220 
8221 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8222    overflow further.  Try to decrease the magnitude of constants involved
8223    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8224    and put sole constants at the second argument position.
8225    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8226 
8227 static tree
8228 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8229 			       tree arg0, tree arg1)
8230 {
8231   tree t;
8232   bool strict_overflow_p;
8233   const char * const warnmsg = G_("assuming signed overflow does not occur "
8234 				  "when reducing constant in comparison");
8235 
8236   /* Try canonicalization by simplifying arg0.  */
8237   strict_overflow_p = false;
8238   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8239 				       &strict_overflow_p);
8240   if (t)
8241     {
8242       if (strict_overflow_p)
8243 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8244       return t;
8245     }
8246 
8247   /* Try canonicalization by simplifying arg1 using the swapped
8248      comparison.  */
8249   code = swap_tree_comparison (code);
8250   strict_overflow_p = false;
8251   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8252 				       &strict_overflow_p);
8253   if (t && strict_overflow_p)
8254     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8255   return t;
8256 }
8257 
8258 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8259    space.  This is used to avoid issuing overflow warnings for
8260    expressions like &p->x which can not wrap.  */
8261 
8262 static bool
8263 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8264 {
8265   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8266     return true;
8267 
8268   if (bitpos < 0)
8269     return true;
8270 
8271   wide_int wi_offset;
8272   int precision = TYPE_PRECISION (TREE_TYPE (base));
8273   if (offset == NULL_TREE)
8274     wi_offset = wi::zero (precision);
8275   else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8276     return true;
8277   else
8278     wi_offset = offset;
8279 
8280   bool overflow;
8281   wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8282   wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8283   if (overflow)
8284     return true;
8285 
8286   if (!wi::fits_uhwi_p (total))
8287     return true;
8288 
8289   HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8290   if (size <= 0)
8291     return true;
8292 
8293   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8294      array.  */
8295   if (TREE_CODE (base) == ADDR_EXPR)
8296     {
8297       HOST_WIDE_INT base_size;
8298 
8299       base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8300       if (base_size > 0 && size < base_size)
8301 	size = base_size;
8302     }
8303 
8304   return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8305 }
8306 
8307 /* Return a positive integer when the symbol DECL is known to have
8308    a nonzero address, zero when it's known not to (e.g., it's a weak
8309    symbol), and a negative integer when the symbol is not yet in the
8310    symbol table and so whether or not its address is zero is unknown.
8311    For function local objects always return positive integer.  */
8312 static int
8313 maybe_nonzero_address (tree decl)
8314 {
8315   if (DECL_P (decl) && decl_in_symtab_p (decl))
8316     if (struct symtab_node *symbol = symtab_node::get_create (decl))
8317       return symbol->nonzero_address ();
8318 
8319   /* Function local objects are never NULL.  */
8320   if (DECL_P (decl)
8321       && (DECL_CONTEXT (decl)
8322       && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8323       && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8324     return 1;
8325 
8326   return -1;
8327 }
8328 
8329 /* Subroutine of fold_binary.  This routine performs all of the
8330    transformations that are common to the equality/inequality
8331    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8332    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8333    fold_binary should call fold_binary.  Fold a comparison with
8334    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8335    the folded comparison or NULL_TREE.  */
8336 
8337 static tree
8338 fold_comparison (location_t loc, enum tree_code code, tree type,
8339 		 tree op0, tree op1)
8340 {
8341   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8342   tree arg0, arg1, tem;
8343 
8344   arg0 = op0;
8345   arg1 = op1;
8346 
8347   STRIP_SIGN_NOPS (arg0);
8348   STRIP_SIGN_NOPS (arg1);
8349 
8350   /* For comparisons of pointers we can decompose it to a compile time
8351      comparison of the base objects and the offsets into the object.
8352      This requires at least one operand being an ADDR_EXPR or a
8353      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
8354   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8355       && (TREE_CODE (arg0) == ADDR_EXPR
8356 	  || TREE_CODE (arg1) == ADDR_EXPR
8357 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8358 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8359     {
8360       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8361       HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8362       machine_mode mode;
8363       int volatilep, reversep, unsignedp;
8364       bool indirect_base0 = false, indirect_base1 = false;
8365 
8366       /* Get base and offset for the access.  Strip ADDR_EXPR for
8367 	 get_inner_reference, but put it back by stripping INDIRECT_REF
8368 	 off the base object if possible.  indirect_baseN will be true
8369 	 if baseN is not an address but refers to the object itself.  */
8370       base0 = arg0;
8371       if (TREE_CODE (arg0) == ADDR_EXPR)
8372 	{
8373 	  base0
8374 	    = get_inner_reference (TREE_OPERAND (arg0, 0),
8375 				   &bitsize, &bitpos0, &offset0, &mode,
8376 				   &unsignedp, &reversep, &volatilep);
8377 	  if (TREE_CODE (base0) == INDIRECT_REF)
8378 	    base0 = TREE_OPERAND (base0, 0);
8379 	  else
8380 	    indirect_base0 = true;
8381 	}
8382       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8383 	{
8384 	  base0 = TREE_OPERAND (arg0, 0);
8385 	  STRIP_SIGN_NOPS (base0);
8386 	  if (TREE_CODE (base0) == ADDR_EXPR)
8387 	    {
8388 	      base0
8389 		= get_inner_reference (TREE_OPERAND (base0, 0),
8390 				       &bitsize, &bitpos0, &offset0, &mode,
8391 				       &unsignedp, &reversep, &volatilep);
8392 	      if (TREE_CODE (base0) == INDIRECT_REF)
8393 		base0 = TREE_OPERAND (base0, 0);
8394 	      else
8395 		indirect_base0 = true;
8396 	    }
8397 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
8398 	    offset0 = TREE_OPERAND (arg0, 1);
8399 	  else
8400 	    offset0 = size_binop (PLUS_EXPR, offset0,
8401 				  TREE_OPERAND (arg0, 1));
8402 	  if (TREE_CODE (offset0) == INTEGER_CST)
8403 	    {
8404 	      offset_int tem = wi::sext (wi::to_offset (offset0),
8405 					 TYPE_PRECISION (sizetype));
8406 	      tem <<= LOG2_BITS_PER_UNIT;
8407 	      tem += bitpos0;
8408 	      if (wi::fits_shwi_p (tem))
8409 		{
8410 		  bitpos0 = tem.to_shwi ();
8411 		  offset0 = NULL_TREE;
8412 		}
8413 	    }
8414 	}
8415 
8416       base1 = arg1;
8417       if (TREE_CODE (arg1) == ADDR_EXPR)
8418 	{
8419 	  base1
8420 	    = get_inner_reference (TREE_OPERAND (arg1, 0),
8421 				   &bitsize, &bitpos1, &offset1, &mode,
8422 				   &unsignedp, &reversep, &volatilep);
8423 	  if (TREE_CODE (base1) == INDIRECT_REF)
8424 	    base1 = TREE_OPERAND (base1, 0);
8425 	  else
8426 	    indirect_base1 = true;
8427 	}
8428       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8429 	{
8430 	  base1 = TREE_OPERAND (arg1, 0);
8431 	  STRIP_SIGN_NOPS (base1);
8432 	  if (TREE_CODE (base1) == ADDR_EXPR)
8433 	    {
8434 	      base1
8435 		= get_inner_reference (TREE_OPERAND (base1, 0),
8436 				       &bitsize, &bitpos1, &offset1, &mode,
8437 				       &unsignedp, &reversep, &volatilep);
8438 	      if (TREE_CODE (base1) == INDIRECT_REF)
8439 		base1 = TREE_OPERAND (base1, 0);
8440 	      else
8441 		indirect_base1 = true;
8442 	    }
8443 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
8444 	    offset1 = TREE_OPERAND (arg1, 1);
8445 	  else
8446 	    offset1 = size_binop (PLUS_EXPR, offset1,
8447 				  TREE_OPERAND (arg1, 1));
8448 	  if (TREE_CODE (offset1) == INTEGER_CST)
8449 	    {
8450 	      offset_int tem = wi::sext (wi::to_offset (offset1),
8451 					 TYPE_PRECISION (sizetype));
8452 	      tem <<= LOG2_BITS_PER_UNIT;
8453 	      tem += bitpos1;
8454 	      if (wi::fits_shwi_p (tem))
8455 		{
8456 		  bitpos1 = tem.to_shwi ();
8457 		  offset1 = NULL_TREE;
8458 		}
8459 	    }
8460 	}
8461 
8462       /* If we have equivalent bases we might be able to simplify.  */
8463       if (indirect_base0 == indirect_base1
8464 	  && operand_equal_p (base0, base1,
8465 			      indirect_base0 ? OEP_ADDRESS_OF : 0))
8466 	{
8467 	  /* We can fold this expression to a constant if the non-constant
8468 	     offset parts are equal.  */
8469 	  if ((offset0 == offset1
8470 	       || (offset0 && offset1
8471 		   && operand_equal_p (offset0, offset1, 0)))
8472 	      && (equality_code
8473 		  || (indirect_base0
8474 		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8475 		  || POINTER_TYPE_OVERFLOW_UNDEFINED))
8476 
8477 	    {
8478 	      if (!equality_code
8479 		  && bitpos0 != bitpos1
8480 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8481 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8482 		fold_overflow_warning (("assuming pointer wraparound does not "
8483 					"occur when comparing P +- C1 with "
8484 					"P +- C2"),
8485 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
8486 
8487 	      switch (code)
8488 		{
8489 		case EQ_EXPR:
8490 		  return constant_boolean_node (bitpos0 == bitpos1, type);
8491 		case NE_EXPR:
8492 		  return constant_boolean_node (bitpos0 != bitpos1, type);
8493 		case LT_EXPR:
8494 		  return constant_boolean_node (bitpos0 < bitpos1, type);
8495 		case LE_EXPR:
8496 		  return constant_boolean_node (bitpos0 <= bitpos1, type);
8497 		case GE_EXPR:
8498 		  return constant_boolean_node (bitpos0 >= bitpos1, type);
8499 		case GT_EXPR:
8500 		  return constant_boolean_node (bitpos0 > bitpos1, type);
8501 		default:;
8502 		}
8503 	    }
8504 	  /* We can simplify the comparison to a comparison of the variable
8505 	     offset parts if the constant offset parts are equal.
8506 	     Be careful to use signed sizetype here because otherwise we
8507 	     mess with array offsets in the wrong way.  This is possible
8508 	     because pointer arithmetic is restricted to retain within an
8509 	     object and overflow on pointer differences is undefined as of
8510 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
8511 	  else if (bitpos0 == bitpos1
8512 		   && (equality_code
8513 		       || (indirect_base0
8514 			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8515 		       || POINTER_TYPE_OVERFLOW_UNDEFINED))
8516 	    {
8517 	      /* By converting to signed sizetype we cover middle-end pointer
8518 	         arithmetic which operates on unsigned pointer types of size
8519 	         type size and ARRAY_REF offsets which are properly sign or
8520 	         zero extended from their type in case it is narrower than
8521 	         sizetype.  */
8522 	      if (offset0 == NULL_TREE)
8523 		offset0 = build_int_cst (ssizetype, 0);
8524 	      else
8525 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
8526 	      if (offset1 == NULL_TREE)
8527 		offset1 = build_int_cst (ssizetype, 0);
8528 	      else
8529 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
8530 
8531 	      if (!equality_code
8532 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8533 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8534 		fold_overflow_warning (("assuming pointer wraparound does not "
8535 					"occur when comparing P +- C1 with "
8536 					"P +- C2"),
8537 				       WARN_STRICT_OVERFLOW_COMPARISON);
8538 
8539 	      return fold_build2_loc (loc, code, type, offset0, offset1);
8540 	    }
8541 	}
8542       /* For equal offsets we can simplify to a comparison of the
8543 	 base addresses.  */
8544       else if (bitpos0 == bitpos1
8545 	       && (indirect_base0
8546 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8547 	       && (indirect_base1
8548 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8549 	       && ((offset0 == offset1)
8550 		   || (offset0 && offset1
8551 		       && operand_equal_p (offset0, offset1, 0))))
8552 	{
8553 	  if (indirect_base0)
8554 	    base0 = build_fold_addr_expr_loc (loc, base0);
8555 	  if (indirect_base1)
8556 	    base1 = build_fold_addr_expr_loc (loc, base1);
8557 	  return fold_build2_loc (loc, code, type, base0, base1);
8558 	}
8559       /* Comparison between an ordinary (non-weak) symbol and a null
8560 	 pointer can be eliminated since such symbols must have a non
8561 	 null address.  In C, relational expressions between pointers
8562 	 to objects and null pointers are undefined.  The results
8563 	 below follow the C++ rules with the additional property that
8564 	 every object pointer compares greater than a null pointer.
8565       */
8566       else if (((DECL_P (base0)
8567 		 && maybe_nonzero_address (base0) > 0
8568 		 /* Avoid folding references to struct members at offset 0 to
8569 		    prevent tests like '&ptr->firstmember == 0' from getting
8570 		    eliminated.  When ptr is null, although the -> expression
8571 		    is strictly speaking invalid, GCC retains it as a matter
8572 		    of QoI.  See PR c/44555. */
8573 		 && (offset0 == NULL_TREE && bitpos0 != 0))
8574 		|| CONSTANT_CLASS_P (base0))
8575 	       && indirect_base0
8576 	       /* The caller guarantees that when one of the arguments is
8577 		  constant (i.e., null in this case) it is second.  */
8578 	       && integer_zerop (arg1))
8579 	{
8580 	  switch (code)
8581 	    {
8582 	    case EQ_EXPR:
8583 	    case LE_EXPR:
8584 	    case LT_EXPR:
8585 	      return constant_boolean_node (false, type);
8586 	    case GE_EXPR:
8587 	    case GT_EXPR:
8588 	    case NE_EXPR:
8589 	      return constant_boolean_node (true, type);
8590 	    default:
8591 	      gcc_unreachable ();
8592 	    }
8593 	}
8594     }
8595 
8596   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8597      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
8598      the resulting offset is smaller in absolute value than the
8599      original one and has the same sign.  */
8600   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8601       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8602       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8603       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8604 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8605       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8606       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8607 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8608     {
8609       tree const1 = TREE_OPERAND (arg0, 1);
8610       tree const2 = TREE_OPERAND (arg1, 1);
8611       tree variable1 = TREE_OPERAND (arg0, 0);
8612       tree variable2 = TREE_OPERAND (arg1, 0);
8613       tree cst;
8614       const char * const warnmsg = G_("assuming signed overflow does not "
8615 				      "occur when combining constants around "
8616 				      "a comparison");
8617 
8618       /* Put the constant on the side where it doesn't overflow and is
8619 	 of lower absolute value and of same sign than before.  */
8620       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8621 			     ? MINUS_EXPR : PLUS_EXPR,
8622 			     const2, const1);
8623       if (!TREE_OVERFLOW (cst)
8624 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8625 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8626 	{
8627 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8628 	  return fold_build2_loc (loc, code, type,
8629 				  variable1,
8630 				  fold_build2_loc (loc, TREE_CODE (arg1),
8631 						   TREE_TYPE (arg1),
8632 						   variable2, cst));
8633 	}
8634 
8635       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8636 			     ? MINUS_EXPR : PLUS_EXPR,
8637 			     const1, const2);
8638       if (!TREE_OVERFLOW (cst)
8639 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8640 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8641 	{
8642 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8643 	  return fold_build2_loc (loc, code, type,
8644 				  fold_build2_loc (loc, TREE_CODE (arg0),
8645 						   TREE_TYPE (arg0),
8646 						   variable1, cst),
8647 				  variable2);
8648 	}
8649     }
8650 
8651   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8652   if (tem)
8653     return tem;
8654 
8655   /* If we are comparing an expression that just has comparisons
8656      of two integer values, arithmetic expressions of those comparisons,
8657      and constants, we can simplify it.  There are only three cases
8658      to check: the two values can either be equal, the first can be
8659      greater, or the second can be greater.  Fold the expression for
8660      those three values.  Since each value must be 0 or 1, we have
8661      eight possibilities, each of which corresponds to the constant 0
8662      or 1 or one of the six possible comparisons.
8663 
8664      This handles common cases like (a > b) == 0 but also handles
8665      expressions like  ((x > y) - (y > x)) > 0, which supposedly
8666      occur in macroized code.  */
8667 
8668   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8669     {
8670       tree cval1 = 0, cval2 = 0;
8671       int save_p = 0;
8672 
8673       if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8674 	  /* Don't handle degenerate cases here; they should already
8675 	     have been handled anyway.  */
8676 	  && cval1 != 0 && cval2 != 0
8677 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8678 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8679 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8680 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8681 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8682 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8683 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8684 	{
8685 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8686 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8687 
8688 	  /* We can't just pass T to eval_subst in case cval1 or cval2
8689 	     was the same as ARG1.  */
8690 
8691 	  tree high_result
8692 		= fold_build2_loc (loc, code, type,
8693 			       eval_subst (loc, arg0, cval1, maxval,
8694 					   cval2, minval),
8695 			       arg1);
8696 	  tree equal_result
8697 		= fold_build2_loc (loc, code, type,
8698 			       eval_subst (loc, arg0, cval1, maxval,
8699 					   cval2, maxval),
8700 			       arg1);
8701 	  tree low_result
8702 		= fold_build2_loc (loc, code, type,
8703 			       eval_subst (loc, arg0, cval1, minval,
8704 					   cval2, maxval),
8705 			       arg1);
8706 
8707 	  /* All three of these results should be 0 or 1.  Confirm they are.
8708 	     Then use those values to select the proper code to use.  */
8709 
8710 	  if (TREE_CODE (high_result) == INTEGER_CST
8711 	      && TREE_CODE (equal_result) == INTEGER_CST
8712 	      && TREE_CODE (low_result) == INTEGER_CST)
8713 	    {
8714 	      /* Make a 3-bit mask with the high-order bit being the
8715 		 value for `>', the next for '=', and the low for '<'.  */
8716 	      switch ((integer_onep (high_result) * 4)
8717 		      + (integer_onep (equal_result) * 2)
8718 		      + integer_onep (low_result))
8719 		{
8720 		case 0:
8721 		  /* Always false.  */
8722 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8723 		case 1:
8724 		  code = LT_EXPR;
8725 		  break;
8726 		case 2:
8727 		  code = EQ_EXPR;
8728 		  break;
8729 		case 3:
8730 		  code = LE_EXPR;
8731 		  break;
8732 		case 4:
8733 		  code = GT_EXPR;
8734 		  break;
8735 		case 5:
8736 		  code = NE_EXPR;
8737 		  break;
8738 		case 6:
8739 		  code = GE_EXPR;
8740 		  break;
8741 		case 7:
8742 		  /* Always true.  */
8743 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8744 		}
8745 
8746 	      if (save_p)
8747 		{
8748 		  tem = save_expr (build2 (code, type, cval1, cval2));
8749 		  protected_set_expr_location (tem, loc);
8750 		  return tem;
8751 		}
8752 	      return fold_build2_loc (loc, code, type, cval1, cval2);
8753 	    }
8754 	}
8755     }
8756 
8757   /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8758      into a single range test.  */
8759   if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8760       && TREE_CODE (arg1) == INTEGER_CST
8761       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8762       && !integer_zerop (TREE_OPERAND (arg0, 1))
8763       && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8764       && !TREE_OVERFLOW (arg1))
8765     {
8766       tem = fold_div_compare (loc, code, type, arg0, arg1);
8767       if (tem != NULL_TREE)
8768 	return tem;
8769     }
8770 
8771   return NULL_TREE;
8772 }
8773 
8774 
8775 /* Subroutine of fold_binary.  Optimize complex multiplications of the
8776    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
8777    argument EXPR represents the expression "z" of type TYPE.  */
8778 
8779 static tree
8780 fold_mult_zconjz (location_t loc, tree type, tree expr)
8781 {
8782   tree itype = TREE_TYPE (type);
8783   tree rpart, ipart, tem;
8784 
8785   if (TREE_CODE (expr) == COMPLEX_EXPR)
8786     {
8787       rpart = TREE_OPERAND (expr, 0);
8788       ipart = TREE_OPERAND (expr, 1);
8789     }
8790   else if (TREE_CODE (expr) == COMPLEX_CST)
8791     {
8792       rpart = TREE_REALPART (expr);
8793       ipart = TREE_IMAGPART (expr);
8794     }
8795   else
8796     {
8797       expr = save_expr (expr);
8798       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8799       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8800     }
8801 
8802   rpart = save_expr (rpart);
8803   ipart = save_expr (ipart);
8804   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8805 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8806 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8807   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8808 			  build_zero_cst (itype));
8809 }
8810 
8811 
8812 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
8813    CONSTRUCTOR ARG into array ELTS and return true if successful.  */
8814 
8815 static bool
8816 vec_cst_ctor_to_array (tree arg, tree *elts)
8817 {
8818   unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8819 
8820   if (TREE_CODE (arg) == VECTOR_CST)
8821     {
8822       for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8823 	elts[i] = VECTOR_CST_ELT (arg, i);
8824     }
8825   else if (TREE_CODE (arg) == CONSTRUCTOR)
8826     {
8827       constructor_elt *elt;
8828 
8829       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8830 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8831 	  return false;
8832 	else
8833 	  elts[i] = elt->value;
8834     }
8835   else
8836     return false;
8837   for (; i < nelts; i++)
8838     elts[i]
8839       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8840   return true;
8841 }
8842 
8843 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8844    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8845    NULL_TREE otherwise.  */
8846 
8847 static tree
8848 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8849 {
8850   unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8851   tree *elts;
8852   bool need_ctor = false;
8853 
8854   gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8855 	      && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8856   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8857       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8858     return NULL_TREE;
8859 
8860   elts = XALLOCAVEC (tree, nelts * 3);
8861   if (!vec_cst_ctor_to_array (arg0, elts)
8862       || !vec_cst_ctor_to_array (arg1, elts + nelts))
8863     return NULL_TREE;
8864 
8865   for (i = 0; i < nelts; i++)
8866     {
8867       if (!CONSTANT_CLASS_P (elts[sel[i]]))
8868 	need_ctor = true;
8869       elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8870     }
8871 
8872   if (need_ctor)
8873     {
8874       vec<constructor_elt, va_gc> *v;
8875       vec_alloc (v, nelts);
8876       for (i = 0; i < nelts; i++)
8877 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8878       return build_constructor (type, v);
8879     }
8880   else
8881     return build_vector (type, &elts[2 * nelts]);
8882 }
8883 
8884 /* Try to fold a pointer difference of type TYPE two address expressions of
8885    array references AREF0 and AREF1 using location LOC.  Return a
8886    simplified expression for the difference or NULL_TREE.  */
8887 
8888 static tree
8889 fold_addr_of_array_ref_difference (location_t loc, tree type,
8890 				   tree aref0, tree aref1)
8891 {
8892   tree base0 = TREE_OPERAND (aref0, 0);
8893   tree base1 = TREE_OPERAND (aref1, 0);
8894   tree base_offset = build_int_cst (type, 0);
8895 
8896   /* If the bases are array references as well, recurse.  If the bases
8897      are pointer indirections compute the difference of the pointers.
8898      If the bases are equal, we are set.  */
8899   if ((TREE_CODE (base0) == ARRAY_REF
8900        && TREE_CODE (base1) == ARRAY_REF
8901        && (base_offset
8902 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8903       || (INDIRECT_REF_P (base0)
8904 	  && INDIRECT_REF_P (base1)
8905 	  && (base_offset
8906 	        = fold_binary_loc (loc, MINUS_EXPR, type,
8907 				   fold_convert (type, TREE_OPERAND (base0, 0)),
8908 				   fold_convert (type,
8909 						 TREE_OPERAND (base1, 0)))))
8910       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8911     {
8912       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8913       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8914       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8915       tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8916       return fold_build2_loc (loc, PLUS_EXPR, type,
8917 			      base_offset,
8918 			      fold_build2_loc (loc, MULT_EXPR, type,
8919 					       diff, esz));
8920     }
8921   return NULL_TREE;
8922 }
8923 
8924 /* If the real or vector real constant CST of type TYPE has an exact
8925    inverse, return it, else return NULL.  */
8926 
8927 tree
8928 exact_inverse (tree type, tree cst)
8929 {
8930   REAL_VALUE_TYPE r;
8931   tree unit_type, *elts;
8932   machine_mode mode;
8933   unsigned vec_nelts, i;
8934 
8935   switch (TREE_CODE (cst))
8936     {
8937     case REAL_CST:
8938       r = TREE_REAL_CST (cst);
8939 
8940       if (exact_real_inverse (TYPE_MODE (type), &r))
8941 	return build_real (type, r);
8942 
8943       return NULL_TREE;
8944 
8945     case VECTOR_CST:
8946       vec_nelts = VECTOR_CST_NELTS (cst);
8947       elts = XALLOCAVEC (tree, vec_nelts);
8948       unit_type = TREE_TYPE (type);
8949       mode = TYPE_MODE (unit_type);
8950 
8951       for (i = 0; i < vec_nelts; i++)
8952 	{
8953 	  r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8954 	  if (!exact_real_inverse (mode, &r))
8955 	    return NULL_TREE;
8956 	  elts[i] = build_real (unit_type, r);
8957 	}
8958 
8959       return build_vector (type, elts);
8960 
8961     default:
8962       return NULL_TREE;
8963     }
8964 }
8965 
8966 /*  Mask out the tz least significant bits of X of type TYPE where
8967     tz is the number of trailing zeroes in Y.  */
8968 static wide_int
8969 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8970 {
8971   int tz = wi::ctz (y);
8972   if (tz > 0)
8973     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8974   return x;
8975 }
8976 
8977 /* Return true when T is an address and is known to be nonzero.
8978    For floating point we further ensure that T is not denormal.
8979    Similar logic is present in nonzero_address in rtlanal.h.
8980 
8981    If the return value is based on the assumption that signed overflow
8982    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8983    change *STRICT_OVERFLOW_P.  */
8984 
8985 static bool
8986 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8987 {
8988   tree type = TREE_TYPE (t);
8989   enum tree_code code;
8990 
8991   /* Doing something useful for floating point would need more work.  */
8992   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8993     return false;
8994 
8995   code = TREE_CODE (t);
8996   switch (TREE_CODE_CLASS (code))
8997     {
8998     case tcc_unary:
8999       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9000 					      strict_overflow_p);
9001     case tcc_binary:
9002     case tcc_comparison:
9003       return tree_binary_nonzero_warnv_p (code, type,
9004 					       TREE_OPERAND (t, 0),
9005 					       TREE_OPERAND (t, 1),
9006 					       strict_overflow_p);
9007     case tcc_constant:
9008     case tcc_declaration:
9009     case tcc_reference:
9010       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9011 
9012     default:
9013       break;
9014     }
9015 
9016   switch (code)
9017     {
9018     case TRUTH_NOT_EXPR:
9019       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9020 					      strict_overflow_p);
9021 
9022     case TRUTH_AND_EXPR:
9023     case TRUTH_OR_EXPR:
9024     case TRUTH_XOR_EXPR:
9025       return tree_binary_nonzero_warnv_p (code, type,
9026 					       TREE_OPERAND (t, 0),
9027 					       TREE_OPERAND (t, 1),
9028 					       strict_overflow_p);
9029 
9030     case COND_EXPR:
9031     case CONSTRUCTOR:
9032     case OBJ_TYPE_REF:
9033     case ASSERT_EXPR:
9034     case ADDR_EXPR:
9035     case WITH_SIZE_EXPR:
9036     case SSA_NAME:
9037       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9038 
9039     case COMPOUND_EXPR:
9040     case MODIFY_EXPR:
9041     case BIND_EXPR:
9042       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9043 					strict_overflow_p);
9044 
9045     case SAVE_EXPR:
9046       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9047 					strict_overflow_p);
9048 
9049     case CALL_EXPR:
9050       {
9051 	tree fndecl = get_callee_fndecl (t);
9052 	if (!fndecl) return false;
9053 	if (flag_delete_null_pointer_checks && !flag_check_new
9054 	    && DECL_IS_OPERATOR_NEW (fndecl)
9055 	    && !TREE_NOTHROW (fndecl))
9056 	  return true;
9057 	if (flag_delete_null_pointer_checks
9058 	    && lookup_attribute ("returns_nonnull",
9059 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9060 	  return true;
9061 	return alloca_call_p (t);
9062       }
9063 
9064     default:
9065       break;
9066     }
9067   return false;
9068 }
9069 
9070 /* Return true when T is an address and is known to be nonzero.
9071    Handle warnings about undefined signed overflow.  */
9072 
9073 bool
9074 tree_expr_nonzero_p (tree t)
9075 {
9076   bool ret, strict_overflow_p;
9077 
9078   strict_overflow_p = false;
9079   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9080   if (strict_overflow_p)
9081     fold_overflow_warning (("assuming signed overflow does not occur when "
9082 			    "determining that expression is always "
9083 			    "non-zero"),
9084 			   WARN_STRICT_OVERFLOW_MISC);
9085   return ret;
9086 }
9087 
9088 /* Return true if T is known not to be equal to an integer W.  */
9089 
9090 bool
9091 expr_not_equal_to (tree t, const wide_int &w)
9092 {
9093   wide_int min, max, nz;
9094   value_range_type rtype;
9095   switch (TREE_CODE (t))
9096     {
9097     case INTEGER_CST:
9098       return wi::ne_p (t, w);
9099 
9100     case SSA_NAME:
9101       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9102 	return false;
9103       rtype = get_range_info (t, &min, &max);
9104       if (rtype == VR_RANGE)
9105 	{
9106 	  if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9107 	    return true;
9108 	  if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9109 	    return true;
9110 	}
9111       else if (rtype == VR_ANTI_RANGE
9112 	       && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9113 	       && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9114 	return true;
9115       /* If T has some known zero bits and W has any of those bits set,
9116 	 then T is known not to be equal to W.  */
9117       if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9118 			      TYPE_PRECISION (TREE_TYPE (t))), 0))
9119 	return true;
9120       return false;
9121 
9122     default:
9123       return false;
9124     }
9125 }
9126 
9127 /* Fold a binary expression of code CODE and type TYPE with operands
9128    OP0 and OP1.  LOC is the location of the resulting expression.
9129    Return the folded expression if folding is successful.  Otherwise,
9130    return NULL_TREE.  */
9131 
9132 tree
9133 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9134 		 tree op0, tree op1)
9135 {
9136   enum tree_code_class kind = TREE_CODE_CLASS (code);
9137   tree arg0, arg1, tem;
9138   tree t1 = NULL_TREE;
9139   bool strict_overflow_p;
9140   unsigned int prec;
9141 
9142   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9143 	      && TREE_CODE_LENGTH (code) == 2
9144 	      && op0 != NULL_TREE
9145 	      && op1 != NULL_TREE);
9146 
9147   arg0 = op0;
9148   arg1 = op1;
9149 
9150   /* Strip any conversions that don't change the mode.  This is
9151      safe for every expression, except for a comparison expression
9152      because its signedness is derived from its operands.  So, in
9153      the latter case, only strip conversions that don't change the
9154      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
9155      preserved.
9156 
9157      Note that this is done as an internal manipulation within the
9158      constant folder, in order to find the simplest representation
9159      of the arguments so that their form can be studied.  In any
9160      cases, the appropriate type conversions should be put back in
9161      the tree that will get out of the constant folder.  */
9162 
9163   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9164     {
9165       STRIP_SIGN_NOPS (arg0);
9166       STRIP_SIGN_NOPS (arg1);
9167     }
9168   else
9169     {
9170       STRIP_NOPS (arg0);
9171       STRIP_NOPS (arg1);
9172     }
9173 
9174   /* Note that TREE_CONSTANT isn't enough: static var addresses are
9175      constant but we can't do arithmetic on them.  */
9176   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9177     {
9178       tem = const_binop (code, type, arg0, arg1);
9179       if (tem != NULL_TREE)
9180 	{
9181 	  if (TREE_TYPE (tem) != type)
9182 	    tem = fold_convert_loc (loc, type, tem);
9183 	  return tem;
9184 	}
9185     }
9186 
9187   /* If this is a commutative operation, and ARG0 is a constant, move it
9188      to ARG1 to reduce the number of tests below.  */
9189   if (commutative_tree_code (code)
9190       && tree_swap_operands_p (arg0, arg1))
9191     return fold_build2_loc (loc, code, type, op1, op0);
9192 
9193   /* Likewise if this is a comparison, and ARG0 is a constant, move it
9194      to ARG1 to reduce the number of tests below.  */
9195   if (kind == tcc_comparison
9196       && tree_swap_operands_p (arg0, arg1))
9197     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9198 
9199   tem = generic_simplify (loc, code, type, op0, op1);
9200   if (tem)
9201     return tem;
9202 
9203   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9204 
9205      First check for cases where an arithmetic operation is applied to a
9206      compound, conditional, or comparison operation.  Push the arithmetic
9207      operation inside the compound or conditional to see if any folding
9208      can then be done.  Convert comparison to conditional for this purpose.
9209      The also optimizes non-constant cases that used to be done in
9210      expand_expr.
9211 
9212      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9213      one of the operands is a comparison and the other is a comparison, a
9214      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
9215      code below would make the expression more complex.  Change it to a
9216      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
9217      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
9218 
9219   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9220        || code == EQ_EXPR || code == NE_EXPR)
9221       && TREE_CODE (type) != VECTOR_TYPE
9222       && ((truth_value_p (TREE_CODE (arg0))
9223 	   && (truth_value_p (TREE_CODE (arg1))
9224 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
9225 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
9226 	  || (truth_value_p (TREE_CODE (arg1))
9227 	      && (truth_value_p (TREE_CODE (arg0))
9228 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
9229 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
9230     {
9231       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9232 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9233 			 : TRUTH_XOR_EXPR,
9234 			 boolean_type_node,
9235 			 fold_convert_loc (loc, boolean_type_node, arg0),
9236 			 fold_convert_loc (loc, boolean_type_node, arg1));
9237 
9238       if (code == EQ_EXPR)
9239 	tem = invert_truthvalue_loc (loc, tem);
9240 
9241       return fold_convert_loc (loc, type, tem);
9242     }
9243 
9244   if (TREE_CODE_CLASS (code) == tcc_binary
9245       || TREE_CODE_CLASS (code) == tcc_comparison)
9246     {
9247       if (TREE_CODE (arg0) == COMPOUND_EXPR)
9248 	{
9249 	  tem = fold_build2_loc (loc, code, type,
9250 			     fold_convert_loc (loc, TREE_TYPE (op0),
9251 					       TREE_OPERAND (arg0, 1)), op1);
9252 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9253 			     tem);
9254 	}
9255       if (TREE_CODE (arg1) == COMPOUND_EXPR)
9256 	{
9257 	  tem = fold_build2_loc (loc, code, type, op0,
9258 			     fold_convert_loc (loc, TREE_TYPE (op1),
9259 					       TREE_OPERAND (arg1, 1)));
9260 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9261 			     tem);
9262 	}
9263 
9264       if (TREE_CODE (arg0) == COND_EXPR
9265 	  || TREE_CODE (arg0) == VEC_COND_EXPR
9266 	  || COMPARISON_CLASS_P (arg0))
9267 	{
9268 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9269 						     arg0, arg1,
9270 						     /*cond_first_p=*/1);
9271 	  if (tem != NULL_TREE)
9272 	    return tem;
9273 	}
9274 
9275       if (TREE_CODE (arg1) == COND_EXPR
9276 	  || TREE_CODE (arg1) == VEC_COND_EXPR
9277 	  || COMPARISON_CLASS_P (arg1))
9278 	{
9279 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9280 						     arg1, arg0,
9281 					             /*cond_first_p=*/0);
9282 	  if (tem != NULL_TREE)
9283 	    return tem;
9284 	}
9285     }
9286 
9287   switch (code)
9288     {
9289     case MEM_REF:
9290       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
9291       if (TREE_CODE (arg0) == ADDR_EXPR
9292 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9293 	{
9294 	  tree iref = TREE_OPERAND (arg0, 0);
9295 	  return fold_build2 (MEM_REF, type,
9296 			      TREE_OPERAND (iref, 0),
9297 			      int_const_binop (PLUS_EXPR, arg1,
9298 					       TREE_OPERAND (iref, 1)));
9299 	}
9300 
9301       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
9302       if (TREE_CODE (arg0) == ADDR_EXPR
9303 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
9304 	{
9305 	  tree base;
9306 	  HOST_WIDE_INT coffset;
9307 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9308 						&coffset);
9309 	  if (!base)
9310 	    return NULL_TREE;
9311 	  return fold_build2 (MEM_REF, type,
9312 			      build_fold_addr_expr (base),
9313 			      int_const_binop (PLUS_EXPR, arg1,
9314 					       size_int (coffset)));
9315 	}
9316 
9317       return NULL_TREE;
9318 
9319     case POINTER_PLUS_EXPR:
9320       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
9321       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9322 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9323         return fold_convert_loc (loc, type,
9324 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9325 					      fold_convert_loc (loc, sizetype,
9326 								arg1),
9327 					      fold_convert_loc (loc, sizetype,
9328 								arg0)));
9329 
9330       return NULL_TREE;
9331 
9332     case PLUS_EXPR:
9333       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9334 	{
9335 	  /* X + (X / CST) * -CST is X % CST.  */
9336 	  if (TREE_CODE (arg1) == MULT_EXPR
9337 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9338 	      && operand_equal_p (arg0,
9339 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9340 	    {
9341 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9342 	      tree cst1 = TREE_OPERAND (arg1, 1);
9343 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9344 				      cst1, cst0);
9345 	      if (sum && integer_zerop (sum))
9346 		return fold_convert_loc (loc, type,
9347 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9348 						      TREE_TYPE (arg0), arg0,
9349 						      cst0));
9350 	    }
9351 	}
9352 
9353       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9354 	 one.  Make sure the type is not saturating and has the signedness of
9355 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9356 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9357       if ((TREE_CODE (arg0) == MULT_EXPR
9358 	   || TREE_CODE (arg1) == MULT_EXPR)
9359 	  && !TYPE_SATURATING (type)
9360 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9361 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9362 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9363         {
9364 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9365 	  if (tem)
9366 	    return tem;
9367 	}
9368 
9369       if (! FLOAT_TYPE_P (type))
9370 	{
9371 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9372 	     (plus (plus (mult) (mult)) (foo)) so that we can
9373 	     take advantage of the factoring cases below.  */
9374 	  if (ANY_INTEGRAL_TYPE_P (type)
9375 	      && TYPE_OVERFLOW_WRAPS (type)
9376 	      && (((TREE_CODE (arg0) == PLUS_EXPR
9377 		    || TREE_CODE (arg0) == MINUS_EXPR)
9378 		   && TREE_CODE (arg1) == MULT_EXPR)
9379 		  || ((TREE_CODE (arg1) == PLUS_EXPR
9380 		       || TREE_CODE (arg1) == MINUS_EXPR)
9381 		      && TREE_CODE (arg0) == MULT_EXPR)))
9382 	    {
9383 	      tree parg0, parg1, parg, marg;
9384 	      enum tree_code pcode;
9385 
9386 	      if (TREE_CODE (arg1) == MULT_EXPR)
9387 		parg = arg0, marg = arg1;
9388 	      else
9389 		parg = arg1, marg = arg0;
9390 	      pcode = TREE_CODE (parg);
9391 	      parg0 = TREE_OPERAND (parg, 0);
9392 	      parg1 = TREE_OPERAND (parg, 1);
9393 	      STRIP_NOPS (parg0);
9394 	      STRIP_NOPS (parg1);
9395 
9396 	      if (TREE_CODE (parg0) == MULT_EXPR
9397 		  && TREE_CODE (parg1) != MULT_EXPR)
9398 		return fold_build2_loc (loc, pcode, type,
9399 				    fold_build2_loc (loc, PLUS_EXPR, type,
9400 						 fold_convert_loc (loc, type,
9401 								   parg0),
9402 						 fold_convert_loc (loc, type,
9403 								   marg)),
9404 				    fold_convert_loc (loc, type, parg1));
9405 	      if (TREE_CODE (parg0) != MULT_EXPR
9406 		  && TREE_CODE (parg1) == MULT_EXPR)
9407 		return
9408 		  fold_build2_loc (loc, PLUS_EXPR, type,
9409 			       fold_convert_loc (loc, type, parg0),
9410 			       fold_build2_loc (loc, pcode, type,
9411 					    fold_convert_loc (loc, type, marg),
9412 					    fold_convert_loc (loc, type,
9413 							      parg1)));
9414 	    }
9415 	}
9416       else
9417 	{
9418 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9419 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
9420 	     if signed zeros are involved.  */
9421 	  if (!HONOR_SNANS (element_mode (arg0))
9422               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9423 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9424 	    {
9425 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9426 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9427 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9428 	      bool arg0rz = false, arg0iz = false;
9429 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
9430 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
9431 		{
9432 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9433 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9434 		  if (arg0rz && arg1i && real_zerop (arg1i))
9435 		    {
9436 		      tree rp = arg1r ? arg1r
9437 				  : build1 (REALPART_EXPR, rtype, arg1);
9438 		      tree ip = arg0i ? arg0i
9439 				  : build1 (IMAGPART_EXPR, rtype, arg0);
9440 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9441 		    }
9442 		  else if (arg0iz && arg1r && real_zerop (arg1r))
9443 		    {
9444 		      tree rp = arg0r ? arg0r
9445 				  : build1 (REALPART_EXPR, rtype, arg0);
9446 		      tree ip = arg1i ? arg1i
9447 				  : build1 (IMAGPART_EXPR, rtype, arg1);
9448 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9449 		    }
9450 		}
9451 	    }
9452 
9453 	  if (flag_unsafe_math_optimizations
9454 	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9455 	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9456 	      && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9457 	    return tem;
9458 
9459           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9460              We associate floats only if the user has specified
9461              -fassociative-math.  */
9462           if (flag_associative_math
9463               && TREE_CODE (arg1) == PLUS_EXPR
9464               && TREE_CODE (arg0) != MULT_EXPR)
9465             {
9466               tree tree10 = TREE_OPERAND (arg1, 0);
9467               tree tree11 = TREE_OPERAND (arg1, 1);
9468               if (TREE_CODE (tree11) == MULT_EXPR
9469 		  && TREE_CODE (tree10) == MULT_EXPR)
9470                 {
9471                   tree tree0;
9472                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9473                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9474                 }
9475             }
9476           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9477              We associate floats only if the user has specified
9478              -fassociative-math.  */
9479           if (flag_associative_math
9480               && TREE_CODE (arg0) == PLUS_EXPR
9481               && TREE_CODE (arg1) != MULT_EXPR)
9482             {
9483               tree tree00 = TREE_OPERAND (arg0, 0);
9484               tree tree01 = TREE_OPERAND (arg0, 1);
9485               if (TREE_CODE (tree01) == MULT_EXPR
9486 		  && TREE_CODE (tree00) == MULT_EXPR)
9487                 {
9488                   tree tree0;
9489                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9490                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9491                 }
9492             }
9493 	}
9494 
9495      bit_rotate:
9496       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9497 	 is a rotate of A by C1 bits.  */
9498       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9499 	 is a rotate of A by B bits.  */
9500       {
9501 	enum tree_code code0, code1;
9502 	tree rtype;
9503 	code0 = TREE_CODE (arg0);
9504 	code1 = TREE_CODE (arg1);
9505 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9506 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9507 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
9508 			        TREE_OPERAND (arg1, 0), 0)
9509 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9510 	        TYPE_UNSIGNED (rtype))
9511 	    /* Only create rotates in complete modes.  Other cases are not
9512 	       expanded properly.  */
9513 	    && (element_precision (rtype)
9514 		== GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9515 	  {
9516 	    tree tree01, tree11;
9517 	    enum tree_code code01, code11;
9518 
9519 	    tree01 = TREE_OPERAND (arg0, 1);
9520 	    tree11 = TREE_OPERAND (arg1, 1);
9521 	    STRIP_NOPS (tree01);
9522 	    STRIP_NOPS (tree11);
9523 	    code01 = TREE_CODE (tree01);
9524 	    code11 = TREE_CODE (tree11);
9525 	    if (code01 == INTEGER_CST
9526 		&& code11 == INTEGER_CST
9527 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
9528 		    == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9529 	      {
9530 		tem = build2_loc (loc, LROTATE_EXPR,
9531 				  TREE_TYPE (TREE_OPERAND (arg0, 0)),
9532 				  TREE_OPERAND (arg0, 0),
9533 				  code0 == LSHIFT_EXPR
9534 				  ? TREE_OPERAND (arg0, 1)
9535 				  : TREE_OPERAND (arg1, 1));
9536 		return fold_convert_loc (loc, type, tem);
9537 	      }
9538 	    else if (code11 == MINUS_EXPR)
9539 	      {
9540 		tree tree110, tree111;
9541 		tree110 = TREE_OPERAND (tree11, 0);
9542 		tree111 = TREE_OPERAND (tree11, 1);
9543 		STRIP_NOPS (tree110);
9544 		STRIP_NOPS (tree111);
9545 		if (TREE_CODE (tree110) == INTEGER_CST
9546 		    && 0 == compare_tree_int (tree110,
9547 					      element_precision
9548 					      (TREE_TYPE (TREE_OPERAND
9549 							  (arg0, 0))))
9550 		    && operand_equal_p (tree01, tree111, 0))
9551 		  return
9552 		    fold_convert_loc (loc, type,
9553 				      build2 ((code0 == LSHIFT_EXPR
9554 					       ? LROTATE_EXPR
9555 					       : RROTATE_EXPR),
9556 					      TREE_TYPE (TREE_OPERAND (arg0, 0)),
9557 					      TREE_OPERAND (arg0, 0),
9558 					      TREE_OPERAND (arg0, 1)));
9559 	      }
9560 	    else if (code01 == MINUS_EXPR)
9561 	      {
9562 		tree tree010, tree011;
9563 		tree010 = TREE_OPERAND (tree01, 0);
9564 		tree011 = TREE_OPERAND (tree01, 1);
9565 		STRIP_NOPS (tree010);
9566 		STRIP_NOPS (tree011);
9567 		if (TREE_CODE (tree010) == INTEGER_CST
9568 		    && 0 == compare_tree_int (tree010,
9569 					      element_precision
9570 					      (TREE_TYPE (TREE_OPERAND
9571 							  (arg0, 0))))
9572 		    && operand_equal_p (tree11, tree011, 0))
9573 		    return fold_convert_loc
9574 		      (loc, type,
9575 		       build2 ((code0 != LSHIFT_EXPR
9576 				? LROTATE_EXPR
9577 				: RROTATE_EXPR),
9578 			       TREE_TYPE (TREE_OPERAND (arg0, 0)),
9579 			       TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9580 	      }
9581 	  }
9582       }
9583 
9584     associate:
9585       /* In most languages, can't associate operations on floats through
9586 	 parentheses.  Rather than remember where the parentheses were, we
9587 	 don't associate floats at all, unless the user has specified
9588 	 -fassociative-math.
9589 	 And, we need to make sure type is not saturating.  */
9590 
9591       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9592 	  && !TYPE_SATURATING (type))
9593 	{
9594 	  tree var0, con0, lit0, minus_lit0;
9595 	  tree var1, con1, lit1, minus_lit1;
9596 	  tree atype = type;
9597 	  bool ok = true;
9598 
9599 	  /* Split both trees into variables, constants, and literals.  Then
9600 	     associate each group together, the constants with literals,
9601 	     then the result with variables.  This increases the chances of
9602 	     literals being recombined later and of generating relocatable
9603 	     expressions for the sum of a constant and literal.  */
9604 	  var0 = split_tree (loc, arg0, type, code,
9605 			     &con0, &lit0, &minus_lit0, 0);
9606 	  var1 = split_tree (loc, arg1, type, code,
9607 			     &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9608 
9609 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
9610 	  if (code == MINUS_EXPR)
9611 	    code = PLUS_EXPR;
9612 
9613 	  /* With undefined overflow prefer doing association in a type
9614 	     which wraps on overflow, if that is one of the operand types.  */
9615 	  if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9616 	      || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9617 	    {
9618 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9619 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9620 		atype = TREE_TYPE (arg0);
9621 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9622 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9623 		atype = TREE_TYPE (arg1);
9624 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9625 	    }
9626 
9627 	  /* With undefined overflow we can only associate constants with one
9628 	     variable, and constants whose association doesn't overflow.  */
9629 	  if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9630 	      || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9631 	    {
9632 	      if (var0 && var1)
9633 		{
9634 		  tree tmp0 = var0;
9635 		  tree tmp1 = var1;
9636 		  bool one_neg = false;
9637 
9638 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
9639 		    {
9640 		      tmp0 = TREE_OPERAND (tmp0, 0);
9641 		      one_neg = !one_neg;
9642 		    }
9643 		  if (CONVERT_EXPR_P (tmp0)
9644 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9645 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9646 			  <= TYPE_PRECISION (atype)))
9647 		    tmp0 = TREE_OPERAND (tmp0, 0);
9648 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
9649 		    {
9650 		      tmp1 = TREE_OPERAND (tmp1, 0);
9651 		      one_neg = !one_neg;
9652 		    }
9653 		  if (CONVERT_EXPR_P (tmp1)
9654 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9655 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9656 			  <= TYPE_PRECISION (atype)))
9657 		    tmp1 = TREE_OPERAND (tmp1, 0);
9658 		  /* The only case we can still associate with two variables
9659 		     is if they cancel out.  */
9660 		  if (!one_neg
9661 		      || !operand_equal_p (tmp0, tmp1, 0))
9662 		    ok = false;
9663 		}
9664 	    }
9665 
9666 	  /* Only do something if we found more than two objects.  Otherwise,
9667 	     nothing has changed and we risk infinite recursion.  */
9668 	  if (ok
9669 	      && (2 < ((var0 != 0) + (var1 != 0)
9670 		       + (con0 != 0) + (con1 != 0)
9671 		       + (lit0 != 0) + (lit1 != 0)
9672 		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
9673 	    {
9674 	      var0 = associate_trees (loc, var0, var1, code, atype);
9675 	      con0 = associate_trees (loc, con0, con1, code, atype);
9676 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
9677 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9678 					    code, atype);
9679 
9680 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
9681 		 greater than the positive part.  Otherwise, the multiplicative
9682 		 folding code (i.e extract_muldiv) may be fooled in case
9683 		 unsigned constants are subtracted, like in the following
9684 		 example: ((X*2 + 4) - 8U)/2.  */
9685 	      if (minus_lit0 && lit0)
9686 		{
9687 		  if (TREE_CODE (lit0) == INTEGER_CST
9688 		      && TREE_CODE (minus_lit0) == INTEGER_CST
9689 		      && tree_int_cst_lt (lit0, minus_lit0))
9690 		    {
9691 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9692 						    MINUS_EXPR, atype);
9693 		      lit0 = 0;
9694 		    }
9695 		  else
9696 		    {
9697 		      lit0 = associate_trees (loc, lit0, minus_lit0,
9698 					      MINUS_EXPR, atype);
9699 		      minus_lit0 = 0;
9700 		    }
9701 		}
9702 
9703 	      /* Don't introduce overflows through reassociation.  */
9704 	      if ((lit0 && TREE_OVERFLOW_P (lit0))
9705 		  || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9706 		return NULL_TREE;
9707 
9708 	      if (minus_lit0)
9709 		{
9710 		  if (con0 == 0)
9711 		    return
9712 		      fold_convert_loc (loc, type,
9713 					associate_trees (loc, var0, minus_lit0,
9714 							 MINUS_EXPR, atype));
9715 		  else
9716 		    {
9717 		      con0 = associate_trees (loc, con0, minus_lit0,
9718 					      MINUS_EXPR, atype);
9719 		      return
9720 			fold_convert_loc (loc, type,
9721 					  associate_trees (loc, var0, con0,
9722 							   PLUS_EXPR, atype));
9723 		    }
9724 		}
9725 
9726 	      con0 = associate_trees (loc, con0, lit0, code, atype);
9727 	      return
9728 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9729 							      code, atype));
9730 	    }
9731 	}
9732 
9733       return NULL_TREE;
9734 
9735     case MINUS_EXPR:
9736       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
9737       if (TREE_CODE (arg0) == NEGATE_EXPR
9738 	  && negate_expr_p (op1)
9739 	  /* If arg0 is e.g. unsigned int and type is int, then this could
9740 	     introduce UB, because if A is INT_MIN at runtime, the original
9741 	     expression can be well defined while the latter is not.
9742 	     See PR83269.  */
9743 	  && !(ANY_INTEGRAL_TYPE_P (type)
9744 	       && TYPE_OVERFLOW_UNDEFINED (type)
9745 	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9746 	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9747 	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
9748 			        fold_convert_loc (loc, type,
9749 						  TREE_OPERAND (arg0, 0)));
9750 
9751       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9752 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
9753 	 signed zeros are involved.  */
9754       if (!HONOR_SNANS (element_mode (arg0))
9755 	  && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9756 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9757         {
9758 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9759 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9760 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9761 	  bool arg0rz = false, arg0iz = false;
9762 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
9763 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
9764 	    {
9765 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9766 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9767 	      if (arg0rz && arg1i && real_zerop (arg1i))
9768 	        {
9769 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9770 					 arg1r ? arg1r
9771 					 : build1 (REALPART_EXPR, rtype, arg1));
9772 		  tree ip = arg0i ? arg0i
9773 		    : build1 (IMAGPART_EXPR, rtype, arg0);
9774 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9775 		}
9776 	      else if (arg0iz && arg1r && real_zerop (arg1r))
9777 	        {
9778 		  tree rp = arg0r ? arg0r
9779 		    : build1 (REALPART_EXPR, rtype, arg0);
9780 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9781 					 arg1i ? arg1i
9782 					 : build1 (IMAGPART_EXPR, rtype, arg1));
9783 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9784 		}
9785 	    }
9786 	}
9787 
9788       /* A - B -> A + (-B) if B is easily negatable.  */
9789       if (negate_expr_p (op1)
9790 	  && ! TYPE_OVERFLOW_SANITIZED (type)
9791 	  && ((FLOAT_TYPE_P (type)
9792                /* Avoid this transformation if B is a positive REAL_CST.  */
9793 	       && (TREE_CODE (op1) != REAL_CST
9794 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9795 	      || INTEGRAL_TYPE_P (type)))
9796 	return fold_build2_loc (loc, PLUS_EXPR, type,
9797 				fold_convert_loc (loc, type, arg0),
9798 				negate_expr (op1));
9799 
9800       /* Fold &a[i] - &a[j] to i-j.  */
9801       if (TREE_CODE (arg0) == ADDR_EXPR
9802 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9803 	  && TREE_CODE (arg1) == ADDR_EXPR
9804 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9805         {
9806 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
9807 							TREE_OPERAND (arg0, 0),
9808 							TREE_OPERAND (arg1, 0));
9809 	  if (tem)
9810 	    return tem;
9811 	}
9812 
9813       if (FLOAT_TYPE_P (type)
9814 	  && flag_unsafe_math_optimizations
9815 	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9816 	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9817 	  && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9818 	return tem;
9819 
9820       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9821 	 one.  Make sure the type is not saturating and has the signedness of
9822 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9823 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9824       if ((TREE_CODE (arg0) == MULT_EXPR
9825 	   || TREE_CODE (arg1) == MULT_EXPR)
9826 	  && !TYPE_SATURATING (type)
9827 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9828 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9829 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9830         {
9831 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9832 	  if (tem)
9833 	    return tem;
9834 	}
9835 
9836       goto associate;
9837 
9838     case MULT_EXPR:
9839       if (! FLOAT_TYPE_P (type))
9840 	{
9841 	  /* Transform x * -C into -x * C if x is easily negatable.  */
9842 	  if (TREE_CODE (op1) == INTEGER_CST
9843 	      && tree_int_cst_sgn (op1) == -1
9844 	      && negate_expr_p (op0)
9845 	      && negate_expr_p (op1)
9846 	      && (tem = negate_expr (op1)) != op1
9847 	      && ! TREE_OVERFLOW (tem))
9848 	    return fold_build2_loc (loc, MULT_EXPR, type,
9849 				    fold_convert_loc (loc, type,
9850 						      negate_expr (op0)), tem);
9851 
9852 	  strict_overflow_p = false;
9853 	  if (TREE_CODE (arg1) == INTEGER_CST
9854 	      && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9855 					     &strict_overflow_p)))
9856 	    {
9857 	      if (strict_overflow_p)
9858 		fold_overflow_warning (("assuming signed overflow does not "
9859 					"occur when simplifying "
9860 					"multiplication"),
9861 				       WARN_STRICT_OVERFLOW_MISC);
9862 	      return fold_convert_loc (loc, type, tem);
9863 	    }
9864 
9865 	  /* Optimize z * conj(z) for integer complex numbers.  */
9866 	  if (TREE_CODE (arg0) == CONJ_EXPR
9867 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9868 	    return fold_mult_zconjz (loc, type, arg1);
9869 	  if (TREE_CODE (arg1) == CONJ_EXPR
9870 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9871 	    return fold_mult_zconjz (loc, type, arg0);
9872 	}
9873       else
9874 	{
9875 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9876 	     This is not the same for NaNs or if signed zeros are
9877 	     involved.  */
9878 	  if (!HONOR_NANS (arg0)
9879               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9880 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9881 	      && TREE_CODE (arg1) == COMPLEX_CST
9882 	      && real_zerop (TREE_REALPART (arg1)))
9883 	    {
9884 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9885 	      if (real_onep (TREE_IMAGPART (arg1)))
9886 		return
9887 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
9888 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9889 							     rtype, arg0)),
9890 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9891 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
9892 		return
9893 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
9894 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9895 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9896 							     rtype, arg0)));
9897 	    }
9898 
9899 	  /* Optimize z * conj(z) for floating point complex numbers.
9900 	     Guarded by flag_unsafe_math_optimizations as non-finite
9901 	     imaginary components don't produce scalar results.  */
9902 	  if (flag_unsafe_math_optimizations
9903 	      && TREE_CODE (arg0) == CONJ_EXPR
9904 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9905 	    return fold_mult_zconjz (loc, type, arg1);
9906 	  if (flag_unsafe_math_optimizations
9907 	      && TREE_CODE (arg1) == CONJ_EXPR
9908 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9909 	    return fold_mult_zconjz (loc, type, arg0);
9910 	}
9911       goto associate;
9912 
9913     case BIT_IOR_EXPR:
9914       /* Canonicalize (X & C1) | C2.  */
9915       if (TREE_CODE (arg0) == BIT_AND_EXPR
9916 	  && TREE_CODE (arg1) == INTEGER_CST
9917 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9918 	{
9919 	  int width = TYPE_PRECISION (type), w;
9920 	  wide_int c1 = TREE_OPERAND (arg0, 1);
9921 	  wide_int c2 = arg1;
9922 
9923 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
9924 	  if ((c1 & c2) == c1)
9925 	    return omit_one_operand_loc (loc, type, arg1,
9926 					 TREE_OPERAND (arg0, 0));
9927 
9928 	  wide_int msk = wi::mask (width, false,
9929 				   TYPE_PRECISION (TREE_TYPE (arg1)));
9930 
9931 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
9932 	  if (msk.and_not (c1 | c2) == 0)
9933 	    {
9934 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9935 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9936 	    }
9937 
9938 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9939 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9940 	     mode which allows further optimizations.  */
9941 	  c1 &= msk;
9942 	  c2 &= msk;
9943 	  wide_int c3 = c1.and_not (c2);
9944 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9945 	    {
9946 	      wide_int mask = wi::mask (w, false,
9947 					TYPE_PRECISION (type));
9948 	      if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9949 		{
9950 		  c3 = mask;
9951 		  break;
9952 		}
9953 	    }
9954 
9955 	  if (c3 != c1)
9956 	    {
9957 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9958 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9959 				     wide_int_to_tree (type, c3));
9960 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9961 	    }
9962 	}
9963 
9964       /* See if this can be simplified into a rotate first.  If that
9965 	 is unsuccessful continue in the association code.  */
9966       goto bit_rotate;
9967 
9968     case BIT_XOR_EXPR:
9969       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
9970       if (TREE_CODE (arg0) == BIT_AND_EXPR
9971 	  && INTEGRAL_TYPE_P (type)
9972 	  && integer_onep (TREE_OPERAND (arg0, 1))
9973 	  && integer_onep (arg1))
9974 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9975 				build_zero_cst (TREE_TYPE (arg0)));
9976 
9977       /* See if this can be simplified into a rotate first.  If that
9978 	 is unsuccessful continue in the association code.  */
9979       goto bit_rotate;
9980 
9981     case BIT_AND_EXPR:
9982       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
9983       if (TREE_CODE (arg0) == BIT_XOR_EXPR
9984 	  && INTEGRAL_TYPE_P (type)
9985 	  && integer_onep (TREE_OPERAND (arg0, 1))
9986 	  && integer_onep (arg1))
9987 	{
9988 	  tree tem2;
9989 	  tem = TREE_OPERAND (arg0, 0);
9990 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9991 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9992 				  tem, tem2);
9993 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9994 				  build_zero_cst (TREE_TYPE (tem)));
9995 	}
9996       /* Fold ~X & 1 as (X & 1) == 0.  */
9997       if (TREE_CODE (arg0) == BIT_NOT_EXPR
9998 	  && INTEGRAL_TYPE_P (type)
9999 	  && integer_onep (arg1))
10000 	{
10001 	  tree tem2;
10002 	  tem = TREE_OPERAND (arg0, 0);
10003 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10004 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10005 				  tem, tem2);
10006 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10007 				  build_zero_cst (TREE_TYPE (tem)));
10008 	}
10009       /* Fold !X & 1 as X == 0.  */
10010       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10011 	  && integer_onep (arg1))
10012 	{
10013 	  tem = TREE_OPERAND (arg0, 0);
10014 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
10015 				  build_zero_cst (TREE_TYPE (tem)));
10016 	}
10017 
10018       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10019          multiple of 1 << CST.  */
10020       if (TREE_CODE (arg1) == INTEGER_CST)
10021 	{
10022 	  wide_int cst1 = arg1;
10023 	  wide_int ncst1 = -cst1;
10024 	  if ((cst1 & ncst1) == ncst1
10025 	      && multiple_of_p (type, arg0,
10026 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10027 	    return fold_convert_loc (loc, type, arg0);
10028 	}
10029 
10030       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10031          bits from CST2.  */
10032       if (TREE_CODE (arg1) == INTEGER_CST
10033 	  && TREE_CODE (arg0) == MULT_EXPR
10034 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10035 	{
10036 	  wide_int warg1 = arg1;
10037 	  wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10038 
10039 	  if (masked == 0)
10040 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
10041 	                                  arg0, arg1);
10042 	  else if (masked != warg1)
10043 	    {
10044 	      /* Avoid the transform if arg1 is a mask of some
10045 	         mode which allows further optimizations.  */
10046 	      int pop = wi::popcount (warg1);
10047 	      if (!(pop >= BITS_PER_UNIT
10048 		    && pow2p_hwi (pop)
10049 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10050 		return fold_build2_loc (loc, code, type, op0,
10051 					wide_int_to_tree (type, masked));
10052 	    }
10053 	}
10054 
10055       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10056 	 ((A & N) + B) & M -> (A + B) & M
10057 	 Similarly if (N & M) == 0,
10058 	 ((A | N) + B) & M -> (A + B) & M
10059 	 and for - instead of + (or unary - instead of +)
10060 	 and/or ^ instead of |.
10061 	 If B is constant and (B & M) == 0, fold into A & M.  */
10062       if (TREE_CODE (arg1) == INTEGER_CST)
10063 	{
10064 	  wide_int cst1 = arg1;
10065 	  if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10066 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10067 	      && (TREE_CODE (arg0) == PLUS_EXPR
10068 		  || TREE_CODE (arg0) == MINUS_EXPR
10069 		  || TREE_CODE (arg0) == NEGATE_EXPR)
10070 	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10071 		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10072 	    {
10073 	      tree pmop[2];
10074 	      int which = 0;
10075 	      wide_int cst0;
10076 
10077 	      /* Now we know that arg0 is (C + D) or (C - D) or
10078 		 -C and arg1 (M) is == (1LL << cst) - 1.
10079 		 Store C into PMOP[0] and D into PMOP[1].  */
10080 	      pmop[0] = TREE_OPERAND (arg0, 0);
10081 	      pmop[1] = NULL;
10082 	      if (TREE_CODE (arg0) != NEGATE_EXPR)
10083 		{
10084 		  pmop[1] = TREE_OPERAND (arg0, 1);
10085 		  which = 1;
10086 		}
10087 
10088 	      if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10089 		which = -1;
10090 
10091 	      for (; which >= 0; which--)
10092 		switch (TREE_CODE (pmop[which]))
10093 		  {
10094 		  case BIT_AND_EXPR:
10095 		  case BIT_IOR_EXPR:
10096 		  case BIT_XOR_EXPR:
10097 		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10098 			!= INTEGER_CST)
10099 		      break;
10100 		    cst0 = TREE_OPERAND (pmop[which], 1);
10101 		    cst0 &= cst1;
10102 		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10103 		      {
10104 			if (cst0 != cst1)
10105 			  break;
10106 		      }
10107 		    else if (cst0 != 0)
10108 		      break;
10109 		    /* If C or D is of the form (A & N) where
10110 		       (N & M) == M, or of the form (A | N) or
10111 		       (A ^ N) where (N & M) == 0, replace it with A.  */
10112 		    pmop[which] = TREE_OPERAND (pmop[which], 0);
10113 		    break;
10114 		  case INTEGER_CST:
10115 		    /* If C or D is a N where (N & M) == 0, it can be
10116 		       omitted (assumed 0).  */
10117 		    if ((TREE_CODE (arg0) == PLUS_EXPR
10118 			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10119 			&& (cst1 & pmop[which]) == 0)
10120 		      pmop[which] = NULL;
10121 		    break;
10122 		  default:
10123 		    break;
10124 		  }
10125 
10126 	      /* Only build anything new if we optimized one or both arguments
10127 		 above.  */
10128 	      if (pmop[0] != TREE_OPERAND (arg0, 0)
10129 		  || (TREE_CODE (arg0) != NEGATE_EXPR
10130 		      && pmop[1] != TREE_OPERAND (arg0, 1)))
10131 		{
10132 		  tree utype = TREE_TYPE (arg0);
10133 		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10134 		    {
10135 		      /* Perform the operations in a type that has defined
10136 			 overflow behavior.  */
10137 		      utype = unsigned_type_for (TREE_TYPE (arg0));
10138 		      if (pmop[0] != NULL)
10139 			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10140 		      if (pmop[1] != NULL)
10141 			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10142 		    }
10143 
10144 		  if (TREE_CODE (arg0) == NEGATE_EXPR)
10145 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10146 		  else if (TREE_CODE (arg0) == PLUS_EXPR)
10147 		    {
10148 		      if (pmop[0] != NULL && pmop[1] != NULL)
10149 			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10150 					       pmop[0], pmop[1]);
10151 		      else if (pmop[0] != NULL)
10152 			tem = pmop[0];
10153 		      else if (pmop[1] != NULL)
10154 			tem = pmop[1];
10155 		      else
10156 			return build_int_cst (type, 0);
10157 		    }
10158 		  else if (pmop[0] == NULL)
10159 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10160 		  else
10161 		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10162 					   pmop[0], pmop[1]);
10163 		  /* TEM is now the new binary +, - or unary - replacement.  */
10164 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10165 					 fold_convert_loc (loc, utype, arg1));
10166 		  return fold_convert_loc (loc, type, tem);
10167 		}
10168 	    }
10169 	}
10170 
10171       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
10172       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10173 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10174 	{
10175 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10176 
10177 	  wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10178 	  if (mask == -1)
10179 	    return
10180 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10181 	}
10182 
10183       goto associate;
10184 
10185     case RDIV_EXPR:
10186       /* Don't touch a floating-point divide by zero unless the mode
10187 	 of the constant can represent infinity.  */
10188       if (TREE_CODE (arg1) == REAL_CST
10189 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10190 	  && real_zerop (arg1))
10191 	return NULL_TREE;
10192 
10193       /* (-A) / (-B) -> A / B  */
10194       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10195 	return fold_build2_loc (loc, RDIV_EXPR, type,
10196 			    TREE_OPERAND (arg0, 0),
10197 			    negate_expr (arg1));
10198       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10199 	return fold_build2_loc (loc, RDIV_EXPR, type,
10200 			    negate_expr (arg0),
10201 			    TREE_OPERAND (arg1, 0));
10202       return NULL_TREE;
10203 
10204     case TRUNC_DIV_EXPR:
10205       /* Fall through */
10206 
10207     case FLOOR_DIV_EXPR:
10208       /* Simplify A / (B << N) where A and B are positive and B is
10209 	 a power of 2, to A >> (N + log2(B)).  */
10210       strict_overflow_p = false;
10211       if (TREE_CODE (arg1) == LSHIFT_EXPR
10212 	  && (TYPE_UNSIGNED (type)
10213 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10214 	{
10215 	  tree sval = TREE_OPERAND (arg1, 0);
10216 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10217 	    {
10218 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
10219 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10220 					 wi::exact_log2 (sval));
10221 
10222 	      if (strict_overflow_p)
10223 		fold_overflow_warning (("assuming signed overflow does not "
10224 					"occur when simplifying A / (B << N)"),
10225 				       WARN_STRICT_OVERFLOW_MISC);
10226 
10227 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10228 					sh_cnt, pow2);
10229 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
10230 				      fold_convert_loc (loc, type, arg0), sh_cnt);
10231 	    }
10232 	}
10233 
10234       /* Fall through */
10235 
10236     case ROUND_DIV_EXPR:
10237     case CEIL_DIV_EXPR:
10238     case EXACT_DIV_EXPR:
10239       if (integer_zerop (arg1))
10240 	return NULL_TREE;
10241 
10242       /* Convert -A / -B to A / B when the type is signed and overflow is
10243 	 undefined.  */
10244       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10245 	  && TREE_CODE (op0) == NEGATE_EXPR
10246 	  && negate_expr_p (op1))
10247 	{
10248 	  if (INTEGRAL_TYPE_P (type))
10249 	    fold_overflow_warning (("assuming signed overflow does not occur "
10250 				    "when distributing negation across "
10251 				    "division"),
10252 				   WARN_STRICT_OVERFLOW_MISC);
10253 	  return fold_build2_loc (loc, code, type,
10254 				  fold_convert_loc (loc, type,
10255 						    TREE_OPERAND (arg0, 0)),
10256 				  negate_expr (op1));
10257 	}
10258       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10259 	  && TREE_CODE (arg1) == NEGATE_EXPR
10260 	  && negate_expr_p (op0))
10261 	{
10262 	  if (INTEGRAL_TYPE_P (type))
10263 	    fold_overflow_warning (("assuming signed overflow does not occur "
10264 				    "when distributing negation across "
10265 				    "division"),
10266 				   WARN_STRICT_OVERFLOW_MISC);
10267 	  return fold_build2_loc (loc, code, type,
10268 				  negate_expr (op0),
10269 				  fold_convert_loc (loc, type,
10270 						    TREE_OPERAND (arg1, 0)));
10271 	}
10272 
10273       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10274 	 operation, EXACT_DIV_EXPR.
10275 
10276 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10277 	 At one time others generated faster code, it's not clear if they do
10278 	 after the last round to changes to the DIV code in expmed.c.  */
10279       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10280 	  && multiple_of_p (type, arg0, arg1))
10281 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10282 				fold_convert (type, arg0),
10283 				fold_convert (type, arg1));
10284 
10285       strict_overflow_p = false;
10286       if (TREE_CODE (arg1) == INTEGER_CST
10287 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10288 					 &strict_overflow_p)))
10289 	{
10290 	  if (strict_overflow_p)
10291 	    fold_overflow_warning (("assuming signed overflow does not occur "
10292 				    "when simplifying division"),
10293 				   WARN_STRICT_OVERFLOW_MISC);
10294 	  return fold_convert_loc (loc, type, tem);
10295 	}
10296 
10297       return NULL_TREE;
10298 
10299     case CEIL_MOD_EXPR:
10300     case FLOOR_MOD_EXPR:
10301     case ROUND_MOD_EXPR:
10302     case TRUNC_MOD_EXPR:
10303       strict_overflow_p = false;
10304       if (TREE_CODE (arg1) == INTEGER_CST
10305 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10306 					 &strict_overflow_p)))
10307 	{
10308 	  if (strict_overflow_p)
10309 	    fold_overflow_warning (("assuming signed overflow does not occur "
10310 				    "when simplifying modulus"),
10311 				   WARN_STRICT_OVERFLOW_MISC);
10312 	  return fold_convert_loc (loc, type, tem);
10313 	}
10314 
10315       return NULL_TREE;
10316 
10317     case LROTATE_EXPR:
10318     case RROTATE_EXPR:
10319     case RSHIFT_EXPR:
10320     case LSHIFT_EXPR:
10321       /* Since negative shift count is not well-defined,
10322 	 don't try to compute it in the compiler.  */
10323       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10324 	return NULL_TREE;
10325 
10326       prec = element_precision (type);
10327 
10328       /* If we have a rotate of a bit operation with the rotate count and
10329 	 the second operand of the bit operation both constant,
10330 	 permute the two operations.  */
10331       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10332 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
10333 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
10334 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
10335 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10336 	{
10337 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10338 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10339 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
10340 				  fold_build2_loc (loc, code, type,
10341 						   arg00, arg1),
10342 				  fold_build2_loc (loc, code, type,
10343 						   arg01, arg1));
10344 	}
10345 
10346       /* Two consecutive rotates adding up to the some integer
10347 	 multiple of the precision of the type can be ignored.  */
10348       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10349 	  && TREE_CODE (arg0) == RROTATE_EXPR
10350 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10351 	  && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10352 			     prec) == 0)
10353 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10354 
10355       return NULL_TREE;
10356 
10357     case MIN_EXPR:
10358     case MAX_EXPR:
10359       goto associate;
10360 
10361     case TRUTH_ANDIF_EXPR:
10362       /* Note that the operands of this must be ints
10363 	 and their values must be 0 or 1.
10364 	 ("true" is a fixed value perhaps depending on the language.)  */
10365       /* If first arg is constant zero, return it.  */
10366       if (integer_zerop (arg0))
10367 	return fold_convert_loc (loc, type, arg0);
10368       /* FALLTHRU */
10369     case TRUTH_AND_EXPR:
10370       /* If either arg is constant true, drop it.  */
10371       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10372 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10373       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10374 	  /* Preserve sequence points.  */
10375 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10376 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10377       /* If second arg is constant zero, result is zero, but first arg
10378 	 must be evaluated.  */
10379       if (integer_zerop (arg1))
10380 	return omit_one_operand_loc (loc, type, arg1, arg0);
10381       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10382 	 case will be handled here.  */
10383       if (integer_zerop (arg0))
10384 	return omit_one_operand_loc (loc, type, arg0, arg1);
10385 
10386       /* !X && X is always false.  */
10387       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10388 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10389 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10390       /* X && !X is always false.  */
10391       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10392 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10393 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10394 
10395       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
10396 	 means A >= Y && A != MAX, but in this case we know that
10397 	 A < X <= MAX.  */
10398 
10399       if (!TREE_SIDE_EFFECTS (arg0)
10400 	  && !TREE_SIDE_EFFECTS (arg1))
10401 	{
10402 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10403 	  if (tem && !operand_equal_p (tem, arg0, 0))
10404 	    return fold_build2_loc (loc, code, type, tem, arg1);
10405 
10406 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10407 	  if (tem && !operand_equal_p (tem, arg1, 0))
10408 	    return fold_build2_loc (loc, code, type, arg0, tem);
10409 	}
10410 
10411       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10412           != NULL_TREE)
10413         return tem;
10414 
10415       return NULL_TREE;
10416 
10417     case TRUTH_ORIF_EXPR:
10418       /* Note that the operands of this must be ints
10419 	 and their values must be 0 or true.
10420 	 ("true" is a fixed value perhaps depending on the language.)  */
10421       /* If first arg is constant true, return it.  */
10422       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10423 	return fold_convert_loc (loc, type, arg0);
10424       /* FALLTHRU */
10425     case TRUTH_OR_EXPR:
10426       /* If either arg is constant zero, drop it.  */
10427       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10428 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10429       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10430 	  /* Preserve sequence points.  */
10431 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10432 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10433       /* If second arg is constant true, result is true, but we must
10434 	 evaluate first arg.  */
10435       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10436 	return omit_one_operand_loc (loc, type, arg1, arg0);
10437       /* Likewise for first arg, but note this only occurs here for
10438 	 TRUTH_OR_EXPR.  */
10439       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10440 	return omit_one_operand_loc (loc, type, arg0, arg1);
10441 
10442       /* !X || X is always true.  */
10443       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10444 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10445 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10446       /* X || !X is always true.  */
10447       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10448 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10449 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10450 
10451       /* (X && !Y) || (!X && Y) is X ^ Y */
10452       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10453 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10454         {
10455 	  tree a0, a1, l0, l1, n0, n1;
10456 
10457 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10458 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10459 
10460 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10461 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10462 
10463 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10464 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10465 
10466 	  if ((operand_equal_p (n0, a0, 0)
10467 	       && operand_equal_p (n1, a1, 0))
10468 	      || (operand_equal_p (n0, a1, 0)
10469 		  && operand_equal_p (n1, a0, 0)))
10470 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10471 	}
10472 
10473       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10474           != NULL_TREE)
10475         return tem;
10476 
10477       return NULL_TREE;
10478 
10479     case TRUTH_XOR_EXPR:
10480       /* If the second arg is constant zero, drop it.  */
10481       if (integer_zerop (arg1))
10482 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10483       /* If the second arg is constant true, this is a logical inversion.  */
10484       if (integer_onep (arg1))
10485 	{
10486 	  tem = invert_truthvalue_loc (loc, arg0);
10487 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10488 	}
10489       /* Identical arguments cancel to zero.  */
10490       if (operand_equal_p (arg0, arg1, 0))
10491 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10492 
10493       /* !X ^ X is always true.  */
10494       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10495 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10496 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10497 
10498       /* X ^ !X is always true.  */
10499       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10500 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10501 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10502 
10503       return NULL_TREE;
10504 
10505     case EQ_EXPR:
10506     case NE_EXPR:
10507       STRIP_NOPS (arg0);
10508       STRIP_NOPS (arg1);
10509 
10510       tem = fold_comparison (loc, code, type, op0, op1);
10511       if (tem != NULL_TREE)
10512 	return tem;
10513 
10514       /* bool_var != 1 becomes !bool_var. */
10515       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10516           && code == NE_EXPR)
10517         return fold_convert_loc (loc, type,
10518 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10519 						  TREE_TYPE (arg0), arg0));
10520 
10521       /* bool_var == 0 becomes !bool_var. */
10522       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10523           && code == EQ_EXPR)
10524         return fold_convert_loc (loc, type,
10525 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10526 						  TREE_TYPE (arg0), arg0));
10527 
10528       /* !exp != 0 becomes !exp */
10529       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10530 	  && code == NE_EXPR)
10531         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10532 
10533       /* Transform comparisons of the form X +- Y CMP X to Y CMP 0.  */
10534       if ((TREE_CODE (arg0) == PLUS_EXPR
10535 	   || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10536 	   || TREE_CODE (arg0) == MINUS_EXPR)
10537 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10538 									0)),
10539 			      arg1, 0)
10540 	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10541 	      || POINTER_TYPE_P (TREE_TYPE (arg0))))
10542 	{
10543 	  tree val = TREE_OPERAND (arg0, 1);
10544 	  val = fold_build2_loc (loc, code, type, val,
10545 				 build_int_cst (TREE_TYPE (val), 0));
10546 	  return omit_two_operands_loc (loc, type, val,
10547 					TREE_OPERAND (arg0, 0), arg1);
10548 	}
10549 
10550       /* Transform comparisons of the form X CMP X +- Y to Y CMP 0.  */
10551       if ((TREE_CODE (arg1) == PLUS_EXPR
10552 	   || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10553 	   || TREE_CODE (arg1) == MINUS_EXPR)
10554 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10555 									0)),
10556 			      arg0, 0)
10557 	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10558 	      || POINTER_TYPE_P (TREE_TYPE (arg1))))
10559 	{
10560 	  tree val = TREE_OPERAND (arg1, 1);
10561 	  val = fold_build2_loc (loc, code, type, val,
10562 				 build_int_cst (TREE_TYPE (val), 0));
10563 	  return omit_two_operands_loc (loc, type, val,
10564 					TREE_OPERAND (arg1, 0), arg0);
10565 	}
10566 
10567       /* Transform comparisons of the form C - X CMP X if C % 2 == 1.  */
10568       if (TREE_CODE (arg0) == MINUS_EXPR
10569 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10570 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10571 									1)),
10572 			      arg1, 0)
10573 	  && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10574 	return omit_two_operands_loc (loc, type,
10575 				      code == NE_EXPR
10576 				      ? boolean_true_node : boolean_false_node,
10577 				      TREE_OPERAND (arg0, 1), arg1);
10578 
10579       /* Transform comparisons of the form X CMP C - X if C % 2 == 1.  */
10580       if (TREE_CODE (arg1) == MINUS_EXPR
10581 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10582 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10583 									1)),
10584 			      arg0, 0)
10585 	  && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10586 	return omit_two_operands_loc (loc, type,
10587 				      code == NE_EXPR
10588 				      ? boolean_true_node : boolean_false_node,
10589 				      TREE_OPERAND (arg1, 1), arg0);
10590 
10591       /* If this is an EQ or NE comparison with zero and ARG0 is
10592 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
10593 	 two operations, but the latter can be done in one less insn
10594 	 on machines that have only two-operand insns or on which a
10595 	 constant cannot be the first operand.  */
10596       if (TREE_CODE (arg0) == BIT_AND_EXPR
10597 	  && integer_zerop (arg1))
10598 	{
10599 	  tree arg00 = TREE_OPERAND (arg0, 0);
10600 	  tree arg01 = TREE_OPERAND (arg0, 1);
10601 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
10602 	      && integer_onep (TREE_OPERAND (arg00, 0)))
10603 	    {
10604 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10605 				      arg01, TREE_OPERAND (arg00, 1));
10606 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10607 				 build_int_cst (TREE_TYPE (arg0), 1));
10608 	      return fold_build2_loc (loc, code, type,
10609 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10610 				  arg1);
10611 	    }
10612 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
10613 		   && integer_onep (TREE_OPERAND (arg01, 0)))
10614 	    {
10615 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10616 				      arg00, TREE_OPERAND (arg01, 1));
10617 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10618 				 build_int_cst (TREE_TYPE (arg0), 1));
10619 	      return fold_build2_loc (loc, code, type,
10620 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10621 				  arg1);
10622 	    }
10623 	}
10624 
10625       /* If this is an NE or EQ comparison of zero against the result of a
10626 	 signed MOD operation whose second operand is a power of 2, make
10627 	 the MOD operation unsigned since it is simpler and equivalent.  */
10628       if (integer_zerop (arg1)
10629 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10630 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10631 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
10632 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10633 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10634 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10635 	{
10636 	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10637 	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10638 				     fold_convert_loc (loc, newtype,
10639 						       TREE_OPERAND (arg0, 0)),
10640 				     fold_convert_loc (loc, newtype,
10641 						       TREE_OPERAND (arg0, 1)));
10642 
10643 	  return fold_build2_loc (loc, code, type, newmod,
10644 			      fold_convert_loc (loc, newtype, arg1));
10645 	}
10646 
10647       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10648 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
10649 	 a single bit.  */
10650       if (TREE_CODE (arg0) == BIT_AND_EXPR
10651 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10652 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10653 	     == INTEGER_CST
10654 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10655 	  && integer_zerop (arg1))
10656 	{
10657 	  tree itype = TREE_TYPE (arg0);
10658 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10659 	  prec = TYPE_PRECISION (itype);
10660 
10661 	  /* Check for a valid shift count.  */
10662 	  if (wi::ltu_p (arg001, prec))
10663 	    {
10664 	      tree arg01 = TREE_OPERAND (arg0, 1);
10665 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10666 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10667 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10668 		 can be rewritten as (X & (C2 << C1)) != 0.  */
10669 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10670 		{
10671 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10672 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10673 		  return fold_build2_loc (loc, code, type, tem,
10674 					  fold_convert_loc (loc, itype, arg1));
10675 		}
10676 	      /* Otherwise, for signed (arithmetic) shifts,
10677 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10678 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
10679 	      else if (!TYPE_UNSIGNED (itype))
10680 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10681 				    arg000, build_int_cst (itype, 0));
10682 	      /* Otherwise, of unsigned (logical) shifts,
10683 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10684 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
10685 	      else
10686 		return omit_one_operand_loc (loc, type,
10687 					 code == EQ_EXPR ? integer_one_node
10688 							 : integer_zero_node,
10689 					 arg000);
10690 	    }
10691 	}
10692 
10693       /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10694 	 Similarly for NE_EXPR.  */
10695       if (TREE_CODE (arg0) == BIT_AND_EXPR
10696 	  && TREE_CODE (arg1) == INTEGER_CST
10697 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10698 	{
10699 	  tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10700 				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
10701 				   TREE_OPERAND (arg0, 1));
10702 	  tree dandnotc
10703 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10704 			       fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10705 			       notc);
10706 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10707 	  if (integer_nonzerop (dandnotc))
10708 	    return omit_one_operand_loc (loc, type, rslt, arg0);
10709 	}
10710 
10711       /* If this is a comparison of a field, we may be able to simplify it.  */
10712       if ((TREE_CODE (arg0) == COMPONENT_REF
10713 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
10714 	  /* Handle the constant case even without -O
10715 	     to make sure the warnings are given.  */
10716 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10717 	{
10718 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10719 	  if (t1)
10720 	    return t1;
10721 	}
10722 
10723       /* Optimize comparisons of strlen vs zero to a compare of the
10724 	 first character of the string vs zero.  To wit,
10725 		strlen(ptr) == 0   =>  *ptr == 0
10726 		strlen(ptr) != 0   =>  *ptr != 0
10727 	 Other cases should reduce to one of these two (or a constant)
10728 	 due to the return value of strlen being unsigned.  */
10729       if (TREE_CODE (arg0) == CALL_EXPR
10730 	  && integer_zerop (arg1))
10731 	{
10732 	  tree fndecl = get_callee_fndecl (arg0);
10733 
10734 	  if (fndecl
10735 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10736 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10737 	      && call_expr_nargs (arg0) == 1
10738 	      && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10739 	    {
10740 	      tree iref = build_fold_indirect_ref_loc (loc,
10741 						   CALL_EXPR_ARG (arg0, 0));
10742 	      return fold_build2_loc (loc, code, type, iref,
10743 				  build_int_cst (TREE_TYPE (iref), 0));
10744 	    }
10745 	}
10746 
10747       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10748 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
10749       if (TREE_CODE (arg0) == RSHIFT_EXPR
10750 	  && integer_zerop (arg1)
10751 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10752 	{
10753 	  tree arg00 = TREE_OPERAND (arg0, 0);
10754 	  tree arg01 = TREE_OPERAND (arg0, 1);
10755 	  tree itype = TREE_TYPE (arg00);
10756 	  if (wi::eq_p (arg01, element_precision (itype) - 1))
10757 	    {
10758 	      if (TYPE_UNSIGNED (itype))
10759 		{
10760 		  itype = signed_type_for (itype);
10761 		  arg00 = fold_convert_loc (loc, itype, arg00);
10762 		}
10763 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10764 				  type, arg00, build_zero_cst (itype));
10765 	    }
10766 	}
10767 
10768       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10769 	 (X & C) == 0 when C is a single bit.  */
10770       if (TREE_CODE (arg0) == BIT_AND_EXPR
10771 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10772 	  && integer_zerop (arg1)
10773 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10774 	{
10775 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10776 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10777 				 TREE_OPERAND (arg0, 1));
10778 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10779 				  type, tem,
10780 				  fold_convert_loc (loc, TREE_TYPE (arg0),
10781 						    arg1));
10782 	}
10783 
10784       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10785 	 constant C is a power of two, i.e. a single bit.  */
10786       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10787 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10788 	  && integer_zerop (arg1)
10789 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10790 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10791 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10792 	{
10793 	  tree arg00 = TREE_OPERAND (arg0, 0);
10794 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10795 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
10796 	}
10797 
10798       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10799 	 when is C is a power of two, i.e. a single bit.  */
10800       if (TREE_CODE (arg0) == BIT_AND_EXPR
10801 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10802 	  && integer_zerop (arg1)
10803 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10804 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10805 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10806 	{
10807 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10808 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10809 			     arg000, TREE_OPERAND (arg0, 1));
10810 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10811 			      tem, build_int_cst (TREE_TYPE (tem), 0));
10812 	}
10813 
10814       if (integer_zerop (arg1)
10815 	  && tree_expr_nonzero_p (arg0))
10816         {
10817 	  tree res = constant_boolean_node (code==NE_EXPR, type);
10818 	  return omit_one_operand_loc (loc, type, res, arg0);
10819 	}
10820 
10821       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
10822       if (TREE_CODE (arg0) == BIT_AND_EXPR
10823 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
10824 	{
10825 	  tree arg00 = TREE_OPERAND (arg0, 0);
10826 	  tree arg01 = TREE_OPERAND (arg0, 1);
10827 	  tree arg10 = TREE_OPERAND (arg1, 0);
10828 	  tree arg11 = TREE_OPERAND (arg1, 1);
10829 	  tree itype = TREE_TYPE (arg0);
10830 
10831 	  if (operand_equal_p (arg01, arg11, 0))
10832 	    return fold_build2_loc (loc, code, type,
10833 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
10834 					     fold_build2_loc (loc,
10835 							  BIT_XOR_EXPR, itype,
10836 							  arg00, arg10),
10837 					     arg01),
10838 				build_zero_cst (itype));
10839 
10840 	  if (operand_equal_p (arg01, arg10, 0))
10841 	    return fold_build2_loc (loc, code, type,
10842 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
10843 					     fold_build2_loc (loc,
10844 							  BIT_XOR_EXPR, itype,
10845 							  arg00, arg11),
10846 					     arg01),
10847 				build_zero_cst (itype));
10848 
10849 	  if (operand_equal_p (arg00, arg11, 0))
10850 	    return fold_build2_loc (loc, code, type,
10851 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
10852 					     fold_build2_loc (loc,
10853 							  BIT_XOR_EXPR, itype,
10854 							  arg01, arg10),
10855 					     arg00),
10856 				build_zero_cst (itype));
10857 
10858 	  if (operand_equal_p (arg00, arg10, 0))
10859 	    return fold_build2_loc (loc, code, type,
10860 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
10861 					     fold_build2_loc (loc,
10862 							  BIT_XOR_EXPR, itype,
10863 							  arg01, arg11),
10864 					     arg00),
10865 				build_zero_cst (itype));
10866 	}
10867 
10868       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10869 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
10870 	{
10871 	  tree arg00 = TREE_OPERAND (arg0, 0);
10872 	  tree arg01 = TREE_OPERAND (arg0, 1);
10873 	  tree arg10 = TREE_OPERAND (arg1, 0);
10874 	  tree arg11 = TREE_OPERAND (arg1, 1);
10875 	  tree itype = TREE_TYPE (arg0);
10876 
10877 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10878 	     operand_equal_p guarantees no side-effects so we don't need
10879 	     to use omit_one_operand on Z.  */
10880 	  if (operand_equal_p (arg01, arg11, 0))
10881 	    return fold_build2_loc (loc, code, type, arg00,
10882 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10883 						      arg10));
10884 	  if (operand_equal_p (arg01, arg10, 0))
10885 	    return fold_build2_loc (loc, code, type, arg00,
10886 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10887 						      arg11));
10888 	  if (operand_equal_p (arg00, arg11, 0))
10889 	    return fold_build2_loc (loc, code, type, arg01,
10890 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10891 						      arg10));
10892 	  if (operand_equal_p (arg00, arg10, 0))
10893 	    return fold_build2_loc (loc, code, type, arg01,
10894 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10895 						      arg11));
10896 
10897 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
10898 	  if (TREE_CODE (arg01) == INTEGER_CST
10899 	      && TREE_CODE (arg11) == INTEGER_CST)
10900 	    {
10901 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10902 				     fold_convert_loc (loc, itype, arg11));
10903 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10904 	      return fold_build2_loc (loc, code, type, tem,
10905 				      fold_convert_loc (loc, itype, arg10));
10906 	    }
10907 	}
10908 
10909       /* Attempt to simplify equality/inequality comparisons of complex
10910 	 values.  Only lower the comparison if the result is known or
10911 	 can be simplified to a single scalar comparison.  */
10912       if ((TREE_CODE (arg0) == COMPLEX_EXPR
10913 	   || TREE_CODE (arg0) == COMPLEX_CST)
10914 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
10915 	      || TREE_CODE (arg1) == COMPLEX_CST))
10916 	{
10917 	  tree real0, imag0, real1, imag1;
10918 	  tree rcond, icond;
10919 
10920 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
10921 	    {
10922 	      real0 = TREE_OPERAND (arg0, 0);
10923 	      imag0 = TREE_OPERAND (arg0, 1);
10924 	    }
10925 	  else
10926 	    {
10927 	      real0 = TREE_REALPART (arg0);
10928 	      imag0 = TREE_IMAGPART (arg0);
10929 	    }
10930 
10931 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
10932 	    {
10933 	      real1 = TREE_OPERAND (arg1, 0);
10934 	      imag1 = TREE_OPERAND (arg1, 1);
10935 	    }
10936 	  else
10937 	    {
10938 	      real1 = TREE_REALPART (arg1);
10939 	      imag1 = TREE_IMAGPART (arg1);
10940 	    }
10941 
10942 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
10943 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10944 	    {
10945 	      if (integer_zerop (rcond))
10946 		{
10947 		  if (code == EQ_EXPR)
10948 		    return omit_two_operands_loc (loc, type, boolean_false_node,
10949 					      imag0, imag1);
10950 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10951 		}
10952 	      else
10953 		{
10954 		  if (code == NE_EXPR)
10955 		    return omit_two_operands_loc (loc, type, boolean_true_node,
10956 					      imag0, imag1);
10957 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10958 		}
10959 	    }
10960 
10961 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
10962 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
10963 	    {
10964 	      if (integer_zerop (icond))
10965 		{
10966 		  if (code == EQ_EXPR)
10967 		    return omit_two_operands_loc (loc, type, boolean_false_node,
10968 					      real0, real1);
10969 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10970 		}
10971 	      else
10972 		{
10973 		  if (code == NE_EXPR)
10974 		    return omit_two_operands_loc (loc, type, boolean_true_node,
10975 					      real0, real1);
10976 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10977 		}
10978 	    }
10979 	}
10980 
10981       return NULL_TREE;
10982 
10983     case LT_EXPR:
10984     case GT_EXPR:
10985     case LE_EXPR:
10986     case GE_EXPR:
10987       tem = fold_comparison (loc, code, type, op0, op1);
10988       if (tem != NULL_TREE)
10989 	return tem;
10990 
10991       /* Transform comparisons of the form X +- C CMP X.  */
10992       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10993 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10994 	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10995 	       && !HONOR_SNANS (arg0))
10996 	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10997 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10998 	{
10999 	  tree arg01 = TREE_OPERAND (arg0, 1);
11000 	  enum tree_code code0 = TREE_CODE (arg0);
11001 	  int is_positive;
11002 
11003 	  if (TREE_CODE (arg01) == REAL_CST)
11004 	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11005 	  else
11006 	    is_positive = tree_int_cst_sgn (arg01);
11007 
11008 	  /* (X - c) > X becomes false.  */
11009 	  if (code == GT_EXPR
11010 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11011 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11012 	    {
11013 	      if (TREE_CODE (arg01) == INTEGER_CST
11014 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11015 		fold_overflow_warning (("assuming signed overflow does not "
11016 					"occur when assuming that (X - c) > X "
11017 					"is always false"),
11018 				       WARN_STRICT_OVERFLOW_ALL);
11019 	      return constant_boolean_node (0, type);
11020 	    }
11021 
11022 	  /* Likewise (X + c) < X becomes false.  */
11023 	  if (code == LT_EXPR
11024 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11025 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11026 	    {
11027 	      if (TREE_CODE (arg01) == INTEGER_CST
11028 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11029 		fold_overflow_warning (("assuming signed overflow does not "
11030 					"occur when assuming that "
11031 					"(X + c) < X is always false"),
11032 				       WARN_STRICT_OVERFLOW_ALL);
11033 	      return constant_boolean_node (0, type);
11034 	    }
11035 
11036 	  /* Convert (X - c) <= X to true.  */
11037 	  if (!HONOR_NANS (arg1)
11038 	      && code == LE_EXPR
11039 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11040 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11041 	    {
11042 	      if (TREE_CODE (arg01) == INTEGER_CST
11043 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11044 		fold_overflow_warning (("assuming signed overflow does not "
11045 					"occur when assuming that "
11046 					"(X - c) <= X is always true"),
11047 				       WARN_STRICT_OVERFLOW_ALL);
11048 	      return constant_boolean_node (1, type);
11049 	    }
11050 
11051 	  /* Convert (X + c) >= X to true.  */
11052 	  if (!HONOR_NANS (arg1)
11053 	      && code == GE_EXPR
11054 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11055 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11056 	    {
11057 	      if (TREE_CODE (arg01) == INTEGER_CST
11058 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11059 		fold_overflow_warning (("assuming signed overflow does not "
11060 					"occur when assuming that "
11061 					"(X + c) >= X is always true"),
11062 				       WARN_STRICT_OVERFLOW_ALL);
11063 	      return constant_boolean_node (1, type);
11064 	    }
11065 
11066 	  if (TREE_CODE (arg01) == INTEGER_CST)
11067 	    {
11068 	      /* Convert X + c > X and X - c < X to true for integers.  */
11069 	      if (code == GT_EXPR
11070 	          && ((code0 == PLUS_EXPR && is_positive > 0)
11071 		      || (code0 == MINUS_EXPR && is_positive < 0)))
11072 		{
11073 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11074 		    fold_overflow_warning (("assuming signed overflow does "
11075 					    "not occur when assuming that "
11076 					    "(X + c) > X is always true"),
11077 					   WARN_STRICT_OVERFLOW_ALL);
11078 		  return constant_boolean_node (1, type);
11079 		}
11080 
11081 	      if (code == LT_EXPR
11082 	          && ((code0 == MINUS_EXPR && is_positive > 0)
11083 		      || (code0 == PLUS_EXPR && is_positive < 0)))
11084 		{
11085 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11086 		    fold_overflow_warning (("assuming signed overflow does "
11087 					    "not occur when assuming that "
11088 					    "(X - c) < X is always true"),
11089 					   WARN_STRICT_OVERFLOW_ALL);
11090 		  return constant_boolean_node (1, type);
11091 		}
11092 
11093 	      /* Convert X + c <= X and X - c >= X to false for integers.  */
11094 	      if (code == LE_EXPR
11095 	          && ((code0 == PLUS_EXPR && is_positive > 0)
11096 		      || (code0 == MINUS_EXPR && is_positive < 0)))
11097 		{
11098 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11099 		    fold_overflow_warning (("assuming signed overflow does "
11100 					    "not occur when assuming that "
11101 					    "(X + c) <= X is always false"),
11102 					   WARN_STRICT_OVERFLOW_ALL);
11103 		  return constant_boolean_node (0, type);
11104 		}
11105 
11106 	      if (code == GE_EXPR
11107 	          && ((code0 == MINUS_EXPR && is_positive > 0)
11108 		      || (code0 == PLUS_EXPR && is_positive < 0)))
11109 		{
11110 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11111 		    fold_overflow_warning (("assuming signed overflow does "
11112 					    "not occur when assuming that "
11113 					    "(X - c) >= X is always false"),
11114 					   WARN_STRICT_OVERFLOW_ALL);
11115 		  return constant_boolean_node (0, type);
11116 		}
11117 	    }
11118 	}
11119 
11120       /* If we are comparing an ABS_EXPR with a constant, we can
11121 	 convert all the cases into explicit comparisons, but they may
11122 	 well not be faster than doing the ABS and one comparison.
11123 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11124 	 and a comparison, and is probably faster.  */
11125       if (code == LE_EXPR
11126 	  && TREE_CODE (arg1) == INTEGER_CST
11127 	  && TREE_CODE (arg0) == ABS_EXPR
11128 	  && ! TREE_SIDE_EFFECTS (arg0)
11129 	  && (0 != (tem = negate_expr (arg1)))
11130 	  && TREE_CODE (tem) == INTEGER_CST
11131 	  && !TREE_OVERFLOW (tem))
11132 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11133 			    build2 (GE_EXPR, type,
11134 				    TREE_OPERAND (arg0, 0), tem),
11135 			    build2 (LE_EXPR, type,
11136 				    TREE_OPERAND (arg0, 0), arg1));
11137 
11138       /* Convert ABS_EXPR<x> >= 0 to true.  */
11139       strict_overflow_p = false;
11140       if (code == GE_EXPR
11141 	  && (integer_zerop (arg1)
11142 	      || (! HONOR_NANS (arg0)
11143 		  && real_zerop (arg1)))
11144 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11145 	{
11146 	  if (strict_overflow_p)
11147 	    fold_overflow_warning (("assuming signed overflow does not occur "
11148 				    "when simplifying comparison of "
11149 				    "absolute value and zero"),
11150 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11151 	  return omit_one_operand_loc (loc, type,
11152 				       constant_boolean_node (true, type),
11153 				       arg0);
11154 	}
11155 
11156       /* Convert ABS_EXPR<x> < 0 to false.  */
11157       strict_overflow_p = false;
11158       if (code == LT_EXPR
11159 	  && (integer_zerop (arg1) || real_zerop (arg1))
11160 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11161 	{
11162 	  if (strict_overflow_p)
11163 	    fold_overflow_warning (("assuming signed overflow does not occur "
11164 				    "when simplifying comparison of "
11165 				    "absolute value and zero"),
11166 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11167 	  return omit_one_operand_loc (loc, type,
11168 				       constant_boolean_node (false, type),
11169 				       arg0);
11170 	}
11171 
11172       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11173 	 and similarly for >= into !=.  */
11174       if ((code == LT_EXPR || code == GE_EXPR)
11175 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11176 	  && TREE_CODE (arg1) == LSHIFT_EXPR
11177 	  && integer_onep (TREE_OPERAND (arg1, 0)))
11178 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11179 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11180 				   TREE_OPERAND (arg1, 1)),
11181 			   build_zero_cst (TREE_TYPE (arg0)));
11182 
11183       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
11184 	 otherwise Y might be >= # of bits in X's type and thus e.g.
11185 	 (unsigned char) (1 << Y) for Y 15 might be 0.
11186 	 If the cast is widening, then 1 << Y should have unsigned type,
11187 	 otherwise if Y is number of bits in the signed shift type minus 1,
11188 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
11189 	 31 might be 0xffffffff80000000.  */
11190       if ((code == LT_EXPR || code == GE_EXPR)
11191 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11192 	  && CONVERT_EXPR_P (arg1)
11193 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11194 	  && (element_precision (TREE_TYPE (arg1))
11195 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11196 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11197 	      || (element_precision (TREE_TYPE (arg1))
11198 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11199 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11200 	{
11201 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11202 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11203 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11204 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11205 			     build_zero_cst (TREE_TYPE (arg0)));
11206 	}
11207 
11208       return NULL_TREE;
11209 
11210     case UNORDERED_EXPR:
11211     case ORDERED_EXPR:
11212     case UNLT_EXPR:
11213     case UNLE_EXPR:
11214     case UNGT_EXPR:
11215     case UNGE_EXPR:
11216     case UNEQ_EXPR:
11217     case LTGT_EXPR:
11218       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
11219       {
11220 	tree targ0 = strip_float_extensions (arg0);
11221 	tree targ1 = strip_float_extensions (arg1);
11222 	tree newtype = TREE_TYPE (targ0);
11223 
11224 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11225 	  newtype = TREE_TYPE (targ1);
11226 
11227 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11228 	  return fold_build2_loc (loc, code, type,
11229 			      fold_convert_loc (loc, newtype, targ0),
11230 			      fold_convert_loc (loc, newtype, targ1));
11231       }
11232 
11233       return NULL_TREE;
11234 
11235     case COMPOUND_EXPR:
11236       /* When pedantic, a compound expression can be neither an lvalue
11237 	 nor an integer constant expression.  */
11238       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11239 	return NULL_TREE;
11240       /* Don't let (0, 0) be null pointer constant.  */
11241       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11242 				 : fold_convert_loc (loc, type, arg1);
11243       return pedantic_non_lvalue_loc (loc, tem);
11244 
11245     case ASSERT_EXPR:
11246       /* An ASSERT_EXPR should never be passed to fold_binary.  */
11247       gcc_unreachable ();
11248 
11249     default:
11250       return NULL_TREE;
11251     } /* switch (code) */
11252 }
11253 
11254 /* Used by contains_label_[p1].  */
11255 
11256 struct contains_label_data
11257 {
11258   hash_set<tree> *pset;
11259   bool inside_switch_p;
11260 };
11261 
11262 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
11263    a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11264    return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
11265 
11266 static tree
11267 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11268 {
11269   contains_label_data *d = (contains_label_data *) data;
11270   switch (TREE_CODE (*tp))
11271     {
11272     case LABEL_EXPR:
11273       return *tp;
11274 
11275     case CASE_LABEL_EXPR:
11276       if (!d->inside_switch_p)
11277 	return *tp;
11278       return NULL_TREE;
11279 
11280     case SWITCH_EXPR:
11281       if (!d->inside_switch_p)
11282 	{
11283 	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11284 	    return *tp;
11285 	  d->inside_switch_p = true;
11286 	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11287 	    return *tp;
11288 	  d->inside_switch_p = false;
11289 	  *walk_subtrees = 0;
11290 	}
11291       return NULL_TREE;
11292 
11293     case GOTO_EXPR:
11294       *walk_subtrees = 0;
11295       return NULL_TREE;
11296 
11297     default:
11298       return NULL_TREE;
11299     }
11300 }
11301 
11302 /* Return whether the sub-tree ST contains a label which is accessible from
11303    outside the sub-tree.  */
11304 
11305 static bool
11306 contains_label_p (tree st)
11307 {
11308   hash_set<tree> pset;
11309   contains_label_data data = { &pset, false };
11310   return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11311 }
11312 
11313 /* Fold a ternary expression of code CODE and type TYPE with operands
11314    OP0, OP1, and OP2.  Return the folded expression if folding is
11315    successful.  Otherwise, return NULL_TREE.  */
11316 
11317 tree
11318 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11319 		  tree op0, tree op1, tree op2)
11320 {
11321   tree tem;
11322   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11323   enum tree_code_class kind = TREE_CODE_CLASS (code);
11324 
11325   gcc_assert (IS_EXPR_CODE_CLASS (kind)
11326 	      && TREE_CODE_LENGTH (code) == 3);
11327 
11328   /* If this is a commutative operation, and OP0 is a constant, move it
11329      to OP1 to reduce the number of tests below.  */
11330   if (commutative_ternary_tree_code (code)
11331       && tree_swap_operands_p (op0, op1))
11332     return fold_build3_loc (loc, code, type, op1, op0, op2);
11333 
11334   tem = generic_simplify (loc, code, type, op0, op1, op2);
11335   if (tem)
11336     return tem;
11337 
11338   /* Strip any conversions that don't change the mode.  This is safe
11339      for every expression, except for a comparison expression because
11340      its signedness is derived from its operands.  So, in the latter
11341      case, only strip conversions that don't change the signedness.
11342 
11343      Note that this is done as an internal manipulation within the
11344      constant folder, in order to find the simplest representation of
11345      the arguments so that their form can be studied.  In any cases,
11346      the appropriate type conversions should be put back in the tree
11347      that will get out of the constant folder.  */
11348   if (op0)
11349     {
11350       arg0 = op0;
11351       STRIP_NOPS (arg0);
11352     }
11353 
11354   if (op1)
11355     {
11356       arg1 = op1;
11357       STRIP_NOPS (arg1);
11358     }
11359 
11360   if (op2)
11361     {
11362       arg2 = op2;
11363       STRIP_NOPS (arg2);
11364     }
11365 
11366   switch (code)
11367     {
11368     case COMPONENT_REF:
11369       if (TREE_CODE (arg0) == CONSTRUCTOR
11370 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11371 	{
11372 	  unsigned HOST_WIDE_INT idx;
11373 	  tree field, value;
11374 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11375 	    if (field == arg1)
11376 	      return value;
11377 	}
11378       return NULL_TREE;
11379 
11380     case COND_EXPR:
11381     case VEC_COND_EXPR:
11382       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11383 	 so all simple results must be passed through pedantic_non_lvalue.  */
11384       if (TREE_CODE (arg0) == INTEGER_CST)
11385 	{
11386 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
11387 	  tem = integer_zerop (arg0) ? op2 : op1;
11388 	  /* Only optimize constant conditions when the selected branch
11389 	     has the same type as the COND_EXPR.  This avoids optimizing
11390              away "c ? x : throw", where the throw has a void type.
11391              Avoid throwing away that operand which contains label.  */
11392           if ((!TREE_SIDE_EFFECTS (unused_op)
11393                || !contains_label_p (unused_op))
11394               && (! VOID_TYPE_P (TREE_TYPE (tem))
11395                   || VOID_TYPE_P (type)))
11396 	    return pedantic_non_lvalue_loc (loc, tem);
11397 	  return NULL_TREE;
11398 	}
11399       else if (TREE_CODE (arg0) == VECTOR_CST)
11400 	{
11401 	  if ((TREE_CODE (arg1) == VECTOR_CST
11402 	       || TREE_CODE (arg1) == CONSTRUCTOR)
11403 	      && (TREE_CODE (arg2) == VECTOR_CST
11404 		  || TREE_CODE (arg2) == CONSTRUCTOR))
11405 	    {
11406 	      unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11407 	      unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11408 	      gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11409 	      for (i = 0; i < nelts; i++)
11410 		{
11411 		  tree val = VECTOR_CST_ELT (arg0, i);
11412 		  if (integer_all_onesp (val))
11413 		    sel[i] = i;
11414 		  else if (integer_zerop (val))
11415 		    sel[i] = nelts + i;
11416 		  else /* Currently unreachable.  */
11417 		    return NULL_TREE;
11418 		}
11419 	      tree t = fold_vec_perm (type, arg1, arg2, sel);
11420 	      if (t != NULL_TREE)
11421 		return t;
11422 	    }
11423 	}
11424 
11425       /* If we have A op B ? A : C, we may be able to convert this to a
11426 	 simpler expression, depending on the operation and the values
11427 	 of B and C.  Signed zeros prevent all of these transformations,
11428 	 for reasons given above each one.
11429 
11430          Also try swapping the arguments and inverting the conditional.  */
11431       if (COMPARISON_CLASS_P (arg0)
11432 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11433 					     arg1, TREE_OPERAND (arg0, 1))
11434 	  && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11435 	{
11436 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11437 	  if (tem)
11438 	    return tem;
11439 	}
11440 
11441       if (COMPARISON_CLASS_P (arg0)
11442 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11443 					     op2,
11444 					     TREE_OPERAND (arg0, 1))
11445 	  && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11446 	{
11447 	  location_t loc0 = expr_location_or (arg0, loc);
11448 	  tem = fold_invert_truthvalue (loc0, arg0);
11449 	  if (tem && COMPARISON_CLASS_P (tem))
11450 	    {
11451 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11452 	      if (tem)
11453 		return tem;
11454 	    }
11455 	}
11456 
11457       /* If the second operand is simpler than the third, swap them
11458 	 since that produces better jump optimization results.  */
11459       if (truth_value_p (TREE_CODE (arg0))
11460 	  && tree_swap_operands_p (op1, op2))
11461 	{
11462 	  location_t loc0 = expr_location_or (arg0, loc);
11463 	  /* See if this can be inverted.  If it can't, possibly because
11464 	     it was a floating-point inequality comparison, don't do
11465 	     anything.  */
11466 	  tem = fold_invert_truthvalue (loc0, arg0);
11467 	  if (tem)
11468 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
11469 	}
11470 
11471       /* Convert A ? 1 : 0 to simply A.  */
11472       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11473 				 : (integer_onep (op1)
11474 				    && !VECTOR_TYPE_P (type)))
11475 	  && integer_zerop (op2)
11476 	  /* If we try to convert OP0 to our type, the
11477 	     call to fold will try to move the conversion inside
11478 	     a COND, which will recurse.  In that case, the COND_EXPR
11479 	     is probably the best choice, so leave it alone.  */
11480 	  && type == TREE_TYPE (arg0))
11481 	return pedantic_non_lvalue_loc (loc, arg0);
11482 
11483       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
11484 	 over COND_EXPR in cases such as floating point comparisons.  */
11485       if (integer_zerop (op1)
11486 	  && code == COND_EXPR
11487 	  && integer_onep (op2)
11488 	  && !VECTOR_TYPE_P (type)
11489 	  && truth_value_p (TREE_CODE (arg0)))
11490 	return pedantic_non_lvalue_loc (loc,
11491 				    fold_convert_loc (loc, type,
11492 					      invert_truthvalue_loc (loc,
11493 								     arg0)));
11494 
11495       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
11496       if (TREE_CODE (arg0) == LT_EXPR
11497 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11498 	  && integer_zerop (op2)
11499 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11500 	{
11501 	  /* sign_bit_p looks through both zero and sign extensions,
11502 	     but for this optimization only sign extensions are
11503 	     usable.  */
11504 	  tree tem2 = TREE_OPERAND (arg0, 0);
11505 	  while (tem != tem2)
11506 	    {
11507 	      if (TREE_CODE (tem2) != NOP_EXPR
11508 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11509 		{
11510 		  tem = NULL_TREE;
11511 		  break;
11512 		}
11513 	      tem2 = TREE_OPERAND (tem2, 0);
11514 	    }
11515 	  /* sign_bit_p only checks ARG1 bits within A's precision.
11516 	     If <sign bit of A> has wider type than A, bits outside
11517 	     of A's precision in <sign bit of A> need to be checked.
11518 	     If they are all 0, this optimization needs to be done
11519 	     in unsigned A's type, if they are all 1 in signed A's type,
11520 	     otherwise this can't be done.  */
11521 	  if (tem
11522 	      && TYPE_PRECISION (TREE_TYPE (tem))
11523 		 < TYPE_PRECISION (TREE_TYPE (arg1))
11524 	      && TYPE_PRECISION (TREE_TYPE (tem))
11525 		 < TYPE_PRECISION (type))
11526 	    {
11527 	      int inner_width, outer_width;
11528 	      tree tem_type;
11529 
11530 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11531 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11532 	      if (outer_width > TYPE_PRECISION (type))
11533 		outer_width = TYPE_PRECISION (type);
11534 
11535 	      wide_int mask = wi::shifted_mask
11536 		(inner_width, outer_width - inner_width, false,
11537 		 TYPE_PRECISION (TREE_TYPE (arg1)));
11538 
11539 	      wide_int common = mask & arg1;
11540 	      if (common == mask)
11541 		{
11542 		  tem_type = signed_type_for (TREE_TYPE (tem));
11543 		  tem = fold_convert_loc (loc, tem_type, tem);
11544 		}
11545 	      else if (common == 0)
11546 		{
11547 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
11548 		  tem = fold_convert_loc (loc, tem_type, tem);
11549 		}
11550 	      else
11551 		tem = NULL;
11552 	    }
11553 
11554 	  if (tem)
11555 	    return
11556 	      fold_convert_loc (loc, type,
11557 				fold_build2_loc (loc, BIT_AND_EXPR,
11558 					     TREE_TYPE (tem), tem,
11559 					     fold_convert_loc (loc,
11560 							       TREE_TYPE (tem),
11561 							       arg1)));
11562 	}
11563 
11564       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
11565 	 already handled above.  */
11566       if (TREE_CODE (arg0) == BIT_AND_EXPR
11567 	  && integer_onep (TREE_OPERAND (arg0, 1))
11568 	  && integer_zerop (op2)
11569 	  && integer_pow2p (arg1))
11570 	{
11571 	  tree tem = TREE_OPERAND (arg0, 0);
11572 	  STRIP_NOPS (tem);
11573 	  if (TREE_CODE (tem) == RSHIFT_EXPR
11574 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11575               && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11576 		 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11577 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
11578 				    fold_convert_loc (loc, type,
11579 						      TREE_OPERAND (tem, 0)),
11580 				    op1);
11581 	}
11582 
11583       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
11584 	 is probably obsolete because the first operand should be a
11585 	 truth value (that's why we have the two cases above), but let's
11586 	 leave it in until we can confirm this for all front-ends.  */
11587       if (integer_zerop (op2)
11588 	  && TREE_CODE (arg0) == NE_EXPR
11589 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11590 	  && integer_pow2p (arg1)
11591 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11592 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11593 			      arg1, OEP_ONLY_CONST)
11594 	  /* operand_equal_p compares just value, not precision, so e.g.
11595 	     arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
11596 	     second operand 32-bit -128, which is not a power of two (or vice
11597 	     versa.  */
11598 	  && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
11599 	return pedantic_non_lvalue_loc (loc,
11600 					fold_convert_loc (loc, type,
11601 							  TREE_OPERAND (arg0,
11602 									0)));
11603 
11604       /* Disable the transformations below for vectors, since
11605 	 fold_binary_op_with_conditional_arg may undo them immediately,
11606 	 yielding an infinite loop.  */
11607       if (code == VEC_COND_EXPR)
11608 	return NULL_TREE;
11609 
11610       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
11611       if (integer_zerop (op2)
11612 	  && truth_value_p (TREE_CODE (arg0))
11613 	  && truth_value_p (TREE_CODE (arg1))
11614 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11615 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11616 							   : TRUTH_ANDIF_EXPR,
11617 				type, fold_convert_loc (loc, type, arg0), op1);
11618 
11619       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
11620       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11621 	  && truth_value_p (TREE_CODE (arg0))
11622 	  && truth_value_p (TREE_CODE (arg1))
11623 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11624 	{
11625 	  location_t loc0 = expr_location_or (arg0, loc);
11626 	  /* Only perform transformation if ARG0 is easily inverted.  */
11627 	  tem = fold_invert_truthvalue (loc0, arg0);
11628 	  if (tem)
11629 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11630 					 ? BIT_IOR_EXPR
11631 					 : TRUTH_ORIF_EXPR,
11632 				    type, fold_convert_loc (loc, type, tem),
11633 				    op1);
11634 	}
11635 
11636       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
11637       if (integer_zerop (arg1)
11638 	  && truth_value_p (TREE_CODE (arg0))
11639 	  && truth_value_p (TREE_CODE (op2))
11640 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11641 	{
11642 	  location_t loc0 = expr_location_or (arg0, loc);
11643 	  /* Only perform transformation if ARG0 is easily inverted.  */
11644 	  tem = fold_invert_truthvalue (loc0, arg0);
11645 	  if (tem)
11646 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11647 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11648 				    type, fold_convert_loc (loc, type, tem),
11649 				    op2);
11650 	}
11651 
11652       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
11653       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11654 	  && truth_value_p (TREE_CODE (arg0))
11655 	  && truth_value_p (TREE_CODE (op2))
11656 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11657 	return fold_build2_loc (loc, code == VEC_COND_EXPR
11658 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11659 				type, fold_convert_loc (loc, type, arg0), op2);
11660 
11661       return NULL_TREE;
11662 
11663     case CALL_EXPR:
11664       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
11665 	 of fold_ternary on them.  */
11666       gcc_unreachable ();
11667 
11668     case BIT_FIELD_REF:
11669       if (TREE_CODE (arg0) == VECTOR_CST
11670 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
11671 	      || (TREE_CODE (type) == VECTOR_TYPE
11672 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11673 	{
11674 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11675 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11676 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11677 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11678 
11679 	  if (n != 0
11680 	      && (idx % width) == 0
11681 	      && (n % width) == 0
11682 	      && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11683 	    {
11684 	      idx = idx / width;
11685 	      n = n / width;
11686 
11687 	      if (TREE_CODE (arg0) == VECTOR_CST)
11688 		{
11689 		  if (n == 1)
11690 		    return VECTOR_CST_ELT (arg0, idx);
11691 
11692 		  tree *vals = XALLOCAVEC (tree, n);
11693 		  for (unsigned i = 0; i < n; ++i)
11694 		    vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11695 		  return build_vector (type, vals);
11696 		}
11697 	    }
11698 	}
11699 
11700       /* On constants we can use native encode/interpret to constant
11701          fold (nearly) all BIT_FIELD_REFs.  */
11702       if (CONSTANT_CLASS_P (arg0)
11703 	  && can_native_interpret_type_p (type)
11704 	  && BITS_PER_UNIT == 8)
11705 	{
11706 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11707 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11708 	  /* Limit us to a reasonable amount of work.  To relax the
11709 	     other limitations we need bit-shifting of the buffer
11710 	     and rounding up the size.  */
11711 	  if (bitpos % BITS_PER_UNIT == 0
11712 	      && bitsize % BITS_PER_UNIT == 0
11713 	      && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11714 	    {
11715 	      unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11716 	      unsigned HOST_WIDE_INT len
11717 		= native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11718 				      bitpos / BITS_PER_UNIT);
11719 	      if (len > 0
11720 		  && len * BITS_PER_UNIT >= bitsize)
11721 		{
11722 		  tree v = native_interpret_expr (type, b,
11723 						  bitsize / BITS_PER_UNIT);
11724 		  if (v)
11725 		    return v;
11726 		}
11727 	    }
11728 	}
11729 
11730       return NULL_TREE;
11731 
11732     case FMA_EXPR:
11733       /* For integers we can decompose the FMA if possible.  */
11734       if (TREE_CODE (arg0) == INTEGER_CST
11735 	  && TREE_CODE (arg1) == INTEGER_CST)
11736 	return fold_build2_loc (loc, PLUS_EXPR, type,
11737 				const_binop (MULT_EXPR, arg0, arg1), arg2);
11738       if (integer_zerop (arg2))
11739 	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11740 
11741       return fold_fma (loc, type, arg0, arg1, arg2);
11742 
11743     case VEC_PERM_EXPR:
11744       if (TREE_CODE (arg2) == VECTOR_CST)
11745 	{
11746 	  unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11747 	  unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11748 	  unsigned char *sel2 = sel + nelts;
11749 	  bool need_mask_canon = false;
11750 	  bool need_mask_canon2 = false;
11751 	  bool all_in_vec0 = true;
11752 	  bool all_in_vec1 = true;
11753 	  bool maybe_identity = true;
11754 	  bool single_arg = (op0 == op1);
11755 	  bool changed = false;
11756 
11757 	  mask2 = 2 * nelts - 1;
11758 	  mask = single_arg ? (nelts - 1) : mask2;
11759 	  gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11760 	  for (i = 0; i < nelts; i++)
11761 	    {
11762 	      tree val = VECTOR_CST_ELT (arg2, i);
11763 	      if (TREE_CODE (val) != INTEGER_CST)
11764 		return NULL_TREE;
11765 
11766 	      /* Make sure that the perm value is in an acceptable
11767 		 range.  */
11768 	      wide_int t = val;
11769 	      need_mask_canon |= wi::gtu_p (t, mask);
11770 	      need_mask_canon2 |= wi::gtu_p (t, mask2);
11771 	      sel[i] = t.to_uhwi () & mask;
11772 	      sel2[i] = t.to_uhwi () & mask2;
11773 
11774 	      if (sel[i] < nelts)
11775 		all_in_vec1 = false;
11776 	      else
11777 		all_in_vec0 = false;
11778 
11779 	      if ((sel[i] & (nelts-1)) != i)
11780 		maybe_identity = false;
11781 	    }
11782 
11783 	  if (maybe_identity)
11784 	    {
11785 	      if (all_in_vec0)
11786 		return op0;
11787 	      if (all_in_vec1)
11788 		return op1;
11789 	    }
11790 
11791 	  if (all_in_vec0)
11792 	    op1 = op0;
11793 	  else if (all_in_vec1)
11794 	    {
11795 	      op0 = op1;
11796 	      for (i = 0; i < nelts; i++)
11797 		sel[i] -= nelts;
11798 	      need_mask_canon = true;
11799 	    }
11800 
11801 	  if ((TREE_CODE (op0) == VECTOR_CST
11802 	       || TREE_CODE (op0) == CONSTRUCTOR)
11803 	      && (TREE_CODE (op1) == VECTOR_CST
11804 		  || TREE_CODE (op1) == CONSTRUCTOR))
11805 	    {
11806 	      tree t = fold_vec_perm (type, op0, op1, sel);
11807 	      if (t != NULL_TREE)
11808 		return t;
11809 	    }
11810 
11811 	  if (op0 == op1 && !single_arg)
11812 	    changed = true;
11813 
11814 	  /* Some targets are deficient and fail to expand a single
11815 	     argument permutation while still allowing an equivalent
11816 	     2-argument version.  */
11817 	  if (need_mask_canon && arg2 == op2
11818 	      && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11819 	      && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11820 	    {
11821 	      need_mask_canon = need_mask_canon2;
11822 	      sel = sel2;
11823 	    }
11824 
11825 	  if (need_mask_canon && arg2 == op2)
11826 	    {
11827 	      tree *tsel = XALLOCAVEC (tree, nelts);
11828 	      tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11829 	      for (i = 0; i < nelts; i++)
11830 		tsel[i] = build_int_cst (eltype, sel[i]);
11831 	      op2 = build_vector (TREE_TYPE (arg2), tsel);
11832 	      changed = true;
11833 	    }
11834 
11835 	  if (changed)
11836 	    return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11837 	}
11838       return NULL_TREE;
11839 
11840     case BIT_INSERT_EXPR:
11841       /* Perform (partial) constant folding of BIT_INSERT_EXPR.  */
11842       if (TREE_CODE (arg0) == INTEGER_CST
11843 	  && TREE_CODE (arg1) == INTEGER_CST)
11844 	{
11845 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11846 	  unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11847 	  wide_int tem = wi::bit_and (arg0,
11848 				      wi::shifted_mask (bitpos, bitsize, true,
11849 							TYPE_PRECISION (type)));
11850 	  wide_int tem2
11851 	    = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11852 				    bitsize), bitpos);
11853 	  return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11854 	}
11855       else if (TREE_CODE (arg0) == VECTOR_CST
11856 	       && CONSTANT_CLASS_P (arg1)
11857 	       && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11858 				      TREE_TYPE (arg1)))
11859 	{
11860 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11861 	  unsigned HOST_WIDE_INT elsize
11862 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11863 	  if (bitpos % elsize == 0)
11864 	    {
11865 	      unsigned k = bitpos / elsize;
11866 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11867 		return arg0;
11868 	      else
11869 		{
11870 		  tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
11871 		  memcpy (elts, VECTOR_CST_ELTS (arg0),
11872 			  sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
11873 		  elts[k] = arg1;
11874 		  return build_vector (type, elts);
11875 		}
11876 	    }
11877 	}
11878       return NULL_TREE;
11879 
11880     default:
11881       return NULL_TREE;
11882     } /* switch (code) */
11883 }
11884 
11885 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11886    of an array (or vector).  */
11887 
11888 tree
11889 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11890 {
11891   tree index_type = NULL_TREE;
11892   offset_int low_bound = 0;
11893 
11894   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11895     {
11896       tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11897       if (domain_type && TYPE_MIN_VALUE (domain_type))
11898 	{
11899 	  /* Static constructors for variably sized objects makes no sense.  */
11900 	  gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11901 	  index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11902 	  low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11903 	}
11904     }
11905 
11906   if (index_type)
11907     access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11908 			    TYPE_SIGN (index_type));
11909 
11910   offset_int index = low_bound - 1;
11911   if (index_type)
11912     index = wi::ext (index, TYPE_PRECISION (index_type),
11913 		     TYPE_SIGN (index_type));
11914 
11915   offset_int max_index;
11916   unsigned HOST_WIDE_INT cnt;
11917   tree cfield, cval;
11918 
11919   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11920     {
11921       /* Array constructor might explicitly set index, or specify a range,
11922 	 or leave index NULL meaning that it is next index after previous
11923 	 one.  */
11924       if (cfield)
11925 	{
11926 	  if (TREE_CODE (cfield) == INTEGER_CST)
11927 	    max_index = index = wi::to_offset (cfield);
11928 	  else
11929 	    {
11930 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11931 	      index = wi::to_offset (TREE_OPERAND (cfield, 0));
11932 	      max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11933 	    }
11934 	}
11935       else
11936 	{
11937 	  index += 1;
11938 	  if (index_type)
11939 	    index = wi::ext (index, TYPE_PRECISION (index_type),
11940 			     TYPE_SIGN (index_type));
11941 	  max_index = index;
11942 	}
11943 
11944     /* Do we have match?  */
11945     if (wi::cmpu (access_index, index) >= 0
11946 	&& wi::cmpu (access_index, max_index) <= 0)
11947       return cval;
11948   }
11949   return NULL_TREE;
11950 }
11951 
11952 /* Perform constant folding and related simplification of EXPR.
11953    The related simplifications include x*1 => x, x*0 => 0, etc.,
11954    and application of the associative law.
11955    NOP_EXPR conversions may be removed freely (as long as we
11956    are careful not to change the type of the overall expression).
11957    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11958    but we can constant-fold them if they have constant operands.  */
11959 
11960 #ifdef ENABLE_FOLD_CHECKING
11961 # define fold(x) fold_1 (x)
11962 static tree fold_1 (tree);
11963 static
11964 #endif
11965 tree
11966 fold (tree expr)
11967 {
11968   const tree t = expr;
11969   enum tree_code code = TREE_CODE (t);
11970   enum tree_code_class kind = TREE_CODE_CLASS (code);
11971   tree tem;
11972   location_t loc = EXPR_LOCATION (expr);
11973 
11974   /* Return right away if a constant.  */
11975   if (kind == tcc_constant)
11976     return t;
11977 
11978   /* CALL_EXPR-like objects with variable numbers of operands are
11979      treated specially.  */
11980   if (kind == tcc_vl_exp)
11981     {
11982       if (code == CALL_EXPR)
11983 	{
11984 	  tem = fold_call_expr (loc, expr, false);
11985 	  return tem ? tem : expr;
11986 	}
11987       return expr;
11988     }
11989 
11990   if (IS_EXPR_CODE_CLASS (kind))
11991     {
11992       tree type = TREE_TYPE (t);
11993       tree op0, op1, op2;
11994 
11995       switch (TREE_CODE_LENGTH (code))
11996 	{
11997 	case 1:
11998 	  op0 = TREE_OPERAND (t, 0);
11999 	  tem = fold_unary_loc (loc, code, type, op0);
12000 	  return tem ? tem : expr;
12001 	case 2:
12002 	  op0 = TREE_OPERAND (t, 0);
12003 	  op1 = TREE_OPERAND (t, 1);
12004 	  tem = fold_binary_loc (loc, code, type, op0, op1);
12005 	  return tem ? tem : expr;
12006 	case 3:
12007 	  op0 = TREE_OPERAND (t, 0);
12008 	  op1 = TREE_OPERAND (t, 1);
12009 	  op2 = TREE_OPERAND (t, 2);
12010 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12011 	  return tem ? tem : expr;
12012 	default:
12013 	  break;
12014 	}
12015     }
12016 
12017   switch (code)
12018     {
12019     case ARRAY_REF:
12020       {
12021 	tree op0 = TREE_OPERAND (t, 0);
12022 	tree op1 = TREE_OPERAND (t, 1);
12023 
12024 	if (TREE_CODE (op1) == INTEGER_CST
12025 	    && TREE_CODE (op0) == CONSTRUCTOR
12026 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12027 	  {
12028 	    tree val = get_array_ctor_element_at_index (op0,
12029 							wi::to_offset (op1));
12030 	    if (val)
12031 	      return val;
12032 	  }
12033 
12034 	return t;
12035       }
12036 
12037       /* Return a VECTOR_CST if possible.  */
12038     case CONSTRUCTOR:
12039       {
12040 	tree type = TREE_TYPE (t);
12041 	if (TREE_CODE (type) != VECTOR_TYPE)
12042 	  return t;
12043 
12044 	unsigned i;
12045 	tree val;
12046 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12047 	  if (! CONSTANT_CLASS_P (val))
12048 	    return t;
12049 
12050 	return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12051       }
12052 
12053     case CONST_DECL:
12054       return fold (DECL_INITIAL (t));
12055 
12056     default:
12057       return t;
12058     } /* switch (code) */
12059 }
12060 
12061 #ifdef ENABLE_FOLD_CHECKING
12062 #undef fold
12063 
12064 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12065 				hash_table<nofree_ptr_hash<const tree_node> > *);
12066 static void fold_check_failed (const_tree, const_tree);
12067 void print_fold_checksum (const_tree);
12068 
12069 /* When --enable-checking=fold, compute a digest of expr before
12070    and after actual fold call to see if fold did not accidentally
12071    change original expr.  */
12072 
12073 tree
12074 fold (tree expr)
12075 {
12076   tree ret;
12077   struct md5_ctx ctx;
12078   unsigned char checksum_before[16], checksum_after[16];
12079   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12080 
12081   md5_init_ctx (&ctx);
12082   fold_checksum_tree (expr, &ctx, &ht);
12083   md5_finish_ctx (&ctx, checksum_before);
12084   ht.empty ();
12085 
12086   ret = fold_1 (expr);
12087 
12088   md5_init_ctx (&ctx);
12089   fold_checksum_tree (expr, &ctx, &ht);
12090   md5_finish_ctx (&ctx, checksum_after);
12091 
12092   if (memcmp (checksum_before, checksum_after, 16))
12093     fold_check_failed (expr, ret);
12094 
12095   return ret;
12096 }
12097 
12098 void
12099 print_fold_checksum (const_tree expr)
12100 {
12101   struct md5_ctx ctx;
12102   unsigned char checksum[16], cnt;
12103   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12104 
12105   md5_init_ctx (&ctx);
12106   fold_checksum_tree (expr, &ctx, &ht);
12107   md5_finish_ctx (&ctx, checksum);
12108   for (cnt = 0; cnt < 16; ++cnt)
12109     fprintf (stderr, "%02x", checksum[cnt]);
12110   putc ('\n', stderr);
12111 }
12112 
12113 static void
12114 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12115 {
12116   internal_error ("fold check: original tree changed by fold");
12117 }
12118 
12119 static void
12120 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12121 		    hash_table<nofree_ptr_hash <const tree_node> > *ht)
12122 {
12123   const tree_node **slot;
12124   enum tree_code code;
12125   union tree_node buf;
12126   int i, len;
12127 
12128  recursive_label:
12129   if (expr == NULL)
12130     return;
12131   slot = ht->find_slot (expr, INSERT);
12132   if (*slot != NULL)
12133     return;
12134   *slot = expr;
12135   code = TREE_CODE (expr);
12136   if (TREE_CODE_CLASS (code) == tcc_declaration
12137       && HAS_DECL_ASSEMBLER_NAME_P (expr))
12138     {
12139       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
12140       memcpy ((char *) &buf, expr, tree_size (expr));
12141       SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12142       buf.decl_with_vis.symtab_node = NULL;
12143       expr = (tree) &buf;
12144     }
12145   else if (TREE_CODE_CLASS (code) == tcc_type
12146 	   && (TYPE_POINTER_TO (expr)
12147 	       || TYPE_REFERENCE_TO (expr)
12148 	       || TYPE_CACHED_VALUES_P (expr)
12149 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12150 	       || TYPE_NEXT_VARIANT (expr)
12151 	       || TYPE_ALIAS_SET_KNOWN_P (expr)))
12152     {
12153       /* Allow these fields to be modified.  */
12154       tree tmp;
12155       memcpy ((char *) &buf, expr, tree_size (expr));
12156       expr = tmp = (tree) &buf;
12157       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12158       TYPE_POINTER_TO (tmp) = NULL;
12159       TYPE_REFERENCE_TO (tmp) = NULL;
12160       TYPE_NEXT_VARIANT (tmp) = NULL;
12161       TYPE_ALIAS_SET (tmp) = -1;
12162       if (TYPE_CACHED_VALUES_P (tmp))
12163 	{
12164 	  TYPE_CACHED_VALUES_P (tmp) = 0;
12165 	  TYPE_CACHED_VALUES (tmp) = NULL;
12166 	}
12167     }
12168   md5_process_bytes (expr, tree_size (expr), ctx);
12169   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12170     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12171   if (TREE_CODE_CLASS (code) != tcc_type
12172       && TREE_CODE_CLASS (code) != tcc_declaration
12173       && code != TREE_LIST
12174       && code != SSA_NAME
12175       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12176     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12177   switch (TREE_CODE_CLASS (code))
12178     {
12179     case tcc_constant:
12180       switch (code)
12181 	{
12182 	case STRING_CST:
12183 	  md5_process_bytes (TREE_STRING_POINTER (expr),
12184 			     TREE_STRING_LENGTH (expr), ctx);
12185 	  break;
12186 	case COMPLEX_CST:
12187 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12188 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12189 	  break;
12190 	case VECTOR_CST:
12191 	  for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12192 	    fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12193 	  break;
12194 	default:
12195 	  break;
12196 	}
12197       break;
12198     case tcc_exceptional:
12199       switch (code)
12200 	{
12201 	case TREE_LIST:
12202 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12203 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12204 	  expr = TREE_CHAIN (expr);
12205 	  goto recursive_label;
12206 	  break;
12207 	case TREE_VEC:
12208 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12209 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12210 	  break;
12211 	default:
12212 	  break;
12213 	}
12214       break;
12215     case tcc_expression:
12216     case tcc_reference:
12217     case tcc_comparison:
12218     case tcc_unary:
12219     case tcc_binary:
12220     case tcc_statement:
12221     case tcc_vl_exp:
12222       len = TREE_OPERAND_LENGTH (expr);
12223       for (i = 0; i < len; ++i)
12224 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12225       break;
12226     case tcc_declaration:
12227       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12228       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12229       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12230 	{
12231 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12232 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12233 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12234 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12235 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12236 	}
12237 
12238       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12239 	{
12240 	  if (TREE_CODE (expr) == FUNCTION_DECL)
12241 	    {
12242 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12243 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12244 	    }
12245 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12246 	}
12247       break;
12248     case tcc_type:
12249       if (TREE_CODE (expr) == ENUMERAL_TYPE)
12250         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12251       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12252       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12253       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12254       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12255       if (INTEGRAL_TYPE_P (expr)
12256           || SCALAR_FLOAT_TYPE_P (expr))
12257 	{
12258 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12259 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12260 	}
12261       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12262       if (TREE_CODE (expr) == RECORD_TYPE
12263 	  || TREE_CODE (expr) == UNION_TYPE
12264 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
12265 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12266       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12267       break;
12268     default:
12269       break;
12270     }
12271 }
12272 
12273 /* Helper function for outputting the checksum of a tree T.  When
12274    debugging with gdb, you can "define mynext" to be "next" followed
12275    by "call debug_fold_checksum (op0)", then just trace down till the
12276    outputs differ.  */
12277 
12278 DEBUG_FUNCTION void
12279 debug_fold_checksum (const_tree t)
12280 {
12281   int i;
12282   unsigned char checksum[16];
12283   struct md5_ctx ctx;
12284   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12285 
12286   md5_init_ctx (&ctx);
12287   fold_checksum_tree (t, &ctx, &ht);
12288   md5_finish_ctx (&ctx, checksum);
12289   ht.empty ();
12290 
12291   for (i = 0; i < 16; i++)
12292     fprintf (stderr, "%d ", checksum[i]);
12293 
12294   fprintf (stderr, "\n");
12295 }
12296 
12297 #endif
12298 
12299 /* Fold a unary tree expression with code CODE of type TYPE with an
12300    operand OP0.  LOC is the location of the resulting expression.
12301    Return a folded expression if successful.  Otherwise, return a tree
12302    expression with code CODE of type TYPE with an operand OP0.  */
12303 
12304 tree
12305 fold_build1_stat_loc (location_t loc,
12306 		      enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12307 {
12308   tree tem;
12309 #ifdef ENABLE_FOLD_CHECKING
12310   unsigned char checksum_before[16], checksum_after[16];
12311   struct md5_ctx ctx;
12312   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12313 
12314   md5_init_ctx (&ctx);
12315   fold_checksum_tree (op0, &ctx, &ht);
12316   md5_finish_ctx (&ctx, checksum_before);
12317   ht.empty ();
12318 #endif
12319 
12320   tem = fold_unary_loc (loc, code, type, op0);
12321   if (!tem)
12322     tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12323 
12324 #ifdef ENABLE_FOLD_CHECKING
12325   md5_init_ctx (&ctx);
12326   fold_checksum_tree (op0, &ctx, &ht);
12327   md5_finish_ctx (&ctx, checksum_after);
12328 
12329   if (memcmp (checksum_before, checksum_after, 16))
12330     fold_check_failed (op0, tem);
12331 #endif
12332   return tem;
12333 }
12334 
12335 /* Fold a binary tree expression with code CODE of type TYPE with
12336    operands OP0 and OP1.  LOC is the location of the resulting
12337    expression.  Return a folded expression if successful.  Otherwise,
12338    return a tree expression with code CODE of type TYPE with operands
12339    OP0 and OP1.  */
12340 
12341 tree
12342 fold_build2_stat_loc (location_t loc,
12343 		      enum tree_code code, tree type, tree op0, tree op1
12344 		      MEM_STAT_DECL)
12345 {
12346   tree tem;
12347 #ifdef ENABLE_FOLD_CHECKING
12348   unsigned char checksum_before_op0[16],
12349                 checksum_before_op1[16],
12350 		checksum_after_op0[16],
12351 		checksum_after_op1[16];
12352   struct md5_ctx ctx;
12353   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12354 
12355   md5_init_ctx (&ctx);
12356   fold_checksum_tree (op0, &ctx, &ht);
12357   md5_finish_ctx (&ctx, checksum_before_op0);
12358   ht.empty ();
12359 
12360   md5_init_ctx (&ctx);
12361   fold_checksum_tree (op1, &ctx, &ht);
12362   md5_finish_ctx (&ctx, checksum_before_op1);
12363   ht.empty ();
12364 #endif
12365 
12366   tem = fold_binary_loc (loc, code, type, op0, op1);
12367   if (!tem)
12368     tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12369 
12370 #ifdef ENABLE_FOLD_CHECKING
12371   md5_init_ctx (&ctx);
12372   fold_checksum_tree (op0, &ctx, &ht);
12373   md5_finish_ctx (&ctx, checksum_after_op0);
12374   ht.empty ();
12375 
12376   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12377     fold_check_failed (op0, tem);
12378 
12379   md5_init_ctx (&ctx);
12380   fold_checksum_tree (op1, &ctx, &ht);
12381   md5_finish_ctx (&ctx, checksum_after_op1);
12382 
12383   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12384     fold_check_failed (op1, tem);
12385 #endif
12386   return tem;
12387 }
12388 
12389 /* Fold a ternary tree expression with code CODE of type TYPE with
12390    operands OP0, OP1, and OP2.  Return a folded expression if
12391    successful.  Otherwise, return a tree expression with code CODE of
12392    type TYPE with operands OP0, OP1, and OP2.  */
12393 
12394 tree
12395 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12396 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
12397 {
12398   tree tem;
12399 #ifdef ENABLE_FOLD_CHECKING
12400   unsigned char checksum_before_op0[16],
12401                 checksum_before_op1[16],
12402                 checksum_before_op2[16],
12403 		checksum_after_op0[16],
12404 		checksum_after_op1[16],
12405 		checksum_after_op2[16];
12406   struct md5_ctx ctx;
12407   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12408 
12409   md5_init_ctx (&ctx);
12410   fold_checksum_tree (op0, &ctx, &ht);
12411   md5_finish_ctx (&ctx, checksum_before_op0);
12412   ht.empty ();
12413 
12414   md5_init_ctx (&ctx);
12415   fold_checksum_tree (op1, &ctx, &ht);
12416   md5_finish_ctx (&ctx, checksum_before_op1);
12417   ht.empty ();
12418 
12419   md5_init_ctx (&ctx);
12420   fold_checksum_tree (op2, &ctx, &ht);
12421   md5_finish_ctx (&ctx, checksum_before_op2);
12422   ht.empty ();
12423 #endif
12424 
12425   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12426   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12427   if (!tem)
12428     tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12429 
12430 #ifdef ENABLE_FOLD_CHECKING
12431   md5_init_ctx (&ctx);
12432   fold_checksum_tree (op0, &ctx, &ht);
12433   md5_finish_ctx (&ctx, checksum_after_op0);
12434   ht.empty ();
12435 
12436   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12437     fold_check_failed (op0, tem);
12438 
12439   md5_init_ctx (&ctx);
12440   fold_checksum_tree (op1, &ctx, &ht);
12441   md5_finish_ctx (&ctx, checksum_after_op1);
12442   ht.empty ();
12443 
12444   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12445     fold_check_failed (op1, tem);
12446 
12447   md5_init_ctx (&ctx);
12448   fold_checksum_tree (op2, &ctx, &ht);
12449   md5_finish_ctx (&ctx, checksum_after_op2);
12450 
12451   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12452     fold_check_failed (op2, tem);
12453 #endif
12454   return tem;
12455 }
12456 
12457 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12458    arguments in ARGARRAY, and a null static chain.
12459    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
12460    of type TYPE from the given operands as constructed by build_call_array.  */
12461 
12462 tree
12463 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12464 			   int nargs, tree *argarray)
12465 {
12466   tree tem;
12467 #ifdef ENABLE_FOLD_CHECKING
12468   unsigned char checksum_before_fn[16],
12469                 checksum_before_arglist[16],
12470 		checksum_after_fn[16],
12471 		checksum_after_arglist[16];
12472   struct md5_ctx ctx;
12473   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12474   int i;
12475 
12476   md5_init_ctx (&ctx);
12477   fold_checksum_tree (fn, &ctx, &ht);
12478   md5_finish_ctx (&ctx, checksum_before_fn);
12479   ht.empty ();
12480 
12481   md5_init_ctx (&ctx);
12482   for (i = 0; i < nargs; i++)
12483     fold_checksum_tree (argarray[i], &ctx, &ht);
12484   md5_finish_ctx (&ctx, checksum_before_arglist);
12485   ht.empty ();
12486 #endif
12487 
12488   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12489   if (!tem)
12490     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12491 
12492 #ifdef ENABLE_FOLD_CHECKING
12493   md5_init_ctx (&ctx);
12494   fold_checksum_tree (fn, &ctx, &ht);
12495   md5_finish_ctx (&ctx, checksum_after_fn);
12496   ht.empty ();
12497 
12498   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12499     fold_check_failed (fn, tem);
12500 
12501   md5_init_ctx (&ctx);
12502   for (i = 0; i < nargs; i++)
12503     fold_checksum_tree (argarray[i], &ctx, &ht);
12504   md5_finish_ctx (&ctx, checksum_after_arglist);
12505 
12506   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12507     fold_check_failed (NULL_TREE, tem);
12508 #endif
12509   return tem;
12510 }
12511 
12512 /* Perform constant folding and related simplification of initializer
12513    expression EXPR.  These behave identically to "fold_buildN" but ignore
12514    potential run-time traps and exceptions that fold must preserve.  */
12515 
12516 #define START_FOLD_INIT \
12517   int saved_signaling_nans = flag_signaling_nans;\
12518   int saved_trapping_math = flag_trapping_math;\
12519   int saved_rounding_math = flag_rounding_math;\
12520   int saved_trapv = flag_trapv;\
12521   int saved_folding_initializer = folding_initializer;\
12522   flag_signaling_nans = 0;\
12523   flag_trapping_math = 0;\
12524   flag_rounding_math = 0;\
12525   flag_trapv = 0;\
12526   folding_initializer = 1;
12527 
12528 #define END_FOLD_INIT \
12529   flag_signaling_nans = saved_signaling_nans;\
12530   flag_trapping_math = saved_trapping_math;\
12531   flag_rounding_math = saved_rounding_math;\
12532   flag_trapv = saved_trapv;\
12533   folding_initializer = saved_folding_initializer;
12534 
12535 tree
12536 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12537 			     tree type, tree op)
12538 {
12539   tree result;
12540   START_FOLD_INIT;
12541 
12542   result = fold_build1_loc (loc, code, type, op);
12543 
12544   END_FOLD_INIT;
12545   return result;
12546 }
12547 
12548 tree
12549 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12550 			     tree type, tree op0, tree op1)
12551 {
12552   tree result;
12553   START_FOLD_INIT;
12554 
12555   result = fold_build2_loc (loc, code, type, op0, op1);
12556 
12557   END_FOLD_INIT;
12558   return result;
12559 }
12560 
12561 tree
12562 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12563 				       int nargs, tree *argarray)
12564 {
12565   tree result;
12566   START_FOLD_INIT;
12567 
12568   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12569 
12570   END_FOLD_INIT;
12571   return result;
12572 }
12573 
12574 #undef START_FOLD_INIT
12575 #undef END_FOLD_INIT
12576 
12577 /* Determine if first argument is a multiple of second argument.  Return 0 if
12578    it is not, or we cannot easily determined it to be.
12579 
12580    An example of the sort of thing we care about (at this point; this routine
12581    could surely be made more general, and expanded to do what the *_DIV_EXPR's
12582    fold cases do now) is discovering that
12583 
12584      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12585 
12586    is a multiple of
12587 
12588      SAVE_EXPR (J * 8)
12589 
12590    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12591 
12592    This code also handles discovering that
12593 
12594      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12595 
12596    is a multiple of 8 so we don't have to worry about dealing with a
12597    possible remainder.
12598 
12599    Note that we *look* inside a SAVE_EXPR only to determine how it was
12600    calculated; it is not safe for fold to do much of anything else with the
12601    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12602    at run time.  For example, the latter example above *cannot* be implemented
12603    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12604    evaluation time of the original SAVE_EXPR is not necessarily the same at
12605    the time the new expression is evaluated.  The only optimization of this
12606    sort that would be valid is changing
12607 
12608      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12609 
12610    divided by 8 to
12611 
12612      SAVE_EXPR (I) * SAVE_EXPR (J)
12613 
12614    (where the same SAVE_EXPR (J) is used in the original and the
12615    transformed version).  */
12616 
12617 int
12618 multiple_of_p (tree type, const_tree top, const_tree bottom)
12619 {
12620   gimple *stmt;
12621   tree t1, op1, op2;
12622 
12623   if (operand_equal_p (top, bottom, 0))
12624     return 1;
12625 
12626   if (TREE_CODE (type) != INTEGER_TYPE)
12627     return 0;
12628 
12629   switch (TREE_CODE (top))
12630     {
12631     case BIT_AND_EXPR:
12632       /* Bitwise and provides a power of two multiple.  If the mask is
12633 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
12634       if (!integer_pow2p (bottom))
12635 	return 0;
12636       /* FALLTHRU */
12637 
12638     case MULT_EXPR:
12639       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12640 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12641 
12642     case MINUS_EXPR:
12643       /* It is impossible to prove if op0 - op1 is multiple of bottom
12644 	 precisely, so be conservative here checking if both op0 and op1
12645 	 are multiple of bottom.  Note we check the second operand first
12646 	 since it's usually simpler.  */
12647       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12648 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12649 
12650     case PLUS_EXPR:
12651       /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12652 	 as op0 - 3 if the expression has unsigned type.  For example,
12653 	 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not.  */
12654       op1 = TREE_OPERAND (top, 1);
12655       if (TYPE_UNSIGNED (type)
12656 	  && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12657 	op1 = fold_build1 (NEGATE_EXPR, type, op1);
12658       return (multiple_of_p (type, op1, bottom)
12659 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12660 
12661     case LSHIFT_EXPR:
12662       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12663 	{
12664 	  op1 = TREE_OPERAND (top, 1);
12665 	  /* const_binop may not detect overflow correctly,
12666 	     so check for it explicitly here.  */
12667 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12668 	      && 0 != (t1 = fold_convert (type,
12669 					  const_binop (LSHIFT_EXPR,
12670 						       size_one_node,
12671 						       op1)))
12672 	      && !TREE_OVERFLOW (t1))
12673 	    return multiple_of_p (type, t1, bottom);
12674 	}
12675       return 0;
12676 
12677     case NOP_EXPR:
12678       /* Can't handle conversions from non-integral or wider integral type.  */
12679       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12680 	  || (TYPE_PRECISION (type)
12681 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12682 	return 0;
12683 
12684       /* fall through */
12685 
12686     case SAVE_EXPR:
12687       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12688 
12689     case COND_EXPR:
12690       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12691 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12692 
12693     case INTEGER_CST:
12694       if (TREE_CODE (bottom) != INTEGER_CST
12695 	  || integer_zerop (bottom)
12696 	  || (TYPE_UNSIGNED (type)
12697 	      && (tree_int_cst_sgn (top) < 0
12698 		  || tree_int_cst_sgn (bottom) < 0)))
12699 	return 0;
12700       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12701 				SIGNED);
12702 
12703     case SSA_NAME:
12704       if (TREE_CODE (bottom) == INTEGER_CST
12705 	  && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12706 	  && gimple_code (stmt) == GIMPLE_ASSIGN)
12707 	{
12708 	  enum tree_code code = gimple_assign_rhs_code (stmt);
12709 
12710 	  /* Check for special cases to see if top is defined as multiple
12711 	     of bottom:
12712 
12713 	       top = (X & ~(bottom - 1) ; bottom is power of 2
12714 
12715 	     or
12716 
12717 	       Y = X % bottom
12718 	       top = X - Y.  */
12719 	  if (code == BIT_AND_EXPR
12720 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12721 	      && TREE_CODE (op2) == INTEGER_CST
12722 	      && integer_pow2p (bottom)
12723 	      && wi::multiple_of_p (wi::to_widest (op2),
12724 				    wi::to_widest (bottom), UNSIGNED))
12725 	    return 1;
12726 
12727 	  op1 = gimple_assign_rhs1 (stmt);
12728 	  if (code == MINUS_EXPR
12729 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12730 	      && TREE_CODE (op2) == SSA_NAME
12731 	      && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12732 	      && gimple_code (stmt) == GIMPLE_ASSIGN
12733 	      && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12734 	      && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12735 	      && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12736 	    return 1;
12737 	}
12738 
12739       /* fall through */
12740 
12741     default:
12742       return 0;
12743     }
12744 }
12745 
12746 #define tree_expr_nonnegative_warnv_p(X, Y) \
12747   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12748 
12749 #define RECURSE(X) \
12750   ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12751 
12752 /* Return true if CODE or TYPE is known to be non-negative. */
12753 
12754 static bool
12755 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12756 {
12757   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12758       && truth_value_p (code))
12759     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12760        have a signed:1 type (where the value is -1 and 0).  */
12761     return true;
12762   return false;
12763 }
12764 
12765 /* Return true if (CODE OP0) is known to be non-negative.  If the return
12766    value is based on the assumption that signed overflow is undefined,
12767    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12768    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12769 
12770 bool
12771 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12772 				bool *strict_overflow_p, int depth)
12773 {
12774   if (TYPE_UNSIGNED (type))
12775     return true;
12776 
12777   switch (code)
12778     {
12779     case ABS_EXPR:
12780       /* We can't return 1 if flag_wrapv is set because
12781 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
12782       if (!ANY_INTEGRAL_TYPE_P (type))
12783 	return true;
12784       if (TYPE_OVERFLOW_UNDEFINED (type))
12785 	{
12786 	  *strict_overflow_p = true;
12787 	  return true;
12788 	}
12789       break;
12790 
12791     case NON_LVALUE_EXPR:
12792     case FLOAT_EXPR:
12793     case FIX_TRUNC_EXPR:
12794       return RECURSE (op0);
12795 
12796     CASE_CONVERT:
12797       {
12798 	tree inner_type = TREE_TYPE (op0);
12799 	tree outer_type = type;
12800 
12801 	if (TREE_CODE (outer_type) == REAL_TYPE)
12802 	  {
12803 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12804 	      return RECURSE (op0);
12805 	    if (INTEGRAL_TYPE_P (inner_type))
12806 	      {
12807 		if (TYPE_UNSIGNED (inner_type))
12808 		  return true;
12809 		return RECURSE (op0);
12810 	      }
12811 	  }
12812 	else if (INTEGRAL_TYPE_P (outer_type))
12813 	  {
12814 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12815 	      return RECURSE (op0);
12816 	    if (INTEGRAL_TYPE_P (inner_type))
12817 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12818 		      && TYPE_UNSIGNED (inner_type);
12819 	  }
12820       }
12821       break;
12822 
12823     default:
12824       return tree_simple_nonnegative_warnv_p (code, type);
12825     }
12826 
12827   /* We don't know sign of `t', so be conservative and return false.  */
12828   return false;
12829 }
12830 
12831 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
12832    value is based on the assumption that signed overflow is undefined,
12833    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12834    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12835 
12836 bool
12837 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12838 				 tree op1, bool *strict_overflow_p,
12839 				 int depth)
12840 {
12841   if (TYPE_UNSIGNED (type))
12842     return true;
12843 
12844   switch (code)
12845     {
12846     case POINTER_PLUS_EXPR:
12847     case PLUS_EXPR:
12848       if (FLOAT_TYPE_P (type))
12849 	return RECURSE (op0) && RECURSE (op1);
12850 
12851       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12852 	 both unsigned and at least 2 bits shorter than the result.  */
12853       if (TREE_CODE (type) == INTEGER_TYPE
12854 	  && TREE_CODE (op0) == NOP_EXPR
12855 	  && TREE_CODE (op1) == NOP_EXPR)
12856 	{
12857 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12858 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12859 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12860 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12861 	    {
12862 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
12863 				       TYPE_PRECISION (inner2)) + 1;
12864 	      return prec < TYPE_PRECISION (type);
12865 	    }
12866 	}
12867       break;
12868 
12869     case MULT_EXPR:
12870       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12871 	{
12872 	  /* x * x is always non-negative for floating point x
12873 	     or without overflow.  */
12874 	  if (operand_equal_p (op0, op1, 0)
12875 	      || (RECURSE (op0) && RECURSE (op1)))
12876 	    {
12877 	      if (ANY_INTEGRAL_TYPE_P (type)
12878 		  && TYPE_OVERFLOW_UNDEFINED (type))
12879 		*strict_overflow_p = true;
12880 	      return true;
12881 	    }
12882 	}
12883 
12884       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12885 	 both unsigned and their total bits is shorter than the result.  */
12886       if (TREE_CODE (type) == INTEGER_TYPE
12887 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12888 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12889 	{
12890 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12891 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
12892 	    : TREE_TYPE (op0);
12893 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12894 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
12895 	    : TREE_TYPE (op1);
12896 
12897 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
12898 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
12899 
12900 	  if (TREE_CODE (op0) == INTEGER_CST)
12901 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12902 
12903 	  if (TREE_CODE (op1) == INTEGER_CST)
12904 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12905 
12906 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12907 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12908 	    {
12909 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12910 		? tree_int_cst_min_precision (op0, UNSIGNED)
12911 		: TYPE_PRECISION (inner0);
12912 
12913 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12914 		? tree_int_cst_min_precision (op1, UNSIGNED)
12915 		: TYPE_PRECISION (inner1);
12916 
12917 	      return precision0 + precision1 < TYPE_PRECISION (type);
12918 	    }
12919 	}
12920       return false;
12921 
12922     case BIT_AND_EXPR:
12923     case MAX_EXPR:
12924       return RECURSE (op0) || RECURSE (op1);
12925 
12926     case BIT_IOR_EXPR:
12927     case BIT_XOR_EXPR:
12928     case MIN_EXPR:
12929     case RDIV_EXPR:
12930     case TRUNC_DIV_EXPR:
12931     case CEIL_DIV_EXPR:
12932     case FLOOR_DIV_EXPR:
12933     case ROUND_DIV_EXPR:
12934       return RECURSE (op0) && RECURSE (op1);
12935 
12936     case TRUNC_MOD_EXPR:
12937       return RECURSE (op0);
12938 
12939     case FLOOR_MOD_EXPR:
12940       return RECURSE (op1);
12941 
12942     case CEIL_MOD_EXPR:
12943     case ROUND_MOD_EXPR:
12944     default:
12945       return tree_simple_nonnegative_warnv_p (code, type);
12946     }
12947 
12948   /* We don't know sign of `t', so be conservative and return false.  */
12949   return false;
12950 }
12951 
12952 /* Return true if T is known to be non-negative.  If the return
12953    value is based on the assumption that signed overflow is undefined,
12954    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12955    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12956 
12957 bool
12958 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12959 {
12960   if (TYPE_UNSIGNED (TREE_TYPE (t)))
12961     return true;
12962 
12963   switch (TREE_CODE (t))
12964     {
12965     case INTEGER_CST:
12966       return tree_int_cst_sgn (t) >= 0;
12967 
12968     case REAL_CST:
12969       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12970 
12971     case FIXED_CST:
12972       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12973 
12974     case COND_EXPR:
12975       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12976 
12977     case SSA_NAME:
12978       /* Limit the depth of recursion to avoid quadratic behavior.
12979 	 This is expected to catch almost all occurrences in practice.
12980 	 If this code misses important cases that unbounded recursion
12981 	 would not, passes that need this information could be revised
12982 	 to provide it through dataflow propagation.  */
12983       return (!name_registered_for_update_p (t)
12984 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12985 	      && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12986 						  strict_overflow_p, depth));
12987 
12988     default:
12989       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12990     }
12991 }
12992 
12993 /* Return true if T is known to be non-negative.  If the return
12994    value is based on the assumption that signed overflow is undefined,
12995    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12996    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12997 
12998 bool
12999 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13000 			       bool *strict_overflow_p, int depth)
13001 {
13002   switch (fn)
13003     {
13004     CASE_CFN_ACOS:
13005     CASE_CFN_ACOSH:
13006     CASE_CFN_CABS:
13007     CASE_CFN_COSH:
13008     CASE_CFN_ERFC:
13009     CASE_CFN_EXP:
13010     CASE_CFN_EXP10:
13011     CASE_CFN_EXP2:
13012     CASE_CFN_FABS:
13013     CASE_CFN_FDIM:
13014     CASE_CFN_HYPOT:
13015     CASE_CFN_POW10:
13016     CASE_CFN_FFS:
13017     CASE_CFN_PARITY:
13018     CASE_CFN_POPCOUNT:
13019     CASE_CFN_CLZ:
13020     CASE_CFN_CLRSB:
13021     case CFN_BUILT_IN_BSWAP32:
13022     case CFN_BUILT_IN_BSWAP64:
13023       /* Always true.  */
13024       return true;
13025 
13026     CASE_CFN_SQRT:
13027       /* sqrt(-0.0) is -0.0.  */
13028       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13029 	return true;
13030       return RECURSE (arg0);
13031 
13032     CASE_CFN_ASINH:
13033     CASE_CFN_ATAN:
13034     CASE_CFN_ATANH:
13035     CASE_CFN_CBRT:
13036     CASE_CFN_CEIL:
13037     CASE_CFN_ERF:
13038     CASE_CFN_EXPM1:
13039     CASE_CFN_FLOOR:
13040     CASE_CFN_FMOD:
13041     CASE_CFN_FREXP:
13042     CASE_CFN_ICEIL:
13043     CASE_CFN_IFLOOR:
13044     CASE_CFN_IRINT:
13045     CASE_CFN_IROUND:
13046     CASE_CFN_LCEIL:
13047     CASE_CFN_LDEXP:
13048     CASE_CFN_LFLOOR:
13049     CASE_CFN_LLCEIL:
13050     CASE_CFN_LLFLOOR:
13051     CASE_CFN_LLRINT:
13052     CASE_CFN_LLROUND:
13053     CASE_CFN_LRINT:
13054     CASE_CFN_LROUND:
13055     CASE_CFN_MODF:
13056     CASE_CFN_NEARBYINT:
13057     CASE_CFN_RINT:
13058     CASE_CFN_ROUND:
13059     CASE_CFN_SCALB:
13060     CASE_CFN_SCALBLN:
13061     CASE_CFN_SCALBN:
13062     CASE_CFN_SIGNBIT:
13063     CASE_CFN_SIGNIFICAND:
13064     CASE_CFN_SINH:
13065     CASE_CFN_TANH:
13066     CASE_CFN_TRUNC:
13067       /* True if the 1st argument is nonnegative.  */
13068       return RECURSE (arg0);
13069 
13070     CASE_CFN_FMAX:
13071       /* True if the 1st OR 2nd arguments are nonnegative.  */
13072       return RECURSE (arg0) || RECURSE (arg1);
13073 
13074     CASE_CFN_FMIN:
13075       /* True if the 1st AND 2nd arguments are nonnegative.  */
13076       return RECURSE (arg0) && RECURSE (arg1);
13077 
13078     CASE_CFN_COPYSIGN:
13079       /* True if the 2nd argument is nonnegative.  */
13080       return RECURSE (arg1);
13081 
13082     CASE_CFN_POWI:
13083       /* True if the 1st argument is nonnegative or the second
13084 	 argument is an even integer.  */
13085       if (TREE_CODE (arg1) == INTEGER_CST
13086 	  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13087 	return true;
13088       return RECURSE (arg0);
13089 
13090     CASE_CFN_POW:
13091       /* True if the 1st argument is nonnegative or the second
13092 	 argument is an even integer valued real.  */
13093       if (TREE_CODE (arg1) == REAL_CST)
13094 	{
13095 	  REAL_VALUE_TYPE c;
13096 	  HOST_WIDE_INT n;
13097 
13098 	  c = TREE_REAL_CST (arg1);
13099 	  n = real_to_integer (&c);
13100 	  if ((n & 1) == 0)
13101 	    {
13102 	      REAL_VALUE_TYPE cint;
13103 	      real_from_integer (&cint, VOIDmode, n, SIGNED);
13104 	      if (real_identical (&c, &cint))
13105 		return true;
13106 	    }
13107 	}
13108       return RECURSE (arg0);
13109 
13110     default:
13111       break;
13112     }
13113   return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13114 }
13115 
13116 /* Return true if T is known to be non-negative.  If the return
13117    value is based on the assumption that signed overflow is undefined,
13118    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13119    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13120 
13121 static bool
13122 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13123 {
13124   enum tree_code code = TREE_CODE (t);
13125   if (TYPE_UNSIGNED (TREE_TYPE (t)))
13126     return true;
13127 
13128   switch (code)
13129     {
13130     case TARGET_EXPR:
13131       {
13132 	tree temp = TARGET_EXPR_SLOT (t);
13133 	t = TARGET_EXPR_INITIAL (t);
13134 
13135 	/* If the initializer is non-void, then it's a normal expression
13136 	   that will be assigned to the slot.  */
13137 	if (!VOID_TYPE_P (t))
13138 	  return RECURSE (t);
13139 
13140 	/* Otherwise, the initializer sets the slot in some way.  One common
13141 	   way is an assignment statement at the end of the initializer.  */
13142 	while (1)
13143 	  {
13144 	    if (TREE_CODE (t) == BIND_EXPR)
13145 	      t = expr_last (BIND_EXPR_BODY (t));
13146 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13147 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
13148 	      t = expr_last (TREE_OPERAND (t, 0));
13149 	    else if (TREE_CODE (t) == STATEMENT_LIST)
13150 	      t = expr_last (t);
13151 	    else
13152 	      break;
13153 	  }
13154 	if (TREE_CODE (t) == MODIFY_EXPR
13155 	    && TREE_OPERAND (t, 0) == temp)
13156 	  return RECURSE (TREE_OPERAND (t, 1));
13157 
13158 	return false;
13159       }
13160 
13161     case CALL_EXPR:
13162       {
13163 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
13164 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
13165 
13166 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13167 					      get_call_combined_fn (t),
13168 					      arg0,
13169 					      arg1,
13170 					      strict_overflow_p, depth);
13171       }
13172     case COMPOUND_EXPR:
13173     case MODIFY_EXPR:
13174       return RECURSE (TREE_OPERAND (t, 1));
13175 
13176     case BIND_EXPR:
13177       return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13178 
13179     case SAVE_EXPR:
13180       return RECURSE (TREE_OPERAND (t, 0));
13181 
13182     default:
13183       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13184     }
13185 }
13186 
13187 #undef RECURSE
13188 #undef tree_expr_nonnegative_warnv_p
13189 
13190 /* Return true if T is known to be non-negative.  If the return
13191    value is based on the assumption that signed overflow is undefined,
13192    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13193    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13194 
13195 bool
13196 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13197 {
13198   enum tree_code code;
13199   if (t == error_mark_node)
13200     return false;
13201 
13202   code = TREE_CODE (t);
13203   switch (TREE_CODE_CLASS (code))
13204     {
13205     case tcc_binary:
13206     case tcc_comparison:
13207       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13208 					      TREE_TYPE (t),
13209 					      TREE_OPERAND (t, 0),
13210 					      TREE_OPERAND (t, 1),
13211 					      strict_overflow_p, depth);
13212 
13213     case tcc_unary:
13214       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13215 					     TREE_TYPE (t),
13216 					     TREE_OPERAND (t, 0),
13217 					     strict_overflow_p, depth);
13218 
13219     case tcc_constant:
13220     case tcc_declaration:
13221     case tcc_reference:
13222       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13223 
13224     default:
13225       break;
13226     }
13227 
13228   switch (code)
13229     {
13230     case TRUTH_AND_EXPR:
13231     case TRUTH_OR_EXPR:
13232     case TRUTH_XOR_EXPR:
13233       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13234 					      TREE_TYPE (t),
13235 					      TREE_OPERAND (t, 0),
13236 					      TREE_OPERAND (t, 1),
13237 					      strict_overflow_p, depth);
13238     case TRUTH_NOT_EXPR:
13239       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13240 					     TREE_TYPE (t),
13241 					     TREE_OPERAND (t, 0),
13242 					     strict_overflow_p, depth);
13243 
13244     case COND_EXPR:
13245     case CONSTRUCTOR:
13246     case OBJ_TYPE_REF:
13247     case ASSERT_EXPR:
13248     case ADDR_EXPR:
13249     case WITH_SIZE_EXPR:
13250     case SSA_NAME:
13251       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13252 
13253     default:
13254       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13255     }
13256 }
13257 
13258 /* Return true if `t' is known to be non-negative.  Handle warnings
13259    about undefined signed overflow.  */
13260 
13261 bool
13262 tree_expr_nonnegative_p (tree t)
13263 {
13264   bool ret, strict_overflow_p;
13265 
13266   strict_overflow_p = false;
13267   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13268   if (strict_overflow_p)
13269     fold_overflow_warning (("assuming signed overflow does not occur when "
13270 			    "determining that expression is always "
13271 			    "non-negative"),
13272 			   WARN_STRICT_OVERFLOW_MISC);
13273   return ret;
13274 }
13275 
13276 
13277 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13278    For floating point we further ensure that T is not denormal.
13279    Similar logic is present in nonzero_address in rtlanal.h.
13280 
13281    If the return value is based on the assumption that signed overflow
13282    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13283    change *STRICT_OVERFLOW_P.  */
13284 
13285 bool
13286 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13287 				 bool *strict_overflow_p)
13288 {
13289   switch (code)
13290     {
13291     case ABS_EXPR:
13292       return tree_expr_nonzero_warnv_p (op0,
13293 					strict_overflow_p);
13294 
13295     case NOP_EXPR:
13296       {
13297 	tree inner_type = TREE_TYPE (op0);
13298 	tree outer_type = type;
13299 
13300 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13301 		&& tree_expr_nonzero_warnv_p (op0,
13302 					      strict_overflow_p));
13303       }
13304       break;
13305 
13306     case NON_LVALUE_EXPR:
13307       return tree_expr_nonzero_warnv_p (op0,
13308 					strict_overflow_p);
13309 
13310     default:
13311       break;
13312   }
13313 
13314   return false;
13315 }
13316 
13317 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13318    For floating point we further ensure that T is not denormal.
13319    Similar logic is present in nonzero_address in rtlanal.h.
13320 
13321    If the return value is based on the assumption that signed overflow
13322    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13323    change *STRICT_OVERFLOW_P.  */
13324 
13325 bool
13326 tree_binary_nonzero_warnv_p (enum tree_code code,
13327 			     tree type,
13328 			     tree op0,
13329 			     tree op1, bool *strict_overflow_p)
13330 {
13331   bool sub_strict_overflow_p;
13332   switch (code)
13333     {
13334     case POINTER_PLUS_EXPR:
13335     case PLUS_EXPR:
13336       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13337 	{
13338 	  /* With the presence of negative values it is hard
13339 	     to say something.  */
13340 	  sub_strict_overflow_p = false;
13341 	  if (!tree_expr_nonnegative_warnv_p (op0,
13342 					      &sub_strict_overflow_p)
13343 	      || !tree_expr_nonnegative_warnv_p (op1,
13344 						 &sub_strict_overflow_p))
13345 	    return false;
13346 	  /* One of operands must be positive and the other non-negative.  */
13347 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
13348 	     overflows, on a twos-complement machine the sum of two
13349 	     nonnegative numbers can never be zero.  */
13350 	  return (tree_expr_nonzero_warnv_p (op0,
13351 					     strict_overflow_p)
13352 		  || tree_expr_nonzero_warnv_p (op1,
13353 						strict_overflow_p));
13354 	}
13355       break;
13356 
13357     case MULT_EXPR:
13358       if (TYPE_OVERFLOW_UNDEFINED (type))
13359 	{
13360 	  if (tree_expr_nonzero_warnv_p (op0,
13361 					 strict_overflow_p)
13362 	      && tree_expr_nonzero_warnv_p (op1,
13363 					    strict_overflow_p))
13364 	    {
13365 	      *strict_overflow_p = true;
13366 	      return true;
13367 	    }
13368 	}
13369       break;
13370 
13371     case MIN_EXPR:
13372       sub_strict_overflow_p = false;
13373       if (tree_expr_nonzero_warnv_p (op0,
13374 				     &sub_strict_overflow_p)
13375 	  && tree_expr_nonzero_warnv_p (op1,
13376 					&sub_strict_overflow_p))
13377 	{
13378 	  if (sub_strict_overflow_p)
13379 	    *strict_overflow_p = true;
13380 	}
13381       break;
13382 
13383     case MAX_EXPR:
13384       sub_strict_overflow_p = false;
13385       if (tree_expr_nonzero_warnv_p (op0,
13386 				     &sub_strict_overflow_p))
13387 	{
13388 	  if (sub_strict_overflow_p)
13389 	    *strict_overflow_p = true;
13390 
13391 	  /* When both operands are nonzero, then MAX must be too.  */
13392 	  if (tree_expr_nonzero_warnv_p (op1,
13393 					 strict_overflow_p))
13394 	    return true;
13395 
13396 	  /* MAX where operand 0 is positive is positive.  */
13397 	  return tree_expr_nonnegative_warnv_p (op0,
13398 					       strict_overflow_p);
13399 	}
13400       /* MAX where operand 1 is positive is positive.  */
13401       else if (tree_expr_nonzero_warnv_p (op1,
13402 					  &sub_strict_overflow_p)
13403 	       && tree_expr_nonnegative_warnv_p (op1,
13404 						 &sub_strict_overflow_p))
13405 	{
13406 	  if (sub_strict_overflow_p)
13407 	    *strict_overflow_p = true;
13408 	  return true;
13409 	}
13410       break;
13411 
13412     case BIT_IOR_EXPR:
13413       return (tree_expr_nonzero_warnv_p (op1,
13414 					 strict_overflow_p)
13415 	      || tree_expr_nonzero_warnv_p (op0,
13416 					    strict_overflow_p));
13417 
13418     default:
13419       break;
13420   }
13421 
13422   return false;
13423 }
13424 
13425 /* Return true when T is an address and is known to be nonzero.
13426    For floating point we further ensure that T is not denormal.
13427    Similar logic is present in nonzero_address in rtlanal.h.
13428 
13429    If the return value is based on the assumption that signed overflow
13430    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13431    change *STRICT_OVERFLOW_P.  */
13432 
13433 bool
13434 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13435 {
13436   bool sub_strict_overflow_p;
13437   switch (TREE_CODE (t))
13438     {
13439     case INTEGER_CST:
13440       return !integer_zerop (t);
13441 
13442     case ADDR_EXPR:
13443       {
13444 	tree base = TREE_OPERAND (t, 0);
13445 
13446 	if (!DECL_P (base))
13447 	  base = get_base_address (base);
13448 
13449 	if (base && TREE_CODE (base) == TARGET_EXPR)
13450 	  base = TARGET_EXPR_SLOT (base);
13451 
13452 	if (!base)
13453 	  return false;
13454 
13455 	/* For objects in symbol table check if we know they are non-zero.
13456 	   Don't do anything for variables and functions before symtab is built;
13457 	   it is quite possible that they will be declared weak later.  */
13458 	int nonzero_addr = maybe_nonzero_address (base);
13459 	if (nonzero_addr >= 0)
13460 	  return nonzero_addr;
13461 
13462 	/* Constants are never weak.  */
13463 	if (CONSTANT_CLASS_P (base))
13464 	  return true;
13465 
13466 	return false;
13467       }
13468 
13469     case COND_EXPR:
13470       sub_strict_overflow_p = false;
13471       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13472 				     &sub_strict_overflow_p)
13473 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13474 					&sub_strict_overflow_p))
13475 	{
13476 	  if (sub_strict_overflow_p)
13477 	    *strict_overflow_p = true;
13478 	  return true;
13479 	}
13480       break;
13481 
13482     default:
13483       break;
13484     }
13485   return false;
13486 }
13487 
13488 #define integer_valued_real_p(X) \
13489   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13490 
13491 #define RECURSE(X) \
13492   ((integer_valued_real_p) (X, depth + 1))
13493 
13494 /* Return true if the floating point result of (CODE OP0) has an
13495    integer value.  We also allow +Inf, -Inf and NaN to be considered
13496    integer values. Return false for signaling NaN.
13497 
13498    DEPTH is the current nesting depth of the query.  */
13499 
13500 bool
13501 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13502 {
13503   switch (code)
13504     {
13505     case FLOAT_EXPR:
13506       return true;
13507 
13508     case ABS_EXPR:
13509       return RECURSE (op0);
13510 
13511     CASE_CONVERT:
13512       {
13513 	tree type = TREE_TYPE (op0);
13514 	if (TREE_CODE (type) == INTEGER_TYPE)
13515 	  return true;
13516 	if (TREE_CODE (type) == REAL_TYPE)
13517 	  return RECURSE (op0);
13518 	break;
13519       }
13520 
13521     default:
13522       break;
13523     }
13524   return false;
13525 }
13526 
13527 /* Return true if the floating point result of (CODE OP0 OP1) has an
13528    integer value.  We also allow +Inf, -Inf and NaN to be considered
13529    integer values. Return false for signaling NaN.
13530 
13531    DEPTH is the current nesting depth of the query.  */
13532 
13533 bool
13534 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13535 {
13536   switch (code)
13537     {
13538     case PLUS_EXPR:
13539     case MINUS_EXPR:
13540     case MULT_EXPR:
13541     case MIN_EXPR:
13542     case MAX_EXPR:
13543       return RECURSE (op0) && RECURSE (op1);
13544 
13545     default:
13546       break;
13547     }
13548   return false;
13549 }
13550 
13551 /* Return true if the floating point result of calling FNDECL with arguments
13552    ARG0 and ARG1 has an integer value.  We also allow +Inf, -Inf and NaN to be
13553    considered integer values. Return false for signaling NaN.  If FNDECL
13554    takes fewer than 2 arguments, the remaining ARGn are null.
13555 
13556    DEPTH is the current nesting depth of the query.  */
13557 
13558 bool
13559 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13560 {
13561   switch (fn)
13562     {
13563     CASE_CFN_CEIL:
13564     CASE_CFN_FLOOR:
13565     CASE_CFN_NEARBYINT:
13566     CASE_CFN_RINT:
13567     CASE_CFN_ROUND:
13568     CASE_CFN_TRUNC:
13569       return true;
13570 
13571     CASE_CFN_FMIN:
13572     CASE_CFN_FMAX:
13573       return RECURSE (arg0) && RECURSE (arg1);
13574 
13575     default:
13576       break;
13577     }
13578   return false;
13579 }
13580 
13581 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13582    has an integer value.  We also allow +Inf, -Inf and NaN to be
13583    considered integer values. Return false for signaling NaN.
13584 
13585    DEPTH is the current nesting depth of the query.  */
13586 
13587 bool
13588 integer_valued_real_single_p (tree t, int depth)
13589 {
13590   switch (TREE_CODE (t))
13591     {
13592     case REAL_CST:
13593       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13594 
13595     case COND_EXPR:
13596       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13597 
13598     case SSA_NAME:
13599       /* Limit the depth of recursion to avoid quadratic behavior.
13600 	 This is expected to catch almost all occurrences in practice.
13601 	 If this code misses important cases that unbounded recursion
13602 	 would not, passes that need this information could be revised
13603 	 to provide it through dataflow propagation.  */
13604       return (!name_registered_for_update_p (t)
13605 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13606 	      && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13607 						    depth));
13608 
13609     default:
13610       break;
13611     }
13612   return false;
13613 }
13614 
13615 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13616    has an integer value.  We also allow +Inf, -Inf and NaN to be
13617    considered integer values. Return false for signaling NaN.
13618 
13619    DEPTH is the current nesting depth of the query.  */
13620 
13621 static bool
13622 integer_valued_real_invalid_p (tree t, int depth)
13623 {
13624   switch (TREE_CODE (t))
13625     {
13626     case COMPOUND_EXPR:
13627     case MODIFY_EXPR:
13628     case BIND_EXPR:
13629       return RECURSE (TREE_OPERAND (t, 1));
13630 
13631     case SAVE_EXPR:
13632       return RECURSE (TREE_OPERAND (t, 0));
13633 
13634     default:
13635       break;
13636     }
13637   return false;
13638 }
13639 
13640 #undef RECURSE
13641 #undef integer_valued_real_p
13642 
13643 /* Return true if the floating point expression T has an integer value.
13644    We also allow +Inf, -Inf and NaN to be considered integer values.
13645    Return false for signaling NaN.
13646 
13647    DEPTH is the current nesting depth of the query.  */
13648 
13649 bool
13650 integer_valued_real_p (tree t, int depth)
13651 {
13652   if (t == error_mark_node)
13653     return false;
13654 
13655   tree_code code = TREE_CODE (t);
13656   switch (TREE_CODE_CLASS (code))
13657     {
13658     case tcc_binary:
13659     case tcc_comparison:
13660       return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13661 					   TREE_OPERAND (t, 1), depth);
13662 
13663     case tcc_unary:
13664       return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13665 
13666     case tcc_constant:
13667     case tcc_declaration:
13668     case tcc_reference:
13669       return integer_valued_real_single_p (t, depth);
13670 
13671     default:
13672       break;
13673     }
13674 
13675   switch (code)
13676     {
13677     case COND_EXPR:
13678     case SSA_NAME:
13679       return integer_valued_real_single_p (t, depth);
13680 
13681     case CALL_EXPR:
13682       {
13683 	tree arg0 = (call_expr_nargs (t) > 0
13684 		     ? CALL_EXPR_ARG (t, 0)
13685 		     : NULL_TREE);
13686 	tree arg1 = (call_expr_nargs (t) > 1
13687 		     ? CALL_EXPR_ARG (t, 1)
13688 		     : NULL_TREE);
13689 	return integer_valued_real_call_p (get_call_combined_fn (t),
13690 					   arg0, arg1, depth);
13691       }
13692 
13693     default:
13694       return integer_valued_real_invalid_p (t, depth);
13695     }
13696 }
13697 
13698 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13699    attempt to fold the expression to a constant without modifying TYPE,
13700    OP0 or OP1.
13701 
13702    If the expression could be simplified to a constant, then return
13703    the constant.  If the expression would not be simplified to a
13704    constant, then return NULL_TREE.  */
13705 
13706 tree
13707 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13708 {
13709   tree tem = fold_binary (code, type, op0, op1);
13710   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13711 }
13712 
13713 /* Given the components of a unary expression CODE, TYPE and OP0,
13714    attempt to fold the expression to a constant without modifying
13715    TYPE or OP0.
13716 
13717    If the expression could be simplified to a constant, then return
13718    the constant.  If the expression would not be simplified to a
13719    constant, then return NULL_TREE.  */
13720 
13721 tree
13722 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13723 {
13724   tree tem = fold_unary (code, type, op0);
13725   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13726 }
13727 
13728 /* If EXP represents referencing an element in a constant string
13729    (either via pointer arithmetic or array indexing), return the
13730    tree representing the value accessed, otherwise return NULL.  */
13731 
13732 tree
13733 fold_read_from_constant_string (tree exp)
13734 {
13735   if ((TREE_CODE (exp) == INDIRECT_REF
13736        || TREE_CODE (exp) == ARRAY_REF)
13737       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13738     {
13739       tree exp1 = TREE_OPERAND (exp, 0);
13740       tree index;
13741       tree string;
13742       location_t loc = EXPR_LOCATION (exp);
13743 
13744       if (TREE_CODE (exp) == INDIRECT_REF)
13745 	string = string_constant (exp1, &index);
13746       else
13747 	{
13748 	  tree low_bound = array_ref_low_bound (exp);
13749 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13750 
13751 	  /* Optimize the special-case of a zero lower bound.
13752 
13753 	     We convert the low_bound to sizetype to avoid some problems
13754 	     with constant folding.  (E.g. suppose the lower bound is 1,
13755 	     and its mode is QI.  Without the conversion,l (ARRAY
13756 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13757 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
13758 	  if (! integer_zerop (low_bound))
13759 	    index = size_diffop_loc (loc, index,
13760 				 fold_convert_loc (loc, sizetype, low_bound));
13761 
13762 	  string = exp1;
13763 	}
13764 
13765       if (string
13766 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13767 	  && TREE_CODE (string) == STRING_CST
13768 	  && TREE_CODE (index) == INTEGER_CST
13769 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13770 	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13771 	      == MODE_INT)
13772 	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13773 	return build_int_cst_type (TREE_TYPE (exp),
13774 				   (TREE_STRING_POINTER (string)
13775 				    [TREE_INT_CST_LOW (index)]));
13776     }
13777   return NULL;
13778 }
13779 
13780 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13781    an integer constant, real, or fixed-point constant.
13782 
13783    TYPE is the type of the result.  */
13784 
13785 static tree
13786 fold_negate_const (tree arg0, tree type)
13787 {
13788   tree t = NULL_TREE;
13789 
13790   switch (TREE_CODE (arg0))
13791     {
13792     case INTEGER_CST:
13793       {
13794 	bool overflow;
13795 	wide_int val = wi::neg (arg0, &overflow);
13796 	t = force_fit_type (type, val, 1,
13797 			    (overflow && ! TYPE_UNSIGNED (type))
13798 			    || TREE_OVERFLOW (arg0));
13799 	break;
13800       }
13801 
13802     case REAL_CST:
13803       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13804       break;
13805 
13806     case FIXED_CST:
13807       {
13808         FIXED_VALUE_TYPE f;
13809         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13810 					    &(TREE_FIXED_CST (arg0)), NULL,
13811 					    TYPE_SATURATING (type));
13812 	t = build_fixed (type, f);
13813 	/* Propagate overflow flags.  */
13814 	if (overflow_p | TREE_OVERFLOW (arg0))
13815 	  TREE_OVERFLOW (t) = 1;
13816 	break;
13817       }
13818 
13819     default:
13820       gcc_unreachable ();
13821     }
13822 
13823   return t;
13824 }
13825 
13826 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13827    an integer constant or real constant.
13828 
13829    TYPE is the type of the result.  */
13830 
13831 tree
13832 fold_abs_const (tree arg0, tree type)
13833 {
13834   tree t = NULL_TREE;
13835 
13836   switch (TREE_CODE (arg0))
13837     {
13838     case INTEGER_CST:
13839       {
13840         /* If the value is unsigned or non-negative, then the absolute value
13841 	   is the same as the ordinary value.  */
13842 	if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13843 	  t = arg0;
13844 
13845 	/* If the value is negative, then the absolute value is
13846 	   its negation.  */
13847 	else
13848 	  {
13849 	    bool overflow;
13850 	    wide_int val = wi::neg (arg0, &overflow);
13851 	    t = force_fit_type (type, val, -1,
13852 				overflow | TREE_OVERFLOW (arg0));
13853 	  }
13854       }
13855       break;
13856 
13857     case REAL_CST:
13858       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13859 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13860       else
13861 	t =  arg0;
13862       break;
13863 
13864     default:
13865       gcc_unreachable ();
13866     }
13867 
13868   return t;
13869 }
13870 
13871 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13872    constant.  TYPE is the type of the result.  */
13873 
13874 static tree
13875 fold_not_const (const_tree arg0, tree type)
13876 {
13877   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13878 
13879   return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13880 }
13881 
13882 /* Given CODE, a relational operator, the target type, TYPE and two
13883    constant operands OP0 and OP1, return the result of the
13884    relational operation.  If the result is not a compile time
13885    constant, then return NULL_TREE.  */
13886 
13887 static tree
13888 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13889 {
13890   int result, invert;
13891 
13892   /* From here on, the only cases we handle are when the result is
13893      known to be a constant.  */
13894 
13895   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13896     {
13897       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13898       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13899 
13900       /* Handle the cases where either operand is a NaN.  */
13901       if (real_isnan (c0) || real_isnan (c1))
13902 	{
13903 	  switch (code)
13904 	    {
13905 	    case EQ_EXPR:
13906 	    case ORDERED_EXPR:
13907 	      result = 0;
13908 	      break;
13909 
13910 	    case NE_EXPR:
13911 	    case UNORDERED_EXPR:
13912 	    case UNLT_EXPR:
13913 	    case UNLE_EXPR:
13914 	    case UNGT_EXPR:
13915 	    case UNGE_EXPR:
13916 	    case UNEQ_EXPR:
13917               result = 1;
13918 	      break;
13919 
13920 	    case LT_EXPR:
13921 	    case LE_EXPR:
13922 	    case GT_EXPR:
13923 	    case GE_EXPR:
13924 	    case LTGT_EXPR:
13925 	      if (flag_trapping_math)
13926 		return NULL_TREE;
13927 	      result = 0;
13928 	      break;
13929 
13930 	    default:
13931 	      gcc_unreachable ();
13932 	    }
13933 
13934 	  return constant_boolean_node (result, type);
13935 	}
13936 
13937       return constant_boolean_node (real_compare (code, c0, c1), type);
13938     }
13939 
13940   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13941     {
13942       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13943       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13944       return constant_boolean_node (fixed_compare (code, c0, c1), type);
13945     }
13946 
13947   /* Handle equality/inequality of complex constants.  */
13948   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13949     {
13950       tree rcond = fold_relational_const (code, type,
13951 					  TREE_REALPART (op0),
13952 					  TREE_REALPART (op1));
13953       tree icond = fold_relational_const (code, type,
13954 					  TREE_IMAGPART (op0),
13955 					  TREE_IMAGPART (op1));
13956       if (code == EQ_EXPR)
13957 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13958       else if (code == NE_EXPR)
13959 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13960       else
13961 	return NULL_TREE;
13962     }
13963 
13964   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13965     {
13966       if (!VECTOR_TYPE_P (type))
13967 	{
13968 	  /* Have vector comparison with scalar boolean result.  */
13969 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13970 		      && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13971 	  for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13972 	    {
13973 	      tree elem0 = VECTOR_CST_ELT (op0, i);
13974 	      tree elem1 = VECTOR_CST_ELT (op1, i);
13975 	      tree tmp = fold_relational_const (code, type, elem0, elem1);
13976 	      if (tmp == NULL_TREE)
13977 		return NULL_TREE;
13978 	      if (integer_zerop (tmp))
13979 		return constant_boolean_node (false, type);
13980 	    }
13981 	  return constant_boolean_node (true, type);
13982 	}
13983       unsigned count = VECTOR_CST_NELTS (op0);
13984       tree *elts =  XALLOCAVEC (tree, count);
13985       gcc_assert (VECTOR_CST_NELTS (op1) == count
13986 		  && TYPE_VECTOR_SUBPARTS (type) == count);
13987 
13988       for (unsigned i = 0; i < count; i++)
13989 	{
13990 	  tree elem_type = TREE_TYPE (type);
13991 	  tree elem0 = VECTOR_CST_ELT (op0, i);
13992 	  tree elem1 = VECTOR_CST_ELT (op1, i);
13993 
13994 	  tree tem = fold_relational_const (code, elem_type,
13995 					    elem0, elem1);
13996 
13997 	  if (tem == NULL_TREE)
13998 	    return NULL_TREE;
13999 
14000 	  elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14001 	}
14002 
14003       return build_vector (type, elts);
14004     }
14005 
14006   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14007 
14008      To compute GT, swap the arguments and do LT.
14009      To compute GE, do LT and invert the result.
14010      To compute LE, swap the arguments, do LT and invert the result.
14011      To compute NE, do EQ and invert the result.
14012 
14013      Therefore, the code below must handle only EQ and LT.  */
14014 
14015   if (code == LE_EXPR || code == GT_EXPR)
14016     {
14017       std::swap (op0, op1);
14018       code = swap_tree_comparison (code);
14019     }
14020 
14021   /* Note that it is safe to invert for real values here because we
14022      have already handled the one case that it matters.  */
14023 
14024   invert = 0;
14025   if (code == NE_EXPR || code == GE_EXPR)
14026     {
14027       invert = 1;
14028       code = invert_tree_comparison (code, false);
14029     }
14030 
14031   /* Compute a result for LT or EQ if args permit;
14032      Otherwise return T.  */
14033   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14034     {
14035       if (code == EQ_EXPR)
14036 	result = tree_int_cst_equal (op0, op1);
14037       else
14038 	result = tree_int_cst_lt (op0, op1);
14039     }
14040   else
14041     return NULL_TREE;
14042 
14043   if (invert)
14044     result ^= 1;
14045   return constant_boolean_node (result, type);
14046 }
14047 
14048 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14049    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
14050    itself.  */
14051 
14052 tree
14053 fold_build_cleanup_point_expr (tree type, tree expr)
14054 {
14055   /* If the expression does not have side effects then we don't have to wrap
14056      it with a cleanup point expression.  */
14057   if (!TREE_SIDE_EFFECTS (expr))
14058     return expr;
14059 
14060   /* If the expression is a return, check to see if the expression inside the
14061      return has no side effects or the right hand side of the modify expression
14062      inside the return. If either don't have side effects set we don't need to
14063      wrap the expression in a cleanup point expression.  Note we don't check the
14064      left hand side of the modify because it should always be a return decl.  */
14065   if (TREE_CODE (expr) == RETURN_EXPR)
14066     {
14067       tree op = TREE_OPERAND (expr, 0);
14068       if (!op || !TREE_SIDE_EFFECTS (op))
14069         return expr;
14070       op = TREE_OPERAND (op, 1);
14071       if (!TREE_SIDE_EFFECTS (op))
14072         return expr;
14073     }
14074 
14075   return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14076 }
14077 
14078 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14079    of an indirection through OP0, or NULL_TREE if no simplification is
14080    possible.  */
14081 
14082 tree
14083 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14084 {
14085   tree sub = op0;
14086   tree subtype;
14087 
14088   STRIP_NOPS (sub);
14089   subtype = TREE_TYPE (sub);
14090   if (!POINTER_TYPE_P (subtype)
14091       || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14092     return NULL_TREE;
14093 
14094   if (TREE_CODE (sub) == ADDR_EXPR)
14095     {
14096       tree op = TREE_OPERAND (sub, 0);
14097       tree optype = TREE_TYPE (op);
14098 
14099       /* *&CONST_DECL -> to the value of the const decl.  */
14100       if (TREE_CODE (op) == CONST_DECL)
14101 	return DECL_INITIAL (op);
14102       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
14103       if (type == optype)
14104 	{
14105 	  tree fop = fold_read_from_constant_string (op);
14106 	  if (fop)
14107 	    return fop;
14108 	  else
14109 	    return op;
14110 	}
14111       /* *(foo *)&fooarray => fooarray[0] */
14112       else if (TREE_CODE (optype) == ARRAY_TYPE
14113 	       && type == TREE_TYPE (optype)
14114 	       && (!in_gimple_form
14115 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14116 	{
14117 	  tree type_domain = TYPE_DOMAIN (optype);
14118 	  tree min_val = size_zero_node;
14119 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
14120 	    min_val = TYPE_MIN_VALUE (type_domain);
14121 	  if (in_gimple_form
14122 	      && TREE_CODE (min_val) != INTEGER_CST)
14123 	    return NULL_TREE;
14124 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
14125 			     NULL_TREE, NULL_TREE);
14126 	}
14127       /* *(foo *)&complexfoo => __real__ complexfoo */
14128       else if (TREE_CODE (optype) == COMPLEX_TYPE
14129 	       && type == TREE_TYPE (optype))
14130 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
14131       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14132       else if (VECTOR_TYPE_P (optype)
14133 	       && type == TREE_TYPE (optype))
14134 	{
14135 	  tree part_width = TYPE_SIZE (type);
14136 	  tree index = bitsize_int (0);
14137 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14138 				  index);
14139 	}
14140     }
14141 
14142   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14143       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14144     {
14145       tree op00 = TREE_OPERAND (sub, 0);
14146       tree op01 = TREE_OPERAND (sub, 1);
14147 
14148       STRIP_NOPS (op00);
14149       if (TREE_CODE (op00) == ADDR_EXPR)
14150 	{
14151 	  tree op00type;
14152 	  op00 = TREE_OPERAND (op00, 0);
14153 	  op00type = TREE_TYPE (op00);
14154 
14155 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14156 	  if (VECTOR_TYPE_P (op00type)
14157 	      && type == TREE_TYPE (op00type)
14158 	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14159 		 but we want to treat offsets with MSB set as negative.
14160 		 For the code below negative offsets are invalid and
14161 		 TYPE_SIZE of the element is something unsigned, so
14162 		 check whether op01 fits into HOST_WIDE_INT, which
14163 		 implies it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14164 		 then just use unsigned HOST_WIDE_INT because we want to treat
14165 		 the value as unsigned.  */
14166 	      && tree_fits_shwi_p (op01))
14167 	    {
14168 	      tree part_width = TYPE_SIZE (type);
14169 	      unsigned HOST_WIDE_INT max_offset
14170 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
14171 		   * TYPE_VECTOR_SUBPARTS (op00type));
14172 	      if (tree_int_cst_sign_bit (op01) == 0
14173 		  && compare_tree_int (op01, max_offset) == -1)
14174 		{
14175 		  unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14176 		  unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14177 		  tree index = bitsize_int (indexi);
14178 		  return fold_build3_loc (loc,
14179 					  BIT_FIELD_REF, type, op00,
14180 					  part_width, index);
14181 		}
14182 	    }
14183 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14184 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
14185 		   && type == TREE_TYPE (op00type))
14186 	    {
14187 	      tree size = TYPE_SIZE_UNIT (type);
14188 	      if (tree_int_cst_equal (size, op01))
14189 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14190 	    }
14191 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
14192 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
14193 		   && type == TREE_TYPE (op00type))
14194 	    {
14195 	      tree type_domain = TYPE_DOMAIN (op00type);
14196 	      tree min_val = size_zero_node;
14197 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
14198 		min_val = TYPE_MIN_VALUE (type_domain);
14199 	      op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14200 				     TYPE_SIZE_UNIT (type));
14201 	      op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14202 	      return build4_loc (loc, ARRAY_REF, type, op00, op01,
14203 				 NULL_TREE, NULL_TREE);
14204 	    }
14205 	}
14206     }
14207 
14208   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14209   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14210       && type == TREE_TYPE (TREE_TYPE (subtype))
14211       && (!in_gimple_form
14212 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14213     {
14214       tree type_domain;
14215       tree min_val = size_zero_node;
14216       sub = build_fold_indirect_ref_loc (loc, sub);
14217       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14218       if (type_domain && TYPE_MIN_VALUE (type_domain))
14219 	min_val = TYPE_MIN_VALUE (type_domain);
14220       if (in_gimple_form
14221 	  && TREE_CODE (min_val) != INTEGER_CST)
14222 	return NULL_TREE;
14223       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14224 			 NULL_TREE);
14225     }
14226 
14227   return NULL_TREE;
14228 }
14229 
14230 /* Builds an expression for an indirection through T, simplifying some
14231    cases.  */
14232 
14233 tree
14234 build_fold_indirect_ref_loc (location_t loc, tree t)
14235 {
14236   tree type = TREE_TYPE (TREE_TYPE (t));
14237   tree sub = fold_indirect_ref_1 (loc, type, t);
14238 
14239   if (sub)
14240     return sub;
14241 
14242   return build1_loc (loc, INDIRECT_REF, type, t);
14243 }
14244 
14245 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
14246 
14247 tree
14248 fold_indirect_ref_loc (location_t loc, tree t)
14249 {
14250   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14251 
14252   if (sub)
14253     return sub;
14254   else
14255     return t;
14256 }
14257 
14258 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14259    whose result is ignored.  The type of the returned tree need not be
14260    the same as the original expression.  */
14261 
14262 tree
14263 fold_ignored_result (tree t)
14264 {
14265   if (!TREE_SIDE_EFFECTS (t))
14266     return integer_zero_node;
14267 
14268   for (;;)
14269     switch (TREE_CODE_CLASS (TREE_CODE (t)))
14270       {
14271       case tcc_unary:
14272 	t = TREE_OPERAND (t, 0);
14273 	break;
14274 
14275       case tcc_binary:
14276       case tcc_comparison:
14277 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14278 	  t = TREE_OPERAND (t, 0);
14279 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14280 	  t = TREE_OPERAND (t, 1);
14281 	else
14282 	  return t;
14283 	break;
14284 
14285       case tcc_expression:
14286 	switch (TREE_CODE (t))
14287 	  {
14288 	  case COMPOUND_EXPR:
14289 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14290 	      return t;
14291 	    t = TREE_OPERAND (t, 0);
14292 	    break;
14293 
14294 	  case COND_EXPR:
14295 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14296 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14297 	      return t;
14298 	    t = TREE_OPERAND (t, 0);
14299 	    break;
14300 
14301 	  default:
14302 	    return t;
14303 	  }
14304 	break;
14305 
14306       default:
14307 	return t;
14308       }
14309 }
14310 
14311 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14312 
14313 tree
14314 round_up_loc (location_t loc, tree value, unsigned int divisor)
14315 {
14316   tree div = NULL_TREE;
14317 
14318   if (divisor == 1)
14319     return value;
14320 
14321   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14322      have to do anything.  Only do this when we are not given a const,
14323      because in that case, this check is more expensive than just
14324      doing it.  */
14325   if (TREE_CODE (value) != INTEGER_CST)
14326     {
14327       div = build_int_cst (TREE_TYPE (value), divisor);
14328 
14329       if (multiple_of_p (TREE_TYPE (value), value, div))
14330 	return value;
14331     }
14332 
14333   /* If divisor is a power of two, simplify this to bit manipulation.  */
14334   if (pow2_or_zerop (divisor))
14335     {
14336       if (TREE_CODE (value) == INTEGER_CST)
14337 	{
14338 	  wide_int val = value;
14339 	  bool overflow_p;
14340 
14341 	  if ((val & (divisor - 1)) == 0)
14342 	    return value;
14343 
14344 	  overflow_p = TREE_OVERFLOW (value);
14345 	  val += divisor - 1;
14346 	  val &= (int) -divisor;
14347 	  if (val == 0)
14348 	    overflow_p = true;
14349 
14350 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14351 	}
14352       else
14353 	{
14354 	  tree t;
14355 
14356 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
14357 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
14358 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14359 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14360 	}
14361     }
14362   else
14363     {
14364       if (!div)
14365 	div = build_int_cst (TREE_TYPE (value), divisor);
14366       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14367       value = size_binop_loc (loc, MULT_EXPR, value, div);
14368     }
14369 
14370   return value;
14371 }
14372 
14373 /* Likewise, but round down.  */
14374 
14375 tree
14376 round_down_loc (location_t loc, tree value, int divisor)
14377 {
14378   tree div = NULL_TREE;
14379 
14380   gcc_assert (divisor > 0);
14381   if (divisor == 1)
14382     return value;
14383 
14384   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14385      have to do anything.  Only do this when we are not given a const,
14386      because in that case, this check is more expensive than just
14387      doing it.  */
14388   if (TREE_CODE (value) != INTEGER_CST)
14389     {
14390       div = build_int_cst (TREE_TYPE (value), divisor);
14391 
14392       if (multiple_of_p (TREE_TYPE (value), value, div))
14393 	return value;
14394     }
14395 
14396   /* If divisor is a power of two, simplify this to bit manipulation.  */
14397   if (pow2_or_zerop (divisor))
14398     {
14399       tree t;
14400 
14401       t = build_int_cst (TREE_TYPE (value), -divisor);
14402       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14403     }
14404   else
14405     {
14406       if (!div)
14407 	div = build_int_cst (TREE_TYPE (value), divisor);
14408       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14409       value = size_binop_loc (loc, MULT_EXPR, value, div);
14410     }
14411 
14412   return value;
14413 }
14414 
14415 /* Returns the pointer to the base of the object addressed by EXP and
14416    extracts the information about the offset of the access, storing it
14417    to PBITPOS and POFFSET.  */
14418 
14419 static tree
14420 split_address_to_core_and_offset (tree exp,
14421 				  HOST_WIDE_INT *pbitpos, tree *poffset)
14422 {
14423   tree core;
14424   machine_mode mode;
14425   int unsignedp, reversep, volatilep;
14426   HOST_WIDE_INT bitsize;
14427   location_t loc = EXPR_LOCATION (exp);
14428 
14429   if (TREE_CODE (exp) == ADDR_EXPR)
14430     {
14431       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14432 				  poffset, &mode, &unsignedp, &reversep,
14433 				  &volatilep);
14434       core = build_fold_addr_expr_loc (loc, core);
14435     }
14436   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14437     {
14438       core = TREE_OPERAND (exp, 0);
14439       STRIP_NOPS (core);
14440       *pbitpos = 0;
14441       *poffset = TREE_OPERAND (exp, 1);
14442       if (TREE_CODE (*poffset) == INTEGER_CST)
14443 	{
14444 	  offset_int tem = wi::sext (wi::to_offset (*poffset),
14445 				     TYPE_PRECISION (TREE_TYPE (*poffset)));
14446 	  tem <<= LOG2_BITS_PER_UNIT;
14447 	  if (wi::fits_shwi_p (tem))
14448 	    {
14449 	      *pbitpos = tem.to_shwi ();
14450 	      *poffset = NULL_TREE;
14451 	    }
14452 	}
14453     }
14454   else
14455     {
14456       core = exp;
14457       *pbitpos = 0;
14458       *poffset = NULL_TREE;
14459     }
14460 
14461   return core;
14462 }
14463 
14464 /* Returns true if addresses of E1 and E2 differ by a constant, false
14465    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
14466 
14467 bool
14468 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14469 {
14470   tree core1, core2;
14471   HOST_WIDE_INT bitpos1, bitpos2;
14472   tree toffset1, toffset2, tdiff, type;
14473 
14474   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14475   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14476 
14477   if (bitpos1 % BITS_PER_UNIT != 0
14478       || bitpos2 % BITS_PER_UNIT != 0
14479       || !operand_equal_p (core1, core2, 0))
14480     return false;
14481 
14482   if (toffset1 && toffset2)
14483     {
14484       type = TREE_TYPE (toffset1);
14485       if (type != TREE_TYPE (toffset2))
14486 	toffset2 = fold_convert (type, toffset2);
14487 
14488       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14489       if (!cst_and_fits_in_hwi (tdiff))
14490 	return false;
14491 
14492       *diff = int_cst_value (tdiff);
14493     }
14494   else if (toffset1 || toffset2)
14495     {
14496       /* If only one of the offsets is non-constant, the difference cannot
14497 	 be a constant.  */
14498       return false;
14499     }
14500   else
14501     *diff = 0;
14502 
14503   *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14504   return true;
14505 }
14506 
14507 /* Return OFF converted to a pointer offset type suitable as offset for
14508    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
14509 tree
14510 convert_to_ptrofftype_loc (location_t loc, tree off)
14511 {
14512   return fold_convert_loc (loc, sizetype, off);
14513 }
14514 
14515 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14516 tree
14517 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14518 {
14519   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14520 			  ptr, convert_to_ptrofftype_loc (loc, off));
14521 }
14522 
14523 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14524 tree
14525 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14526 {
14527   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14528 			  ptr, size_int (off));
14529 }
14530 
14531 /* Return a char pointer for a C string if it is a string constant
14532    or sum of string constant and integer constant.  We only support
14533    string constants properly terminated with '\0' character.
14534    If STRLEN is a valid pointer, length (including terminating character)
14535    of returned string is stored to the argument.  */
14536 
14537 const char *
14538 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14539 {
14540   tree offset_node;
14541 
14542   if (strlen)
14543     *strlen = 0;
14544 
14545   src = string_constant (src, &offset_node);
14546   if (src == 0)
14547     return NULL;
14548 
14549   unsigned HOST_WIDE_INT offset = 0;
14550   if (offset_node != NULL_TREE)
14551     {
14552       if (!tree_fits_uhwi_p (offset_node))
14553 	return NULL;
14554       else
14555 	offset = tree_to_uhwi (offset_node);
14556     }
14557 
14558   unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14559   const char *string = TREE_STRING_POINTER (src);
14560 
14561   /* Support only properly null-terminated strings.  */
14562   if (string_length == 0
14563       || string[string_length - 1] != '\0'
14564       || offset >= string_length)
14565     return NULL;
14566 
14567   if (strlen)
14568     *strlen = string_length - offset;
14569   return string + offset;
14570 }
14571 
14572 #if CHECKING_P
14573 
14574 namespace selftest {
14575 
14576 /* Helper functions for writing tests of folding trees.  */
14577 
14578 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT.  */
14579 
14580 static void
14581 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14582 			     tree constant)
14583 {
14584   ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14585 }
14586 
14587 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14588    wrapping WRAPPED_EXPR.  */
14589 
14590 static void
14591 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14592 				 tree wrapped_expr)
14593 {
14594   tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14595   ASSERT_NE (wrapped_expr, result);
14596   ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14597   ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14598 }
14599 
14600 /* Verify that various arithmetic binary operations are folded
14601    correctly.  */
14602 
14603 static void
14604 test_arithmetic_folding ()
14605 {
14606   tree type = integer_type_node;
14607   tree x = create_tmp_var_raw (type, "x");
14608   tree zero = build_zero_cst (type);
14609   tree one = build_int_cst (type, 1);
14610 
14611   /* Addition.  */
14612   /* 1 <-- (0 + 1) */
14613   assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14614 			       one);
14615   assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14616 			       one);
14617 
14618   /* (nonlvalue)x <-- (x + 0) */
14619   assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14620 				   x);
14621 
14622   /* Subtraction.  */
14623   /* 0 <-- (x - x) */
14624   assert_binop_folds_to_const (x, MINUS_EXPR, x,
14625 			       zero);
14626   assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14627 				   x);
14628 
14629   /* Multiplication.  */
14630   /* 0 <-- (x * 0) */
14631   assert_binop_folds_to_const (x, MULT_EXPR, zero,
14632 			       zero);
14633 
14634   /* (nonlvalue)x <-- (x * 1) */
14635   assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14636 				   x);
14637 }
14638 
14639 /* Verify that various binary operations on vectors are folded
14640    correctly.  */
14641 
14642 static void
14643 test_vector_folding ()
14644 {
14645   tree inner_type = integer_type_node;
14646   tree type = build_vector_type (inner_type, 4);
14647   tree zero = build_zero_cst (type);
14648   tree one = build_one_cst (type);
14649 
14650   /* Verify equality tests that return a scalar boolean result.  */
14651   tree res_type = boolean_type_node;
14652   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14653   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14654   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14655   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14656 }
14657 
14658 /* Run all of the selftests within this file.  */
14659 
14660 void
14661 fold_const_c_tests ()
14662 {
14663   test_arithmetic_folding ();
14664   test_vector_folding ();
14665 }
14666 
14667 } // namespace selftest
14668 
14669 #endif /* CHECKING_P */
14670