xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/fold-const.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 
83 /* Nonzero if we are folding constants inside an initializer; zero
84    otherwise.  */
85 int folding_initializer = 0;
86 
87 /* The following constants represent a bit based encoding of GCC's
88    comparison operators.  This encoding simplifies transformations
89    on relational comparison operators, such as AND and OR.  */
90 enum comparison_code {
91   COMPCODE_FALSE = 0,
92   COMPCODE_LT = 1,
93   COMPCODE_EQ = 2,
94   COMPCODE_LE = 3,
95   COMPCODE_GT = 4,
96   COMPCODE_LTGT = 5,
97   COMPCODE_GE = 6,
98   COMPCODE_ORD = 7,
99   COMPCODE_UNORD = 8,
100   COMPCODE_UNLT = 9,
101   COMPCODE_UNEQ = 10,
102   COMPCODE_UNLE = 11,
103   COMPCODE_UNGT = 12,
104   COMPCODE_NE = 13,
105   COMPCODE_UNGE = 14,
106   COMPCODE_TRUE = 15
107 };
108 
109 static bool negate_expr_p (tree);
110 static tree negate_expr (tree);
111 static tree split_tree (location_t, tree, tree, enum tree_code,
112 			tree *, tree *, tree *, int);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int operand_equal_for_comparison_p (tree, tree, tree);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 					tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 						 enum tree_code, tree,
133 						 tree, tree,
134 						 tree, tree, int);
135 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static bool vec_cst_ctor_to_array (tree, tree *);
142 static tree fold_negate_expr (location_t, tree);
143 
144 
145 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
146    Otherwise, return LOC.  */
147 
148 static location_t
149 expr_location_or (tree t, location_t loc)
150 {
151   location_t tloc = EXPR_LOCATION (t);
152   return tloc == UNKNOWN_LOCATION ? loc : tloc;
153 }
154 
155 /* Similar to protected_set_expr_location, but never modify x in place,
156    if location can and needs to be set, unshare it.  */
157 
158 static inline tree
159 protected_set_expr_location_unshare (tree x, location_t loc)
160 {
161   if (CAN_HAVE_LOCATION_P (x)
162       && EXPR_LOCATION (x) != loc
163       && !(TREE_CODE (x) == SAVE_EXPR
164 	   || TREE_CODE (x) == TARGET_EXPR
165 	   || TREE_CODE (x) == BIND_EXPR))
166     {
167       x = copy_node (x);
168       SET_EXPR_LOCATION (x, loc);
169     }
170   return x;
171 }
172 
173 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
174    division and returns the quotient.  Otherwise returns
175    NULL_TREE.  */
176 
177 tree
178 div_if_zero_remainder (const_tree arg1, const_tree arg2)
179 {
180   widest_int quo;
181 
182   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
183 			 SIGNED, &quo))
184     return wide_int_to_tree (TREE_TYPE (arg1), quo);
185 
186   return NULL_TREE;
187 }
188 
189 /* This is nonzero if we should defer warnings about undefined
190    overflow.  This facility exists because these warnings are a
191    special case.  The code to estimate loop iterations does not want
192    to issue any warnings, since it works with expressions which do not
193    occur in user code.  Various bits of cleanup code call fold(), but
194    only use the result if it has certain characteristics (e.g., is a
195    constant); that code only wants to issue a warning if the result is
196    used.  */
197 
198 static int fold_deferring_overflow_warnings;
199 
200 /* If a warning about undefined overflow is deferred, this is the
201    warning.  Note that this may cause us to turn two warnings into
202    one, but that is fine since it is sufficient to only give one
203    warning per expression.  */
204 
205 static const char* fold_deferred_overflow_warning;
206 
207 /* If a warning about undefined overflow is deferred, this is the
208    level at which the warning should be emitted.  */
209 
210 static enum warn_strict_overflow_code fold_deferred_overflow_code;
211 
212 /* Start deferring overflow warnings.  We could use a stack here to
213    permit nested calls, but at present it is not necessary.  */
214 
215 void
216 fold_defer_overflow_warnings (void)
217 {
218   ++fold_deferring_overflow_warnings;
219 }
220 
221 /* Stop deferring overflow warnings.  If there is a pending warning,
222    and ISSUE is true, then issue the warning if appropriate.  STMT is
223    the statement with which the warning should be associated (used for
224    location information); STMT may be NULL.  CODE is the level of the
225    warning--a warn_strict_overflow_code value.  This function will use
226    the smaller of CODE and the deferred code when deciding whether to
227    issue the warning.  CODE may be zero to mean to always use the
228    deferred code.  */
229 
230 void
231 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
232 {
233   const char *warnmsg;
234   location_t locus;
235 
236   gcc_assert (fold_deferring_overflow_warnings > 0);
237   --fold_deferring_overflow_warnings;
238   if (fold_deferring_overflow_warnings > 0)
239     {
240       if (fold_deferred_overflow_warning != NULL
241 	  && code != 0
242 	  && code < (int) fold_deferred_overflow_code)
243 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
244       return;
245     }
246 
247   warnmsg = fold_deferred_overflow_warning;
248   fold_deferred_overflow_warning = NULL;
249 
250   if (!issue || warnmsg == NULL)
251     return;
252 
253   if (gimple_no_warning_p (stmt))
254     return;
255 
256   /* Use the smallest code level when deciding to issue the
257      warning.  */
258   if (code == 0 || code > (int) fold_deferred_overflow_code)
259     code = fold_deferred_overflow_code;
260 
261   if (!issue_strict_overflow_warning (code))
262     return;
263 
264   if (stmt == NULL)
265     locus = input_location;
266   else
267     locus = gimple_location (stmt);
268   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
269 }
270 
271 /* Stop deferring overflow warnings, ignoring any deferred
272    warnings.  */
273 
274 void
275 fold_undefer_and_ignore_overflow_warnings (void)
276 {
277   fold_undefer_overflow_warnings (false, NULL, 0);
278 }
279 
280 /* Whether we are deferring overflow warnings.  */
281 
282 bool
283 fold_deferring_overflow_warnings_p (void)
284 {
285   return fold_deferring_overflow_warnings > 0;
286 }
287 
288 /* This is called when we fold something based on the fact that signed
289    overflow is undefined.  */
290 
291 void
292 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
293 {
294   if (fold_deferring_overflow_warnings > 0)
295     {
296       if (fold_deferred_overflow_warning == NULL
297 	  || wc < fold_deferred_overflow_code)
298 	{
299 	  fold_deferred_overflow_warning = gmsgid;
300 	  fold_deferred_overflow_code = wc;
301 	}
302     }
303   else if (issue_strict_overflow_warning (wc))
304     warning (OPT_Wstrict_overflow, gmsgid);
305 }
306 
307 /* Return true if the built-in mathematical function specified by CODE
308    is odd, i.e. -f(x) == f(-x).  */
309 
310 bool
311 negate_mathfn_p (combined_fn fn)
312 {
313   switch (fn)
314     {
315     CASE_CFN_ASIN:
316     CASE_CFN_ASINH:
317     CASE_CFN_ATAN:
318     CASE_CFN_ATANH:
319     CASE_CFN_CASIN:
320     CASE_CFN_CASINH:
321     CASE_CFN_CATAN:
322     CASE_CFN_CATANH:
323     CASE_CFN_CBRT:
324     CASE_CFN_CPROJ:
325     CASE_CFN_CSIN:
326     CASE_CFN_CSINH:
327     CASE_CFN_CTAN:
328     CASE_CFN_CTANH:
329     CASE_CFN_ERF:
330     CASE_CFN_LLROUND:
331     CASE_CFN_LROUND:
332     CASE_CFN_ROUND:
333     CASE_CFN_SIN:
334     CASE_CFN_SINH:
335     CASE_CFN_TAN:
336     CASE_CFN_TANH:
337     CASE_CFN_TRUNC:
338       return true;
339 
340     CASE_CFN_LLRINT:
341     CASE_CFN_LRINT:
342     CASE_CFN_NEARBYINT:
343     CASE_CFN_RINT:
344       return !flag_rounding_math;
345 
346     default:
347       break;
348     }
349   return false;
350 }
351 
352 /* Check whether we may negate an integer constant T without causing
353    overflow.  */
354 
355 bool
356 may_negate_without_overflow_p (const_tree t)
357 {
358   tree type;
359 
360   gcc_assert (TREE_CODE (t) == INTEGER_CST);
361 
362   type = TREE_TYPE (t);
363   if (TYPE_UNSIGNED (type))
364     return false;
365 
366   return !wi::only_sign_bit_p (t);
367 }
368 
369 /* Determine whether an expression T can be cheaply negated using
370    the function negate_expr without introducing undefined overflow.  */
371 
372 static bool
373 negate_expr_p (tree t)
374 {
375   tree type;
376 
377   if (t == 0)
378     return false;
379 
380   type = TREE_TYPE (t);
381 
382   STRIP_SIGN_NOPS (t);
383   switch (TREE_CODE (t))
384     {
385     case INTEGER_CST:
386       if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
387 	return true;
388 
389       /* Check that -CST will not overflow type.  */
390       return may_negate_without_overflow_p (t);
391     case BIT_NOT_EXPR:
392       return (INTEGRAL_TYPE_P (type)
393 	      && TYPE_OVERFLOW_WRAPS (type));
394 
395     case FIXED_CST:
396       return true;
397 
398     case NEGATE_EXPR:
399       return !TYPE_OVERFLOW_SANITIZED (type);
400 
401     case REAL_CST:
402       /* We want to canonicalize to positive real constants.  Pretend
403          that only negative ones can be easily negated.  */
404       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405 
406     case COMPLEX_CST:
407       return negate_expr_p (TREE_REALPART (t))
408 	     && negate_expr_p (TREE_IMAGPART (t));
409 
410     case VECTOR_CST:
411       {
412 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 	  return true;
414 
415 	int count = TYPE_VECTOR_SUBPARTS (type), i;
416 
417 	for (i = 0; i < count; i++)
418 	  if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
419 	    return false;
420 
421 	return true;
422       }
423 
424     case COMPLEX_EXPR:
425       return negate_expr_p (TREE_OPERAND (t, 0))
426 	     && negate_expr_p (TREE_OPERAND (t, 1));
427 
428     case CONJ_EXPR:
429       return negate_expr_p (TREE_OPERAND (t, 0));
430 
431     case PLUS_EXPR:
432       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 	  || HONOR_SIGNED_ZEROS (element_mode (type))
434 	  || (INTEGRAL_TYPE_P (type)
435 	      && ! TYPE_OVERFLOW_WRAPS (type)))
436 	return false;
437       /* -(A + B) -> (-B) - A.  */
438       if (negate_expr_p (TREE_OPERAND (t, 1)))
439 	return true;
440       /* -(A + B) -> (-A) - B.  */
441       return negate_expr_p (TREE_OPERAND (t, 0));
442 
443     case MINUS_EXPR:
444       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
445       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
447 	     && (! INTEGRAL_TYPE_P (type)
448 		 || TYPE_OVERFLOW_WRAPS (type));
449 
450     case MULT_EXPR:
451       if (TYPE_UNSIGNED (type))
452 	break;
453       /* INT_MIN/n * n doesn't overflow while negating one operand it does
454          if n is a (negative) power of two.  */
455       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 		 && wi::popcount (wi::abs (TREE_OPERAND (t, 0))) != 1)
459 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 		    && wi::popcount (wi::abs (TREE_OPERAND (t, 1))) != 1)))
461 	break;
462 
463       /* Fall through.  */
464 
465     case RDIV_EXPR:
466       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
467 	return negate_expr_p (TREE_OPERAND (t, 1))
468 	       || negate_expr_p (TREE_OPERAND (t, 0));
469       break;
470 
471     case TRUNC_DIV_EXPR:
472     case ROUND_DIV_EXPR:
473     case EXACT_DIV_EXPR:
474       if (TYPE_UNSIGNED (type))
475 	break;
476       /* In general we can't negate A in A / B, because if A is INT_MIN and
477          B is not 1 we change the sign of the result.  */
478       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
479 	  && negate_expr_p (TREE_OPERAND (t, 0)))
480 	return true;
481       /* In general we can't negate B in A / B, because if A is INT_MIN and
482 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 	 and actually traps on some architectures.  */
484       if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
485 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
486 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
487 	      && ! integer_onep (TREE_OPERAND (t, 1))))
488 	return negate_expr_p (TREE_OPERAND (t, 1));
489       break;
490 
491     case NOP_EXPR:
492       /* Negate -((double)float) as (double)(-float).  */
493       if (TREE_CODE (type) == REAL_TYPE)
494 	{
495 	  tree tem = strip_float_extensions (t);
496 	  if (tem != t)
497 	    return negate_expr_p (tem);
498 	}
499       break;
500 
501     case CALL_EXPR:
502       /* Negate -f(x) as f(-x).  */
503       if (negate_mathfn_p (get_call_combined_fn (t)))
504 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
505       break;
506 
507     case RSHIFT_EXPR:
508       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
509       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
510 	{
511 	  tree op1 = TREE_OPERAND (t, 1);
512 	  if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
513 	    return true;
514 	}
515       break;
516 
517     default:
518       break;
519     }
520   return false;
521 }
522 
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524    simplification is possible.
525    If negate_expr_p would return true for T, NULL_TREE will never be
526    returned.  */
527 
528 static tree
529 fold_negate_expr_1 (location_t loc, tree t)
530 {
531   tree type = TREE_TYPE (t);
532   tree tem;
533 
534   switch (TREE_CODE (t))
535     {
536     /* Convert - (~A) to A + 1.  */
537     case BIT_NOT_EXPR:
538       if (INTEGRAL_TYPE_P (type))
539         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540 				build_one_cst (type));
541       break;
542 
543     case INTEGER_CST:
544       tem = fold_negate_const (t, type);
545       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 	  || (ANY_INTEGRAL_TYPE_P (type)
547 	      && !TYPE_OVERFLOW_TRAPS (type)
548 	      && TYPE_OVERFLOW_WRAPS (type))
549 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
550 	return tem;
551       break;
552 
553     case REAL_CST:
554       tem = fold_negate_const (t, type);
555       return tem;
556 
557     case FIXED_CST:
558       tem = fold_negate_const (t, type);
559       return tem;
560 
561     case COMPLEX_CST:
562       {
563 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 	if (rpart && ipart)
566 	  return build_complex (type, rpart, ipart);
567       }
568       break;
569 
570     case VECTOR_CST:
571       {
572 	int count = TYPE_VECTOR_SUBPARTS (type), i;
573 	tree *elts = XALLOCAVEC (tree, count);
574 
575 	for (i = 0; i < count; i++)
576 	  {
577 	    elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 	    if (elts[i] == NULL_TREE)
579 	      return NULL_TREE;
580 	  }
581 
582 	return build_vector (type, elts);
583       }
584 
585     case COMPLEX_EXPR:
586       if (negate_expr_p (t))
587 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
588 				fold_negate_expr (loc, TREE_OPERAND (t, 0)),
589 				fold_negate_expr (loc, TREE_OPERAND (t, 1)));
590       break;
591 
592     case CONJ_EXPR:
593       if (negate_expr_p (t))
594 	return fold_build1_loc (loc, CONJ_EXPR, type,
595 				fold_negate_expr (loc, TREE_OPERAND (t, 0)));
596       break;
597 
598     case NEGATE_EXPR:
599       if (!TYPE_OVERFLOW_SANITIZED (type))
600 	return TREE_OPERAND (t, 0);
601       break;
602 
603     case PLUS_EXPR:
604       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
605 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
606 	{
607 	  /* -(A + B) -> (-B) - A.  */
608 	  if (negate_expr_p (TREE_OPERAND (t, 1)))
609 	    {
610 	      tem = negate_expr (TREE_OPERAND (t, 1));
611 	      return fold_build2_loc (loc, MINUS_EXPR, type,
612 				      tem, TREE_OPERAND (t, 0));
613 	    }
614 
615 	  /* -(A + B) -> (-A) - B.  */
616 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
617 	    {
618 	      tem = negate_expr (TREE_OPERAND (t, 0));
619 	      return fold_build2_loc (loc, MINUS_EXPR, type,
620 				      tem, TREE_OPERAND (t, 1));
621 	    }
622 	}
623       break;
624 
625     case MINUS_EXPR:
626       /* - (A - B) -> B - A  */
627       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
628 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
629 	return fold_build2_loc (loc, MINUS_EXPR, type,
630 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
631       break;
632 
633     case MULT_EXPR:
634       if (TYPE_UNSIGNED (type))
635         break;
636 
637       /* Fall through.  */
638 
639     case RDIV_EXPR:
640       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
641 	{
642 	  tem = TREE_OPERAND (t, 1);
643 	  if (negate_expr_p (tem))
644 	    return fold_build2_loc (loc, TREE_CODE (t), type,
645 				    TREE_OPERAND (t, 0), negate_expr (tem));
646 	  tem = TREE_OPERAND (t, 0);
647 	  if (negate_expr_p (tem))
648 	    return fold_build2_loc (loc, TREE_CODE (t), type,
649 				    negate_expr (tem), TREE_OPERAND (t, 1));
650 	}
651       break;
652 
653     case TRUNC_DIV_EXPR:
654     case ROUND_DIV_EXPR:
655     case EXACT_DIV_EXPR:
656       if (TYPE_UNSIGNED (type))
657 	break;
658       /* In general we can't negate A in A / B, because if A is INT_MIN and
659 	 B is not 1 we change the sign of the result.  */
660       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
661 	  && negate_expr_p (TREE_OPERAND (t, 0)))
662 	return fold_build2_loc (loc, TREE_CODE (t), type,
663 				negate_expr (TREE_OPERAND (t, 0)),
664 				TREE_OPERAND (t, 1));
665       /* In general we can't negate B in A / B, because if A is INT_MIN and
666 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
667 	 and actually traps on some architectures.  */
668       if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
669 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
670 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
671 	       && ! integer_onep (TREE_OPERAND (t, 1))))
672 	  && negate_expr_p (TREE_OPERAND (t, 1)))
673 	return fold_build2_loc (loc, TREE_CODE (t), type,
674 				TREE_OPERAND (t, 0),
675 				negate_expr (TREE_OPERAND (t, 1)));
676       break;
677 
678     case NOP_EXPR:
679       /* Convert -((double)float) into (double)(-float).  */
680       if (TREE_CODE (type) == REAL_TYPE)
681 	{
682 	  tem = strip_float_extensions (t);
683 	  if (tem != t && negate_expr_p (tem))
684 	    return fold_convert_loc (loc, type, negate_expr (tem));
685 	}
686       break;
687 
688     case CALL_EXPR:
689       /* Negate -f(x) as f(-x).  */
690       if (negate_mathfn_p (get_call_combined_fn (t))
691 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
692 	{
693 	  tree fndecl, arg;
694 
695 	  fndecl = get_callee_fndecl (t);
696 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
697 	  return build_call_expr_loc (loc, fndecl, 1, arg);
698 	}
699       break;
700 
701     case RSHIFT_EXPR:
702       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
703       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
704 	{
705 	  tree op1 = TREE_OPERAND (t, 1);
706 	  if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
707 	    {
708 	      tree ntype = TYPE_UNSIGNED (type)
709 			   ? signed_type_for (type)
710 			   : unsigned_type_for (type);
711 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
712 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
713 	      return fold_convert_loc (loc, type, temp);
714 	    }
715 	}
716       break;
717 
718     default:
719       break;
720     }
721 
722   return NULL_TREE;
723 }
724 
725 /* A wrapper for fold_negate_expr_1.  */
726 
727 static tree
728 fold_negate_expr (location_t loc, tree t)
729 {
730   tree type = TREE_TYPE (t);
731   STRIP_SIGN_NOPS (t);
732   tree tem = fold_negate_expr_1 (loc, t);
733   if (tem == NULL_TREE)
734     return NULL_TREE;
735   return fold_convert_loc (loc, type, tem);
736 }
737 
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
740    return NULL_TREE. */
741 
742 static tree
743 negate_expr (tree t)
744 {
745   tree type, tem;
746   location_t loc;
747 
748   if (t == NULL_TREE)
749     return NULL_TREE;
750 
751   loc = EXPR_LOCATION (t);
752   type = TREE_TYPE (t);
753   STRIP_SIGN_NOPS (t);
754 
755   tem = fold_negate_expr (loc, t);
756   if (!tem)
757     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758   return fold_convert_loc (loc, type, tem);
759 }
760 
761 /* Split a tree IN into a constant, literal and variable parts that could be
762    combined with CODE to make IN.  "constant" means an expression with
763    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
764    commutative arithmetic operation.  Store the constant part into *CONP,
765    the literal in *LITP and return the variable part.  If a part isn't
766    present, set it to null.  If the tree does not decompose in this way,
767    return the entire tree as the variable part and the other parts as null.
768 
769    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
770    case, we negate an operand that was subtracted.  Except if it is a
771    literal for which we use *MINUS_LITP instead.
772 
773    If NEGATE_P is true, we are negating all of IN, again except a literal
774    for which we use *MINUS_LITP instead.  If a variable part is of pointer
775    type, it is negated after converting to TYPE.  This prevents us from
776    generating illegal MINUS pointer expression.  LOC is the location of
777    the converted variable part.
778 
779    If IN is itself a literal or constant, return it as appropriate.
780 
781    Note that we do not guarantee that any of the three values will be the
782    same type as IN, but they will have the same signedness and mode.  */
783 
784 static tree
785 split_tree (location_t loc, tree in, tree type, enum tree_code code,
786 	    tree *conp, tree *litp, tree *minus_litp, int negate_p)
787 {
788   tree var = 0;
789 
790   *conp = 0;
791   *litp = 0;
792   *minus_litp = 0;
793 
794   /* Strip any conversions that don't change the machine mode or signedness.  */
795   STRIP_SIGN_NOPS (in);
796 
797   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
798       || TREE_CODE (in) == FIXED_CST)
799     *litp = in;
800   else if (TREE_CODE (in) == code
801 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
802 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
803 	       /* We can associate addition and subtraction together (even
804 		  though the C standard doesn't say so) for integers because
805 		  the value is not affected.  For reals, the value might be
806 		  affected, so we can't.  */
807 	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
808 		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
809     {
810       tree op0 = TREE_OPERAND (in, 0);
811       tree op1 = TREE_OPERAND (in, 1);
812       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
813       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
814 
815       /* First see if either of the operands is a literal, then a constant.  */
816       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
817 	  || TREE_CODE (op0) == FIXED_CST)
818 	*litp = op0, op0 = 0;
819       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
820 	       || TREE_CODE (op1) == FIXED_CST)
821 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
822 
823       if (op0 != 0 && TREE_CONSTANT (op0))
824 	*conp = op0, op0 = 0;
825       else if (op1 != 0 && TREE_CONSTANT (op1))
826 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
827 
828       /* If we haven't dealt with either operand, this is not a case we can
829 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
830       if (op0 != 0 && op1 != 0)
831 	var = in;
832       else if (op0 != 0)
833 	var = op0;
834       else
835 	var = op1, neg_var_p = neg1_p;
836 
837       /* Now do any needed negations.  */
838       if (neg_litp_p)
839 	*minus_litp = *litp, *litp = 0;
840       if (neg_conp_p && *conp)
841 	{
842 	  /* Convert to TYPE before negating.  */
843 	  *conp = fold_convert_loc (loc, type, *conp);
844 	  *conp = negate_expr (*conp);
845 	}
846       if (neg_var_p && var)
847 	{
848 	  /* Convert to TYPE before negating.  */
849 	  var = fold_convert_loc (loc, type, var);
850 	  var = negate_expr (var);
851 	}
852     }
853   else if (TREE_CONSTANT (in))
854     *conp = in;
855   else if (TREE_CODE (in) == BIT_NOT_EXPR
856 	   && code == PLUS_EXPR)
857     {
858       /* -X - 1 is folded to ~X, undo that here.  Do _not_ do this
859          when IN is constant.  */
860       *minus_litp = build_one_cst (TREE_TYPE (in));
861       var = negate_expr (TREE_OPERAND (in, 0));
862     }
863   else
864     var = in;
865 
866   if (negate_p)
867     {
868       if (*litp)
869 	*minus_litp = *litp, *litp = 0;
870       else if (*minus_litp)
871 	*litp = *minus_litp, *minus_litp = 0;
872       if (*conp)
873 	{
874 	  /* Convert to TYPE before negating.  */
875 	  *conp = fold_convert_loc (loc, type, *conp);
876 	  *conp = negate_expr (*conp);
877 	}
878       if (var)
879 	{
880 	  /* Convert to TYPE before negating.  */
881 	  var = fold_convert_loc (loc, type, var);
882 	  var = negate_expr (var);
883 	}
884     }
885 
886   if (*litp
887       && TREE_OVERFLOW_P (*litp))
888     *litp = drop_tree_overflow (*litp);
889   if (*minus_litp
890       && TREE_OVERFLOW_P (*minus_litp))
891     *minus_litp = drop_tree_overflow (*minus_litp);
892 
893   return var;
894 }
895 
896 /* Re-associate trees split by the above function.  T1 and T2 are
897    either expressions to associate or null.  Return the new
898    expression, if any.  LOC is the location of the new expression.  If
899    we build an operation, do it in TYPE and with CODE.  */
900 
901 static tree
902 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
903 {
904   if (t1 == 0)
905     return t2;
906   else if (t2 == 0)
907     return t1;
908 
909   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
910      try to fold this since we will have infinite recursion.  But do
911      deal with any NEGATE_EXPRs.  */
912   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
913       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
914     {
915       if (code == PLUS_EXPR)
916 	{
917 	  if (TREE_CODE (t1) == NEGATE_EXPR)
918 	    return build2_loc (loc, MINUS_EXPR, type,
919 			       fold_convert_loc (loc, type, t2),
920 			       fold_convert_loc (loc, type,
921 						 TREE_OPERAND (t1, 0)));
922 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
923 	    return build2_loc (loc, MINUS_EXPR, type,
924 			       fold_convert_loc (loc, type, t1),
925 			       fold_convert_loc (loc, type,
926 						 TREE_OPERAND (t2, 0)));
927 	  else if (integer_zerop (t2))
928 	    return fold_convert_loc (loc, type, t1);
929 	}
930       else if (code == MINUS_EXPR)
931 	{
932 	  if (integer_zerop (t2))
933 	    return fold_convert_loc (loc, type, t1);
934 	}
935 
936       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
937 			 fold_convert_loc (loc, type, t2));
938     }
939 
940   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
941 			  fold_convert_loc (loc, type, t2));
942 }
943 
944 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
945    for use in int_const_binop, size_binop and size_diffop.  */
946 
947 static bool
948 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
949 {
950   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
951     return false;
952   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
953     return false;
954 
955   switch (code)
956     {
957     case LSHIFT_EXPR:
958     case RSHIFT_EXPR:
959     case LROTATE_EXPR:
960     case RROTATE_EXPR:
961       return true;
962 
963     default:
964       break;
965     }
966 
967   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
968 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
969 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
970 }
971 
972 
973 /* Combine two integer constants ARG1 and ARG2 under operation CODE
974    to produce a new constant.  Return NULL_TREE if we don't know how
975    to evaluate CODE at compile-time.  */
976 
977 static tree
978 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
979 		   int overflowable)
980 {
981   wide_int res;
982   tree t;
983   tree type = TREE_TYPE (arg1);
984   signop sign = TYPE_SIGN (type);
985   bool overflow = false;
986 
987   wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
988 
989   switch (code)
990     {
991     case BIT_IOR_EXPR:
992       res = wi::bit_or (arg1, arg2);
993       break;
994 
995     case BIT_XOR_EXPR:
996       res = wi::bit_xor (arg1, arg2);
997       break;
998 
999     case BIT_AND_EXPR:
1000       res = wi::bit_and (arg1, arg2);
1001       break;
1002 
1003     case RSHIFT_EXPR:
1004     case LSHIFT_EXPR:
1005       if (wi::neg_p (arg2))
1006 	{
1007 	  arg2 = -arg2;
1008 	  if (code == RSHIFT_EXPR)
1009 	    code = LSHIFT_EXPR;
1010 	  else
1011 	    code = RSHIFT_EXPR;
1012 	}
1013 
1014       if (code == RSHIFT_EXPR)
1015 	/* It's unclear from the C standard whether shifts can overflow.
1016 	   The following code ignores overflow; perhaps a C standard
1017 	   interpretation ruling is needed.  */
1018 	res = wi::rshift (arg1, arg2, sign);
1019       else
1020 	res = wi::lshift (arg1, arg2);
1021       break;
1022 
1023     case RROTATE_EXPR:
1024     case LROTATE_EXPR:
1025       if (wi::neg_p (arg2))
1026 	{
1027 	  arg2 = -arg2;
1028 	  if (code == RROTATE_EXPR)
1029 	    code = LROTATE_EXPR;
1030 	  else
1031 	    code = RROTATE_EXPR;
1032 	}
1033 
1034       if (code == RROTATE_EXPR)
1035 	res = wi::rrotate (arg1, arg2);
1036       else
1037 	res = wi::lrotate (arg1, arg2);
1038       break;
1039 
1040     case PLUS_EXPR:
1041       res = wi::add (arg1, arg2, sign, &overflow);
1042       break;
1043 
1044     case MINUS_EXPR:
1045       res = wi::sub (arg1, arg2, sign, &overflow);
1046       break;
1047 
1048     case MULT_EXPR:
1049       res = wi::mul (arg1, arg2, sign, &overflow);
1050       break;
1051 
1052     case MULT_HIGHPART_EXPR:
1053       res = wi::mul_high (arg1, arg2, sign);
1054       break;
1055 
1056     case TRUNC_DIV_EXPR:
1057     case EXACT_DIV_EXPR:
1058       if (arg2 == 0)
1059 	return NULL_TREE;
1060       res = wi::div_trunc (arg1, arg2, sign, &overflow);
1061       break;
1062 
1063     case FLOOR_DIV_EXPR:
1064       if (arg2 == 0)
1065 	return NULL_TREE;
1066       res = wi::div_floor (arg1, arg2, sign, &overflow);
1067       break;
1068 
1069     case CEIL_DIV_EXPR:
1070       if (arg2 == 0)
1071 	return NULL_TREE;
1072       res = wi::div_ceil (arg1, arg2, sign, &overflow);
1073       break;
1074 
1075     case ROUND_DIV_EXPR:
1076       if (arg2 == 0)
1077 	return NULL_TREE;
1078       res = wi::div_round (arg1, arg2, sign, &overflow);
1079       break;
1080 
1081     case TRUNC_MOD_EXPR:
1082       if (arg2 == 0)
1083 	return NULL_TREE;
1084       res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1085       break;
1086 
1087     case FLOOR_MOD_EXPR:
1088       if (arg2 == 0)
1089 	return NULL_TREE;
1090       res = wi::mod_floor (arg1, arg2, sign, &overflow);
1091       break;
1092 
1093     case CEIL_MOD_EXPR:
1094       if (arg2 == 0)
1095 	return NULL_TREE;
1096       res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1097       break;
1098 
1099     case ROUND_MOD_EXPR:
1100       if (arg2 == 0)
1101 	return NULL_TREE;
1102       res = wi::mod_round (arg1, arg2, sign, &overflow);
1103       break;
1104 
1105     case MIN_EXPR:
1106       res = wi::min (arg1, arg2, sign);
1107       break;
1108 
1109     case MAX_EXPR:
1110       res = wi::max (arg1, arg2, sign);
1111       break;
1112 
1113     default:
1114       return NULL_TREE;
1115     }
1116 
1117   t = force_fit_type (type, res, overflowable,
1118 		      (((sign == SIGNED || overflowable == -1)
1119 			&& overflow)
1120 		       | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1121 
1122   return t;
1123 }
1124 
1125 tree
1126 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1127 {
1128   return int_const_binop_1 (code, arg1, arg2, 1);
1129 }
1130 
1131 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1132    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1133    are the same kind of constant and the same machine mode.  Return zero if
1134    combining the constants is not allowed in the current operating mode.  */
1135 
1136 static tree
1137 const_binop (enum tree_code code, tree arg1, tree arg2)
1138 {
1139   /* Sanity check for the recursive cases.  */
1140   if (!arg1 || !arg2)
1141     return NULL_TREE;
1142 
1143   STRIP_NOPS (arg1);
1144   STRIP_NOPS (arg2);
1145 
1146   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1147     {
1148       if (code == POINTER_PLUS_EXPR)
1149 	return int_const_binop (PLUS_EXPR,
1150 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1151 
1152       return int_const_binop (code, arg1, arg2);
1153     }
1154 
1155   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1156     {
1157       machine_mode mode;
1158       REAL_VALUE_TYPE d1;
1159       REAL_VALUE_TYPE d2;
1160       REAL_VALUE_TYPE value;
1161       REAL_VALUE_TYPE result;
1162       bool inexact;
1163       tree t, type;
1164 
1165       /* The following codes are handled by real_arithmetic.  */
1166       switch (code)
1167 	{
1168 	case PLUS_EXPR:
1169 	case MINUS_EXPR:
1170 	case MULT_EXPR:
1171 	case RDIV_EXPR:
1172 	case MIN_EXPR:
1173 	case MAX_EXPR:
1174 	  break;
1175 
1176 	default:
1177 	  return NULL_TREE;
1178 	}
1179 
1180       d1 = TREE_REAL_CST (arg1);
1181       d2 = TREE_REAL_CST (arg2);
1182 
1183       type = TREE_TYPE (arg1);
1184       mode = TYPE_MODE (type);
1185 
1186       /* Don't perform operation if we honor signaling NaNs and
1187 	 either operand is a signaling NaN.  */
1188       if (HONOR_SNANS (mode)
1189 	  && (REAL_VALUE_ISSIGNALING_NAN (d1)
1190 	      || REAL_VALUE_ISSIGNALING_NAN (d2)))
1191 	return NULL_TREE;
1192 
1193       /* Don't perform operation if it would raise a division
1194 	 by zero exception.  */
1195       if (code == RDIV_EXPR
1196 	  && real_equal (&d2, &dconst0)
1197 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1198 	return NULL_TREE;
1199 
1200       /* If either operand is a NaN, just return it.  Otherwise, set up
1201 	 for floating-point trap; we return an overflow.  */
1202       if (REAL_VALUE_ISNAN (d1))
1203       {
1204 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1205 	   is off.  */
1206 	d1.signalling = 0;
1207 	t = build_real (type, d1);
1208 	return t;
1209       }
1210       else if (REAL_VALUE_ISNAN (d2))
1211       {
1212 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1213 	   is off.  */
1214 	d2.signalling = 0;
1215 	t = build_real (type, d2);
1216 	return t;
1217       }
1218 
1219       inexact = real_arithmetic (&value, code, &d1, &d2);
1220       real_convert (&result, mode, &value);
1221 
1222       /* Don't constant fold this floating point operation if
1223 	 the result has overflowed and flag_trapping_math.  */
1224       if (flag_trapping_math
1225 	  && MODE_HAS_INFINITIES (mode)
1226 	  && REAL_VALUE_ISINF (result)
1227 	  && !REAL_VALUE_ISINF (d1)
1228 	  && !REAL_VALUE_ISINF (d2))
1229 	return NULL_TREE;
1230 
1231       /* Don't constant fold this floating point operation if the
1232 	 result may dependent upon the run-time rounding mode and
1233 	 flag_rounding_math is set, or if GCC's software emulation
1234 	 is unable to accurately represent the result.  */
1235       if ((flag_rounding_math
1236 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1237 	  && (inexact || !real_identical (&result, &value)))
1238 	return NULL_TREE;
1239 
1240       t = build_real (type, result);
1241 
1242       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1243       return t;
1244     }
1245 
1246   if (TREE_CODE (arg1) == FIXED_CST)
1247     {
1248       FIXED_VALUE_TYPE f1;
1249       FIXED_VALUE_TYPE f2;
1250       FIXED_VALUE_TYPE result;
1251       tree t, type;
1252       int sat_p;
1253       bool overflow_p;
1254 
1255       /* The following codes are handled by fixed_arithmetic.  */
1256       switch (code)
1257         {
1258 	case PLUS_EXPR:
1259 	case MINUS_EXPR:
1260 	case MULT_EXPR:
1261 	case TRUNC_DIV_EXPR:
1262 	  if (TREE_CODE (arg2) != FIXED_CST)
1263 	    return NULL_TREE;
1264 	  f2 = TREE_FIXED_CST (arg2);
1265 	  break;
1266 
1267 	case LSHIFT_EXPR:
1268 	case RSHIFT_EXPR:
1269 	  {
1270 	    if (TREE_CODE (arg2) != INTEGER_CST)
1271 	      return NULL_TREE;
1272 	    wide_int w2 = arg2;
1273 	    f2.data.high = w2.elt (1);
1274 	    f2.data.low = w2.ulow ();
1275 	    f2.mode = SImode;
1276 	  }
1277 	  break;
1278 
1279         default:
1280 	  return NULL_TREE;
1281         }
1282 
1283       f1 = TREE_FIXED_CST (arg1);
1284       type = TREE_TYPE (arg1);
1285       sat_p = TYPE_SATURATING (type);
1286       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1287       t = build_fixed (type, result);
1288       /* Propagate overflow flags.  */
1289       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1290 	TREE_OVERFLOW (t) = 1;
1291       return t;
1292     }
1293 
1294   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1295     {
1296       tree type = TREE_TYPE (arg1);
1297       tree r1 = TREE_REALPART (arg1);
1298       tree i1 = TREE_IMAGPART (arg1);
1299       tree r2 = TREE_REALPART (arg2);
1300       tree i2 = TREE_IMAGPART (arg2);
1301       tree real, imag;
1302 
1303       switch (code)
1304 	{
1305 	case PLUS_EXPR:
1306 	case MINUS_EXPR:
1307 	  real = const_binop (code, r1, r2);
1308 	  imag = const_binop (code, i1, i2);
1309 	  break;
1310 
1311 	case MULT_EXPR:
1312 	  if (COMPLEX_FLOAT_TYPE_P (type))
1313 	    return do_mpc_arg2 (arg1, arg2, type,
1314 				/* do_nonfinite= */ folding_initializer,
1315 				mpc_mul);
1316 
1317 	  real = const_binop (MINUS_EXPR,
1318 			      const_binop (MULT_EXPR, r1, r2),
1319 			      const_binop (MULT_EXPR, i1, i2));
1320 	  imag = const_binop (PLUS_EXPR,
1321 			      const_binop (MULT_EXPR, r1, i2),
1322 			      const_binop (MULT_EXPR, i1, r2));
1323 	  break;
1324 
1325 	case RDIV_EXPR:
1326 	  if (COMPLEX_FLOAT_TYPE_P (type))
1327 	    return do_mpc_arg2 (arg1, arg2, type,
1328                                 /* do_nonfinite= */ folding_initializer,
1329 				mpc_div);
1330 	  /* Fallthru. */
1331 	case TRUNC_DIV_EXPR:
1332 	case CEIL_DIV_EXPR:
1333 	case FLOOR_DIV_EXPR:
1334 	case ROUND_DIV_EXPR:
1335 	  if (flag_complex_method == 0)
1336 	  {
1337 	    /* Keep this algorithm in sync with
1338 	       tree-complex.c:expand_complex_div_straight().
1339 
1340 	       Expand complex division to scalars, straightforward algorithm.
1341 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1342 	       t = br*br + bi*bi
1343 	    */
1344 	    tree magsquared
1345 	      = const_binop (PLUS_EXPR,
1346 			     const_binop (MULT_EXPR, r2, r2),
1347 			     const_binop (MULT_EXPR, i2, i2));
1348 	    tree t1
1349 	      = const_binop (PLUS_EXPR,
1350 			     const_binop (MULT_EXPR, r1, r2),
1351 			     const_binop (MULT_EXPR, i1, i2));
1352 	    tree t2
1353 	      = const_binop (MINUS_EXPR,
1354 			     const_binop (MULT_EXPR, i1, r2),
1355 			     const_binop (MULT_EXPR, r1, i2));
1356 
1357 	    real = const_binop (code, t1, magsquared);
1358 	    imag = const_binop (code, t2, magsquared);
1359 	  }
1360 	  else
1361 	  {
1362 	    /* Keep this algorithm in sync with
1363                tree-complex.c:expand_complex_div_wide().
1364 
1365 	       Expand complex division to scalars, modified algorithm to minimize
1366 	       overflow with wide input ranges.  */
1367 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1368 					fold_abs_const (r2, TREE_TYPE (type)),
1369 					fold_abs_const (i2, TREE_TYPE (type)));
1370 
1371 	    if (integer_nonzerop (compare))
1372 	      {
1373 		/* In the TRUE branch, we compute
1374 		   ratio = br/bi;
1375 		   div = (br * ratio) + bi;
1376 		   tr = (ar * ratio) + ai;
1377 		   ti = (ai * ratio) - ar;
1378 		   tr = tr / div;
1379 		   ti = ti / div;  */
1380 		tree ratio = const_binop (code, r2, i2);
1381 		tree div = const_binop (PLUS_EXPR, i2,
1382 					const_binop (MULT_EXPR, r2, ratio));
1383 		real = const_binop (MULT_EXPR, r1, ratio);
1384 		real = const_binop (PLUS_EXPR, real, i1);
1385 		real = const_binop (code, real, div);
1386 
1387 		imag = const_binop (MULT_EXPR, i1, ratio);
1388 		imag = const_binop (MINUS_EXPR, imag, r1);
1389 		imag = const_binop (code, imag, div);
1390 	      }
1391 	    else
1392 	      {
1393 		/* In the FALSE branch, we compute
1394 		   ratio = d/c;
1395 		   divisor = (d * ratio) + c;
1396 		   tr = (b * ratio) + a;
1397 		   ti = b - (a * ratio);
1398 		   tr = tr / div;
1399 		   ti = ti / div;  */
1400 		tree ratio = const_binop (code, i2, r2);
1401 		tree div = const_binop (PLUS_EXPR, r2,
1402                                         const_binop (MULT_EXPR, i2, ratio));
1403 
1404 		real = const_binop (MULT_EXPR, i1, ratio);
1405 		real = const_binop (PLUS_EXPR, real, r1);
1406 		real = const_binop (code, real, div);
1407 
1408 		imag = const_binop (MULT_EXPR, r1, ratio);
1409 		imag = const_binop (MINUS_EXPR, i1, imag);
1410 		imag = const_binop (code, imag, div);
1411 	      }
1412 	  }
1413 	  break;
1414 
1415 	default:
1416 	  return NULL_TREE;
1417 	}
1418 
1419       if (real && imag)
1420 	return build_complex (type, real, imag);
1421     }
1422 
1423   if (TREE_CODE (arg1) == VECTOR_CST
1424       && TREE_CODE (arg2) == VECTOR_CST)
1425     {
1426       tree type = TREE_TYPE (arg1);
1427       int count = TYPE_VECTOR_SUBPARTS (type), i;
1428       tree *elts = XALLOCAVEC (tree, count);
1429 
1430       for (i = 0; i < count; i++)
1431 	{
1432 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1433 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1434 
1435 	  elts[i] = const_binop (code, elem1, elem2);
1436 
1437 	  /* It is possible that const_binop cannot handle the given
1438 	     code and return NULL_TREE */
1439 	  if (elts[i] == NULL_TREE)
1440 	    return NULL_TREE;
1441 	}
1442 
1443       return build_vector (type, elts);
1444     }
1445 
1446   /* Shifts allow a scalar offset for a vector.  */
1447   if (TREE_CODE (arg1) == VECTOR_CST
1448       && TREE_CODE (arg2) == INTEGER_CST)
1449     {
1450       tree type = TREE_TYPE (arg1);
1451       int count = TYPE_VECTOR_SUBPARTS (type), i;
1452       tree *elts = XALLOCAVEC (tree, count);
1453 
1454       for (i = 0; i < count; i++)
1455 	{
1456 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1457 
1458 	  elts[i] = const_binop (code, elem1, arg2);
1459 
1460 	  /* It is possible that const_binop cannot handle the given
1461 	     code and return NULL_TREE.  */
1462 	  if (elts[i] == NULL_TREE)
1463 	    return NULL_TREE;
1464 	}
1465 
1466       return build_vector (type, elts);
1467     }
1468   return NULL_TREE;
1469 }
1470 
1471 /* Overload that adds a TYPE parameter to be able to dispatch
1472    to fold_relational_const.  */
1473 
1474 tree
1475 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1476 {
1477   if (TREE_CODE_CLASS (code) == tcc_comparison)
1478     return fold_relational_const (code, type, arg1, arg2);
1479 
1480   /* ???  Until we make the const_binop worker take the type of the
1481      result as argument put those cases that need it here.  */
1482   switch (code)
1483     {
1484     case COMPLEX_EXPR:
1485       if ((TREE_CODE (arg1) == REAL_CST
1486 	   && TREE_CODE (arg2) == REAL_CST)
1487 	  || (TREE_CODE (arg1) == INTEGER_CST
1488 	      && TREE_CODE (arg2) == INTEGER_CST))
1489 	return build_complex (type, arg1, arg2);
1490       return NULL_TREE;
1491 
1492     case VEC_PACK_TRUNC_EXPR:
1493     case VEC_PACK_FIX_TRUNC_EXPR:
1494       {
1495 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1496 	tree *elts;
1497 
1498 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1499 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1500 	if (TREE_CODE (arg1) != VECTOR_CST
1501 	    || TREE_CODE (arg2) != VECTOR_CST)
1502 	  return NULL_TREE;
1503 
1504 	elts = XALLOCAVEC (tree, nelts);
1505 	if (!vec_cst_ctor_to_array (arg1, elts)
1506 	    || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1507 	  return NULL_TREE;
1508 
1509 	for (i = 0; i < nelts; i++)
1510 	  {
1511 	    elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1512 					  ? NOP_EXPR : FIX_TRUNC_EXPR,
1513 					  TREE_TYPE (type), elts[i]);
1514 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1515 	      return NULL_TREE;
1516 	  }
1517 
1518 	return build_vector (type, elts);
1519       }
1520 
1521     case VEC_WIDEN_MULT_LO_EXPR:
1522     case VEC_WIDEN_MULT_HI_EXPR:
1523     case VEC_WIDEN_MULT_EVEN_EXPR:
1524     case VEC_WIDEN_MULT_ODD_EXPR:
1525       {
1526 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1527 	unsigned int out, ofs, scale;
1528 	tree *elts;
1529 
1530 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1531 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1532 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1533 	  return NULL_TREE;
1534 
1535 	elts = XALLOCAVEC (tree, nelts * 4);
1536 	if (!vec_cst_ctor_to_array (arg1, elts)
1537 	    || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1538 	  return NULL_TREE;
1539 
1540 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1541 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1542 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1543 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1544 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1545 	  scale = 1, ofs = 0;
1546 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1547 	  scale = 1, ofs = 1;
1548 
1549 	for (out = 0; out < nelts; out++)
1550 	  {
1551 	    unsigned int in1 = (out << scale) + ofs;
1552 	    unsigned int in2 = in1 + nelts * 2;
1553 	    tree t1, t2;
1554 
1555 	    t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1556 	    t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1557 
1558 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1559 	      return NULL_TREE;
1560 	    elts[out] = const_binop (MULT_EXPR, t1, t2);
1561 	    if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1562 	      return NULL_TREE;
1563 	  }
1564 
1565 	return build_vector (type, elts);
1566       }
1567 
1568     default:;
1569     }
1570 
1571   if (TREE_CODE_CLASS (code) != tcc_binary)
1572     return NULL_TREE;
1573 
1574   /* Make sure type and arg0 have the same saturating flag.  */
1575   gcc_checking_assert (TYPE_SATURATING (type)
1576 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1577 
1578   return const_binop (code, arg1, arg2);
1579 }
1580 
1581 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1582    Return zero if computing the constants is not possible.  */
1583 
1584 tree
1585 const_unop (enum tree_code code, tree type, tree arg0)
1586 {
1587   /* Don't perform the operation, other than NEGATE and ABS, if
1588      flag_signaling_nans is on and the operand is a signaling NaN.  */
1589   if (TREE_CODE (arg0) == REAL_CST
1590       && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1591       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1592       && code != NEGATE_EXPR
1593       && code != ABS_EXPR)
1594     return NULL_TREE;
1595 
1596   switch (code)
1597     {
1598     CASE_CONVERT:
1599     case FLOAT_EXPR:
1600     case FIX_TRUNC_EXPR:
1601     case FIXED_CONVERT_EXPR:
1602       return fold_convert_const (code, type, arg0);
1603 
1604     case ADDR_SPACE_CONVERT_EXPR:
1605       /* If the source address is 0, and the source address space
1606 	 cannot have a valid object at 0, fold to dest type null.  */
1607       if (integer_zerop (arg0)
1608 	  && !(targetm.addr_space.zero_address_valid
1609 	       (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1610 	return fold_convert_const (code, type, arg0);
1611       break;
1612 
1613     case VIEW_CONVERT_EXPR:
1614       return fold_view_convert_expr (type, arg0);
1615 
1616     case NEGATE_EXPR:
1617       {
1618 	/* Can't call fold_negate_const directly here as that doesn't
1619 	   handle all cases and we might not be able to negate some
1620 	   constants.  */
1621 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1622 	if (tem && CONSTANT_CLASS_P (tem))
1623 	  return tem;
1624 	break;
1625       }
1626 
1627     case ABS_EXPR:
1628       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1629 	return fold_abs_const (arg0, type);
1630       break;
1631 
1632     case CONJ_EXPR:
1633       if (TREE_CODE (arg0) == COMPLEX_CST)
1634 	{
1635 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1636 					  TREE_TYPE (type));
1637 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1638 	}
1639       break;
1640 
1641     case BIT_NOT_EXPR:
1642       if (TREE_CODE (arg0) == INTEGER_CST)
1643 	return fold_not_const (arg0, type);
1644       /* Perform BIT_NOT_EXPR on each element individually.  */
1645       else if (TREE_CODE (arg0) == VECTOR_CST)
1646 	{
1647 	  tree *elements;
1648 	  tree elem;
1649 	  unsigned count = VECTOR_CST_NELTS (arg0), i;
1650 
1651 	  elements = XALLOCAVEC (tree, count);
1652 	  for (i = 0; i < count; i++)
1653 	    {
1654 	      elem = VECTOR_CST_ELT (arg0, i);
1655 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1656 	      if (elem == NULL_TREE)
1657 		break;
1658 	      elements[i] = elem;
1659 	    }
1660 	  if (i == count)
1661 	    return build_vector (type, elements);
1662 	}
1663       break;
1664 
1665     case TRUTH_NOT_EXPR:
1666       if (TREE_CODE (arg0) == INTEGER_CST)
1667 	return constant_boolean_node (integer_zerop (arg0), type);
1668       break;
1669 
1670     case REALPART_EXPR:
1671       if (TREE_CODE (arg0) == COMPLEX_CST)
1672 	return fold_convert (type, TREE_REALPART (arg0));
1673       break;
1674 
1675     case IMAGPART_EXPR:
1676       if (TREE_CODE (arg0) == COMPLEX_CST)
1677 	return fold_convert (type, TREE_IMAGPART (arg0));
1678       break;
1679 
1680     case VEC_UNPACK_LO_EXPR:
1681     case VEC_UNPACK_HI_EXPR:
1682     case VEC_UNPACK_FLOAT_LO_EXPR:
1683     case VEC_UNPACK_FLOAT_HI_EXPR:
1684       {
1685 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1686 	tree *elts;
1687 	enum tree_code subcode;
1688 
1689 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1690 	if (TREE_CODE (arg0) != VECTOR_CST)
1691 	  return NULL_TREE;
1692 
1693 	elts = XALLOCAVEC (tree, nelts * 2);
1694 	if (!vec_cst_ctor_to_array (arg0, elts))
1695 	  return NULL_TREE;
1696 
1697 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1698 				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
1699 	  elts += nelts;
1700 
1701 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1702 	  subcode = NOP_EXPR;
1703 	else
1704 	  subcode = FLOAT_EXPR;
1705 
1706 	for (i = 0; i < nelts; i++)
1707 	  {
1708 	    elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1709 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1710 	      return NULL_TREE;
1711 	  }
1712 
1713 	return build_vector (type, elts);
1714       }
1715 
1716     case REDUC_MIN_EXPR:
1717     case REDUC_MAX_EXPR:
1718     case REDUC_PLUS_EXPR:
1719       {
1720 	unsigned int nelts, i;
1721 	tree *elts;
1722 	enum tree_code subcode;
1723 
1724 	if (TREE_CODE (arg0) != VECTOR_CST)
1725 	  return NULL_TREE;
1726         nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1727 
1728 	elts = XALLOCAVEC (tree, nelts);
1729 	if (!vec_cst_ctor_to_array (arg0, elts))
1730 	  return NULL_TREE;
1731 
1732 	switch (code)
1733 	  {
1734 	  case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1735 	  case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1736 	  case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1737 	  default: gcc_unreachable ();
1738 	  }
1739 
1740 	for (i = 1; i < nelts; i++)
1741 	  {
1742 	    elts[0] = const_binop (subcode, elts[0], elts[i]);
1743 	    if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1744 	      return NULL_TREE;
1745 	  }
1746 
1747 	return elts[0];
1748       }
1749 
1750     default:
1751       break;
1752     }
1753 
1754   return NULL_TREE;
1755 }
1756 
1757 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1758    indicates which particular sizetype to create.  */
1759 
1760 tree
1761 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1762 {
1763   return build_int_cst (sizetype_tab[(int) kind], number);
1764 }
1765 
1766 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1767    is a tree code.  The type of the result is taken from the operands.
1768    Both must be equivalent integer types, ala int_binop_types_match_p.
1769    If the operands are constant, so is the result.  */
1770 
1771 tree
1772 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1773 {
1774   tree type = TREE_TYPE (arg0);
1775 
1776   if (arg0 == error_mark_node || arg1 == error_mark_node)
1777     return error_mark_node;
1778 
1779   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1780                                        TREE_TYPE (arg1)));
1781 
1782   /* Handle the special case of two integer constants faster.  */
1783   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1784     {
1785       /* And some specific cases even faster than that.  */
1786       if (code == PLUS_EXPR)
1787 	{
1788 	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1789 	    return arg1;
1790 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1791 	    return arg0;
1792 	}
1793       else if (code == MINUS_EXPR)
1794 	{
1795 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1796 	    return arg0;
1797 	}
1798       else if (code == MULT_EXPR)
1799 	{
1800 	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1801 	    return arg1;
1802 	}
1803 
1804       /* Handle general case of two integer constants.  For sizetype
1805          constant calculations we always want to know about overflow,
1806 	 even in the unsigned case.  */
1807       return int_const_binop_1 (code, arg0, arg1, -1);
1808     }
1809 
1810   return fold_build2_loc (loc, code, type, arg0, arg1);
1811 }
1812 
1813 /* Given two values, either both of sizetype or both of bitsizetype,
1814    compute the difference between the two values.  Return the value
1815    in signed type corresponding to the type of the operands.  */
1816 
1817 tree
1818 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1819 {
1820   tree type = TREE_TYPE (arg0);
1821   tree ctype;
1822 
1823   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1824 				       TREE_TYPE (arg1)));
1825 
1826   /* If the type is already signed, just do the simple thing.  */
1827   if (!TYPE_UNSIGNED (type))
1828     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1829 
1830   if (type == sizetype)
1831     ctype = ssizetype;
1832   else if (type == bitsizetype)
1833     ctype = sbitsizetype;
1834   else
1835     ctype = signed_type_for (type);
1836 
1837   /* If either operand is not a constant, do the conversions to the signed
1838      type and subtract.  The hardware will do the right thing with any
1839      overflow in the subtraction.  */
1840   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1841     return size_binop_loc (loc, MINUS_EXPR,
1842 			   fold_convert_loc (loc, ctype, arg0),
1843 			   fold_convert_loc (loc, ctype, arg1));
1844 
1845   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1846      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1847      overflow) and negate (which can't either).  Special-case a result
1848      of zero while we're here.  */
1849   if (tree_int_cst_equal (arg0, arg1))
1850     return build_int_cst (ctype, 0);
1851   else if (tree_int_cst_lt (arg1, arg0))
1852     return fold_convert_loc (loc, ctype,
1853 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1854   else
1855     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1856 			   fold_convert_loc (loc, ctype,
1857 					     size_binop_loc (loc,
1858 							     MINUS_EXPR,
1859 							     arg1, arg0)));
1860 }
1861 
1862 /* A subroutine of fold_convert_const handling conversions of an
1863    INTEGER_CST to another integer type.  */
1864 
1865 static tree
1866 fold_convert_const_int_from_int (tree type, const_tree arg1)
1867 {
1868   /* Given an integer constant, make new constant with new type,
1869      appropriately sign-extended or truncated.  Use widest_int
1870      so that any extension is done according ARG1's type.  */
1871   return force_fit_type (type, wi::to_widest (arg1),
1872 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1873 			 TREE_OVERFLOW (arg1));
1874 }
1875 
1876 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1877    to an integer type.  */
1878 
1879 static tree
1880 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1881 {
1882   bool overflow = false;
1883   tree t;
1884 
1885   /* The following code implements the floating point to integer
1886      conversion rules required by the Java Language Specification,
1887      that IEEE NaNs are mapped to zero and values that overflow
1888      the target precision saturate, i.e. values greater than
1889      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1890      are mapped to INT_MIN.  These semantics are allowed by the
1891      C and C++ standards that simply state that the behavior of
1892      FP-to-integer conversion is unspecified upon overflow.  */
1893 
1894   wide_int val;
1895   REAL_VALUE_TYPE r;
1896   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1897 
1898   switch (code)
1899     {
1900     case FIX_TRUNC_EXPR:
1901       real_trunc (&r, VOIDmode, &x);
1902       break;
1903 
1904     default:
1905       gcc_unreachable ();
1906     }
1907 
1908   /* If R is NaN, return zero and show we have an overflow.  */
1909   if (REAL_VALUE_ISNAN (r))
1910     {
1911       overflow = true;
1912       val = wi::zero (TYPE_PRECISION (type));
1913     }
1914 
1915   /* See if R is less than the lower bound or greater than the
1916      upper bound.  */
1917 
1918   if (! overflow)
1919     {
1920       tree lt = TYPE_MIN_VALUE (type);
1921       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1922       if (real_less (&r, &l))
1923 	{
1924 	  overflow = true;
1925 	  val = lt;
1926 	}
1927     }
1928 
1929   if (! overflow)
1930     {
1931       tree ut = TYPE_MAX_VALUE (type);
1932       if (ut)
1933 	{
1934 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1935 	  if (real_less (&u, &r))
1936 	    {
1937 	      overflow = true;
1938 	      val = ut;
1939 	    }
1940 	}
1941     }
1942 
1943   if (! overflow)
1944     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1945 
1946   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1947   return t;
1948 }
1949 
1950 /* A subroutine of fold_convert_const handling conversions of a
1951    FIXED_CST to an integer type.  */
1952 
1953 static tree
1954 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1955 {
1956   tree t;
1957   double_int temp, temp_trunc;
1958   unsigned int mode;
1959 
1960   /* Right shift FIXED_CST to temp by fbit.  */
1961   temp = TREE_FIXED_CST (arg1).data;
1962   mode = TREE_FIXED_CST (arg1).mode;
1963   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1964     {
1965       temp = temp.rshift (GET_MODE_FBIT (mode),
1966 			  HOST_BITS_PER_DOUBLE_INT,
1967 			  SIGNED_FIXED_POINT_MODE_P (mode));
1968 
1969       /* Left shift temp to temp_trunc by fbit.  */
1970       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1971 				HOST_BITS_PER_DOUBLE_INT,
1972 				SIGNED_FIXED_POINT_MODE_P (mode));
1973     }
1974   else
1975     {
1976       temp = double_int_zero;
1977       temp_trunc = double_int_zero;
1978     }
1979 
1980   /* If FIXED_CST is negative, we need to round the value toward 0.
1981      By checking if the fractional bits are not zero to add 1 to temp.  */
1982   if (SIGNED_FIXED_POINT_MODE_P (mode)
1983       && temp_trunc.is_negative ()
1984       && TREE_FIXED_CST (arg1).data != temp_trunc)
1985     temp += double_int_one;
1986 
1987   /* Given a fixed-point constant, make new constant with new type,
1988      appropriately sign-extended or truncated.  */
1989   t = force_fit_type (type, temp, -1,
1990 		      (temp.is_negative ()
1991 		       && (TYPE_UNSIGNED (type)
1992 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1993 		      | TREE_OVERFLOW (arg1));
1994 
1995   return t;
1996 }
1997 
1998 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1999    to another floating point type.  */
2000 
2001 static tree
2002 fold_convert_const_real_from_real (tree type, const_tree arg1)
2003 {
2004   REAL_VALUE_TYPE value;
2005   tree t;
2006 
2007   /* Don't perform the operation if flag_signaling_nans is on
2008      and the operand is a signaling NaN.  */
2009   if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2010       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2011     return NULL_TREE;
2012 
2013   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2014   t = build_real (type, value);
2015 
2016   /* If converting an infinity or NAN to a representation that doesn't
2017      have one, set the overflow bit so that we can produce some kind of
2018      error message at the appropriate point if necessary.  It's not the
2019      most user-friendly message, but it's better than nothing.  */
2020   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2021       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2022     TREE_OVERFLOW (t) = 1;
2023   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2024 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2025     TREE_OVERFLOW (t) = 1;
2026   /* Regular overflow, conversion produced an infinity in a mode that
2027      can't represent them.  */
2028   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2029 	   && REAL_VALUE_ISINF (value)
2030 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2031     TREE_OVERFLOW (t) = 1;
2032   else
2033     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2034   return t;
2035 }
2036 
2037 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2038    to a floating point type.  */
2039 
2040 static tree
2041 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2042 {
2043   REAL_VALUE_TYPE value;
2044   tree t;
2045 
2046   real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2047   t = build_real (type, value);
2048 
2049   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2050   return t;
2051 }
2052 
2053 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2054    to another fixed-point type.  */
2055 
2056 static tree
2057 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2058 {
2059   FIXED_VALUE_TYPE value;
2060   tree t;
2061   bool overflow_p;
2062 
2063   overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2064 			      TYPE_SATURATING (type));
2065   t = build_fixed (type, value);
2066 
2067   /* Propagate overflow flags.  */
2068   if (overflow_p | TREE_OVERFLOW (arg1))
2069     TREE_OVERFLOW (t) = 1;
2070   return t;
2071 }
2072 
2073 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2074    to a fixed-point type.  */
2075 
2076 static tree
2077 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2078 {
2079   FIXED_VALUE_TYPE value;
2080   tree t;
2081   bool overflow_p;
2082   double_int di;
2083 
2084   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2085 
2086   di.low = TREE_INT_CST_ELT (arg1, 0);
2087   if (TREE_INT_CST_NUNITS (arg1) == 1)
2088     di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2089   else
2090     di.high = TREE_INT_CST_ELT (arg1, 1);
2091 
2092   overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2093 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2094 				       TYPE_SATURATING (type));
2095   t = build_fixed (type, value);
2096 
2097   /* Propagate overflow flags.  */
2098   if (overflow_p | TREE_OVERFLOW (arg1))
2099     TREE_OVERFLOW (t) = 1;
2100   return t;
2101 }
2102 
2103 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2104    to a fixed-point type.  */
2105 
2106 static tree
2107 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2108 {
2109   FIXED_VALUE_TYPE value;
2110   tree t;
2111   bool overflow_p;
2112 
2113   overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2114 					&TREE_REAL_CST (arg1),
2115 					TYPE_SATURATING (type));
2116   t = build_fixed (type, value);
2117 
2118   /* Propagate overflow flags.  */
2119   if (overflow_p | TREE_OVERFLOW (arg1))
2120     TREE_OVERFLOW (t) = 1;
2121   return t;
2122 }
2123 
2124 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2125    type TYPE.  If no simplification can be done return NULL_TREE.  */
2126 
2127 static tree
2128 fold_convert_const (enum tree_code code, tree type, tree arg1)
2129 {
2130   if (TREE_TYPE (arg1) == type)
2131     return arg1;
2132 
2133   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2134       || TREE_CODE (type) == OFFSET_TYPE)
2135     {
2136       if (TREE_CODE (arg1) == INTEGER_CST)
2137 	return fold_convert_const_int_from_int (type, arg1);
2138       else if (TREE_CODE (arg1) == REAL_CST)
2139 	return fold_convert_const_int_from_real (code, type, arg1);
2140       else if (TREE_CODE (arg1) == FIXED_CST)
2141 	return fold_convert_const_int_from_fixed (type, arg1);
2142     }
2143   else if (TREE_CODE (type) == REAL_TYPE)
2144     {
2145       if (TREE_CODE (arg1) == INTEGER_CST)
2146 	return build_real_from_int_cst (type, arg1);
2147       else if (TREE_CODE (arg1) == REAL_CST)
2148 	return fold_convert_const_real_from_real (type, arg1);
2149       else if (TREE_CODE (arg1) == FIXED_CST)
2150 	return fold_convert_const_real_from_fixed (type, arg1);
2151     }
2152   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2153     {
2154       if (TREE_CODE (arg1) == FIXED_CST)
2155 	return fold_convert_const_fixed_from_fixed (type, arg1);
2156       else if (TREE_CODE (arg1) == INTEGER_CST)
2157 	return fold_convert_const_fixed_from_int (type, arg1);
2158       else if (TREE_CODE (arg1) == REAL_CST)
2159 	return fold_convert_const_fixed_from_real (type, arg1);
2160     }
2161   else if (TREE_CODE (type) == VECTOR_TYPE)
2162     {
2163       if (TREE_CODE (arg1) == VECTOR_CST
2164 	  && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2165 	{
2166 	  int len = TYPE_VECTOR_SUBPARTS (type);
2167 	  tree elttype = TREE_TYPE (type);
2168 	  tree *v = XALLOCAVEC (tree, len);
2169 	  for (int i = 0; i < len; ++i)
2170 	    {
2171 	      tree elt = VECTOR_CST_ELT (arg1, i);
2172 	      tree cvt = fold_convert_const (code, elttype, elt);
2173 	      if (cvt == NULL_TREE)
2174 		return NULL_TREE;
2175 	      v[i] = cvt;
2176 	    }
2177 	  return build_vector (type, v);
2178 	}
2179     }
2180   return NULL_TREE;
2181 }
2182 
2183 /* Construct a vector of zero elements of vector type TYPE.  */
2184 
2185 static tree
2186 build_zero_vector (tree type)
2187 {
2188   tree t;
2189 
2190   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2191   return build_vector_from_val (type, t);
2192 }
2193 
2194 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2195 
2196 bool
2197 fold_convertible_p (const_tree type, const_tree arg)
2198 {
2199   tree orig = TREE_TYPE (arg);
2200 
2201   if (type == orig)
2202     return true;
2203 
2204   if (TREE_CODE (arg) == ERROR_MARK
2205       || TREE_CODE (type) == ERROR_MARK
2206       || TREE_CODE (orig) == ERROR_MARK)
2207     return false;
2208 
2209   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2210     return true;
2211 
2212   switch (TREE_CODE (type))
2213     {
2214     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2215     case POINTER_TYPE: case REFERENCE_TYPE:
2216     case OFFSET_TYPE:
2217       return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2218 	      || TREE_CODE (orig) == OFFSET_TYPE);
2219 
2220     case REAL_TYPE:
2221     case FIXED_POINT_TYPE:
2222     case VECTOR_TYPE:
2223     case VOID_TYPE:
2224       return TREE_CODE (type) == TREE_CODE (orig);
2225 
2226     default:
2227       return false;
2228     }
2229 }
2230 
2231 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2232    simple conversions in preference to calling the front-end's convert.  */
2233 
2234 tree
2235 fold_convert_loc (location_t loc, tree type, tree arg)
2236 {
2237   tree orig = TREE_TYPE (arg);
2238   tree tem;
2239 
2240   if (type == orig)
2241     return arg;
2242 
2243   if (TREE_CODE (arg) == ERROR_MARK
2244       || TREE_CODE (type) == ERROR_MARK
2245       || TREE_CODE (orig) == ERROR_MARK)
2246     return error_mark_node;
2247 
2248   switch (TREE_CODE (type))
2249     {
2250     case POINTER_TYPE:
2251     case REFERENCE_TYPE:
2252       /* Handle conversions between pointers to different address spaces.  */
2253       if (POINTER_TYPE_P (orig)
2254 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2255 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2256 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2257       /* fall through */
2258 
2259     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2260     case OFFSET_TYPE:
2261       if (TREE_CODE (arg) == INTEGER_CST)
2262 	{
2263 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2264 	  if (tem != NULL_TREE)
2265 	    return tem;
2266 	}
2267       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2268 	  || TREE_CODE (orig) == OFFSET_TYPE)
2269 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2270       if (TREE_CODE (orig) == COMPLEX_TYPE)
2271 	return fold_convert_loc (loc, type,
2272 				 fold_build1_loc (loc, REALPART_EXPR,
2273 						  TREE_TYPE (orig), arg));
2274       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2275 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2276       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2277 
2278     case REAL_TYPE:
2279       if (TREE_CODE (arg) == INTEGER_CST)
2280 	{
2281 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2282 	  if (tem != NULL_TREE)
2283 	    return tem;
2284 	}
2285       else if (TREE_CODE (arg) == REAL_CST)
2286 	{
2287 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2288 	  if (tem != NULL_TREE)
2289 	    return tem;
2290 	}
2291       else if (TREE_CODE (arg) == FIXED_CST)
2292 	{
2293 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2294 	  if (tem != NULL_TREE)
2295 	    return tem;
2296 	}
2297 
2298       switch (TREE_CODE (orig))
2299 	{
2300 	case INTEGER_TYPE:
2301 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2302 	case POINTER_TYPE: case REFERENCE_TYPE:
2303 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2304 
2305 	case REAL_TYPE:
2306 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2307 
2308 	case FIXED_POINT_TYPE:
2309 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2310 
2311 	case COMPLEX_TYPE:
2312 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2313 	  return fold_convert_loc (loc, type, tem);
2314 
2315 	default:
2316 	  gcc_unreachable ();
2317 	}
2318 
2319     case FIXED_POINT_TYPE:
2320       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2321 	  || TREE_CODE (arg) == REAL_CST)
2322 	{
2323 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2324 	  if (tem != NULL_TREE)
2325 	    goto fold_convert_exit;
2326 	}
2327 
2328       switch (TREE_CODE (orig))
2329 	{
2330 	case FIXED_POINT_TYPE:
2331 	case INTEGER_TYPE:
2332 	case ENUMERAL_TYPE:
2333 	case BOOLEAN_TYPE:
2334 	case REAL_TYPE:
2335 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2336 
2337 	case COMPLEX_TYPE:
2338 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2339 	  return fold_convert_loc (loc, type, tem);
2340 
2341 	default:
2342 	  gcc_unreachable ();
2343 	}
2344 
2345     case COMPLEX_TYPE:
2346       switch (TREE_CODE (orig))
2347 	{
2348 	case INTEGER_TYPE:
2349 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2350 	case POINTER_TYPE: case REFERENCE_TYPE:
2351 	case REAL_TYPE:
2352 	case FIXED_POINT_TYPE:
2353 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2354 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2355 			      fold_convert_loc (loc, TREE_TYPE (type),
2356 					    integer_zero_node));
2357 	case COMPLEX_TYPE:
2358 	  {
2359 	    tree rpart, ipart;
2360 
2361 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2362 	      {
2363 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2364 				      TREE_OPERAND (arg, 0));
2365 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2366 				      TREE_OPERAND (arg, 1));
2367 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2368 	      }
2369 
2370 	    arg = save_expr (arg);
2371 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2372 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2373 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2374 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2375 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2376 	  }
2377 
2378 	default:
2379 	  gcc_unreachable ();
2380 	}
2381 
2382     case VECTOR_TYPE:
2383       if (integer_zerop (arg))
2384 	return build_zero_vector (type);
2385       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2386       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2387 		  || TREE_CODE (orig) == VECTOR_TYPE);
2388       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2389 
2390     case VOID_TYPE:
2391       tem = fold_ignored_result (arg);
2392       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2393 
2394     default:
2395       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2396 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2397       gcc_unreachable ();
2398     }
2399  fold_convert_exit:
2400   protected_set_expr_location_unshare (tem, loc);
2401   return tem;
2402 }
2403 
2404 /* Return false if expr can be assumed not to be an lvalue, true
2405    otherwise.  */
2406 
2407 static bool
2408 maybe_lvalue_p (const_tree x)
2409 {
2410   /* We only need to wrap lvalue tree codes.  */
2411   switch (TREE_CODE (x))
2412   {
2413   case VAR_DECL:
2414   case PARM_DECL:
2415   case RESULT_DECL:
2416   case LABEL_DECL:
2417   case FUNCTION_DECL:
2418   case SSA_NAME:
2419 
2420   case COMPONENT_REF:
2421   case MEM_REF:
2422   case INDIRECT_REF:
2423   case ARRAY_REF:
2424   case ARRAY_RANGE_REF:
2425   case BIT_FIELD_REF:
2426   case OBJ_TYPE_REF:
2427 
2428   case REALPART_EXPR:
2429   case IMAGPART_EXPR:
2430   case PREINCREMENT_EXPR:
2431   case PREDECREMENT_EXPR:
2432   case SAVE_EXPR:
2433   case TRY_CATCH_EXPR:
2434   case WITH_CLEANUP_EXPR:
2435   case COMPOUND_EXPR:
2436   case MODIFY_EXPR:
2437   case TARGET_EXPR:
2438   case COND_EXPR:
2439   case BIND_EXPR:
2440     break;
2441 
2442   default:
2443     /* Assume the worst for front-end tree codes.  */
2444     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2445       break;
2446     return false;
2447   }
2448 
2449   return true;
2450 }
2451 
2452 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2453 
2454 tree
2455 non_lvalue_loc (location_t loc, tree x)
2456 {
2457   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2458      us.  */
2459   if (in_gimple_form)
2460     return x;
2461 
2462   if (! maybe_lvalue_p (x))
2463     return x;
2464   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2465 }
2466 
2467 /* When pedantic, return an expr equal to X but certainly not valid as a
2468    pedantic lvalue.  Otherwise, return X.  */
2469 
2470 static tree
2471 pedantic_non_lvalue_loc (location_t loc, tree x)
2472 {
2473   return protected_set_expr_location_unshare (x, loc);
2474 }
2475 
2476 /* Given a tree comparison code, return the code that is the logical inverse.
2477    It is generally not safe to do this for floating-point comparisons, except
2478    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2479    ERROR_MARK in this case.  */
2480 
2481 enum tree_code
2482 invert_tree_comparison (enum tree_code code, bool honor_nans)
2483 {
2484   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2485       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2486     return ERROR_MARK;
2487 
2488   switch (code)
2489     {
2490     case EQ_EXPR:
2491       return NE_EXPR;
2492     case NE_EXPR:
2493       return EQ_EXPR;
2494     case GT_EXPR:
2495       return honor_nans ? UNLE_EXPR : LE_EXPR;
2496     case GE_EXPR:
2497       return honor_nans ? UNLT_EXPR : LT_EXPR;
2498     case LT_EXPR:
2499       return honor_nans ? UNGE_EXPR : GE_EXPR;
2500     case LE_EXPR:
2501       return honor_nans ? UNGT_EXPR : GT_EXPR;
2502     case LTGT_EXPR:
2503       return UNEQ_EXPR;
2504     case UNEQ_EXPR:
2505       return LTGT_EXPR;
2506     case UNGT_EXPR:
2507       return LE_EXPR;
2508     case UNGE_EXPR:
2509       return LT_EXPR;
2510     case UNLT_EXPR:
2511       return GE_EXPR;
2512     case UNLE_EXPR:
2513       return GT_EXPR;
2514     case ORDERED_EXPR:
2515       return UNORDERED_EXPR;
2516     case UNORDERED_EXPR:
2517       return ORDERED_EXPR;
2518     default:
2519       gcc_unreachable ();
2520     }
2521 }
2522 
2523 /* Similar, but return the comparison that results if the operands are
2524    swapped.  This is safe for floating-point.  */
2525 
2526 enum tree_code
2527 swap_tree_comparison (enum tree_code code)
2528 {
2529   switch (code)
2530     {
2531     case EQ_EXPR:
2532     case NE_EXPR:
2533     case ORDERED_EXPR:
2534     case UNORDERED_EXPR:
2535     case LTGT_EXPR:
2536     case UNEQ_EXPR:
2537       return code;
2538     case GT_EXPR:
2539       return LT_EXPR;
2540     case GE_EXPR:
2541       return LE_EXPR;
2542     case LT_EXPR:
2543       return GT_EXPR;
2544     case LE_EXPR:
2545       return GE_EXPR;
2546     case UNGT_EXPR:
2547       return UNLT_EXPR;
2548     case UNGE_EXPR:
2549       return UNLE_EXPR;
2550     case UNLT_EXPR:
2551       return UNGT_EXPR;
2552     case UNLE_EXPR:
2553       return UNGE_EXPR;
2554     default:
2555       gcc_unreachable ();
2556     }
2557 }
2558 
2559 
2560 /* Convert a comparison tree code from an enum tree_code representation
2561    into a compcode bit-based encoding.  This function is the inverse of
2562    compcode_to_comparison.  */
2563 
2564 static enum comparison_code
2565 comparison_to_compcode (enum tree_code code)
2566 {
2567   switch (code)
2568     {
2569     case LT_EXPR:
2570       return COMPCODE_LT;
2571     case EQ_EXPR:
2572       return COMPCODE_EQ;
2573     case LE_EXPR:
2574       return COMPCODE_LE;
2575     case GT_EXPR:
2576       return COMPCODE_GT;
2577     case NE_EXPR:
2578       return COMPCODE_NE;
2579     case GE_EXPR:
2580       return COMPCODE_GE;
2581     case ORDERED_EXPR:
2582       return COMPCODE_ORD;
2583     case UNORDERED_EXPR:
2584       return COMPCODE_UNORD;
2585     case UNLT_EXPR:
2586       return COMPCODE_UNLT;
2587     case UNEQ_EXPR:
2588       return COMPCODE_UNEQ;
2589     case UNLE_EXPR:
2590       return COMPCODE_UNLE;
2591     case UNGT_EXPR:
2592       return COMPCODE_UNGT;
2593     case LTGT_EXPR:
2594       return COMPCODE_LTGT;
2595     case UNGE_EXPR:
2596       return COMPCODE_UNGE;
2597     default:
2598       gcc_unreachable ();
2599     }
2600 }
2601 
2602 /* Convert a compcode bit-based encoding of a comparison operator back
2603    to GCC's enum tree_code representation.  This function is the
2604    inverse of comparison_to_compcode.  */
2605 
2606 static enum tree_code
2607 compcode_to_comparison (enum comparison_code code)
2608 {
2609   switch (code)
2610     {
2611     case COMPCODE_LT:
2612       return LT_EXPR;
2613     case COMPCODE_EQ:
2614       return EQ_EXPR;
2615     case COMPCODE_LE:
2616       return LE_EXPR;
2617     case COMPCODE_GT:
2618       return GT_EXPR;
2619     case COMPCODE_NE:
2620       return NE_EXPR;
2621     case COMPCODE_GE:
2622       return GE_EXPR;
2623     case COMPCODE_ORD:
2624       return ORDERED_EXPR;
2625     case COMPCODE_UNORD:
2626       return UNORDERED_EXPR;
2627     case COMPCODE_UNLT:
2628       return UNLT_EXPR;
2629     case COMPCODE_UNEQ:
2630       return UNEQ_EXPR;
2631     case COMPCODE_UNLE:
2632       return UNLE_EXPR;
2633     case COMPCODE_UNGT:
2634       return UNGT_EXPR;
2635     case COMPCODE_LTGT:
2636       return LTGT_EXPR;
2637     case COMPCODE_UNGE:
2638       return UNGE_EXPR;
2639     default:
2640       gcc_unreachable ();
2641     }
2642 }
2643 
2644 /* Return a tree for the comparison which is the combination of
2645    doing the AND or OR (depending on CODE) of the two operations LCODE
2646    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2647    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2648    if this makes the transformation invalid.  */
2649 
2650 tree
2651 combine_comparisons (location_t loc,
2652 		     enum tree_code code, enum tree_code lcode,
2653 		     enum tree_code rcode, tree truth_type,
2654 		     tree ll_arg, tree lr_arg)
2655 {
2656   bool honor_nans = HONOR_NANS (ll_arg);
2657   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2658   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2659   int compcode;
2660 
2661   switch (code)
2662     {
2663     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2664       compcode = lcompcode & rcompcode;
2665       break;
2666 
2667     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2668       compcode = lcompcode | rcompcode;
2669       break;
2670 
2671     default:
2672       return NULL_TREE;
2673     }
2674 
2675   if (!honor_nans)
2676     {
2677       /* Eliminate unordered comparisons, as well as LTGT and ORD
2678 	 which are not used unless the mode has NaNs.  */
2679       compcode &= ~COMPCODE_UNORD;
2680       if (compcode == COMPCODE_LTGT)
2681 	compcode = COMPCODE_NE;
2682       else if (compcode == COMPCODE_ORD)
2683 	compcode = COMPCODE_TRUE;
2684     }
2685    else if (flag_trapping_math)
2686      {
2687 	/* Check that the original operation and the optimized ones will trap
2688 	   under the same condition.  */
2689 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2690 		     && (lcompcode != COMPCODE_EQ)
2691 		     && (lcompcode != COMPCODE_ORD);
2692 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2693 		     && (rcompcode != COMPCODE_EQ)
2694 		     && (rcompcode != COMPCODE_ORD);
2695 	bool trap = (compcode & COMPCODE_UNORD) == 0
2696 		    && (compcode != COMPCODE_EQ)
2697 		    && (compcode != COMPCODE_ORD);
2698 
2699         /* In a short-circuited boolean expression the LHS might be
2700 	   such that the RHS, if evaluated, will never trap.  For
2701 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2702 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2703 	   example, the expression above will never trap, hence
2704 	   optimizing it to x < y would be invalid).  */
2705         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2706             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2707           rtrap = false;
2708 
2709         /* If the comparison was short-circuited, and only the RHS
2710 	   trapped, we may now generate a spurious trap.  */
2711 	if (rtrap && !ltrap
2712 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2713 	  return NULL_TREE;
2714 
2715 	/* If we changed the conditions that cause a trap, we lose.  */
2716 	if ((ltrap || rtrap) != trap)
2717 	  return NULL_TREE;
2718       }
2719 
2720   if (compcode == COMPCODE_TRUE)
2721     return constant_boolean_node (true, truth_type);
2722   else if (compcode == COMPCODE_FALSE)
2723     return constant_boolean_node (false, truth_type);
2724   else
2725     {
2726       enum tree_code tcode;
2727 
2728       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2729       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2730     }
2731 }
2732 
2733 /* Return nonzero if two operands (typically of the same tree node)
2734    are necessarily equal. FLAGS modifies behavior as follows:
2735 
2736    If OEP_ONLY_CONST is set, only return nonzero for constants.
2737    This function tests whether the operands are indistinguishable;
2738    it does not test whether they are equal using C's == operation.
2739    The distinction is important for IEEE floating point, because
2740    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2741    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2742 
2743    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2744    even though it may hold multiple values during a function.
2745    This is because a GCC tree node guarantees that nothing else is
2746    executed between the evaluation of its "operands" (which may often
2747    be evaluated in arbitrary order).  Hence if the operands themselves
2748    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2749    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2750    unset means assuming isochronic (or instantaneous) tree equivalence.
2751    Unless comparing arbitrary expression trees, such as from different
2752    statements, this flag can usually be left unset.
2753 
2754    If OEP_PURE_SAME is set, then pure functions with identical arguments
2755    are considered the same.  It is used when the caller has other ways
2756    to ensure that global memory is unchanged in between.
2757 
2758    If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2759    not values of expressions.
2760 
2761    If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2762    such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2763 
2764    Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2765    any operand with side effect.  This is unnecesarily conservative in the
2766    case we know that arg0 and arg1 are in disjoint code paths (such as in
2767    ?: operator).  In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2768    addresses with TREE_CONSTANT flag set so we know that &var == &var
2769    even if var is volatile.  */
2770 
2771 int
2772 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2773 {
2774   /* When checking, verify at the outermost operand_equal_p call that
2775      if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2776      hash value.  */
2777   if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2778     {
2779       if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2780 	{
2781 	  if (arg0 != arg1)
2782 	    {
2783 	      inchash::hash hstate0 (0), hstate1 (0);
2784 	      inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2785 	      inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2786 	      hashval_t h0 = hstate0.end ();
2787 	      hashval_t h1 = hstate1.end ();
2788 	      gcc_assert (h0 == h1);
2789 	    }
2790 	  return 1;
2791 	}
2792       else
2793 	return 0;
2794     }
2795 
2796   /* If either is ERROR_MARK, they aren't equal.  */
2797   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2798       || TREE_TYPE (arg0) == error_mark_node
2799       || TREE_TYPE (arg1) == error_mark_node)
2800     return 0;
2801 
2802   /* Similar, if either does not have a type (like a released SSA name),
2803      they aren't equal.  */
2804   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2805     return 0;
2806 
2807   /* We cannot consider pointers to different address space equal.  */
2808   if (POINTER_TYPE_P (TREE_TYPE (arg0))
2809       && POINTER_TYPE_P (TREE_TYPE (arg1))
2810       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2811 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2812     return 0;
2813 
2814   /* Check equality of integer constants before bailing out due to
2815      precision differences.  */
2816   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2817     {
2818       /* Address of INTEGER_CST is not defined; check that we did not forget
2819 	 to drop the OEP_ADDRESS_OF flags.  */
2820       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2821       return tree_int_cst_equal (arg0, arg1);
2822     }
2823 
2824   if (!(flags & OEP_ADDRESS_OF))
2825     {
2826       /* If both types don't have the same signedness, then we can't consider
2827 	 them equal.  We must check this before the STRIP_NOPS calls
2828 	 because they may change the signedness of the arguments.  As pointers
2829 	 strictly don't have a signedness, require either two pointers or
2830 	 two non-pointers as well.  */
2831       if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2832 	  || POINTER_TYPE_P (TREE_TYPE (arg0))
2833 			     != POINTER_TYPE_P (TREE_TYPE (arg1)))
2834 	return 0;
2835 
2836       /* If both types don't have the same precision, then it is not safe
2837 	 to strip NOPs.  */
2838       if (element_precision (TREE_TYPE (arg0))
2839 	  != element_precision (TREE_TYPE (arg1)))
2840 	return 0;
2841 
2842       STRIP_NOPS (arg0);
2843       STRIP_NOPS (arg1);
2844     }
2845 #if 0
2846   /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2847      sanity check once the issue is solved.  */
2848   else
2849     /* Addresses of conversions and SSA_NAMEs (and many other things)
2850        are not defined.  Check that we did not forget to drop the
2851        OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags.  */
2852     gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2853 			 && TREE_CODE (arg0) != SSA_NAME);
2854 #endif
2855 
2856   /* In case both args are comparisons but with different comparison
2857      code, try to swap the comparison operands of one arg to produce
2858      a match and compare that variant.  */
2859   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2860       && COMPARISON_CLASS_P (arg0)
2861       && COMPARISON_CLASS_P (arg1))
2862     {
2863       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2864 
2865       if (TREE_CODE (arg0) == swap_code)
2866 	return operand_equal_p (TREE_OPERAND (arg0, 0),
2867 			        TREE_OPERAND (arg1, 1), flags)
2868 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
2869 				   TREE_OPERAND (arg1, 0), flags);
2870     }
2871 
2872   if (TREE_CODE (arg0) != TREE_CODE (arg1))
2873     {
2874       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
2875       if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2876 	;
2877       else if (flags & OEP_ADDRESS_OF)
2878 	{
2879 	  /* If we are interested in comparing addresses ignore
2880 	     MEM_REF wrappings of the base that can appear just for
2881 	     TBAA reasons.  */
2882 	  if (TREE_CODE (arg0) == MEM_REF
2883 	      && DECL_P (arg1)
2884 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2885 	      && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2886 	      && integer_zerop (TREE_OPERAND (arg0, 1)))
2887 	    return 1;
2888 	  else if (TREE_CODE (arg1) == MEM_REF
2889 		   && DECL_P (arg0)
2890 		   && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2891 		   && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2892 		   && integer_zerop (TREE_OPERAND (arg1, 1)))
2893 	    return 1;
2894 	  return 0;
2895 	}
2896       else
2897 	return 0;
2898     }
2899 
2900   /* When not checking adddresses, this is needed for conversions and for
2901      COMPONENT_REF.  Might as well play it safe and always test this.  */
2902   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2903       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2904       || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2905 	  && !(flags & OEP_ADDRESS_OF)))
2906     return 0;
2907 
2908   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2909      We don't care about side effects in that case because the SAVE_EXPR
2910      takes care of that for us. In all other cases, two expressions are
2911      equal if they have no side effects.  If we have two identical
2912      expressions with side effects that should be treated the same due
2913      to the only side effects being identical SAVE_EXPR's, that will
2914      be detected in the recursive calls below.
2915      If we are taking an invariant address of two identical objects
2916      they are necessarily equal as well.  */
2917   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2918       && (TREE_CODE (arg0) == SAVE_EXPR
2919 	  || (flags & OEP_MATCH_SIDE_EFFECTS)
2920 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2921     return 1;
2922 
2923   /* Next handle constant cases, those for which we can return 1 even
2924      if ONLY_CONST is set.  */
2925   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2926     switch (TREE_CODE (arg0))
2927       {
2928       case INTEGER_CST:
2929 	return tree_int_cst_equal (arg0, arg1);
2930 
2931       case FIXED_CST:
2932 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2933 				       TREE_FIXED_CST (arg1));
2934 
2935       case REAL_CST:
2936 	if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2937 	  return 1;
2938 
2939 
2940 	if (!HONOR_SIGNED_ZEROS (arg0))
2941 	  {
2942 	    /* If we do not distinguish between signed and unsigned zero,
2943 	       consider them equal.  */
2944 	    if (real_zerop (arg0) && real_zerop (arg1))
2945 	      return 1;
2946 	  }
2947 	return 0;
2948 
2949       case VECTOR_CST:
2950 	{
2951 	  unsigned i;
2952 
2953 	  if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2954 	    return 0;
2955 
2956 	  for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2957 	    {
2958 	      if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2959 				    VECTOR_CST_ELT (arg1, i), flags))
2960 		return 0;
2961 	    }
2962 	  return 1;
2963 	}
2964 
2965       case COMPLEX_CST:
2966 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2967 				 flags)
2968 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2969 				    flags));
2970 
2971       case STRING_CST:
2972 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2973 		&& ! memcmp (TREE_STRING_POINTER (arg0),
2974 			      TREE_STRING_POINTER (arg1),
2975 			      TREE_STRING_LENGTH (arg0)));
2976 
2977       case ADDR_EXPR:
2978 	gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2979 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2980 				flags | OEP_ADDRESS_OF
2981 				| OEP_MATCH_SIDE_EFFECTS);
2982       case CONSTRUCTOR:
2983 	/* In GIMPLE empty constructors are allowed in initializers of
2984 	   aggregates.  */
2985 	return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2986       default:
2987 	break;
2988       }
2989 
2990   if (flags & OEP_ONLY_CONST)
2991     return 0;
2992 
2993 /* Define macros to test an operand from arg0 and arg1 for equality and a
2994    variant that allows null and views null as being different from any
2995    non-null value.  In the latter case, if either is null, the both
2996    must be; otherwise, do the normal comparison.  */
2997 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
2998 				    TREE_OPERAND (arg1, N), flags)
2999 
3000 #define OP_SAME_WITH_NULL(N)				\
3001   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3002    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3003 
3004   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3005     {
3006     case tcc_unary:
3007       /* Two conversions are equal only if signedness and modes match.  */
3008       switch (TREE_CODE (arg0))
3009         {
3010 	CASE_CONVERT:
3011         case FIX_TRUNC_EXPR:
3012 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3013 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3014 	    return 0;
3015 	  break;
3016 	default:
3017 	  break;
3018 	}
3019 
3020       return OP_SAME (0);
3021 
3022 
3023     case tcc_comparison:
3024     case tcc_binary:
3025       if (OP_SAME (0) && OP_SAME (1))
3026 	return 1;
3027 
3028       /* For commutative ops, allow the other order.  */
3029       return (commutative_tree_code (TREE_CODE (arg0))
3030 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3031 				  TREE_OPERAND (arg1, 1), flags)
3032 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3033 				  TREE_OPERAND (arg1, 0), flags));
3034 
3035     case tcc_reference:
3036       /* If either of the pointer (or reference) expressions we are
3037 	 dereferencing contain a side effect, these cannot be equal,
3038 	 but their addresses can be.  */
3039       if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3040 	  && (TREE_SIDE_EFFECTS (arg0)
3041 	      || TREE_SIDE_EFFECTS (arg1)))
3042 	return 0;
3043 
3044       switch (TREE_CODE (arg0))
3045 	{
3046 	case INDIRECT_REF:
3047 	  if (!(flags & OEP_ADDRESS_OF))
3048 	    {
3049 	      if (TYPE_ALIGN (TREE_TYPE (arg0))
3050 		  != TYPE_ALIGN (TREE_TYPE (arg1)))
3051 		return 0;
3052 	      /* Verify that the access types are compatible.  */
3053 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3054 		  != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3055 		return 0;
3056 	    }
3057 	  flags &= ~OEP_ADDRESS_OF;
3058 	  return OP_SAME (0);
3059 
3060 	case IMAGPART_EXPR:
3061 	  /* Require the same offset.  */
3062 	  if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3063 				TYPE_SIZE (TREE_TYPE (arg1)),
3064 				flags & ~OEP_ADDRESS_OF))
3065 	    return 0;
3066 
3067 	/* Fallthru.  */
3068 	case REALPART_EXPR:
3069 	case VIEW_CONVERT_EXPR:
3070 	  return OP_SAME (0);
3071 
3072 	case TARGET_MEM_REF:
3073 	case MEM_REF:
3074 	  if (!(flags & OEP_ADDRESS_OF))
3075 	    {
3076 	      /* Require equal access sizes */
3077 	      if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3078 		  && (!TYPE_SIZE (TREE_TYPE (arg0))
3079 		      || !TYPE_SIZE (TREE_TYPE (arg1))
3080 		      || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3081 					   TYPE_SIZE (TREE_TYPE (arg1)),
3082 					   flags)))
3083 		return 0;
3084 	      /* Verify that access happens in similar types.  */
3085 	      if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3086 		return 0;
3087 	      /* Verify that accesses are TBAA compatible.  */
3088 	      if (!alias_ptr_types_compatible_p
3089 		    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3090 		     TREE_TYPE (TREE_OPERAND (arg1, 1)))
3091 		  || (MR_DEPENDENCE_CLIQUE (arg0)
3092 		      != MR_DEPENDENCE_CLIQUE (arg1))
3093 		  || (MR_DEPENDENCE_BASE (arg0)
3094 		      != MR_DEPENDENCE_BASE (arg1)))
3095 		return 0;
3096 	     /* Verify that alignment is compatible.  */
3097 	     if (TYPE_ALIGN (TREE_TYPE (arg0))
3098 		 != TYPE_ALIGN (TREE_TYPE (arg1)))
3099 		return 0;
3100 	    }
3101 	  flags &= ~OEP_ADDRESS_OF;
3102 	  return (OP_SAME (0) && OP_SAME (1)
3103 		  /* TARGET_MEM_REF require equal extra operands.  */
3104 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
3105 		      || (OP_SAME_WITH_NULL (2)
3106 			  && OP_SAME_WITH_NULL (3)
3107 			  && OP_SAME_WITH_NULL (4))));
3108 
3109 	case ARRAY_REF:
3110 	case ARRAY_RANGE_REF:
3111 	  if (!OP_SAME (0))
3112 	    return 0;
3113 	  flags &= ~OEP_ADDRESS_OF;
3114 	  /* Compare the array index by value if it is constant first as we
3115 	     may have different types but same value here.  */
3116 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3117 				       TREE_OPERAND (arg1, 1))
3118 		   || OP_SAME (1))
3119 		  && OP_SAME_WITH_NULL (2)
3120 		  && OP_SAME_WITH_NULL (3)
3121 		  /* Compare low bound and element size as with OEP_ADDRESS_OF
3122 		     we have to account for the offset of the ref.  */
3123 		  && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3124 		      == TREE_TYPE (TREE_OPERAND (arg1, 0))
3125 		      || (operand_equal_p (array_ref_low_bound
3126 					     (CONST_CAST_TREE (arg0)),
3127 					   array_ref_low_bound
3128 					     (CONST_CAST_TREE (arg1)), flags)
3129 			  && operand_equal_p (array_ref_element_size
3130 					        (CONST_CAST_TREE (arg0)),
3131 					      array_ref_element_size
3132 					        (CONST_CAST_TREE (arg1)),
3133 					      flags))));
3134 
3135 	case COMPONENT_REF:
3136 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3137 	     may be NULL when we're called to compare MEM_EXPRs.  */
3138 	  if (!OP_SAME_WITH_NULL (0)
3139 	      || !OP_SAME (1))
3140 	    return 0;
3141 	  flags &= ~OEP_ADDRESS_OF;
3142 	  return OP_SAME_WITH_NULL (2);
3143 
3144 	case BIT_FIELD_REF:
3145 	  if (!OP_SAME (0))
3146 	    return 0;
3147 	  flags &= ~OEP_ADDRESS_OF;
3148 	  return OP_SAME (1) && OP_SAME (2);
3149 
3150 	default:
3151 	  return 0;
3152 	}
3153 
3154     case tcc_expression:
3155       switch (TREE_CODE (arg0))
3156 	{
3157 	case ADDR_EXPR:
3158 	  /* Be sure we pass right ADDRESS_OF flag.  */
3159 	  gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3160 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3161 				  TREE_OPERAND (arg1, 0),
3162 				  flags | OEP_ADDRESS_OF);
3163 
3164 	case TRUTH_NOT_EXPR:
3165 	  return OP_SAME (0);
3166 
3167 	case TRUTH_ANDIF_EXPR:
3168 	case TRUTH_ORIF_EXPR:
3169 	  return OP_SAME (0) && OP_SAME (1);
3170 
3171 	case FMA_EXPR:
3172 	case WIDEN_MULT_PLUS_EXPR:
3173 	case WIDEN_MULT_MINUS_EXPR:
3174 	  if (!OP_SAME (2))
3175 	    return 0;
3176 	  /* The multiplcation operands are commutative.  */
3177 	  /* FALLTHRU */
3178 
3179 	case TRUTH_AND_EXPR:
3180 	case TRUTH_OR_EXPR:
3181 	case TRUTH_XOR_EXPR:
3182 	  if (OP_SAME (0) && OP_SAME (1))
3183 	    return 1;
3184 
3185 	  /* Otherwise take into account this is a commutative operation.  */
3186 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3187 				   TREE_OPERAND (arg1, 1), flags)
3188 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3189 				      TREE_OPERAND (arg1, 0), flags));
3190 
3191 	case COND_EXPR:
3192 	  if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3193 	    return 0;
3194 	  flags &= ~OEP_ADDRESS_OF;
3195 	  return OP_SAME (0);
3196 
3197 	case VEC_COND_EXPR:
3198 	case DOT_PROD_EXPR:
3199 	case BIT_INSERT_EXPR:
3200 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3201 
3202 	case MODIFY_EXPR:
3203 	case INIT_EXPR:
3204 	case COMPOUND_EXPR:
3205 	case PREDECREMENT_EXPR:
3206 	case PREINCREMENT_EXPR:
3207 	case POSTDECREMENT_EXPR:
3208 	case POSTINCREMENT_EXPR:
3209 	  if (flags & OEP_LEXICOGRAPHIC)
3210 	    return OP_SAME (0) && OP_SAME (1);
3211 	  return 0;
3212 
3213 	case CLEANUP_POINT_EXPR:
3214 	case EXPR_STMT:
3215 	  if (flags & OEP_LEXICOGRAPHIC)
3216 	    return OP_SAME (0);
3217 	  return 0;
3218 
3219 	default:
3220 	  return 0;
3221 	}
3222 
3223     case tcc_vl_exp:
3224       switch (TREE_CODE (arg0))
3225 	{
3226 	case CALL_EXPR:
3227 	  if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3228 	      != (CALL_EXPR_FN (arg1) == NULL_TREE))
3229 	    /* If not both CALL_EXPRs are either internal or normal function
3230 	       functions, then they are not equal.  */
3231 	    return 0;
3232 	  else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3233 	    {
3234 	      /* If the CALL_EXPRs call different internal functions, then they
3235 		 are not equal.  */
3236 	      if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3237 		return 0;
3238 	    }
3239 	  else
3240 	    {
3241 	      /* If the CALL_EXPRs call different functions, then they are not
3242 		 equal.  */
3243 	      if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3244 				     flags))
3245 		return 0;
3246 	    }
3247 
3248 	  /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS.  */
3249 	  {
3250 	    unsigned int cef = call_expr_flags (arg0);
3251 	    if (flags & OEP_PURE_SAME)
3252 	      cef &= ECF_CONST | ECF_PURE;
3253 	    else
3254 	      cef &= ECF_CONST;
3255 	    if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3256 	      return 0;
3257 	  }
3258 
3259 	  /* Now see if all the arguments are the same.  */
3260 	  {
3261 	    const_call_expr_arg_iterator iter0, iter1;
3262 	    const_tree a0, a1;
3263 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3264 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3265 		 a0 && a1;
3266 		 a0 = next_const_call_expr_arg (&iter0),
3267 		   a1 = next_const_call_expr_arg (&iter1))
3268 	      if (! operand_equal_p (a0, a1, flags))
3269 		return 0;
3270 
3271 	    /* If we get here and both argument lists are exhausted
3272 	       then the CALL_EXPRs are equal.  */
3273 	    return ! (a0 || a1);
3274 	  }
3275 	default:
3276 	  return 0;
3277 	}
3278 
3279     case tcc_declaration:
3280       /* Consider __builtin_sqrt equal to sqrt.  */
3281       return (TREE_CODE (arg0) == FUNCTION_DECL
3282 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3283 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3284 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3285 
3286     case tcc_exceptional:
3287       if (TREE_CODE (arg0) == CONSTRUCTOR)
3288 	{
3289 	  /* In GIMPLE constructors are used only to build vectors from
3290 	     elements.  Individual elements in the constructor must be
3291 	     indexed in increasing order and form an initial sequence.
3292 
3293 	     We make no effort to compare constructors in generic.
3294 	     (see sem_variable::equals in ipa-icf which can do so for
3295 	      constants).  */
3296 	  if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3297 	      || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3298 	    return 0;
3299 
3300 	  /* Be sure that vectors constructed have the same representation.
3301 	     We only tested element precision and modes to match.
3302 	     Vectors may be BLKmode and thus also check that the number of
3303 	     parts match.  */
3304 	  if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3305 	      != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3306 	    return 0;
3307 
3308 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3309 	  vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3310 	  unsigned int len = vec_safe_length (v0);
3311 
3312 	  if (len != vec_safe_length (v1))
3313 	    return 0;
3314 
3315 	  for (unsigned int i = 0; i < len; i++)
3316 	    {
3317 	      constructor_elt *c0 = &(*v0)[i];
3318 	      constructor_elt *c1 = &(*v1)[i];
3319 
3320 	      if (!operand_equal_p (c0->value, c1->value, flags)
3321 		  /* In GIMPLE the indexes can be either NULL or matching i.
3322 		     Double check this so we won't get false
3323 		     positives for GENERIC.  */
3324 		  || (c0->index
3325 		      && (TREE_CODE (c0->index) != INTEGER_CST
3326 			  || !compare_tree_int (c0->index, i)))
3327 		  || (c1->index
3328 		      && (TREE_CODE (c1->index) != INTEGER_CST
3329 			  || !compare_tree_int (c1->index, i))))
3330 		return 0;
3331 	    }
3332 	  return 1;
3333 	}
3334       else if (TREE_CODE (arg0) == STATEMENT_LIST
3335 	       && (flags & OEP_LEXICOGRAPHIC))
3336 	{
3337 	  /* Compare the STATEMENT_LISTs.  */
3338 	  tree_stmt_iterator tsi1, tsi2;
3339 	  tree body1 = CONST_CAST_TREE (arg0);
3340 	  tree body2 = CONST_CAST_TREE (arg1);
3341 	  for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3342 	       tsi_next (&tsi1), tsi_next (&tsi2))
3343 	    {
3344 	      /* The lists don't have the same number of statements.  */
3345 	      if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3346 		return 0;
3347 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3348 		return 1;
3349 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3350 				    OEP_LEXICOGRAPHIC))
3351 		return 0;
3352 	    }
3353 	}
3354       return 0;
3355 
3356     case tcc_statement:
3357       switch (TREE_CODE (arg0))
3358 	{
3359 	case RETURN_EXPR:
3360 	  if (flags & OEP_LEXICOGRAPHIC)
3361 	    return OP_SAME_WITH_NULL (0);
3362 	  return 0;
3363 	default:
3364 	  return 0;
3365 	 }
3366 
3367     default:
3368       return 0;
3369     }
3370 
3371 #undef OP_SAME
3372 #undef OP_SAME_WITH_NULL
3373 }
3374 
3375 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3376    shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3377 
3378    When in doubt, return 0.  */
3379 
3380 static int
3381 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3382 {
3383   int unsignedp1, unsignedpo;
3384   tree primarg0, primarg1, primother;
3385   unsigned int correct_width;
3386 
3387   if (operand_equal_p (arg0, arg1, 0))
3388     return 1;
3389 
3390   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3391       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3392     return 0;
3393 
3394   /* Discard any conversions that don't change the modes of ARG0 and ARG1
3395      and see if the inner values are the same.  This removes any
3396      signedness comparison, which doesn't matter here.  */
3397   primarg0 = arg0, primarg1 = arg1;
3398   STRIP_NOPS (primarg0);
3399   STRIP_NOPS (primarg1);
3400   if (operand_equal_p (primarg0, primarg1, 0))
3401     return 1;
3402 
3403   /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3404      actual comparison operand, ARG0.
3405 
3406      First throw away any conversions to wider types
3407      already present in the operands.  */
3408 
3409   primarg1 = get_narrower (arg1, &unsignedp1);
3410   primother = get_narrower (other, &unsignedpo);
3411 
3412   correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3413   if (unsignedp1 == unsignedpo
3414       && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3415       && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3416     {
3417       tree type = TREE_TYPE (arg0);
3418 
3419       /* Make sure shorter operand is extended the right way
3420 	 to match the longer operand.  */
3421       primarg1 = fold_convert (signed_or_unsigned_type_for
3422 			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3423 
3424       if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3425 	return 1;
3426     }
3427 
3428   return 0;
3429 }
3430 
3431 /* See if ARG is an expression that is either a comparison or is performing
3432    arithmetic on comparisons.  The comparisons must only be comparing
3433    two different values, which will be stored in *CVAL1 and *CVAL2; if
3434    they are nonzero it means that some operands have already been found.
3435    No variables may be used anywhere else in the expression except in the
3436    comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
3437    the expression and save_expr needs to be called with CVAL1 and CVAL2.
3438 
3439    If this is true, return 1.  Otherwise, return zero.  */
3440 
3441 static int
3442 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3443 {
3444   enum tree_code code = TREE_CODE (arg);
3445   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3446 
3447   /* We can handle some of the tcc_expression cases here.  */
3448   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3449     tclass = tcc_unary;
3450   else if (tclass == tcc_expression
3451 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3452 	       || code == COMPOUND_EXPR))
3453     tclass = tcc_binary;
3454 
3455   else if (tclass == tcc_expression && code == SAVE_EXPR
3456 	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3457     {
3458       /* If we've already found a CVAL1 or CVAL2, this expression is
3459 	 two complex to handle.  */
3460       if (*cval1 || *cval2)
3461 	return 0;
3462 
3463       tclass = tcc_unary;
3464       *save_p = 1;
3465     }
3466 
3467   switch (tclass)
3468     {
3469     case tcc_unary:
3470       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3471 
3472     case tcc_binary:
3473       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3474 	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
3475 				      cval1, cval2, save_p));
3476 
3477     case tcc_constant:
3478       return 1;
3479 
3480     case tcc_expression:
3481       if (code == COND_EXPR)
3482 	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3483 				     cval1, cval2, save_p)
3484 		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
3485 					cval1, cval2, save_p)
3486 		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
3487 					cval1, cval2, save_p));
3488       return 0;
3489 
3490     case tcc_comparison:
3491       /* First see if we can handle the first operand, then the second.  For
3492 	 the second operand, we know *CVAL1 can't be zero.  It must be that
3493 	 one side of the comparison is each of the values; test for the
3494 	 case where this isn't true by failing if the two operands
3495 	 are the same.  */
3496 
3497       if (operand_equal_p (TREE_OPERAND (arg, 0),
3498 			   TREE_OPERAND (arg, 1), 0))
3499 	return 0;
3500 
3501       if (*cval1 == 0)
3502 	*cval1 = TREE_OPERAND (arg, 0);
3503       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3504 	;
3505       else if (*cval2 == 0)
3506 	*cval2 = TREE_OPERAND (arg, 0);
3507       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3508 	;
3509       else
3510 	return 0;
3511 
3512       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3513 	;
3514       else if (*cval2 == 0)
3515 	*cval2 = TREE_OPERAND (arg, 1);
3516       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3517 	;
3518       else
3519 	return 0;
3520 
3521       return 1;
3522 
3523     default:
3524       return 0;
3525     }
3526 }
3527 
3528 /* ARG is a tree that is known to contain just arithmetic operations and
3529    comparisons.  Evaluate the operations in the tree substituting NEW0 for
3530    any occurrence of OLD0 as an operand of a comparison and likewise for
3531    NEW1 and OLD1.  */
3532 
3533 static tree
3534 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3535 	    tree old1, tree new1)
3536 {
3537   tree type = TREE_TYPE (arg);
3538   enum tree_code code = TREE_CODE (arg);
3539   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3540 
3541   /* We can handle some of the tcc_expression cases here.  */
3542   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3543     tclass = tcc_unary;
3544   else if (tclass == tcc_expression
3545 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3546     tclass = tcc_binary;
3547 
3548   switch (tclass)
3549     {
3550     case tcc_unary:
3551       return fold_build1_loc (loc, code, type,
3552 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3553 				      old0, new0, old1, new1));
3554 
3555     case tcc_binary:
3556       return fold_build2_loc (loc, code, type,
3557 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3558 				      old0, new0, old1, new1),
3559 			  eval_subst (loc, TREE_OPERAND (arg, 1),
3560 				      old0, new0, old1, new1));
3561 
3562     case tcc_expression:
3563       switch (code)
3564 	{
3565 	case SAVE_EXPR:
3566 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3567 			     old1, new1);
3568 
3569 	case COMPOUND_EXPR:
3570 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3571 			     old1, new1);
3572 
3573 	case COND_EXPR:
3574 	  return fold_build3_loc (loc, code, type,
3575 			      eval_subst (loc, TREE_OPERAND (arg, 0),
3576 					  old0, new0, old1, new1),
3577 			      eval_subst (loc, TREE_OPERAND (arg, 1),
3578 					  old0, new0, old1, new1),
3579 			      eval_subst (loc, TREE_OPERAND (arg, 2),
3580 					  old0, new0, old1, new1));
3581 	default:
3582 	  break;
3583 	}
3584       /* Fall through - ???  */
3585 
3586     case tcc_comparison:
3587       {
3588 	tree arg0 = TREE_OPERAND (arg, 0);
3589 	tree arg1 = TREE_OPERAND (arg, 1);
3590 
3591 	/* We need to check both for exact equality and tree equality.  The
3592 	   former will be true if the operand has a side-effect.  In that
3593 	   case, we know the operand occurred exactly once.  */
3594 
3595 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3596 	  arg0 = new0;
3597 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3598 	  arg0 = new1;
3599 
3600 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3601 	  arg1 = new0;
3602 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3603 	  arg1 = new1;
3604 
3605 	return fold_build2_loc (loc, code, type, arg0, arg1);
3606       }
3607 
3608     default:
3609       return arg;
3610     }
3611 }
3612 
3613 /* Return a tree for the case when the result of an expression is RESULT
3614    converted to TYPE and OMITTED was previously an operand of the expression
3615    but is now not needed (e.g., we folded OMITTED * 0).
3616 
3617    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3618    the conversion of RESULT to TYPE.  */
3619 
3620 tree
3621 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3622 {
3623   tree t = fold_convert_loc (loc, type, result);
3624 
3625   /* If the resulting operand is an empty statement, just return the omitted
3626      statement casted to void. */
3627   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3628     return build1_loc (loc, NOP_EXPR, void_type_node,
3629 		       fold_ignored_result (omitted));
3630 
3631   if (TREE_SIDE_EFFECTS (omitted))
3632     return build2_loc (loc, COMPOUND_EXPR, type,
3633 		       fold_ignored_result (omitted), t);
3634 
3635   return non_lvalue_loc (loc, t);
3636 }
3637 
3638 /* Return a tree for the case when the result of an expression is RESULT
3639    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3640    of the expression but are now not needed.
3641 
3642    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3643    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3644    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3645    just do the conversion of RESULT to TYPE.  */
3646 
3647 tree
3648 omit_two_operands_loc (location_t loc, tree type, tree result,
3649 		       tree omitted1, tree omitted2)
3650 {
3651   tree t = fold_convert_loc (loc, type, result);
3652 
3653   if (TREE_SIDE_EFFECTS (omitted2))
3654     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3655   if (TREE_SIDE_EFFECTS (omitted1))
3656     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3657 
3658   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3659 }
3660 
3661 
3662 /* Return a simplified tree node for the truth-negation of ARG.  This
3663    never alters ARG itself.  We assume that ARG is an operation that
3664    returns a truth value (0 or 1).
3665 
3666    FIXME: one would think we would fold the result, but it causes
3667    problems with the dominator optimizer.  */
3668 
3669 static tree
3670 fold_truth_not_expr (location_t loc, tree arg)
3671 {
3672   tree type = TREE_TYPE (arg);
3673   enum tree_code code = TREE_CODE (arg);
3674   location_t loc1, loc2;
3675 
3676   /* If this is a comparison, we can simply invert it, except for
3677      floating-point non-equality comparisons, in which case we just
3678      enclose a TRUTH_NOT_EXPR around what we have.  */
3679 
3680   if (TREE_CODE_CLASS (code) == tcc_comparison)
3681     {
3682       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3683       if (FLOAT_TYPE_P (op_type)
3684 	  && flag_trapping_math
3685 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3686 	  && code != NE_EXPR && code != EQ_EXPR)
3687 	return NULL_TREE;
3688 
3689       code = invert_tree_comparison (code, HONOR_NANS (op_type));
3690       if (code == ERROR_MARK)
3691 	return NULL_TREE;
3692 
3693       tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3694 			     TREE_OPERAND (arg, 1));
3695       if (TREE_NO_WARNING (arg))
3696 	TREE_NO_WARNING (ret) = 1;
3697       return ret;
3698     }
3699 
3700   switch (code)
3701     {
3702     case INTEGER_CST:
3703       return constant_boolean_node (integer_zerop (arg), type);
3704 
3705     case TRUTH_AND_EXPR:
3706       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3707       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3708       return build2_loc (loc, TRUTH_OR_EXPR, type,
3709 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3710 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3711 
3712     case TRUTH_OR_EXPR:
3713       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3714       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3715       return build2_loc (loc, TRUTH_AND_EXPR, type,
3716 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3717 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3718 
3719     case TRUTH_XOR_EXPR:
3720       /* Here we can invert either operand.  We invert the first operand
3721 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3722 	 result is the XOR of the first operand with the inside of the
3723 	 negation of the second operand.  */
3724 
3725       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3726 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3727 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3728       else
3729 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3730 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3731 			   TREE_OPERAND (arg, 1));
3732 
3733     case TRUTH_ANDIF_EXPR:
3734       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3735       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3736       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3737 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3738 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3739 
3740     case TRUTH_ORIF_EXPR:
3741       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3742       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3743       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3744 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3745 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3746 
3747     case TRUTH_NOT_EXPR:
3748       return TREE_OPERAND (arg, 0);
3749 
3750     case COND_EXPR:
3751       {
3752 	tree arg1 = TREE_OPERAND (arg, 1);
3753 	tree arg2 = TREE_OPERAND (arg, 2);
3754 
3755 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3756 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3757 
3758 	/* A COND_EXPR may have a throw as one operand, which
3759 	   then has void type.  Just leave void operands
3760 	   as they are.  */
3761 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3762 			   VOID_TYPE_P (TREE_TYPE (arg1))
3763 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3764 			   VOID_TYPE_P (TREE_TYPE (arg2))
3765 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3766       }
3767 
3768     case COMPOUND_EXPR:
3769       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3770       return build2_loc (loc, COMPOUND_EXPR, type,
3771 			 TREE_OPERAND (arg, 0),
3772 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3773 
3774     case NON_LVALUE_EXPR:
3775       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3776       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3777 
3778     CASE_CONVERT:
3779       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3780 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3781 
3782       /* fall through */
3783 
3784     case FLOAT_EXPR:
3785       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3786       return build1_loc (loc, TREE_CODE (arg), type,
3787 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3788 
3789     case BIT_AND_EXPR:
3790       if (!integer_onep (TREE_OPERAND (arg, 1)))
3791 	return NULL_TREE;
3792       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3793 
3794     case SAVE_EXPR:
3795       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3796 
3797     case CLEANUP_POINT_EXPR:
3798       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3799       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3800 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3801 
3802     default:
3803       return NULL_TREE;
3804     }
3805 }
3806 
3807 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
3808    assume that ARG is an operation that returns a truth value (0 or 1
3809    for scalars, 0 or -1 for vectors).  Return the folded expression if
3810    folding is successful.  Otherwise, return NULL_TREE.  */
3811 
3812 static tree
3813 fold_invert_truthvalue (location_t loc, tree arg)
3814 {
3815   tree type = TREE_TYPE (arg);
3816   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3817 			      ? BIT_NOT_EXPR
3818 			      : TRUTH_NOT_EXPR,
3819 			 type, arg);
3820 }
3821 
3822 /* Return a simplified tree node for the truth-negation of ARG.  This
3823    never alters ARG itself.  We assume that ARG is an operation that
3824    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
3825 
3826 tree
3827 invert_truthvalue_loc (location_t loc, tree arg)
3828 {
3829   if (TREE_CODE (arg) == ERROR_MARK)
3830     return arg;
3831 
3832   tree type = TREE_TYPE (arg);
3833   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3834 			       ? BIT_NOT_EXPR
3835 			       : TRUTH_NOT_EXPR,
3836 			  type, arg);
3837 }
3838 
3839 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3840    with code CODE.  This optimization is unsafe.  */
3841 static tree
3842 distribute_real_division (location_t loc, enum tree_code code, tree type,
3843 			  tree arg0, tree arg1)
3844 {
3845   bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3846   bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3847 
3848   /* (A / C) +- (B / C) -> (A +- B) / C.  */
3849   if (mul0 == mul1
3850       && operand_equal_p (TREE_OPERAND (arg0, 1),
3851 		       TREE_OPERAND (arg1, 1), 0))
3852     return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3853 			fold_build2_loc (loc, code, type,
3854 				     TREE_OPERAND (arg0, 0),
3855 				     TREE_OPERAND (arg1, 0)),
3856 			TREE_OPERAND (arg0, 1));
3857 
3858   /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3859   if (operand_equal_p (TREE_OPERAND (arg0, 0),
3860 		       TREE_OPERAND (arg1, 0), 0)
3861       && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3862       && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3863     {
3864       REAL_VALUE_TYPE r0, r1;
3865       r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3866       r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3867       if (!mul0)
3868 	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3869       if (!mul1)
3870         real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3871       real_arithmetic (&r0, code, &r0, &r1);
3872       return fold_build2_loc (loc, MULT_EXPR, type,
3873 			  TREE_OPERAND (arg0, 0),
3874 			  build_real (type, r0));
3875     }
3876 
3877   return NULL_TREE;
3878 }
3879 
3880 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3881    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero
3882    and uses reverse storage order if REVERSEP is nonzero.  ORIG_INNER
3883    is the original memory reference used to preserve the alias set of
3884    the access.  */
3885 
3886 static tree
3887 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3888 		    HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3889 		    int unsignedp, int reversep)
3890 {
3891   tree result, bftype;
3892 
3893   /* Attempt not to lose the access path if possible.  */
3894   if (TREE_CODE (orig_inner) == COMPONENT_REF)
3895     {
3896       tree ninner = TREE_OPERAND (orig_inner, 0);
3897       machine_mode nmode;
3898       HOST_WIDE_INT nbitsize, nbitpos;
3899       tree noffset;
3900       int nunsignedp, nreversep, nvolatilep = 0;
3901       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3902 				       &noffset, &nmode, &nunsignedp,
3903 				       &nreversep, &nvolatilep);
3904       if (base == inner
3905 	  && noffset == NULL_TREE
3906 	  && nbitsize >= bitsize
3907 	  && nbitpos <= bitpos
3908 	  && bitpos + bitsize <= nbitpos + nbitsize
3909 	  && !reversep
3910 	  && !nreversep
3911 	  && !nvolatilep)
3912 	{
3913 	  inner = ninner;
3914 	  bitpos -= nbitpos;
3915 	}
3916     }
3917 
3918   alias_set_type iset = get_alias_set (orig_inner);
3919   if (iset == 0 && get_alias_set (inner) != iset)
3920     inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3921 			 build_fold_addr_expr (inner),
3922 			 build_int_cst (ptr_type_node, 0));
3923 
3924   if (bitpos == 0 && !reversep)
3925     {
3926       tree size = TYPE_SIZE (TREE_TYPE (inner));
3927       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3928 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3929 	  && tree_fits_shwi_p (size)
3930 	  && tree_to_shwi (size) == bitsize)
3931 	return fold_convert_loc (loc, type, inner);
3932     }
3933 
3934   bftype = type;
3935   if (TYPE_PRECISION (bftype) != bitsize
3936       || TYPE_UNSIGNED (bftype) == !unsignedp)
3937     bftype = build_nonstandard_integer_type (bitsize, 0);
3938 
3939   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3940 		       size_int (bitsize), bitsize_int (bitpos));
3941   REF_REVERSE_STORAGE_ORDER (result) = reversep;
3942 
3943   if (bftype != type)
3944     result = fold_convert_loc (loc, type, result);
3945 
3946   return result;
3947 }
3948 
3949 /* Optimize a bit-field compare.
3950 
3951    There are two cases:  First is a compare against a constant and the
3952    second is a comparison of two items where the fields are at the same
3953    bit position relative to the start of a chunk (byte, halfword, word)
3954    large enough to contain it.  In these cases we can avoid the shift
3955    implicit in bitfield extractions.
3956 
3957    For constants, we emit a compare of the shifted constant with the
3958    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3959    compared.  For two fields at the same position, we do the ANDs with the
3960    similar mask and compare the result of the ANDs.
3961 
3962    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3963    COMPARE_TYPE is the type of the comparison, and LHS and RHS
3964    are the left and right operands of the comparison, respectively.
3965 
3966    If the optimization described above can be done, we return the resulting
3967    tree.  Otherwise we return zero.  */
3968 
3969 static tree
3970 optimize_bit_field_compare (location_t loc, enum tree_code code,
3971 			    tree compare_type, tree lhs, tree rhs)
3972 {
3973   HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3974   tree type = TREE_TYPE (lhs);
3975   tree unsigned_type;
3976   int const_p = TREE_CODE (rhs) == INTEGER_CST;
3977   machine_mode lmode, rmode, nmode;
3978   int lunsignedp, runsignedp;
3979   int lreversep, rreversep;
3980   int lvolatilep = 0, rvolatilep = 0;
3981   tree linner, rinner = NULL_TREE;
3982   tree mask;
3983   tree offset;
3984 
3985   /* Get all the information about the extractions being done.  If the bit size
3986      if the same as the size of the underlying object, we aren't doing an
3987      extraction at all and so can do nothing.  We also don't want to
3988      do anything if the inner expression is a PLACEHOLDER_EXPR since we
3989      then will no longer be able to replace it.  */
3990   linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3991 				&lunsignedp, &lreversep, &lvolatilep);
3992   if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3993       || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3994     return 0;
3995 
3996   if (const_p)
3997     rreversep = lreversep;
3998   else
3999    {
4000      /* If this is not a constant, we can only do something if bit positions,
4001 	sizes, signedness and storage order are the same.  */
4002      rinner
4003        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4004 			      &runsignedp, &rreversep, &rvolatilep);
4005 
4006      if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4007 	 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
4008 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
4009        return 0;
4010    }
4011 
4012   /* Honor the C++ memory model and mimic what RTL expansion does.  */
4013   unsigned HOST_WIDE_INT bitstart = 0;
4014   unsigned HOST_WIDE_INT bitend = 0;
4015   if (TREE_CODE (lhs) == COMPONENT_REF)
4016     {
4017       get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
4018       if (offset != NULL_TREE)
4019 	return 0;
4020     }
4021 
4022   /* See if we can find a mode to refer to this field.  We should be able to,
4023      but fail if we can't.  */
4024   nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4025 			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4026 			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4027 				TYPE_ALIGN (TREE_TYPE (rinner))),
4028 			 word_mode, false);
4029   if (nmode == VOIDmode)
4030     return 0;
4031 
4032   /* Set signed and unsigned types of the precision of this mode for the
4033      shifts below.  */
4034   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4035 
4036   /* Compute the bit position and size for the new reference and our offset
4037      within it. If the new reference is the same size as the original, we
4038      won't optimize anything, so return zero.  */
4039   nbitsize = GET_MODE_BITSIZE (nmode);
4040   nbitpos = lbitpos & ~ (nbitsize - 1);
4041   lbitpos -= nbitpos;
4042   if (nbitsize == lbitsize)
4043     return 0;
4044 
4045   if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4046     lbitpos = nbitsize - lbitsize - lbitpos;
4047 
4048   /* Make the mask to be used against the extracted field.  */
4049   mask = build_int_cst_type (unsigned_type, -1);
4050   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4051   mask = const_binop (RSHIFT_EXPR, mask,
4052 		      size_int (nbitsize - lbitsize - lbitpos));
4053 
4054   if (! const_p)
4055     {
4056       if (nbitpos < 0)
4057 	return 0;
4058 
4059       /* If not comparing with constant, just rework the comparison
4060 	 and return.  */
4061       tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4062 				    nbitsize, nbitpos, 1, lreversep);
4063       t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4064       tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4065 				    nbitsize, nbitpos, 1, rreversep);
4066       t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4067       return fold_build2_loc (loc, code, compare_type, t1, t2);
4068     }
4069 
4070   /* Otherwise, we are handling the constant case.  See if the constant is too
4071      big for the field.  Warn and return a tree for 0 (false) if so.  We do
4072      this not only for its own sake, but to avoid having to test for this
4073      error case below.  If we didn't, we might generate wrong code.
4074 
4075      For unsigned fields, the constant shifted right by the field length should
4076      be all zero.  For signed fields, the high-order bits should agree with
4077      the sign bit.  */
4078 
4079   if (lunsignedp)
4080     {
4081       if (wi::lrshift (rhs, lbitsize) != 0)
4082 	{
4083 	  warning (0, "comparison is always %d due to width of bit-field",
4084 		   code == NE_EXPR);
4085 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4086 	}
4087     }
4088   else
4089     {
4090       wide_int tem = wi::arshift (rhs, lbitsize - 1);
4091       if (tem != 0 && tem != -1)
4092 	{
4093 	  warning (0, "comparison is always %d due to width of bit-field",
4094 		   code == NE_EXPR);
4095 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4096 	}
4097     }
4098 
4099   if (nbitpos < 0)
4100     return 0;
4101 
4102   /* Single-bit compares should always be against zero.  */
4103   if (lbitsize == 1 && ! integer_zerop (rhs))
4104     {
4105       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4106       rhs = build_int_cst (type, 0);
4107     }
4108 
4109   /* Make a new bitfield reference, shift the constant over the
4110      appropriate number of bits and mask it with the computed mask
4111      (in case this was a signed field).  If we changed it, make a new one.  */
4112   lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4113 			    nbitsize, nbitpos, 1, lreversep);
4114 
4115   rhs = const_binop (BIT_AND_EXPR,
4116 		     const_binop (LSHIFT_EXPR,
4117 				  fold_convert_loc (loc, unsigned_type, rhs),
4118 				  size_int (lbitpos)),
4119 		     mask);
4120 
4121   lhs = build2_loc (loc, code, compare_type,
4122 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4123   return lhs;
4124 }
4125 
4126 /* Subroutine for fold_truth_andor_1: decode a field reference.
4127 
4128    If EXP is a comparison reference, we return the innermost reference.
4129 
4130    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4131    set to the starting bit number.
4132 
4133    If the innermost field can be completely contained in a mode-sized
4134    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4135 
4136    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4137    otherwise it is not changed.
4138 
4139    *PUNSIGNEDP is set to the signedness of the field.
4140 
4141    *PREVERSEP is set to the storage order of the field.
4142 
4143    *PMASK is set to the mask used.  This is either contained in a
4144    BIT_AND_EXPR or derived from the width of the field.
4145 
4146    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4147 
4148    Return 0 if this is not a component reference or is one that we can't
4149    do anything with.  */
4150 
4151 static tree
4152 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4153 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4154 			int *punsignedp, int *preversep, int *pvolatilep,
4155 			tree *pmask, tree *pand_mask)
4156 {
4157   tree exp = *exp_;
4158   tree outer_type = 0;
4159   tree and_mask = 0;
4160   tree mask, inner, offset;
4161   tree unsigned_type;
4162   unsigned int precision;
4163 
4164   /* All the optimizations using this function assume integer fields.
4165      There are problems with FP fields since the type_for_size call
4166      below can fail for, e.g., XFmode.  */
4167   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4168     return NULL_TREE;
4169 
4170   /* We are interested in the bare arrangement of bits, so strip everything
4171      that doesn't affect the machine mode.  However, record the type of the
4172      outermost expression if it may matter below.  */
4173   if (CONVERT_EXPR_P (exp)
4174       || TREE_CODE (exp) == NON_LVALUE_EXPR)
4175     outer_type = TREE_TYPE (exp);
4176   STRIP_NOPS (exp);
4177 
4178   if (TREE_CODE (exp) == BIT_AND_EXPR)
4179     {
4180       and_mask = TREE_OPERAND (exp, 1);
4181       exp = TREE_OPERAND (exp, 0);
4182       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4183       if (TREE_CODE (and_mask) != INTEGER_CST)
4184 	return NULL_TREE;
4185     }
4186 
4187   inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4188 			       punsignedp, preversep, pvolatilep);
4189   if ((inner == exp && and_mask == 0)
4190       || *pbitsize < 0 || offset != 0
4191       || TREE_CODE (inner) == PLACEHOLDER_EXPR
4192       /* Reject out-of-bound accesses (PR79731).  */
4193       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4194 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4195 			       *pbitpos + *pbitsize) < 0))
4196     return NULL_TREE;
4197 
4198   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4199   if (unsigned_type == NULL_TREE)
4200     return NULL_TREE;
4201 
4202   *exp_ = exp;
4203 
4204   /* If the number of bits in the reference is the same as the bitsize of
4205      the outer type, then the outer type gives the signedness. Otherwise
4206      (in case of a small bitfield) the signedness is unchanged.  */
4207   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4208     *punsignedp = TYPE_UNSIGNED (outer_type);
4209 
4210   /* Compute the mask to access the bitfield.  */
4211   precision = TYPE_PRECISION (unsigned_type);
4212 
4213   mask = build_int_cst_type (unsigned_type, -1);
4214 
4215   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4216   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4217 
4218   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4219   if (and_mask != 0)
4220     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4221 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4222 
4223   *pmask = mask;
4224   *pand_mask = and_mask;
4225   return inner;
4226 }
4227 
4228 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4229    bit positions and MASK is SIGNED.  */
4230 
4231 static int
4232 all_ones_mask_p (const_tree mask, unsigned int size)
4233 {
4234   tree type = TREE_TYPE (mask);
4235   unsigned int precision = TYPE_PRECISION (type);
4236 
4237   /* If this function returns true when the type of the mask is
4238      UNSIGNED, then there will be errors.  In particular see
4239      gcc.c-torture/execute/990326-1.c.  There does not appear to be
4240      any documentation paper trail as to why this is so.  But the pre
4241      wide-int worked with that restriction and it has been preserved
4242      here.  */
4243   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4244     return false;
4245 
4246   return wi::mask (size, false, precision) == mask;
4247 }
4248 
4249 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4250    represents the sign bit of EXP's type.  If EXP represents a sign
4251    or zero extension, also test VAL against the unextended type.
4252    The return value is the (sub)expression whose sign bit is VAL,
4253    or NULL_TREE otherwise.  */
4254 
4255 tree
4256 sign_bit_p (tree exp, const_tree val)
4257 {
4258   int width;
4259   tree t;
4260 
4261   /* Tree EXP must have an integral type.  */
4262   t = TREE_TYPE (exp);
4263   if (! INTEGRAL_TYPE_P (t))
4264     return NULL_TREE;
4265 
4266   /* Tree VAL must be an integer constant.  */
4267   if (TREE_CODE (val) != INTEGER_CST
4268       || TREE_OVERFLOW (val))
4269     return NULL_TREE;
4270 
4271   width = TYPE_PRECISION (t);
4272   if (wi::only_sign_bit_p (val, width))
4273     return exp;
4274 
4275   /* Handle extension from a narrower type.  */
4276   if (TREE_CODE (exp) == NOP_EXPR
4277       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4278     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4279 
4280   return NULL_TREE;
4281 }
4282 
4283 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4284    to be evaluated unconditionally.  */
4285 
4286 static int
4287 simple_operand_p (const_tree exp)
4288 {
4289   /* Strip any conversions that don't change the machine mode.  */
4290   STRIP_NOPS (exp);
4291 
4292   return (CONSTANT_CLASS_P (exp)
4293   	  || TREE_CODE (exp) == SSA_NAME
4294 	  || (DECL_P (exp)
4295 	      && ! TREE_ADDRESSABLE (exp)
4296 	      && ! TREE_THIS_VOLATILE (exp)
4297 	      && ! DECL_NONLOCAL (exp)
4298 	      /* Don't regard global variables as simple.  They may be
4299 		 allocated in ways unknown to the compiler (shared memory,
4300 		 #pragma weak, etc).  */
4301 	      && ! TREE_PUBLIC (exp)
4302 	      && ! DECL_EXTERNAL (exp)
4303 	      /* Weakrefs are not safe to be read, since they can be NULL.
4304  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4305 		 have DECL_WEAK flag set.  */
4306 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4307 	      /* Loading a static variable is unduly expensive, but global
4308 		 registers aren't expensive.  */
4309 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4310 }
4311 
4312 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4313    to be evaluated unconditionally.
4314    I addition to simple_operand_p, we assume that comparisons, conversions,
4315    and logic-not operations are simple, if their operands are simple, too.  */
4316 
4317 static bool
4318 simple_operand_p_2 (tree exp)
4319 {
4320   enum tree_code code;
4321 
4322   if (TREE_SIDE_EFFECTS (exp)
4323       || tree_could_trap_p (exp))
4324     return false;
4325 
4326   while (CONVERT_EXPR_P (exp))
4327     exp = TREE_OPERAND (exp, 0);
4328 
4329   code = TREE_CODE (exp);
4330 
4331   if (TREE_CODE_CLASS (code) == tcc_comparison)
4332     return (simple_operand_p (TREE_OPERAND (exp, 0))
4333 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4334 
4335   if (code == TRUTH_NOT_EXPR)
4336       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4337 
4338   return simple_operand_p (exp);
4339 }
4340 
4341 
4342 /* The following functions are subroutines to fold_range_test and allow it to
4343    try to change a logical combination of comparisons into a range test.
4344 
4345    For example, both
4346 	X == 2 || X == 3 || X == 4 || X == 5
4347    and
4348 	X >= 2 && X <= 5
4349    are converted to
4350 	(unsigned) (X - 2) <= 3
4351 
4352    We describe each set of comparisons as being either inside or outside
4353    a range, using a variable named like IN_P, and then describe the
4354    range with a lower and upper bound.  If one of the bounds is omitted,
4355    it represents either the highest or lowest value of the type.
4356 
4357    In the comments below, we represent a range by two numbers in brackets
4358    preceded by a "+" to designate being inside that range, or a "-" to
4359    designate being outside that range, so the condition can be inverted by
4360    flipping the prefix.  An omitted bound is represented by a "-".  For
4361    example, "- [-, 10]" means being outside the range starting at the lowest
4362    possible value and ending at 10, in other words, being greater than 10.
4363    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4364    always false.
4365 
4366    We set up things so that the missing bounds are handled in a consistent
4367    manner so neither a missing bound nor "true" and "false" need to be
4368    handled using a special case.  */
4369 
4370 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4371    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4372    and UPPER1_P are nonzero if the respective argument is an upper bound
4373    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4374    must be specified for a comparison.  ARG1 will be converted to ARG0's
4375    type if both are specified.  */
4376 
4377 static tree
4378 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4379 	     tree arg1, int upper1_p)
4380 {
4381   tree tem;
4382   int result;
4383   int sgn0, sgn1;
4384 
4385   /* If neither arg represents infinity, do the normal operation.
4386      Else, if not a comparison, return infinity.  Else handle the special
4387      comparison rules. Note that most of the cases below won't occur, but
4388      are handled for consistency.  */
4389 
4390   if (arg0 != 0 && arg1 != 0)
4391     {
4392       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4393 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4394       STRIP_NOPS (tem);
4395       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4396     }
4397 
4398   if (TREE_CODE_CLASS (code) != tcc_comparison)
4399     return 0;
4400 
4401   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4402      for neither.  In real maths, we cannot assume open ended ranges are
4403      the same. But, this is computer arithmetic, where numbers are finite.
4404      We can therefore make the transformation of any unbounded range with
4405      the value Z, Z being greater than any representable number. This permits
4406      us to treat unbounded ranges as equal.  */
4407   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4408   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4409   switch (code)
4410     {
4411     case EQ_EXPR:
4412       result = sgn0 == sgn1;
4413       break;
4414     case NE_EXPR:
4415       result = sgn0 != sgn1;
4416       break;
4417     case LT_EXPR:
4418       result = sgn0 < sgn1;
4419       break;
4420     case LE_EXPR:
4421       result = sgn0 <= sgn1;
4422       break;
4423     case GT_EXPR:
4424       result = sgn0 > sgn1;
4425       break;
4426     case GE_EXPR:
4427       result = sgn0 >= sgn1;
4428       break;
4429     default:
4430       gcc_unreachable ();
4431     }
4432 
4433   return constant_boolean_node (result, type);
4434 }
4435 
4436 /* Helper routine for make_range.  Perform one step for it, return
4437    new expression if the loop should continue or NULL_TREE if it should
4438    stop.  */
4439 
4440 tree
4441 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4442 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4443 		 bool *strict_overflow_p)
4444 {
4445   tree arg0_type = TREE_TYPE (arg0);
4446   tree n_low, n_high, low = *p_low, high = *p_high;
4447   int in_p = *p_in_p, n_in_p;
4448 
4449   switch (code)
4450     {
4451     case TRUTH_NOT_EXPR:
4452       /* We can only do something if the range is testing for zero.  */
4453       if (low == NULL_TREE || high == NULL_TREE
4454 	  || ! integer_zerop (low) || ! integer_zerop (high))
4455 	return NULL_TREE;
4456       *p_in_p = ! in_p;
4457       return arg0;
4458 
4459     case EQ_EXPR: case NE_EXPR:
4460     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4461       /* We can only do something if the range is testing for zero
4462 	 and if the second operand is an integer constant.  Note that
4463 	 saying something is "in" the range we make is done by
4464 	 complementing IN_P since it will set in the initial case of
4465 	 being not equal to zero; "out" is leaving it alone.  */
4466       if (low == NULL_TREE || high == NULL_TREE
4467 	  || ! integer_zerop (low) || ! integer_zerop (high)
4468 	  || TREE_CODE (arg1) != INTEGER_CST)
4469 	return NULL_TREE;
4470 
4471       switch (code)
4472 	{
4473 	case NE_EXPR:  /* - [c, c]  */
4474 	  low = high = arg1;
4475 	  break;
4476 	case EQ_EXPR:  /* + [c, c]  */
4477 	  in_p = ! in_p, low = high = arg1;
4478 	  break;
4479 	case GT_EXPR:  /* - [-, c] */
4480 	  low = 0, high = arg1;
4481 	  break;
4482 	case GE_EXPR:  /* + [c, -] */
4483 	  in_p = ! in_p, low = arg1, high = 0;
4484 	  break;
4485 	case LT_EXPR:  /* - [c, -] */
4486 	  low = arg1, high = 0;
4487 	  break;
4488 	case LE_EXPR:  /* + [-, c] */
4489 	  in_p = ! in_p, low = 0, high = arg1;
4490 	  break;
4491 	default:
4492 	  gcc_unreachable ();
4493 	}
4494 
4495       /* If this is an unsigned comparison, we also know that EXP is
4496 	 greater than or equal to zero.  We base the range tests we make
4497 	 on that fact, so we record it here so we can parse existing
4498 	 range tests.  We test arg0_type since often the return type
4499 	 of, e.g. EQ_EXPR, is boolean.  */
4500       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4501 	{
4502 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4503 			      in_p, low, high, 1,
4504 			      build_int_cst (arg0_type, 0),
4505 			      NULL_TREE))
4506 	    return NULL_TREE;
4507 
4508 	  in_p = n_in_p, low = n_low, high = n_high;
4509 
4510 	  /* If the high bound is missing, but we have a nonzero low
4511 	     bound, reverse the range so it goes from zero to the low bound
4512 	     minus 1.  */
4513 	  if (high == 0 && low && ! integer_zerop (low))
4514 	    {
4515 	      in_p = ! in_p;
4516 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4517 				  build_int_cst (TREE_TYPE (low), 1), 0);
4518 	      low = build_int_cst (arg0_type, 0);
4519 	    }
4520 	}
4521 
4522       *p_low = low;
4523       *p_high = high;
4524       *p_in_p = in_p;
4525       return arg0;
4526 
4527     case NEGATE_EXPR:
4528       /* If flag_wrapv and ARG0_TYPE is signed, make sure
4529 	 low and high are non-NULL, then normalize will DTRT.  */
4530       if (!TYPE_UNSIGNED (arg0_type)
4531 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4532 	{
4533 	  if (low == NULL_TREE)
4534 	    low = TYPE_MIN_VALUE (arg0_type);
4535 	  if (high == NULL_TREE)
4536 	    high = TYPE_MAX_VALUE (arg0_type);
4537 	}
4538 
4539       /* (-x) IN [a,b] -> x in [-b, -a]  */
4540       n_low = range_binop (MINUS_EXPR, exp_type,
4541 			   build_int_cst (exp_type, 0),
4542 			   0, high, 1);
4543       n_high = range_binop (MINUS_EXPR, exp_type,
4544 			    build_int_cst (exp_type, 0),
4545 			    0, low, 0);
4546       if (n_high != 0 && TREE_OVERFLOW (n_high))
4547 	return NULL_TREE;
4548       goto normalize;
4549 
4550     case BIT_NOT_EXPR:
4551       /* ~ X -> -X - 1  */
4552       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4553 			 build_int_cst (exp_type, 1));
4554 
4555     case PLUS_EXPR:
4556     case MINUS_EXPR:
4557       if (TREE_CODE (arg1) != INTEGER_CST)
4558 	return NULL_TREE;
4559 
4560       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4561 	 move a constant to the other side.  */
4562       if (!TYPE_UNSIGNED (arg0_type)
4563 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4564 	return NULL_TREE;
4565 
4566       /* If EXP is signed, any overflow in the computation is undefined,
4567 	 so we don't worry about it so long as our computations on
4568 	 the bounds don't overflow.  For unsigned, overflow is defined
4569 	 and this is exactly the right thing.  */
4570       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4571 			   arg0_type, low, 0, arg1, 0);
4572       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4573 			    arg0_type, high, 1, arg1, 0);
4574       if ((n_low != 0 && TREE_OVERFLOW (n_low))
4575 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
4576 	return NULL_TREE;
4577 
4578       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4579 	*strict_overflow_p = true;
4580 
4581       normalize:
4582 	/* Check for an unsigned range which has wrapped around the maximum
4583 	   value thus making n_high < n_low, and normalize it.  */
4584 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4585 	  {
4586 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4587 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
4588 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4589 				build_int_cst (TREE_TYPE (n_low), 1), 0);
4590 
4591 	    /* If the range is of the form +/- [ x+1, x ], we won't
4592 	       be able to normalize it.  But then, it represents the
4593 	       whole range or the empty set, so make it
4594 	       +/- [ -, - ].  */
4595 	    if (tree_int_cst_equal (n_low, low)
4596 		&& tree_int_cst_equal (n_high, high))
4597 	      low = high = 0;
4598 	    else
4599 	      in_p = ! in_p;
4600 	  }
4601 	else
4602 	  low = n_low, high = n_high;
4603 
4604 	*p_low = low;
4605 	*p_high = high;
4606 	*p_in_p = in_p;
4607 	return arg0;
4608 
4609     CASE_CONVERT:
4610     case NON_LVALUE_EXPR:
4611       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4612 	return NULL_TREE;
4613 
4614       if (! INTEGRAL_TYPE_P (arg0_type)
4615 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
4616 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4617 	return NULL_TREE;
4618 
4619       n_low = low, n_high = high;
4620 
4621       if (n_low != 0)
4622 	n_low = fold_convert_loc (loc, arg0_type, n_low);
4623 
4624       if (n_high != 0)
4625 	n_high = fold_convert_loc (loc, arg0_type, n_high);
4626 
4627       /* If we're converting arg0 from an unsigned type, to exp,
4628 	 a signed type,  we will be doing the comparison as unsigned.
4629 	 The tests above have already verified that LOW and HIGH
4630 	 are both positive.
4631 
4632 	 So we have to ensure that we will handle large unsigned
4633 	 values the same way that the current signed bounds treat
4634 	 negative values.  */
4635 
4636       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4637 	{
4638 	  tree high_positive;
4639 	  tree equiv_type;
4640 	  /* For fixed-point modes, we need to pass the saturating flag
4641 	     as the 2nd parameter.  */
4642 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4643 	    equiv_type
4644 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4645 						TYPE_SATURATING (arg0_type));
4646 	  else
4647 	    equiv_type
4648 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4649 
4650 	  /* A range without an upper bound is, naturally, unbounded.
4651 	     Since convert would have cropped a very large value, use
4652 	     the max value for the destination type.  */
4653 	  high_positive
4654 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4655 	      : TYPE_MAX_VALUE (arg0_type);
4656 
4657 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4658 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4659 					     fold_convert_loc (loc, arg0_type,
4660 							       high_positive),
4661 					     build_int_cst (arg0_type, 1));
4662 
4663 	  /* If the low bound is specified, "and" the range with the
4664 	     range for which the original unsigned value will be
4665 	     positive.  */
4666 	  if (low != 0)
4667 	    {
4668 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4669 				  1, fold_convert_loc (loc, arg0_type,
4670 						       integer_zero_node),
4671 				  high_positive))
4672 		return NULL_TREE;
4673 
4674 	      in_p = (n_in_p == in_p);
4675 	    }
4676 	  else
4677 	    {
4678 	      /* Otherwise, "or" the range with the range of the input
4679 		 that will be interpreted as negative.  */
4680 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4681 				  1, fold_convert_loc (loc, arg0_type,
4682 						       integer_zero_node),
4683 				  high_positive))
4684 		return NULL_TREE;
4685 
4686 	      in_p = (in_p != n_in_p);
4687 	    }
4688 	}
4689 
4690       *p_low = n_low;
4691       *p_high = n_high;
4692       *p_in_p = in_p;
4693       return arg0;
4694 
4695     default:
4696       return NULL_TREE;
4697     }
4698 }
4699 
4700 /* Given EXP, a logical expression, set the range it is testing into
4701    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4702    actually being tested.  *PLOW and *PHIGH will be made of the same
4703    type as the returned expression.  If EXP is not a comparison, we
4704    will most likely not be returning a useful value and range.  Set
4705    *STRICT_OVERFLOW_P to true if the return value is only valid
4706    because signed overflow is undefined; otherwise, do not change
4707    *STRICT_OVERFLOW_P.  */
4708 
4709 tree
4710 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4711 	    bool *strict_overflow_p)
4712 {
4713   enum tree_code code;
4714   tree arg0, arg1 = NULL_TREE;
4715   tree exp_type, nexp;
4716   int in_p;
4717   tree low, high;
4718   location_t loc = EXPR_LOCATION (exp);
4719 
4720   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4721      and see if we can refine the range.  Some of the cases below may not
4722      happen, but it doesn't seem worth worrying about this.  We "continue"
4723      the outer loop when we've changed something; otherwise we "break"
4724      the switch, which will "break" the while.  */
4725 
4726   in_p = 0;
4727   low = high = build_int_cst (TREE_TYPE (exp), 0);
4728 
4729   while (1)
4730     {
4731       code = TREE_CODE (exp);
4732       exp_type = TREE_TYPE (exp);
4733       arg0 = NULL_TREE;
4734 
4735       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4736 	{
4737 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4738 	    arg0 = TREE_OPERAND (exp, 0);
4739 	  if (TREE_CODE_CLASS (code) == tcc_binary
4740 	      || TREE_CODE_CLASS (code) == tcc_comparison
4741 	      || (TREE_CODE_CLASS (code) == tcc_expression
4742 		  && TREE_OPERAND_LENGTH (exp) > 1))
4743 	    arg1 = TREE_OPERAND (exp, 1);
4744 	}
4745       if (arg0 == NULL_TREE)
4746 	break;
4747 
4748       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4749 			      &high, &in_p, strict_overflow_p);
4750       if (nexp == NULL_TREE)
4751 	break;
4752       exp = nexp;
4753     }
4754 
4755   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4756   if (TREE_CODE (exp) == INTEGER_CST)
4757     {
4758       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4759 						 exp, 0, low, 0))
4760 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4761 						    exp, 1, high, 1)));
4762       low = high = 0;
4763       exp = 0;
4764     }
4765 
4766   *pin_p = in_p, *plow = low, *phigh = high;
4767   return exp;
4768 }
4769 
4770 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4771    type, TYPE, return an expression to test if EXP is in (or out of, depending
4772    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4773 
4774 tree
4775 build_range_check (location_t loc, tree type, tree exp, int in_p,
4776 		   tree low, tree high)
4777 {
4778   tree etype = TREE_TYPE (exp), value;
4779 
4780   /* Disable this optimization for function pointer expressions
4781      on targets that require function pointer canonicalization.  */
4782   if (targetm.have_canonicalize_funcptr_for_compare ()
4783       && TREE_CODE (etype) == POINTER_TYPE
4784       && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4785     return NULL_TREE;
4786 
4787   if (! in_p)
4788     {
4789       value = build_range_check (loc, type, exp, 1, low, high);
4790       if (value != 0)
4791         return invert_truthvalue_loc (loc, value);
4792 
4793       return 0;
4794     }
4795 
4796   if (low == 0 && high == 0)
4797     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4798 
4799   if (low == 0)
4800     return fold_build2_loc (loc, LE_EXPR, type, exp,
4801 			fold_convert_loc (loc, etype, high));
4802 
4803   if (high == 0)
4804     return fold_build2_loc (loc, GE_EXPR, type, exp,
4805 			fold_convert_loc (loc, etype, low));
4806 
4807   if (operand_equal_p (low, high, 0))
4808     return fold_build2_loc (loc, EQ_EXPR, type, exp,
4809 			fold_convert_loc (loc, etype, low));
4810 
4811   if (integer_zerop (low))
4812     {
4813       if (! TYPE_UNSIGNED (etype))
4814 	{
4815 	  etype = unsigned_type_for (etype);
4816 	  high = fold_convert_loc (loc, etype, high);
4817 	  exp = fold_convert_loc (loc, etype, exp);
4818 	}
4819       return build_range_check (loc, type, exp, 1, 0, high);
4820     }
4821 
4822   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4823   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4824     {
4825       int prec = TYPE_PRECISION (etype);
4826 
4827       if (wi::mask (prec - 1, false, prec) == high)
4828 	{
4829 	  if (TYPE_UNSIGNED (etype))
4830 	    {
4831 	      tree signed_etype = signed_type_for (etype);
4832 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4833 		etype
4834 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4835 	      else
4836 		etype = signed_etype;
4837 	      exp = fold_convert_loc (loc, etype, exp);
4838 	    }
4839 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
4840 			      build_int_cst (etype, 0));
4841 	}
4842     }
4843 
4844   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4845      This requires wrap-around arithmetics for the type of the expression.
4846      First make sure that arithmetics in this type is valid, then make sure
4847      that it wraps around.  */
4848   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4849     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4850 					    TYPE_UNSIGNED (etype));
4851 
4852   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4853     {
4854       tree utype, minv, maxv;
4855 
4856       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4857 	 for the type in question, as we rely on this here.  */
4858       utype = unsigned_type_for (etype);
4859       maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4860       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4861 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
4862       minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4863 
4864       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4865 				      minv, 1, maxv, 1)))
4866 	etype = utype;
4867       else
4868 	return 0;
4869     }
4870 
4871   high = fold_convert_loc (loc, etype, high);
4872   low = fold_convert_loc (loc, etype, low);
4873   exp = fold_convert_loc (loc, etype, exp);
4874 
4875   value = const_binop (MINUS_EXPR, high, low);
4876 
4877 
4878   if (POINTER_TYPE_P (etype))
4879     {
4880       if (value != 0 && !TREE_OVERFLOW (value))
4881 	{
4882 	  low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4883           return build_range_check (loc, type,
4884 			     	    fold_build_pointer_plus_loc (loc, exp, low),
4885 			            1, build_int_cst (etype, 0), value);
4886 	}
4887       return 0;
4888     }
4889 
4890   if (value != 0 && !TREE_OVERFLOW (value))
4891     return build_range_check (loc, type,
4892 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4893 			      1, build_int_cst (etype, 0), value);
4894 
4895   return 0;
4896 }
4897 
4898 /* Return the predecessor of VAL in its type, handling the infinite case.  */
4899 
4900 static tree
4901 range_predecessor (tree val)
4902 {
4903   tree type = TREE_TYPE (val);
4904 
4905   if (INTEGRAL_TYPE_P (type)
4906       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4907     return 0;
4908   else
4909     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4910 			build_int_cst (TREE_TYPE (val), 1), 0);
4911 }
4912 
4913 /* Return the successor of VAL in its type, handling the infinite case.  */
4914 
4915 static tree
4916 range_successor (tree val)
4917 {
4918   tree type = TREE_TYPE (val);
4919 
4920   if (INTEGRAL_TYPE_P (type)
4921       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4922     return 0;
4923   else
4924     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4925 			build_int_cst (TREE_TYPE (val), 1), 0);
4926 }
4927 
4928 /* Given two ranges, see if we can merge them into one.  Return 1 if we
4929    can, 0 if we can't.  Set the output range into the specified parameters.  */
4930 
4931 bool
4932 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4933 	      tree high0, int in1_p, tree low1, tree high1)
4934 {
4935   int no_overlap;
4936   int subset;
4937   int temp;
4938   tree tem;
4939   int in_p;
4940   tree low, high;
4941   int lowequal = ((low0 == 0 && low1 == 0)
4942 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4943 						low0, 0, low1, 0)));
4944   int highequal = ((high0 == 0 && high1 == 0)
4945 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4946 						 high0, 1, high1, 1)));
4947 
4948   /* Make range 0 be the range that starts first, or ends last if they
4949      start at the same value.  Swap them if it isn't.  */
4950   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4951 				 low0, 0, low1, 0))
4952       || (lowequal
4953 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
4954 					high1, 1, high0, 1))))
4955     {
4956       temp = in0_p, in0_p = in1_p, in1_p = temp;
4957       tem = low0, low0 = low1, low1 = tem;
4958       tem = high0, high0 = high1, high1 = tem;
4959     }
4960 
4961   /* Now flag two cases, whether the ranges are disjoint or whether the
4962      second range is totally subsumed in the first.  Note that the tests
4963      below are simplified by the ones above.  */
4964   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4965 					  high0, 1, low1, 0));
4966   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4967 				      high1, 1, high0, 1));
4968 
4969   /* We now have four cases, depending on whether we are including or
4970      excluding the two ranges.  */
4971   if (in0_p && in1_p)
4972     {
4973       /* If they don't overlap, the result is false.  If the second range
4974 	 is a subset it is the result.  Otherwise, the range is from the start
4975 	 of the second to the end of the first.  */
4976       if (no_overlap)
4977 	in_p = 0, low = high = 0;
4978       else if (subset)
4979 	in_p = 1, low = low1, high = high1;
4980       else
4981 	in_p = 1, low = low1, high = high0;
4982     }
4983 
4984   else if (in0_p && ! in1_p)
4985     {
4986       /* If they don't overlap, the result is the first range.  If they are
4987 	 equal, the result is false.  If the second range is a subset of the
4988 	 first, and the ranges begin at the same place, we go from just after
4989 	 the end of the second range to the end of the first.  If the second
4990 	 range is not a subset of the first, or if it is a subset and both
4991 	 ranges end at the same place, the range starts at the start of the
4992 	 first range and ends just before the second range.
4993 	 Otherwise, we can't describe this as a single range.  */
4994       if (no_overlap)
4995 	in_p = 1, low = low0, high = high0;
4996       else if (lowequal && highequal)
4997 	in_p = 0, low = high = 0;
4998       else if (subset && lowequal)
4999 	{
5000 	  low = range_successor (high1);
5001 	  high = high0;
5002 	  in_p = 1;
5003 	  if (low == 0)
5004 	    {
5005 	      /* We are in the weird situation where high0 > high1 but
5006 		 high1 has no successor.  Punt.  */
5007 	      return 0;
5008 	    }
5009 	}
5010       else if (! subset || highequal)
5011 	{
5012 	  low = low0;
5013 	  high = range_predecessor (low1);
5014 	  in_p = 1;
5015 	  if (high == 0)
5016 	    {
5017 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5018 	      return 0;
5019 	    }
5020 	}
5021       else
5022 	return 0;
5023     }
5024 
5025   else if (! in0_p && in1_p)
5026     {
5027       /* If they don't overlap, the result is the second range.  If the second
5028 	 is a subset of the first, the result is false.  Otherwise,
5029 	 the range starts just after the first range and ends at the
5030 	 end of the second.  */
5031       if (no_overlap)
5032 	in_p = 1, low = low1, high = high1;
5033       else if (subset || highequal)
5034 	in_p = 0, low = high = 0;
5035       else
5036 	{
5037 	  low = range_successor (high0);
5038 	  high = high1;
5039 	  in_p = 1;
5040 	  if (low == 0)
5041 	    {
5042 	      /* high1 > high0 but high0 has no successor.  Punt.  */
5043 	      return 0;
5044 	    }
5045 	}
5046     }
5047 
5048   else
5049     {
5050       /* The case where we are excluding both ranges.  Here the complex case
5051 	 is if they don't overlap.  In that case, the only time we have a
5052 	 range is if they are adjacent.  If the second is a subset of the
5053 	 first, the result is the first.  Otherwise, the range to exclude
5054 	 starts at the beginning of the first range and ends at the end of the
5055 	 second.  */
5056       if (no_overlap)
5057 	{
5058 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5059 					 range_successor (high0),
5060 					 1, low1, 0)))
5061 	    in_p = 0, low = low0, high = high1;
5062 	  else
5063 	    {
5064 	      /* Canonicalize - [min, x] into - [-, x].  */
5065 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5066 		switch (TREE_CODE (TREE_TYPE (low0)))
5067 		  {
5068 		  case ENUMERAL_TYPE:
5069 		    if (TYPE_PRECISION (TREE_TYPE (low0))
5070 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5071 		      break;
5072 		    /* FALLTHROUGH */
5073 		  case INTEGER_TYPE:
5074 		    if (tree_int_cst_equal (low0,
5075 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5076 		      low0 = 0;
5077 		    break;
5078 		  case POINTER_TYPE:
5079 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5080 			&& integer_zerop (low0))
5081 		      low0 = 0;
5082 		    break;
5083 		  default:
5084 		    break;
5085 		  }
5086 
5087 	      /* Canonicalize - [x, max] into - [x, -].  */
5088 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5089 		switch (TREE_CODE (TREE_TYPE (high1)))
5090 		  {
5091 		  case ENUMERAL_TYPE:
5092 		    if (TYPE_PRECISION (TREE_TYPE (high1))
5093 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5094 		      break;
5095 		    /* FALLTHROUGH */
5096 		  case INTEGER_TYPE:
5097 		    if (tree_int_cst_equal (high1,
5098 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5099 		      high1 = 0;
5100 		    break;
5101 		  case POINTER_TYPE:
5102 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5103 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5104 						       high1, 1,
5105 						       build_int_cst (TREE_TYPE (high1), 1),
5106 						       1)))
5107 		      high1 = 0;
5108 		    break;
5109 		  default:
5110 		    break;
5111 		  }
5112 
5113 	      /* The ranges might be also adjacent between the maximum and
5114 	         minimum values of the given type.  For
5115 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5116 	         return + [x + 1, y - 1].  */
5117 	      if (low0 == 0 && high1 == 0)
5118 	        {
5119 		  low = range_successor (high0);
5120 		  high = range_predecessor (low1);
5121 		  if (low == 0 || high == 0)
5122 		    return 0;
5123 
5124 		  in_p = 1;
5125 		}
5126 	      else
5127 		return 0;
5128 	    }
5129 	}
5130       else if (subset)
5131 	in_p = 0, low = low0, high = high0;
5132       else
5133 	in_p = 0, low = low0, high = high1;
5134     }
5135 
5136   *pin_p = in_p, *plow = low, *phigh = high;
5137   return 1;
5138 }
5139 
5140 
5141 /* Subroutine of fold, looking inside expressions of the form
5142    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5143    of the COND_EXPR.  This function is being used also to optimize
5144    A op B ? C : A, by reversing the comparison first.
5145 
5146    Return a folded expression whose code is not a COND_EXPR
5147    anymore, or NULL_TREE if no folding opportunity is found.  */
5148 
5149 static tree
5150 fold_cond_expr_with_comparison (location_t loc, tree type,
5151 				tree arg0, tree arg1, tree arg2)
5152 {
5153   enum tree_code comp_code = TREE_CODE (arg0);
5154   tree arg00 = TREE_OPERAND (arg0, 0);
5155   tree arg01 = TREE_OPERAND (arg0, 1);
5156   tree arg1_type = TREE_TYPE (arg1);
5157   tree tem;
5158 
5159   STRIP_NOPS (arg1);
5160   STRIP_NOPS (arg2);
5161 
5162   /* If we have A op 0 ? A : -A, consider applying the following
5163      transformations:
5164 
5165      A == 0? A : -A    same as -A
5166      A != 0? A : -A    same as A
5167      A >= 0? A : -A    same as abs (A)
5168      A > 0?  A : -A    same as abs (A)
5169      A <= 0? A : -A    same as -abs (A)
5170      A < 0?  A : -A    same as -abs (A)
5171 
5172      None of these transformations work for modes with signed
5173      zeros.  If A is +/-0, the first two transformations will
5174      change the sign of the result (from +0 to -0, or vice
5175      versa).  The last four will fix the sign of the result,
5176      even though the original expressions could be positive or
5177      negative, depending on the sign of A.
5178 
5179      Note that all these transformations are correct if A is
5180      NaN, since the two alternatives (A and -A) are also NaNs.  */
5181   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5182       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5183 	  ? real_zerop (arg01)
5184 	  : integer_zerop (arg01))
5185       && ((TREE_CODE (arg2) == NEGATE_EXPR
5186 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5187 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5188 	        have already been folded to Y-X, check for that. */
5189 	  || (TREE_CODE (arg1) == MINUS_EXPR
5190 	      && TREE_CODE (arg2) == MINUS_EXPR
5191 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5192 				  TREE_OPERAND (arg2, 1), 0)
5193 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5194 				  TREE_OPERAND (arg2, 0), 0))))
5195     switch (comp_code)
5196       {
5197       case EQ_EXPR:
5198       case UNEQ_EXPR:
5199 	tem = fold_convert_loc (loc, arg1_type, arg1);
5200 	return fold_convert_loc (loc, type, negate_expr (tem));
5201       case NE_EXPR:
5202       case LTGT_EXPR:
5203 	return fold_convert_loc (loc, type, arg1);
5204       case UNGE_EXPR:
5205       case UNGT_EXPR:
5206 	if (flag_trapping_math)
5207 	  break;
5208 	/* Fall through.  */
5209       case GE_EXPR:
5210       case GT_EXPR:
5211 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5212 	  break;
5213 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5214 	return fold_convert_loc (loc, type, tem);
5215       case UNLE_EXPR:
5216       case UNLT_EXPR:
5217 	if (flag_trapping_math)
5218 	  break;
5219 	/* FALLTHRU */
5220       case LE_EXPR:
5221       case LT_EXPR:
5222 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5223 	  break;
5224 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5225 	return negate_expr (fold_convert_loc (loc, type, tem));
5226       default:
5227 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5228 	break;
5229       }
5230 
5231   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5232      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5233      both transformations are correct when A is NaN: A != 0
5234      is then true, and A == 0 is false.  */
5235 
5236   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5237       && integer_zerop (arg01) && integer_zerop (arg2))
5238     {
5239       if (comp_code == NE_EXPR)
5240 	return fold_convert_loc (loc, type, arg1);
5241       else if (comp_code == EQ_EXPR)
5242 	return build_zero_cst (type);
5243     }
5244 
5245   /* Try some transformations of A op B ? A : B.
5246 
5247      A == B? A : B    same as B
5248      A != B? A : B    same as A
5249      A >= B? A : B    same as max (A, B)
5250      A > B?  A : B    same as max (B, A)
5251      A <= B? A : B    same as min (A, B)
5252      A < B?  A : B    same as min (B, A)
5253 
5254      As above, these transformations don't work in the presence
5255      of signed zeros.  For example, if A and B are zeros of
5256      opposite sign, the first two transformations will change
5257      the sign of the result.  In the last four, the original
5258      expressions give different results for (A=+0, B=-0) and
5259      (A=-0, B=+0), but the transformed expressions do not.
5260 
5261      The first two transformations are correct if either A or B
5262      is a NaN.  In the first transformation, the condition will
5263      be false, and B will indeed be chosen.  In the case of the
5264      second transformation, the condition A != B will be true,
5265      and A will be chosen.
5266 
5267      The conversions to max() and min() are not correct if B is
5268      a number and A is not.  The conditions in the original
5269      expressions will be false, so all four give B.  The min()
5270      and max() versions would give a NaN instead.  */
5271   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5272       && operand_equal_for_comparison_p (arg01, arg2, arg00)
5273       /* Avoid these transformations if the COND_EXPR may be used
5274 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5275       && (in_gimple_form
5276 	  || VECTOR_TYPE_P (type)
5277 	  || (! lang_GNU_CXX ()
5278 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5279 	  || ! maybe_lvalue_p (arg1)
5280 	  || ! maybe_lvalue_p (arg2)))
5281     {
5282       tree comp_op0 = arg00;
5283       tree comp_op1 = arg01;
5284       tree comp_type = TREE_TYPE (comp_op0);
5285 
5286       switch (comp_code)
5287 	{
5288 	case EQ_EXPR:
5289 	  return fold_convert_loc (loc, type, arg2);
5290 	case NE_EXPR:
5291 	  return fold_convert_loc (loc, type, arg1);
5292 	case LE_EXPR:
5293 	case LT_EXPR:
5294 	case UNLE_EXPR:
5295 	case UNLT_EXPR:
5296 	  /* In C++ a ?: expression can be an lvalue, so put the
5297 	     operand which will be used if they are equal first
5298 	     so that we can convert this back to the
5299 	     corresponding COND_EXPR.  */
5300 	  if (!HONOR_NANS (arg1))
5301 	    {
5302 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5303 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5304 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5305 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5306 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5307 				   comp_op1, comp_op0);
5308 	      return fold_convert_loc (loc, type, tem);
5309 	    }
5310 	  break;
5311 	case GE_EXPR:
5312 	case GT_EXPR:
5313 	case UNGE_EXPR:
5314 	case UNGT_EXPR:
5315 	  if (!HONOR_NANS (arg1))
5316 	    {
5317 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5318 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5319 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5320 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5321 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5322 				   comp_op1, comp_op0);
5323 	      return fold_convert_loc (loc, type, tem);
5324 	    }
5325 	  break;
5326 	case UNEQ_EXPR:
5327 	  if (!HONOR_NANS (arg1))
5328 	    return fold_convert_loc (loc, type, arg2);
5329 	  break;
5330 	case LTGT_EXPR:
5331 	  if (!HONOR_NANS (arg1))
5332 	    return fold_convert_loc (loc, type, arg1);
5333 	  break;
5334 	default:
5335 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5336 	  break;
5337 	}
5338     }
5339 
5340   return NULL_TREE;
5341 }
5342 
5343 
5344 
5345 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5346 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5347   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5348 		false) >= 2)
5349 #endif
5350 
5351 /* EXP is some logical combination of boolean tests.  See if we can
5352    merge it into some range test.  Return the new tree if so.  */
5353 
5354 static tree
5355 fold_range_test (location_t loc, enum tree_code code, tree type,
5356 		 tree op0, tree op1)
5357 {
5358   int or_op = (code == TRUTH_ORIF_EXPR
5359 	       || code == TRUTH_OR_EXPR);
5360   int in0_p, in1_p, in_p;
5361   tree low0, low1, low, high0, high1, high;
5362   bool strict_overflow_p = false;
5363   tree tem, lhs, rhs;
5364   const char * const warnmsg = G_("assuming signed overflow does not occur "
5365 				  "when simplifying range test");
5366 
5367   if (!INTEGRAL_TYPE_P (type))
5368     return 0;
5369 
5370   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5371   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5372 
5373   /* If this is an OR operation, invert both sides; we will invert
5374      again at the end.  */
5375   if (or_op)
5376     in0_p = ! in0_p, in1_p = ! in1_p;
5377 
5378   /* If both expressions are the same, if we can merge the ranges, and we
5379      can build the range test, return it or it inverted.  If one of the
5380      ranges is always true or always false, consider it to be the same
5381      expression as the other.  */
5382   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5383       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5384 		       in1_p, low1, high1)
5385       && 0 != (tem = (build_range_check (loc, type,
5386 					 lhs != 0 ? lhs
5387 					 : rhs != 0 ? rhs : integer_zero_node,
5388 					 in_p, low, high))))
5389     {
5390       if (strict_overflow_p)
5391 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5392       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5393     }
5394 
5395   /* On machines where the branch cost is expensive, if this is a
5396      short-circuited branch and the underlying object on both sides
5397      is the same, make a non-short-circuit operation.  */
5398   else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5399 	   && lhs != 0 && rhs != 0
5400 	   && (code == TRUTH_ANDIF_EXPR
5401 	       || code == TRUTH_ORIF_EXPR)
5402 	   && operand_equal_p (lhs, rhs, 0))
5403     {
5404       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5405 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5406 	 which cases we can't do this.  */
5407       if (simple_operand_p (lhs))
5408 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5409 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5410 			   type, op0, op1);
5411 
5412       else if (!lang_hooks.decls.global_bindings_p ()
5413 	       && !CONTAINS_PLACEHOLDER_P (lhs))
5414 	{
5415 	  tree common = save_expr (lhs);
5416 
5417 	  if (0 != (lhs = build_range_check (loc, type, common,
5418 					     or_op ? ! in0_p : in0_p,
5419 					     low0, high0))
5420 	      && (0 != (rhs = build_range_check (loc, type, common,
5421 						 or_op ? ! in1_p : in1_p,
5422 						 low1, high1))))
5423 	    {
5424 	      if (strict_overflow_p)
5425 		fold_overflow_warning (warnmsg,
5426 				       WARN_STRICT_OVERFLOW_COMPARISON);
5427 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5428 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5429 				 type, lhs, rhs);
5430 	    }
5431 	}
5432     }
5433 
5434   return 0;
5435 }
5436 
5437 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5438    bit value.  Arrange things so the extra bits will be set to zero if and
5439    only if C is signed-extended to its full width.  If MASK is nonzero,
5440    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5441 
5442 static tree
5443 unextend (tree c, int p, int unsignedp, tree mask)
5444 {
5445   tree type = TREE_TYPE (c);
5446   int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5447   tree temp;
5448 
5449   if (p == modesize || unsignedp)
5450     return c;
5451 
5452   /* We work by getting just the sign bit into the low-order bit, then
5453      into the high-order bit, then sign-extend.  We then XOR that value
5454      with C.  */
5455   temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5456 
5457   /* We must use a signed type in order to get an arithmetic right shift.
5458      However, we must also avoid introducing accidental overflows, so that
5459      a subsequent call to integer_zerop will work.  Hence we must
5460      do the type conversion here.  At this point, the constant is either
5461      zero or one, and the conversion to a signed type can never overflow.
5462      We could get an overflow if this conversion is done anywhere else.  */
5463   if (TYPE_UNSIGNED (type))
5464     temp = fold_convert (signed_type_for (type), temp);
5465 
5466   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5467   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5468   if (mask != 0)
5469     temp = const_binop (BIT_AND_EXPR, temp,
5470 			fold_convert (TREE_TYPE (c), mask));
5471   /* If necessary, convert the type back to match the type of C.  */
5472   if (TYPE_UNSIGNED (type))
5473     temp = fold_convert (type, temp);
5474 
5475   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5476 }
5477 
5478 /* For an expression that has the form
5479      (A && B) || ~B
5480    or
5481      (A || B) && ~B,
5482    we can drop one of the inner expressions and simplify to
5483      A || ~B
5484    or
5485      A && ~B
5486    LOC is the location of the resulting expression.  OP is the inner
5487    logical operation; the left-hand side in the examples above, while CMPOP
5488    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
5489    removing a condition that guards another, as in
5490      (A != NULL && A->...) || A == NULL
5491    which we must not transform.  If RHS_ONLY is true, only eliminate the
5492    right-most operand of the inner logical operation.  */
5493 
5494 static tree
5495 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5496 				 bool rhs_only)
5497 {
5498   tree type = TREE_TYPE (cmpop);
5499   enum tree_code code = TREE_CODE (cmpop);
5500   enum tree_code truthop_code = TREE_CODE (op);
5501   tree lhs = TREE_OPERAND (op, 0);
5502   tree rhs = TREE_OPERAND (op, 1);
5503   tree orig_lhs = lhs, orig_rhs = rhs;
5504   enum tree_code rhs_code = TREE_CODE (rhs);
5505   enum tree_code lhs_code = TREE_CODE (lhs);
5506   enum tree_code inv_code;
5507 
5508   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5509     return NULL_TREE;
5510 
5511   if (TREE_CODE_CLASS (code) != tcc_comparison)
5512     return NULL_TREE;
5513 
5514   if (rhs_code == truthop_code)
5515     {
5516       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5517       if (newrhs != NULL_TREE)
5518 	{
5519 	  rhs = newrhs;
5520 	  rhs_code = TREE_CODE (rhs);
5521 	}
5522     }
5523   if (lhs_code == truthop_code && !rhs_only)
5524     {
5525       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5526       if (newlhs != NULL_TREE)
5527 	{
5528 	  lhs = newlhs;
5529 	  lhs_code = TREE_CODE (lhs);
5530 	}
5531     }
5532 
5533   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5534   if (inv_code == rhs_code
5535       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5536       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5537     return lhs;
5538   if (!rhs_only && inv_code == lhs_code
5539       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5540       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5541     return rhs;
5542   if (rhs != orig_rhs || lhs != orig_lhs)
5543     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5544 			    lhs, rhs);
5545   return NULL_TREE;
5546 }
5547 
5548 /* Find ways of folding logical expressions of LHS and RHS:
5549    Try to merge two comparisons to the same innermost item.
5550    Look for range tests like "ch >= '0' && ch <= '9'".
5551    Look for combinations of simple terms on machines with expensive branches
5552    and evaluate the RHS unconditionally.
5553 
5554    For example, if we have p->a == 2 && p->b == 4 and we can make an
5555    object large enough to span both A and B, we can do this with a comparison
5556    against the object ANDed with the a mask.
5557 
5558    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5559    operations to do this with one comparison.
5560 
5561    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5562    function and the one above.
5563 
5564    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5565    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5566 
5567    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5568    two operands.
5569 
5570    We return the simplified tree or 0 if no optimization is possible.  */
5571 
5572 static tree
5573 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5574 		    tree lhs, tree rhs)
5575 {
5576   /* If this is the "or" of two comparisons, we can do something if
5577      the comparisons are NE_EXPR.  If this is the "and", we can do something
5578      if the comparisons are EQ_EXPR.  I.e.,
5579 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5580 
5581      WANTED_CODE is this operation code.  For single bit fields, we can
5582      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5583      comparison for one-bit fields.  */
5584 
5585   enum tree_code wanted_code;
5586   enum tree_code lcode, rcode;
5587   tree ll_arg, lr_arg, rl_arg, rr_arg;
5588   tree ll_inner, lr_inner, rl_inner, rr_inner;
5589   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5590   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5591   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5592   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5593   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5594   int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5595   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5596   machine_mode lnmode, rnmode;
5597   tree ll_mask, lr_mask, rl_mask, rr_mask;
5598   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5599   tree l_const, r_const;
5600   tree lntype, rntype, result;
5601   HOST_WIDE_INT first_bit, end_bit;
5602   int volatilep;
5603 
5604   /* Start by getting the comparison codes.  Fail if anything is volatile.
5605      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5606      it were surrounded with a NE_EXPR.  */
5607 
5608   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5609     return 0;
5610 
5611   lcode = TREE_CODE (lhs);
5612   rcode = TREE_CODE (rhs);
5613 
5614   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5615     {
5616       lhs = build2 (NE_EXPR, truth_type, lhs,
5617 		    build_int_cst (TREE_TYPE (lhs), 0));
5618       lcode = NE_EXPR;
5619     }
5620 
5621   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5622     {
5623       rhs = build2 (NE_EXPR, truth_type, rhs,
5624 		    build_int_cst (TREE_TYPE (rhs), 0));
5625       rcode = NE_EXPR;
5626     }
5627 
5628   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5629       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5630     return 0;
5631 
5632   ll_arg = TREE_OPERAND (lhs, 0);
5633   lr_arg = TREE_OPERAND (lhs, 1);
5634   rl_arg = TREE_OPERAND (rhs, 0);
5635   rr_arg = TREE_OPERAND (rhs, 1);
5636 
5637   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5638   if (simple_operand_p (ll_arg)
5639       && simple_operand_p (lr_arg))
5640     {
5641       if (operand_equal_p (ll_arg, rl_arg, 0)
5642           && operand_equal_p (lr_arg, rr_arg, 0))
5643 	{
5644           result = combine_comparisons (loc, code, lcode, rcode,
5645 					truth_type, ll_arg, lr_arg);
5646 	  if (result)
5647 	    return result;
5648 	}
5649       else if (operand_equal_p (ll_arg, rr_arg, 0)
5650                && operand_equal_p (lr_arg, rl_arg, 0))
5651 	{
5652           result = combine_comparisons (loc, code, lcode,
5653 					swap_tree_comparison (rcode),
5654 					truth_type, ll_arg, lr_arg);
5655 	  if (result)
5656 	    return result;
5657 	}
5658     }
5659 
5660   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5661 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5662 
5663   /* If the RHS can be evaluated unconditionally and its operands are
5664      simple, it wins to evaluate the RHS unconditionally on machines
5665      with expensive branches.  In this case, this isn't a comparison
5666      that can be merged.  */
5667 
5668   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5669 		   false) >= 2
5670       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5671       && simple_operand_p (rl_arg)
5672       && simple_operand_p (rr_arg))
5673     {
5674       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5675       if (code == TRUTH_OR_EXPR
5676 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5677 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5678 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5679 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5680 	return build2_loc (loc, NE_EXPR, truth_type,
5681 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5682 				   ll_arg, rl_arg),
5683 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5684 
5685       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5686       if (code == TRUTH_AND_EXPR
5687 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5688 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5689 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5690 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5691 	return build2_loc (loc, EQ_EXPR, truth_type,
5692 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5693 				   ll_arg, rl_arg),
5694 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5695     }
5696 
5697   /* See if the comparisons can be merged.  Then get all the parameters for
5698      each side.  */
5699 
5700   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5701       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5702     return 0;
5703 
5704   ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5705   volatilep = 0;
5706   ll_inner = decode_field_reference (loc, &ll_arg,
5707 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5708 				     &ll_unsignedp, &ll_reversep, &volatilep,
5709 				     &ll_mask, &ll_and_mask);
5710   lr_inner = decode_field_reference (loc, &lr_arg,
5711 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5712 				     &lr_unsignedp, &lr_reversep, &volatilep,
5713 				     &lr_mask, &lr_and_mask);
5714   rl_inner = decode_field_reference (loc, &rl_arg,
5715 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5716 				     &rl_unsignedp, &rl_reversep, &volatilep,
5717 				     &rl_mask, &rl_and_mask);
5718   rr_inner = decode_field_reference (loc, &rr_arg,
5719 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5720 				     &rr_unsignedp, &rr_reversep, &volatilep,
5721 				     &rr_mask, &rr_and_mask);
5722 
5723   /* It must be true that the inner operation on the lhs of each
5724      comparison must be the same if we are to be able to do anything.
5725      Then see if we have constants.  If not, the same must be true for
5726      the rhs's.  */
5727   if (volatilep
5728       || ll_reversep != rl_reversep
5729       || ll_inner == 0 || rl_inner == 0
5730       || ! operand_equal_p (ll_inner, rl_inner, 0))
5731     return 0;
5732 
5733   if (TREE_CODE (lr_arg) == INTEGER_CST
5734       && TREE_CODE (rr_arg) == INTEGER_CST)
5735     {
5736       l_const = lr_arg, r_const = rr_arg;
5737       lr_reversep = ll_reversep;
5738     }
5739   else if (lr_reversep != rr_reversep
5740 	   || lr_inner == 0 || rr_inner == 0
5741 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5742     return 0;
5743   else
5744     l_const = r_const = 0;
5745 
5746   /* If either comparison code is not correct for our logical operation,
5747      fail.  However, we can convert a one-bit comparison against zero into
5748      the opposite comparison against that bit being set in the field.  */
5749 
5750   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5751   if (lcode != wanted_code)
5752     {
5753       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5754 	{
5755 	  /* Make the left operand unsigned, since we are only interested
5756 	     in the value of one bit.  Otherwise we are doing the wrong
5757 	     thing below.  */
5758 	  ll_unsignedp = 1;
5759 	  l_const = ll_mask;
5760 	}
5761       else
5762 	return 0;
5763     }
5764 
5765   /* This is analogous to the code for l_const above.  */
5766   if (rcode != wanted_code)
5767     {
5768       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5769 	{
5770 	  rl_unsignedp = 1;
5771 	  r_const = rl_mask;
5772 	}
5773       else
5774 	return 0;
5775     }
5776 
5777   /* See if we can find a mode that contains both fields being compared on
5778      the left.  If we can't, fail.  Otherwise, update all constants and masks
5779      to be relative to a field of that size.  */
5780   first_bit = MIN (ll_bitpos, rl_bitpos);
5781   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5782   lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5783 			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5784 			  volatilep);
5785   if (lnmode == VOIDmode)
5786     return 0;
5787 
5788   lnbitsize = GET_MODE_BITSIZE (lnmode);
5789   lnbitpos = first_bit & ~ (lnbitsize - 1);
5790   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5791   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5792 
5793   if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5794     {
5795       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5796       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5797     }
5798 
5799   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5800 			 size_int (xll_bitpos));
5801   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5802 			 size_int (xrl_bitpos));
5803 
5804   if (l_const)
5805     {
5806       l_const = fold_convert_loc (loc, lntype, l_const);
5807       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5808       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5809       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5810 					fold_build1_loc (loc, BIT_NOT_EXPR,
5811 						     lntype, ll_mask))))
5812 	{
5813 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5814 
5815 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5816 	}
5817     }
5818   if (r_const)
5819     {
5820       r_const = fold_convert_loc (loc, lntype, r_const);
5821       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5822       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5823       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5824 					fold_build1_loc (loc, BIT_NOT_EXPR,
5825 						     lntype, rl_mask))))
5826 	{
5827 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5828 
5829 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5830 	}
5831     }
5832 
5833   /* If the right sides are not constant, do the same for it.  Also,
5834      disallow this optimization if a size, signedness or storage order
5835      mismatch occurs between the left and right sides.  */
5836   if (l_const == 0)
5837     {
5838       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5839 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5840 	  || ll_reversep != lr_reversep
5841 	  /* Make sure the two fields on the right
5842 	     correspond to the left without being swapped.  */
5843 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5844 	return 0;
5845 
5846       first_bit = MIN (lr_bitpos, rr_bitpos);
5847       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5848       rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5849 			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5850 			      volatilep);
5851       if (rnmode == VOIDmode)
5852 	return 0;
5853 
5854       rnbitsize = GET_MODE_BITSIZE (rnmode);
5855       rnbitpos = first_bit & ~ (rnbitsize - 1);
5856       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5857       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5858 
5859       if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5860 	{
5861 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5862 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5863 	}
5864 
5865       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5866 							    rntype, lr_mask),
5867 			     size_int (xlr_bitpos));
5868       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5869 							    rntype, rr_mask),
5870 			     size_int (xrr_bitpos));
5871 
5872       /* Make a mask that corresponds to both fields being compared.
5873 	 Do this for both items being compared.  If the operands are the
5874 	 same size and the bits being compared are in the same position
5875 	 then we can do this by masking both and comparing the masked
5876 	 results.  */
5877       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5878       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5879       if (lnbitsize == rnbitsize
5880 	  && xll_bitpos == xlr_bitpos
5881 	  && lnbitpos >= 0
5882 	  && rnbitpos >= 0)
5883 	{
5884 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5885 				    lntype, lnbitsize, lnbitpos,
5886 				    ll_unsignedp || rl_unsignedp, ll_reversep);
5887 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
5888 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5889 
5890 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5891 				    rntype, rnbitsize, rnbitpos,
5892 				    lr_unsignedp || rr_unsignedp, lr_reversep);
5893 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
5894 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5895 
5896 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5897 	}
5898 
5899       /* There is still another way we can do something:  If both pairs of
5900 	 fields being compared are adjacent, we may be able to make a wider
5901 	 field containing them both.
5902 
5903 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
5904 	 the mask must be shifted to account for the shift done by
5905 	 make_bit_field_ref.  */
5906       if (((ll_bitsize + ll_bitpos == rl_bitpos
5907 	    && lr_bitsize + lr_bitpos == rr_bitpos)
5908 	   || (ll_bitpos == rl_bitpos + rl_bitsize
5909 	       && lr_bitpos == rr_bitpos + rr_bitsize))
5910 	  && ll_bitpos >= 0
5911 	  && rl_bitpos >= 0
5912 	  && lr_bitpos >= 0
5913 	  && rr_bitpos >= 0)
5914 	{
5915 	  tree type;
5916 
5917 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5918 				    ll_bitsize + rl_bitsize,
5919 				    MIN (ll_bitpos, rl_bitpos),
5920 				    ll_unsignedp, ll_reversep);
5921 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5922 				    lr_bitsize + rr_bitsize,
5923 				    MIN (lr_bitpos, rr_bitpos),
5924 				    lr_unsignedp, lr_reversep);
5925 
5926 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5927 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
5928 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5929 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5930 
5931 	  /* Convert to the smaller type before masking out unwanted bits.  */
5932 	  type = lntype;
5933 	  if (lntype != rntype)
5934 	    {
5935 	      if (lnbitsize > rnbitsize)
5936 		{
5937 		  lhs = fold_convert_loc (loc, rntype, lhs);
5938 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5939 		  type = rntype;
5940 		}
5941 	      else if (lnbitsize < rnbitsize)
5942 		{
5943 		  rhs = fold_convert_loc (loc, lntype, rhs);
5944 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5945 		  type = lntype;
5946 		}
5947 	    }
5948 
5949 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5950 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5951 
5952 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5953 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5954 
5955 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5956 	}
5957 
5958       return 0;
5959     }
5960 
5961   /* Handle the case of comparisons with constants.  If there is something in
5962      common between the masks, those bits of the constants must be the same.
5963      If not, the condition is always false.  Test for this to avoid generating
5964      incorrect code below.  */
5965   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5966   if (! integer_zerop (result)
5967       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5968 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5969     {
5970       if (wanted_code == NE_EXPR)
5971 	{
5972 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
5973 	  return constant_boolean_node (true, truth_type);
5974 	}
5975       else
5976 	{
5977 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5978 	  return constant_boolean_node (false, truth_type);
5979 	}
5980     }
5981 
5982   if (lnbitpos < 0)
5983     return 0;
5984 
5985   /* Construct the expression we will return.  First get the component
5986      reference we will make.  Unless the mask is all ones the width of
5987      that field, perform the mask operation.  Then compare with the
5988      merged constant.  */
5989   result = make_bit_field_ref (loc, ll_inner, ll_arg,
5990 			       lntype, lnbitsize, lnbitpos,
5991 			       ll_unsignedp || rl_unsignedp, ll_reversep);
5992 
5993   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5994   if (! all_ones_mask_p (ll_mask, lnbitsize))
5995     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5996 
5997   return build2_loc (loc, wanted_code, truth_type, result,
5998 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
5999 }
6000 
6001 /* T is an integer expression that is being multiplied, divided, or taken a
6002    modulus (CODE says which and what kind of divide or modulus) by a
6003    constant C.  See if we can eliminate that operation by folding it with
6004    other operations already in T.  WIDE_TYPE, if non-null, is a type that
6005    should be used for the computation if wider than our type.
6006 
6007    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6008    (X * 2) + (Y * 4).  We must, however, be assured that either the original
6009    expression would not overflow or that overflow is undefined for the type
6010    in the language in question.
6011 
6012    If we return a non-null expression, it is an equivalent form of the
6013    original computation, but need not be in the original type.
6014 
6015    We set *STRICT_OVERFLOW_P to true if the return values depends on
6016    signed overflow being undefined.  Otherwise we do not change
6017    *STRICT_OVERFLOW_P.  */
6018 
6019 static tree
6020 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6021 		bool *strict_overflow_p)
6022 {
6023   /* To avoid exponential search depth, refuse to allow recursion past
6024      three levels.  Beyond that (1) it's highly unlikely that we'll find
6025      something interesting and (2) we've probably processed it before
6026      when we built the inner expression.  */
6027 
6028   static int depth;
6029   tree ret;
6030 
6031   if (depth > 3)
6032     return NULL;
6033 
6034   depth++;
6035   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6036   depth--;
6037 
6038   return ret;
6039 }
6040 
6041 static tree
6042 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6043 		  bool *strict_overflow_p)
6044 {
6045   tree type = TREE_TYPE (t);
6046   enum tree_code tcode = TREE_CODE (t);
6047   tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6048 				   > GET_MODE_SIZE (TYPE_MODE (type)))
6049 		? wide_type : type);
6050   tree t1, t2;
6051   int same_p = tcode == code;
6052   tree op0 = NULL_TREE, op1 = NULL_TREE;
6053   bool sub_strict_overflow_p;
6054 
6055   /* Don't deal with constants of zero here; they confuse the code below.  */
6056   if (integer_zerop (c))
6057     return NULL_TREE;
6058 
6059   if (TREE_CODE_CLASS (tcode) == tcc_unary)
6060     op0 = TREE_OPERAND (t, 0);
6061 
6062   if (TREE_CODE_CLASS (tcode) == tcc_binary)
6063     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6064 
6065   /* Note that we need not handle conditional operations here since fold
6066      already handles those cases.  So just do arithmetic here.  */
6067   switch (tcode)
6068     {
6069     case INTEGER_CST:
6070       /* For a constant, we can always simplify if we are a multiply
6071 	 or (for divide and modulus) if it is a multiple of our constant.  */
6072       if (code == MULT_EXPR
6073 	  || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6074 	{
6075 	  tree tem = const_binop (code, fold_convert (ctype, t),
6076 				  fold_convert (ctype, c));
6077 	  /* If the multiplication overflowed, we lost information on it.
6078 	     See PR68142 and PR69845.  */
6079 	  if (TREE_OVERFLOW (tem))
6080 	    return NULL_TREE;
6081 	  return tem;
6082 	}
6083       break;
6084 
6085     CASE_CONVERT: case NON_LVALUE_EXPR:
6086       /* If op0 is an expression ...  */
6087       if ((COMPARISON_CLASS_P (op0)
6088 	   || UNARY_CLASS_P (op0)
6089 	   || BINARY_CLASS_P (op0)
6090 	   || VL_EXP_CLASS_P (op0)
6091 	   || EXPRESSION_CLASS_P (op0))
6092 	  /* ... and has wrapping overflow, and its type is smaller
6093 	     than ctype, then we cannot pass through as widening.  */
6094 	  && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6095 		&& TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6096 	       && (TYPE_PRECISION (ctype)
6097 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6098 	      /* ... or this is a truncation (t is narrower than op0),
6099 		 then we cannot pass through this narrowing.  */
6100 	      || (TYPE_PRECISION (type)
6101 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6102 	      /* ... or signedness changes for division or modulus,
6103 		 then we cannot pass through this conversion.  */
6104 	      || (code != MULT_EXPR
6105 		  && (TYPE_UNSIGNED (ctype)
6106 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6107 	      /* ... or has undefined overflow while the converted to
6108 		 type has not, we cannot do the operation in the inner type
6109 		 as that would introduce undefined overflow.  */
6110 	      || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6111 		   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6112 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6113 	break;
6114 
6115       /* Pass the constant down and see if we can make a simplification.  If
6116 	 we can, replace this expression with the inner simplification for
6117 	 possible later conversion to our or some other type.  */
6118       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6119 	  && TREE_CODE (t2) == INTEGER_CST
6120 	  && !TREE_OVERFLOW (t2)
6121 	  && (0 != (t1 = extract_muldiv (op0, t2, code,
6122 					 code == MULT_EXPR
6123 					 ? ctype : NULL_TREE,
6124 					 strict_overflow_p))))
6125 	return t1;
6126       break;
6127 
6128     case ABS_EXPR:
6129       /* If widening the type changes it from signed to unsigned, then we
6130          must avoid building ABS_EXPR itself as unsigned.  */
6131       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6132         {
6133           tree cstype = (*signed_type_for) (ctype);
6134           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6135 	      != 0)
6136             {
6137               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6138               return fold_convert (ctype, t1);
6139             }
6140           break;
6141         }
6142       /* If the constant is negative, we cannot simplify this.  */
6143       if (tree_int_cst_sgn (c) == -1)
6144         break;
6145       /* FALLTHROUGH */
6146     case NEGATE_EXPR:
6147       /* For division and modulus, type can't be unsigned, as e.g.
6148 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6149 	 For signed types, even with wrapping overflow, this is fine.  */
6150       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6151 	break;
6152       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6153 	  != 0)
6154 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6155       break;
6156 
6157     case MIN_EXPR:  case MAX_EXPR:
6158       /* If widening the type changes the signedness, then we can't perform
6159 	 this optimization as that changes the result.  */
6160       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6161 	break;
6162 
6163       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6164       sub_strict_overflow_p = false;
6165       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6166 				&sub_strict_overflow_p)) != 0
6167 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6168 				   &sub_strict_overflow_p)) != 0)
6169 	{
6170 	  if (tree_int_cst_sgn (c) < 0)
6171 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6172 	  if (sub_strict_overflow_p)
6173 	    *strict_overflow_p = true;
6174 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6175 			      fold_convert (ctype, t2));
6176 	}
6177       break;
6178 
6179     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6180       /* If the second operand is constant, this is a multiplication
6181 	 or floor division, by a power of two, so we can treat it that
6182 	 way unless the multiplier or divisor overflows.  Signed
6183 	 left-shift overflow is implementation-defined rather than
6184 	 undefined in C90, so do not convert signed left shift into
6185 	 multiplication.  */
6186       if (TREE_CODE (op1) == INTEGER_CST
6187 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6188 	  /* const_binop may not detect overflow correctly,
6189 	     so check for it explicitly here.  */
6190 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6191 	  && 0 != (t1 = fold_convert (ctype,
6192 				      const_binop (LSHIFT_EXPR,
6193 						   size_one_node,
6194 						   op1)))
6195 	  && !TREE_OVERFLOW (t1))
6196 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6197 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6198 				       ctype,
6199 				       fold_convert (ctype, op0),
6200 				       t1),
6201 			       c, code, wide_type, strict_overflow_p);
6202       break;
6203 
6204     case PLUS_EXPR:  case MINUS_EXPR:
6205       /* See if we can eliminate the operation on both sides.  If we can, we
6206 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6207 	 cases where we can do anything are if the second operand is a
6208 	 constant.  */
6209       sub_strict_overflow_p = false;
6210       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6211       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6212       if (t1 != 0 && t2 != 0
6213 	  && TYPE_OVERFLOW_WRAPS (ctype)
6214 	  && (code == MULT_EXPR
6215 	      /* If not multiplication, we can only do this if both operands
6216 		 are divisible by c.  */
6217 	      || (multiple_of_p (ctype, op0, c)
6218 	          && multiple_of_p (ctype, op1, c))))
6219 	{
6220 	  if (sub_strict_overflow_p)
6221 	    *strict_overflow_p = true;
6222 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6223 			      fold_convert (ctype, t2));
6224 	}
6225 
6226       /* If this was a subtraction, negate OP1 and set it to be an addition.
6227 	 This simplifies the logic below.  */
6228       if (tcode == MINUS_EXPR)
6229 	{
6230 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6231 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6232 	  if (TREE_CODE (op0) == INTEGER_CST)
6233 	    {
6234 	      std::swap (op0, op1);
6235 	      std::swap (t1, t2);
6236 	    }
6237 	}
6238 
6239       if (TREE_CODE (op1) != INTEGER_CST)
6240 	break;
6241 
6242       /* If either OP1 or C are negative, this optimization is not safe for
6243 	 some of the division and remainder types while for others we need
6244 	 to change the code.  */
6245       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6246 	{
6247 	  if (code == CEIL_DIV_EXPR)
6248 	    code = FLOOR_DIV_EXPR;
6249 	  else if (code == FLOOR_DIV_EXPR)
6250 	    code = CEIL_DIV_EXPR;
6251 	  else if (code != MULT_EXPR
6252 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6253 	    break;
6254 	}
6255 
6256       /* If it's a multiply or a division/modulus operation of a multiple
6257          of our constant, do the operation and verify it doesn't overflow.  */
6258       if (code == MULT_EXPR
6259 	  || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6260 	{
6261 	  op1 = const_binop (code, fold_convert (ctype, op1),
6262 			     fold_convert (ctype, c));
6263 	  /* We allow the constant to overflow with wrapping semantics.  */
6264 	  if (op1 == 0
6265 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6266 	    break;
6267 	}
6268       else
6269 	break;
6270 
6271       /* If we have an unsigned type, we cannot widen the operation since it
6272 	 will change the result if the original computation overflowed.  */
6273       if (TYPE_UNSIGNED (ctype) && ctype != type)
6274 	break;
6275 
6276       /* The last case is if we are a multiply.  In that case, we can
6277 	 apply the distributive law to commute the multiply and addition
6278 	 if the multiplication of the constants doesn't overflow
6279 	 and overflow is defined.  With undefined overflow
6280 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.  */
6281       if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6282 	return fold_build2 (tcode, ctype,
6283 			    fold_build2 (code, ctype,
6284 					 fold_convert (ctype, op0),
6285 					 fold_convert (ctype, c)),
6286 			    op1);
6287 
6288       break;
6289 
6290     case MULT_EXPR:
6291       /* We have a special case here if we are doing something like
6292 	 (C * 8) % 4 since we know that's zero.  */
6293       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6294 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6295 	  /* If the multiplication can overflow we cannot optimize this.  */
6296 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6297 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6298 	  && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6299 	{
6300 	  *strict_overflow_p = true;
6301 	  return omit_one_operand (type, integer_zero_node, op0);
6302 	}
6303 
6304       /* ... fall through ...  */
6305 
6306     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6307     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6308       /* If we can extract our operation from the LHS, do so and return a
6309 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6310 	 do something only if the second operand is a constant.  */
6311       if (same_p
6312 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6313 				   strict_overflow_p)) != 0)
6314 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6315 			    fold_convert (ctype, op1));
6316       else if (tcode == MULT_EXPR && code == MULT_EXPR
6317 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6318 					strict_overflow_p)) != 0)
6319 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6320 			    fold_convert (ctype, t1));
6321       else if (TREE_CODE (op1) != INTEGER_CST)
6322 	return 0;
6323 
6324       /* If these are the same operation types, we can associate them
6325 	 assuming no overflow.  */
6326       if (tcode == code)
6327 	{
6328 	  bool overflow_p = false;
6329 	  bool overflow_mul_p;
6330 	  signop sign = TYPE_SIGN (ctype);
6331 	  unsigned prec = TYPE_PRECISION (ctype);
6332 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
6333 				  wi::to_wide (c, prec),
6334 				  sign, &overflow_mul_p);
6335 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6336 	  if (overflow_mul_p
6337 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6338 	    overflow_p = true;
6339 	  if (!overflow_p)
6340 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6341 				wide_int_to_tree (ctype, mul));
6342 	}
6343 
6344       /* If these operations "cancel" each other, we have the main
6345 	 optimizations of this pass, which occur when either constant is a
6346 	 multiple of the other, in which case we replace this with either an
6347 	 operation or CODE or TCODE.
6348 
6349 	 If we have an unsigned type, we cannot do this since it will change
6350 	 the result if the original computation overflowed.  */
6351       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6352 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6353 	      || (tcode == MULT_EXPR
6354 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6355 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6356 		  && code != MULT_EXPR)))
6357 	{
6358 	  if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6359 	    {
6360 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6361 		*strict_overflow_p = true;
6362 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6363 				  fold_convert (ctype,
6364 						const_binop (TRUNC_DIV_EXPR,
6365 							     op1, c)));
6366 	    }
6367 	  else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6368 	    {
6369 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6370 		*strict_overflow_p = true;
6371 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6372 				  fold_convert (ctype,
6373 						const_binop (TRUNC_DIV_EXPR,
6374 							     c, op1)));
6375 	    }
6376 	}
6377       break;
6378 
6379     default:
6380       break;
6381     }
6382 
6383   return 0;
6384 }
6385 
6386 /* Return a node which has the indicated constant VALUE (either 0 or
6387    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6388    and is of the indicated TYPE.  */
6389 
6390 tree
6391 constant_boolean_node (bool value, tree type)
6392 {
6393   if (type == integer_type_node)
6394     return value ? integer_one_node : integer_zero_node;
6395   else if (type == boolean_type_node)
6396     return value ? boolean_true_node : boolean_false_node;
6397   else if (TREE_CODE (type) == VECTOR_TYPE)
6398     return build_vector_from_val (type,
6399 				  build_int_cst (TREE_TYPE (type),
6400 						 value ? -1 : 0));
6401   else
6402     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6403 }
6404 
6405 
6406 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6407    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6408    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6409    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6410    COND is the first argument to CODE; otherwise (as in the example
6411    given here), it is the second argument.  TYPE is the type of the
6412    original expression.  Return NULL_TREE if no simplification is
6413    possible.  */
6414 
6415 static tree
6416 fold_binary_op_with_conditional_arg (location_t loc,
6417 				     enum tree_code code,
6418 				     tree type, tree op0, tree op1,
6419 				     tree cond, tree arg, int cond_first_p)
6420 {
6421   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6422   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6423   tree test, true_value, false_value;
6424   tree lhs = NULL_TREE;
6425   tree rhs = NULL_TREE;
6426   enum tree_code cond_code = COND_EXPR;
6427 
6428   if (TREE_CODE (cond) == COND_EXPR
6429       || TREE_CODE (cond) == VEC_COND_EXPR)
6430     {
6431       test = TREE_OPERAND (cond, 0);
6432       true_value = TREE_OPERAND (cond, 1);
6433       false_value = TREE_OPERAND (cond, 2);
6434       /* If this operand throws an expression, then it does not make
6435 	 sense to try to perform a logical or arithmetic operation
6436 	 involving it.  */
6437       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6438 	lhs = true_value;
6439       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6440 	rhs = false_value;
6441     }
6442   else if (!(TREE_CODE (type) != VECTOR_TYPE
6443 	     && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6444     {
6445       tree testtype = TREE_TYPE (cond);
6446       test = cond;
6447       true_value = constant_boolean_node (true, testtype);
6448       false_value = constant_boolean_node (false, testtype);
6449     }
6450   else
6451     /* Detect the case of mixing vector and scalar types - bail out.  */
6452     return NULL_TREE;
6453 
6454   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6455     cond_code = VEC_COND_EXPR;
6456 
6457   /* This transformation is only worthwhile if we don't have to wrap ARG
6458      in a SAVE_EXPR and the operation can be simplified without recursing
6459      on at least one of the branches once its pushed inside the COND_EXPR.  */
6460   if (!TREE_CONSTANT (arg)
6461       && (TREE_SIDE_EFFECTS (arg)
6462 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6463 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6464     return NULL_TREE;
6465 
6466   arg = fold_convert_loc (loc, arg_type, arg);
6467   if (lhs == 0)
6468     {
6469       true_value = fold_convert_loc (loc, cond_type, true_value);
6470       if (cond_first_p)
6471 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6472       else
6473 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6474     }
6475   if (rhs == 0)
6476     {
6477       false_value = fold_convert_loc (loc, cond_type, false_value);
6478       if (cond_first_p)
6479 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6480       else
6481 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6482     }
6483 
6484   /* Check that we have simplified at least one of the branches.  */
6485   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6486     return NULL_TREE;
6487 
6488   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6489 }
6490 
6491 
6492 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6493 
6494    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6495    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6496    ADDEND is the same as X.
6497 
6498    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6499    and finite.  The problematic cases are when X is zero, and its mode
6500    has signed zeros.  In the case of rounding towards -infinity,
6501    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6502    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6503 
6504 bool
6505 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6506 {
6507   if (!real_zerop (addend))
6508     return false;
6509 
6510   /* Don't allow the fold with -fsignaling-nans.  */
6511   if (HONOR_SNANS (element_mode (type)))
6512     return false;
6513 
6514   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6515   if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6516     return true;
6517 
6518   /* In a vector or complex, we would need to check the sign of all zeros.  */
6519   if (TREE_CODE (addend) != REAL_CST)
6520     return false;
6521 
6522   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6523   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6524     negate = !negate;
6525 
6526   /* The mode has signed zeros, and we have to honor their sign.
6527      In this situation, there is only one case we can return true for.
6528      X - 0 is the same as X unless rounding towards -infinity is
6529      supported.  */
6530   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6531 }
6532 
6533 /* Subroutine of fold() that optimizes comparisons of a division by
6534    a nonzero integer constant against an integer constant, i.e.
6535    X/C1 op C2.
6536 
6537    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6538    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6539    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6540 
6541    The function returns the constant folded tree if a simplification
6542    can be made, and NULL_TREE otherwise.  */
6543 
6544 static tree
6545 fold_div_compare (location_t loc,
6546 		  enum tree_code code, tree type, tree arg0, tree arg1)
6547 {
6548   tree prod, tmp, hi, lo;
6549   tree arg00 = TREE_OPERAND (arg0, 0);
6550   tree arg01 = TREE_OPERAND (arg0, 1);
6551   signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6552   bool neg_overflow = false;
6553   bool overflow;
6554 
6555   /* We have to do this the hard way to detect unsigned overflow.
6556      prod = int_const_binop (MULT_EXPR, arg01, arg1);  */
6557   wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6558   prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6559   neg_overflow = false;
6560 
6561   if (sign == UNSIGNED)
6562     {
6563       tmp = int_const_binop (MINUS_EXPR, arg01,
6564                              build_int_cst (TREE_TYPE (arg01), 1));
6565       lo = prod;
6566 
6567       /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6568       val = wi::add (prod, tmp, sign, &overflow);
6569       hi = force_fit_type (TREE_TYPE (arg00), val,
6570 			   -1, overflow | TREE_OVERFLOW (prod));
6571     }
6572   else if (tree_int_cst_sgn (arg01) >= 0)
6573     {
6574       tmp = int_const_binop (MINUS_EXPR, arg01,
6575 			     build_int_cst (TREE_TYPE (arg01), 1));
6576       switch (tree_int_cst_sgn (arg1))
6577 	{
6578 	case -1:
6579 	  neg_overflow = true;
6580 	  lo = int_const_binop (MINUS_EXPR, prod, tmp);
6581 	  hi = prod;
6582 	  break;
6583 
6584 	case  0:
6585 	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6586 	  hi = tmp;
6587 	  break;
6588 
6589 	case  1:
6590           hi = int_const_binop (PLUS_EXPR, prod, tmp);
6591 	  lo = prod;
6592 	  break;
6593 
6594 	default:
6595 	  gcc_unreachable ();
6596 	}
6597     }
6598   else
6599     {
6600       /* A negative divisor reverses the relational operators.  */
6601       code = swap_tree_comparison (code);
6602 
6603       tmp = int_const_binop (PLUS_EXPR, arg01,
6604 			     build_int_cst (TREE_TYPE (arg01), 1));
6605       switch (tree_int_cst_sgn (arg1))
6606 	{
6607 	case -1:
6608 	  hi = int_const_binop (MINUS_EXPR, prod, tmp);
6609 	  lo = prod;
6610 	  break;
6611 
6612 	case  0:
6613 	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6614 	  lo = tmp;
6615 	  break;
6616 
6617 	case  1:
6618 	  neg_overflow = true;
6619 	  lo = int_const_binop (PLUS_EXPR, prod, tmp);
6620 	  hi = prod;
6621 	  break;
6622 
6623 	default:
6624 	  gcc_unreachable ();
6625 	}
6626     }
6627 
6628   switch (code)
6629     {
6630     case EQ_EXPR:
6631       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6632 	return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6633       if (TREE_OVERFLOW (hi))
6634 	return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6635       if (TREE_OVERFLOW (lo))
6636 	return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6637       return build_range_check (loc, type, arg00, 1, lo, hi);
6638 
6639     case NE_EXPR:
6640       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6641 	return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6642       if (TREE_OVERFLOW (hi))
6643 	return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6644       if (TREE_OVERFLOW (lo))
6645 	return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6646       return build_range_check (loc, type, arg00, 0, lo, hi);
6647 
6648     case LT_EXPR:
6649       if (TREE_OVERFLOW (lo))
6650 	{
6651 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6652 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6653 	}
6654       return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6655 
6656     case LE_EXPR:
6657       if (TREE_OVERFLOW (hi))
6658 	{
6659 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6660 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6661 	}
6662       return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6663 
6664     case GT_EXPR:
6665       if (TREE_OVERFLOW (hi))
6666 	{
6667 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6668 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6669 	}
6670       return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6671 
6672     case GE_EXPR:
6673       if (TREE_OVERFLOW (lo))
6674 	{
6675 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6676 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6677 	}
6678       return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6679 
6680     default:
6681       break;
6682     }
6683 
6684   return NULL_TREE;
6685 }
6686 
6687 
6688 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6689    equality/inequality test, then return a simplified form of the test
6690    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6691    result type.  */
6692 
6693 static tree
6694 fold_single_bit_test_into_sign_test (location_t loc,
6695 				     enum tree_code code, tree arg0, tree arg1,
6696 				     tree result_type)
6697 {
6698   /* If this is testing a single bit, we can optimize the test.  */
6699   if ((code == NE_EXPR || code == EQ_EXPR)
6700       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6701       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6702     {
6703       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6704 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6705       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6706 
6707       if (arg00 != NULL_TREE
6708 	  /* This is only a win if casting to a signed type is cheap,
6709 	     i.e. when arg00's type is not a partial mode.  */
6710 	  && TYPE_PRECISION (TREE_TYPE (arg00))
6711 	     == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6712 	{
6713 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6714 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6715 			      result_type,
6716 			      fold_convert_loc (loc, stype, arg00),
6717 			      build_int_cst (stype, 0));
6718 	}
6719     }
6720 
6721   return NULL_TREE;
6722 }
6723 
6724 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6725    equality/inequality test, then return a simplified form of
6726    the test using shifts and logical operations.  Otherwise return
6727    NULL.  TYPE is the desired result type.  */
6728 
6729 tree
6730 fold_single_bit_test (location_t loc, enum tree_code code,
6731 		      tree arg0, tree arg1, tree result_type)
6732 {
6733   /* If this is testing a single bit, we can optimize the test.  */
6734   if ((code == NE_EXPR || code == EQ_EXPR)
6735       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6736       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6737     {
6738       tree inner = TREE_OPERAND (arg0, 0);
6739       tree type = TREE_TYPE (arg0);
6740       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6741       machine_mode operand_mode = TYPE_MODE (type);
6742       int ops_unsigned;
6743       tree signed_type, unsigned_type, intermediate_type;
6744       tree tem, one;
6745 
6746       /* First, see if we can fold the single bit test into a sign-bit
6747 	 test.  */
6748       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6749 						 result_type);
6750       if (tem)
6751 	return tem;
6752 
6753       /* Otherwise we have (A & C) != 0 where C is a single bit,
6754 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6755 	 Similarly for (A & C) == 0.  */
6756 
6757       /* If INNER is a right shift of a constant and it plus BITNUM does
6758 	 not overflow, adjust BITNUM and INNER.  */
6759       if (TREE_CODE (inner) == RSHIFT_EXPR
6760 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6761 	  && bitnum < TYPE_PRECISION (type)
6762 	  && wi::ltu_p (TREE_OPERAND (inner, 1),
6763 			TYPE_PRECISION (type) - bitnum))
6764 	{
6765 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6766 	  inner = TREE_OPERAND (inner, 0);
6767 	}
6768 
6769       /* If we are going to be able to omit the AND below, we must do our
6770 	 operations as unsigned.  If we must use the AND, we have a choice.
6771 	 Normally unsigned is faster, but for some machines signed is.  */
6772       ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6773 		      && !flag_syntax_only) ? 0 : 1;
6774 
6775       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6776       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6777       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6778       inner = fold_convert_loc (loc, intermediate_type, inner);
6779 
6780       if (bitnum != 0)
6781 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6782 			inner, size_int (bitnum));
6783 
6784       one = build_int_cst (intermediate_type, 1);
6785 
6786       if (code == EQ_EXPR)
6787 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6788 
6789       /* Put the AND last so it can combine with more things.  */
6790       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6791 
6792       /* Make sure to return the proper type.  */
6793       inner = fold_convert_loc (loc, result_type, inner);
6794 
6795       return inner;
6796     }
6797   return NULL_TREE;
6798 }
6799 
6800 /* Test whether it is preferable two swap two operands, ARG0 and
6801    ARG1, for example because ARG0 is an integer constant and ARG1
6802    isn't.  */
6803 
6804 bool
6805 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6806 {
6807   if (CONSTANT_CLASS_P (arg1))
6808     return 0;
6809   if (CONSTANT_CLASS_P (arg0))
6810     return 1;
6811 
6812   STRIP_NOPS (arg0);
6813   STRIP_NOPS (arg1);
6814 
6815   if (TREE_CONSTANT (arg1))
6816     return 0;
6817   if (TREE_CONSTANT (arg0))
6818     return 1;
6819 
6820   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6821      for commutative and comparison operators.  Ensuring a canonical
6822      form allows the optimizers to find additional redundancies without
6823      having to explicitly check for both orderings.  */
6824   if (TREE_CODE (arg0) == SSA_NAME
6825       && TREE_CODE (arg1) == SSA_NAME
6826       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6827     return 1;
6828 
6829   /* Put SSA_NAMEs last.  */
6830   if (TREE_CODE (arg1) == SSA_NAME)
6831     return 0;
6832   if (TREE_CODE (arg0) == SSA_NAME)
6833     return 1;
6834 
6835   /* Put variables last.  */
6836   if (DECL_P (arg1))
6837     return 0;
6838   if (DECL_P (arg0))
6839     return 1;
6840 
6841   return 0;
6842 }
6843 
6844 
6845 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
6846    means A >= Y && A != MAX, but in this case we know that
6847    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
6848 
6849 static tree
6850 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6851 {
6852   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6853 
6854   if (TREE_CODE (bound) == LT_EXPR)
6855     a = TREE_OPERAND (bound, 0);
6856   else if (TREE_CODE (bound) == GT_EXPR)
6857     a = TREE_OPERAND (bound, 1);
6858   else
6859     return NULL_TREE;
6860 
6861   typea = TREE_TYPE (a);
6862   if (!INTEGRAL_TYPE_P (typea)
6863       && !POINTER_TYPE_P (typea))
6864     return NULL_TREE;
6865 
6866   if (TREE_CODE (ineq) == LT_EXPR)
6867     {
6868       a1 = TREE_OPERAND (ineq, 1);
6869       y = TREE_OPERAND (ineq, 0);
6870     }
6871   else if (TREE_CODE (ineq) == GT_EXPR)
6872     {
6873       a1 = TREE_OPERAND (ineq, 0);
6874       y = TREE_OPERAND (ineq, 1);
6875     }
6876   else
6877     return NULL_TREE;
6878 
6879   if (TREE_TYPE (a1) != typea)
6880     return NULL_TREE;
6881 
6882   if (POINTER_TYPE_P (typea))
6883     {
6884       /* Convert the pointer types into integer before taking the difference.  */
6885       tree ta = fold_convert_loc (loc, ssizetype, a);
6886       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6887       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6888     }
6889   else
6890     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6891 
6892   if (!diff || !integer_onep (diff))
6893    return NULL_TREE;
6894 
6895   return fold_build2_loc (loc, GE_EXPR, type, a, y);
6896 }
6897 
6898 /* Fold a sum or difference of at least one multiplication.
6899    Returns the folded tree or NULL if no simplification could be made.  */
6900 
6901 static tree
6902 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6903 			  tree arg0, tree arg1)
6904 {
6905   tree arg00, arg01, arg10, arg11;
6906   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6907 
6908   /* (A * C) +- (B * C) -> (A+-B) * C.
6909      (A * C) +- A -> A * (C+-1).
6910      We are most concerned about the case where C is a constant,
6911      but other combinations show up during loop reduction.  Since
6912      it is not difficult, try all four possibilities.  */
6913 
6914   if (TREE_CODE (arg0) == MULT_EXPR)
6915     {
6916       arg00 = TREE_OPERAND (arg0, 0);
6917       arg01 = TREE_OPERAND (arg0, 1);
6918     }
6919   else if (TREE_CODE (arg0) == INTEGER_CST)
6920     {
6921       arg00 = build_one_cst (type);
6922       arg01 = arg0;
6923     }
6924   else
6925     {
6926       /* We cannot generate constant 1 for fract.  */
6927       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6928 	return NULL_TREE;
6929       arg00 = arg0;
6930       arg01 = build_one_cst (type);
6931     }
6932   if (TREE_CODE (arg1) == MULT_EXPR)
6933     {
6934       arg10 = TREE_OPERAND (arg1, 0);
6935       arg11 = TREE_OPERAND (arg1, 1);
6936     }
6937   else if (TREE_CODE (arg1) == INTEGER_CST)
6938     {
6939       arg10 = build_one_cst (type);
6940       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6941 	 the purpose of this canonicalization.  */
6942       if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6943 	  && negate_expr_p (arg1)
6944 	  && code == PLUS_EXPR)
6945 	{
6946 	  arg11 = negate_expr (arg1);
6947 	  code = MINUS_EXPR;
6948 	}
6949       else
6950 	arg11 = arg1;
6951     }
6952   else
6953     {
6954       /* We cannot generate constant 1 for fract.  */
6955       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6956 	return NULL_TREE;
6957       arg10 = arg1;
6958       arg11 = build_one_cst (type);
6959     }
6960   same = NULL_TREE;
6961 
6962   if (operand_equal_p (arg01, arg11, 0))
6963     same = arg01, alt0 = arg00, alt1 = arg10;
6964   else if (operand_equal_p (arg00, arg10, 0))
6965     same = arg00, alt0 = arg01, alt1 = arg11;
6966   else if (operand_equal_p (arg00, arg11, 0))
6967     same = arg00, alt0 = arg01, alt1 = arg10;
6968   else if (operand_equal_p (arg01, arg10, 0))
6969     same = arg01, alt0 = arg00, alt1 = arg11;
6970 
6971   /* No identical multiplicands; see if we can find a common
6972      power-of-two factor in non-power-of-two multiplies.  This
6973      can help in multi-dimensional array access.  */
6974   else if (tree_fits_shwi_p (arg01)
6975 	   && tree_fits_shwi_p (arg11))
6976     {
6977       HOST_WIDE_INT int01, int11, tmp;
6978       bool swap = false;
6979       tree maybe_same;
6980       int01 = tree_to_shwi (arg01);
6981       int11 = tree_to_shwi (arg11);
6982 
6983       /* Move min of absolute values to int11.  */
6984       if (absu_hwi (int01) < absu_hwi (int11))
6985         {
6986 	  tmp = int01, int01 = int11, int11 = tmp;
6987 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
6988 	  maybe_same = arg01;
6989 	  swap = true;
6990 	}
6991       else
6992 	maybe_same = arg11;
6993 
6994       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6995 	  /* The remainder should not be a constant, otherwise we
6996 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6997 	     increased the number of multiplications necessary.  */
6998 	  && TREE_CODE (arg10) != INTEGER_CST)
6999         {
7000 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7001 			      build_int_cst (TREE_TYPE (arg00),
7002 					     int01 / int11));
7003 	  alt1 = arg10;
7004 	  same = maybe_same;
7005 	  if (swap)
7006 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7007 	}
7008     }
7009 
7010   if (same)
7011     return fold_build2_loc (loc, MULT_EXPR, type,
7012 			fold_build2_loc (loc, code, type,
7013 				     fold_convert_loc (loc, type, alt0),
7014 				     fold_convert_loc (loc, type, alt1)),
7015 			fold_convert_loc (loc, type, same));
7016 
7017   return NULL_TREE;
7018 }
7019 
7020 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7021    specified by EXPR into the buffer PTR of length LEN bytes.
7022    Return the number of bytes placed in the buffer, or zero
7023    upon failure.  */
7024 
7025 static int
7026 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7027 {
7028   tree type = TREE_TYPE (expr);
7029   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7030   int byte, offset, word, words;
7031   unsigned char value;
7032 
7033   if ((off == -1 && total_bytes > len)
7034       || off >= total_bytes)
7035     return 0;
7036   if (off == -1)
7037     off = 0;
7038   words = total_bytes / UNITS_PER_WORD;
7039 
7040   for (byte = 0; byte < total_bytes; byte++)
7041     {
7042       int bitpos = byte * BITS_PER_UNIT;
7043       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7044 	 number of bytes.  */
7045       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7046 
7047       if (total_bytes > UNITS_PER_WORD)
7048 	{
7049 	  word = byte / UNITS_PER_WORD;
7050 	  if (WORDS_BIG_ENDIAN)
7051 	    word = (words - 1) - word;
7052 	  offset = word * UNITS_PER_WORD;
7053 	  if (BYTES_BIG_ENDIAN)
7054 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7055 	  else
7056 	    offset += byte % UNITS_PER_WORD;
7057 	}
7058       else
7059 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7060       if (offset >= off
7061 	  && offset - off < len)
7062 	ptr[offset - off] = value;
7063     }
7064   return MIN (len, total_bytes - off);
7065 }
7066 
7067 
7068 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7069    specified by EXPR into the buffer PTR of length LEN bytes.
7070    Return the number of bytes placed in the buffer, or zero
7071    upon failure.  */
7072 
7073 static int
7074 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7075 {
7076   tree type = TREE_TYPE (expr);
7077   machine_mode mode = TYPE_MODE (type);
7078   int total_bytes = GET_MODE_SIZE (mode);
7079   FIXED_VALUE_TYPE value;
7080   tree i_value, i_type;
7081 
7082   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7083     return 0;
7084 
7085   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7086 
7087   if (NULL_TREE == i_type
7088       || TYPE_PRECISION (i_type) != total_bytes)
7089     return 0;
7090 
7091   value = TREE_FIXED_CST (expr);
7092   i_value = double_int_to_tree (i_type, value.data);
7093 
7094   return native_encode_int (i_value, ptr, len, off);
7095 }
7096 
7097 
7098 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7099    specified by EXPR into the buffer PTR of length LEN bytes.
7100    Return the number of bytes placed in the buffer, or zero
7101    upon failure.  */
7102 
7103 static int
7104 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7105 {
7106   tree type = TREE_TYPE (expr);
7107   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7108   int byte, offset, word, words, bitpos;
7109   unsigned char value;
7110 
7111   /* There are always 32 bits in each long, no matter the size of
7112      the hosts long.  We handle floating point representations with
7113      up to 192 bits.  */
7114   long tmp[6];
7115 
7116   if ((off == -1 && total_bytes > len)
7117       || off >= total_bytes)
7118     return 0;
7119   if (off == -1)
7120     off = 0;
7121   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7122 
7123   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7124 
7125   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7126        bitpos += BITS_PER_UNIT)
7127     {
7128       byte = (bitpos / BITS_PER_UNIT) & 3;
7129       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7130 
7131       if (UNITS_PER_WORD < 4)
7132 	{
7133 	  word = byte / UNITS_PER_WORD;
7134 	  if (WORDS_BIG_ENDIAN)
7135 	    word = (words - 1) - word;
7136 	  offset = word * UNITS_PER_WORD;
7137 	  if (BYTES_BIG_ENDIAN)
7138 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7139 	  else
7140 	    offset += byte % UNITS_PER_WORD;
7141 	}
7142       else
7143 	{
7144 	  offset = byte;
7145 	  if (BYTES_BIG_ENDIAN)
7146 	    {
7147 	      /* Reverse bytes within each long, or within the entire float
7148 		 if it's smaller than a long (for HFmode).  */
7149 	      offset = MIN (3, total_bytes - 1) - offset;
7150 	      gcc_assert (offset >= 0);
7151 	    }
7152 	}
7153       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7154       if (offset >= off
7155 	  && offset - off < len)
7156 	ptr[offset - off] = value;
7157     }
7158   return MIN (len, total_bytes - off);
7159 }
7160 
7161 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7162    specified by EXPR into the buffer PTR of length LEN bytes.
7163    Return the number of bytes placed in the buffer, or zero
7164    upon failure.  */
7165 
7166 static int
7167 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7168 {
7169   int rsize, isize;
7170   tree part;
7171 
7172   part = TREE_REALPART (expr);
7173   rsize = native_encode_expr (part, ptr, len, off);
7174   if (off == -1
7175       && rsize == 0)
7176     return 0;
7177   part = TREE_IMAGPART (expr);
7178   if (off != -1)
7179     off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7180   isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7181   if (off == -1
7182       && isize != rsize)
7183     return 0;
7184   return rsize + isize;
7185 }
7186 
7187 
7188 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7189    specified by EXPR into the buffer PTR of length LEN bytes.
7190    Return the number of bytes placed in the buffer, or zero
7191    upon failure.  */
7192 
7193 static int
7194 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7195 {
7196   unsigned i, count;
7197   int size, offset;
7198   tree itype, elem;
7199 
7200   offset = 0;
7201   count = VECTOR_CST_NELTS (expr);
7202   itype = TREE_TYPE (TREE_TYPE (expr));
7203   size = GET_MODE_SIZE (TYPE_MODE (itype));
7204   for (i = 0; i < count; i++)
7205     {
7206       if (off >= size)
7207 	{
7208 	  off -= size;
7209 	  continue;
7210 	}
7211       elem = VECTOR_CST_ELT (expr, i);
7212       int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7213       if ((off == -1 && res != size)
7214 	  || res == 0)
7215 	return 0;
7216       offset += res;
7217       if (offset >= len)
7218 	return (off == -1 && i < count - 1) ? 0 : offset;
7219       if (off != -1)
7220 	off = 0;
7221     }
7222   return offset;
7223 }
7224 
7225 
7226 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7227    specified by EXPR into the buffer PTR of length LEN bytes.
7228    Return the number of bytes placed in the buffer, or zero
7229    upon failure.  */
7230 
7231 static int
7232 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7233 {
7234   if (! can_native_encode_string_p (expr))
7235     return 0;
7236 
7237   HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7238   if ((off == -1 && total_bytes > len)
7239       || off >= total_bytes)
7240     return 0;
7241   if (off == -1)
7242     off = 0;
7243   if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7244     {
7245       int written = 0;
7246       if (off < TREE_STRING_LENGTH (expr))
7247 	{
7248 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7249 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7250 	}
7251       memset (ptr + written, 0,
7252 	      MIN (total_bytes - written, len - written));
7253     }
7254   else
7255     memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7256   return MIN (total_bytes - off, len);
7257 }
7258 
7259 
7260 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7261    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7262    buffer PTR of length LEN bytes.  If OFF is not -1 then start
7263    the encoding at byte offset OFF and encode at most LEN bytes.
7264    Return the number of bytes placed in the buffer, or zero upon failure.  */
7265 
7266 int
7267 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7268 {
7269   /* We don't support starting at negative offset and -1 is special.  */
7270   if (off < -1)
7271     return 0;
7272 
7273   switch (TREE_CODE (expr))
7274     {
7275     case INTEGER_CST:
7276       return native_encode_int (expr, ptr, len, off);
7277 
7278     case REAL_CST:
7279       return native_encode_real (expr, ptr, len, off);
7280 
7281     case FIXED_CST:
7282       return native_encode_fixed (expr, ptr, len, off);
7283 
7284     case COMPLEX_CST:
7285       return native_encode_complex (expr, ptr, len, off);
7286 
7287     case VECTOR_CST:
7288       return native_encode_vector (expr, ptr, len, off);
7289 
7290     case STRING_CST:
7291       return native_encode_string (expr, ptr, len, off);
7292 
7293     default:
7294       return 0;
7295     }
7296 }
7297 
7298 
7299 /* Subroutine of native_interpret_expr.  Interpret the contents of
7300    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7301    If the buffer cannot be interpreted, return NULL_TREE.  */
7302 
7303 static tree
7304 native_interpret_int (tree type, const unsigned char *ptr, int len)
7305 {
7306   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7307 
7308   if (total_bytes > len
7309       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7310     return NULL_TREE;
7311 
7312   wide_int result = wi::from_buffer (ptr, total_bytes);
7313 
7314   return wide_int_to_tree (type, result);
7315 }
7316 
7317 
7318 /* Subroutine of native_interpret_expr.  Interpret the contents of
7319    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7320    If the buffer cannot be interpreted, return NULL_TREE.  */
7321 
7322 static tree
7323 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7324 {
7325   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7326   double_int result;
7327   FIXED_VALUE_TYPE fixed_value;
7328 
7329   if (total_bytes > len
7330       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7331     return NULL_TREE;
7332 
7333   result = double_int::from_buffer (ptr, total_bytes);
7334   fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7335 
7336   return build_fixed (type, fixed_value);
7337 }
7338 
7339 
7340 /* Subroutine of native_interpret_expr.  Interpret the contents of
7341    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7342    If the buffer cannot be interpreted, return NULL_TREE.  */
7343 
7344 static tree
7345 native_interpret_real (tree type, const unsigned char *ptr, int len)
7346 {
7347   machine_mode mode = TYPE_MODE (type);
7348   int total_bytes = GET_MODE_SIZE (mode);
7349   unsigned char value;
7350   /* There are always 32 bits in each long, no matter the size of
7351      the hosts long.  We handle floating point representations with
7352      up to 192 bits.  */
7353   REAL_VALUE_TYPE r;
7354   long tmp[6];
7355 
7356   total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7357   if (total_bytes > len || total_bytes > 24)
7358     return NULL_TREE;
7359   int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7360 
7361   memset (tmp, 0, sizeof (tmp));
7362   for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7363        bitpos += BITS_PER_UNIT)
7364     {
7365       /* Both OFFSET and BYTE index within a long;
7366 	 bitpos indexes the whole float.  */
7367       int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7368       if (UNITS_PER_WORD < 4)
7369 	{
7370 	  int word = byte / UNITS_PER_WORD;
7371 	  if (WORDS_BIG_ENDIAN)
7372 	    word = (words - 1) - word;
7373 	  offset = word * UNITS_PER_WORD;
7374 	  if (BYTES_BIG_ENDIAN)
7375 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7376 	  else
7377 	    offset += byte % UNITS_PER_WORD;
7378 	}
7379       else
7380 	{
7381 	  offset = byte;
7382 	  if (BYTES_BIG_ENDIAN)
7383 	    {
7384 	      /* Reverse bytes within each long, or within the entire float
7385 		 if it's smaller than a long (for HFmode).  */
7386 	      offset = MIN (3, total_bytes - 1) - offset;
7387 	      gcc_assert (offset >= 0);
7388 	    }
7389 	}
7390       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7391 
7392       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7393     }
7394 
7395   real_from_target (&r, tmp, mode);
7396   return build_real (type, r);
7397 }
7398 
7399 
7400 /* Subroutine of native_interpret_expr.  Interpret the contents of
7401    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7402    If the buffer cannot be interpreted, return NULL_TREE.  */
7403 
7404 static tree
7405 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7406 {
7407   tree etype, rpart, ipart;
7408   int size;
7409 
7410   etype = TREE_TYPE (type);
7411   size = GET_MODE_SIZE (TYPE_MODE (etype));
7412   if (size * 2 > len)
7413     return NULL_TREE;
7414   rpart = native_interpret_expr (etype, ptr, size);
7415   if (!rpart)
7416     return NULL_TREE;
7417   ipart = native_interpret_expr (etype, ptr+size, size);
7418   if (!ipart)
7419     return NULL_TREE;
7420   return build_complex (type, rpart, ipart);
7421 }
7422 
7423 
7424 /* Subroutine of native_interpret_expr.  Interpret the contents of
7425    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7426    If the buffer cannot be interpreted, return NULL_TREE.  */
7427 
7428 static tree
7429 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7430 {
7431   tree etype, elem;
7432   int i, size, count;
7433   tree *elements;
7434 
7435   etype = TREE_TYPE (type);
7436   size = GET_MODE_SIZE (TYPE_MODE (etype));
7437   count = TYPE_VECTOR_SUBPARTS (type);
7438   if (size * count > len)
7439     return NULL_TREE;
7440 
7441   elements = XALLOCAVEC (tree, count);
7442   for (i = count - 1; i >= 0; i--)
7443     {
7444       elem = native_interpret_expr (etype, ptr+(i*size), size);
7445       if (!elem)
7446 	return NULL_TREE;
7447       elements[i] = elem;
7448     }
7449   return build_vector (type, elements);
7450 }
7451 
7452 
7453 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7454    the buffer PTR of length LEN as a constant of type TYPE.  For
7455    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7456    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7457    return NULL_TREE.  */
7458 
7459 tree
7460 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7461 {
7462   switch (TREE_CODE (type))
7463     {
7464     case INTEGER_TYPE:
7465     case ENUMERAL_TYPE:
7466     case BOOLEAN_TYPE:
7467     case POINTER_TYPE:
7468     case REFERENCE_TYPE:
7469       return native_interpret_int (type, ptr, len);
7470 
7471     case REAL_TYPE:
7472       return native_interpret_real (type, ptr, len);
7473 
7474     case FIXED_POINT_TYPE:
7475       return native_interpret_fixed (type, ptr, len);
7476 
7477     case COMPLEX_TYPE:
7478       return native_interpret_complex (type, ptr, len);
7479 
7480     case VECTOR_TYPE:
7481       return native_interpret_vector (type, ptr, len);
7482 
7483     default:
7484       return NULL_TREE;
7485     }
7486 }
7487 
7488 /* Returns true if we can interpret the contents of a native encoding
7489    as TYPE.  */
7490 
7491 static bool
7492 can_native_interpret_type_p (tree type)
7493 {
7494   switch (TREE_CODE (type))
7495     {
7496     case INTEGER_TYPE:
7497     case ENUMERAL_TYPE:
7498     case BOOLEAN_TYPE:
7499     case POINTER_TYPE:
7500     case REFERENCE_TYPE:
7501     case FIXED_POINT_TYPE:
7502     case REAL_TYPE:
7503     case COMPLEX_TYPE:
7504     case VECTOR_TYPE:
7505       return true;
7506     default:
7507       return false;
7508     }
7509 }
7510 
7511 /* Return true iff a constant of type TYPE is accepted by
7512    native_encode_expr.  */
7513 
7514 bool
7515 can_native_encode_type_p (tree type)
7516 {
7517   switch (TREE_CODE (type))
7518     {
7519     case INTEGER_TYPE:
7520     case REAL_TYPE:
7521     case FIXED_POINT_TYPE:
7522     case COMPLEX_TYPE:
7523     case VECTOR_TYPE:
7524     case POINTER_TYPE:
7525       return true;
7526     default:
7527       return false;
7528     }
7529 }
7530 
7531 /* Return true iff a STRING_CST S is accepted by
7532    native_encode_expr.  */
7533 
7534 bool
7535 can_native_encode_string_p (const_tree expr)
7536 {
7537   tree type = TREE_TYPE (expr);
7538 
7539   if (TREE_CODE (type) != ARRAY_TYPE
7540       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7541       || (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT)
7542       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7543     return false;
7544   return true;
7545 }
7546 
7547 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7548    TYPE at compile-time.  If we're unable to perform the conversion
7549    return NULL_TREE.  */
7550 
7551 static tree
7552 fold_view_convert_expr (tree type, tree expr)
7553 {
7554   /* We support up to 512-bit values (for V8DFmode).  */
7555   unsigned char buffer[64];
7556   int len;
7557 
7558   /* Check that the host and target are sane.  */
7559   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7560     return NULL_TREE;
7561 
7562   len = native_encode_expr (expr, buffer, sizeof (buffer));
7563   if (len == 0)
7564     return NULL_TREE;
7565 
7566   return native_interpret_expr (type, buffer, len);
7567 }
7568 
7569 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7570    to avoid confusing the gimplify process.  */
7571 
7572 tree
7573 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7574 {
7575   /* The size of the object is not relevant when talking about its address.  */
7576   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7577     t = TREE_OPERAND (t, 0);
7578 
7579   if (TREE_CODE (t) == INDIRECT_REF)
7580     {
7581       t = TREE_OPERAND (t, 0);
7582 
7583       if (TREE_TYPE (t) != ptrtype)
7584 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7585     }
7586   else if (TREE_CODE (t) == MEM_REF
7587 	   && integer_zerop (TREE_OPERAND (t, 1)))
7588     return TREE_OPERAND (t, 0);
7589   else if (TREE_CODE (t) == MEM_REF
7590 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7591     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7592 			TREE_OPERAND (t, 0),
7593 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7594   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7595     {
7596       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7597 
7598       if (TREE_TYPE (t) != ptrtype)
7599 	t = fold_convert_loc (loc, ptrtype, t);
7600     }
7601   else
7602     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7603 
7604   return t;
7605 }
7606 
7607 /* Build an expression for the address of T.  */
7608 
7609 tree
7610 build_fold_addr_expr_loc (location_t loc, tree t)
7611 {
7612   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7613 
7614   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7615 }
7616 
7617 /* Fold a unary expression of code CODE and type TYPE with operand
7618    OP0.  Return the folded expression if folding is successful.
7619    Otherwise, return NULL_TREE.  */
7620 
7621 tree
7622 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7623 {
7624   tree tem;
7625   tree arg0;
7626   enum tree_code_class kind = TREE_CODE_CLASS (code);
7627 
7628   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7629 	      && TREE_CODE_LENGTH (code) == 1);
7630 
7631   arg0 = op0;
7632   if (arg0)
7633     {
7634       if (CONVERT_EXPR_CODE_P (code)
7635 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7636 	{
7637 	  /* Don't use STRIP_NOPS, because signedness of argument type
7638 	     matters.  */
7639 	  STRIP_SIGN_NOPS (arg0);
7640 	}
7641       else
7642 	{
7643 	  /* Strip any conversions that don't change the mode.  This
7644 	     is safe for every expression, except for a comparison
7645 	     expression because its signedness is derived from its
7646 	     operands.
7647 
7648 	     Note that this is done as an internal manipulation within
7649 	     the constant folder, in order to find the simplest
7650 	     representation of the arguments so that their form can be
7651 	     studied.  In any cases, the appropriate type conversions
7652 	     should be put back in the tree that will get out of the
7653 	     constant folder.  */
7654 	  STRIP_NOPS (arg0);
7655 	}
7656 
7657       if (CONSTANT_CLASS_P (arg0))
7658 	{
7659 	  tree tem = const_unop (code, type, arg0);
7660 	  if (tem)
7661 	    {
7662 	      if (TREE_TYPE (tem) != type)
7663 		tem = fold_convert_loc (loc, type, tem);
7664 	      return tem;
7665 	    }
7666 	}
7667     }
7668 
7669   tem = generic_simplify (loc, code, type, op0);
7670   if (tem)
7671     return tem;
7672 
7673   if (TREE_CODE_CLASS (code) == tcc_unary)
7674     {
7675       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7676 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7677 		       fold_build1_loc (loc, code, type,
7678 				    fold_convert_loc (loc, TREE_TYPE (op0),
7679 						      TREE_OPERAND (arg0, 1))));
7680       else if (TREE_CODE (arg0) == COND_EXPR)
7681 	{
7682 	  tree arg01 = TREE_OPERAND (arg0, 1);
7683 	  tree arg02 = TREE_OPERAND (arg0, 2);
7684 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7685 	    arg01 = fold_build1_loc (loc, code, type,
7686 				 fold_convert_loc (loc,
7687 						   TREE_TYPE (op0), arg01));
7688 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7689 	    arg02 = fold_build1_loc (loc, code, type,
7690 				 fold_convert_loc (loc,
7691 						   TREE_TYPE (op0), arg02));
7692 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7693 			     arg01, arg02);
7694 
7695 	  /* If this was a conversion, and all we did was to move into
7696 	     inside the COND_EXPR, bring it back out.  But leave it if
7697 	     it is a conversion from integer to integer and the
7698 	     result precision is no wider than a word since such a
7699 	     conversion is cheap and may be optimized away by combine,
7700 	     while it couldn't if it were outside the COND_EXPR.  Then return
7701 	     so we don't get into an infinite recursion loop taking the
7702 	     conversion out and then back in.  */
7703 
7704 	  if ((CONVERT_EXPR_CODE_P (code)
7705 	       || code == NON_LVALUE_EXPR)
7706 	      && TREE_CODE (tem) == COND_EXPR
7707 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7708 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7709 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7710 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7711 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7712 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7713 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7714 		     && (INTEGRAL_TYPE_P
7715 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7716 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7717 		  || flag_syntax_only))
7718 	    tem = build1_loc (loc, code, type,
7719 			      build3 (COND_EXPR,
7720 				      TREE_TYPE (TREE_OPERAND
7721 						 (TREE_OPERAND (tem, 1), 0)),
7722 				      TREE_OPERAND (tem, 0),
7723 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7724 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7725 						    0)));
7726 	  return tem;
7727 	}
7728    }
7729 
7730   switch (code)
7731     {
7732     case NON_LVALUE_EXPR:
7733       if (!maybe_lvalue_p (op0))
7734 	return fold_convert_loc (loc, type, op0);
7735       return NULL_TREE;
7736 
7737     CASE_CONVERT:
7738     case FLOAT_EXPR:
7739     case FIX_TRUNC_EXPR:
7740       if (COMPARISON_CLASS_P (op0))
7741 	{
7742 	  /* If we have (type) (a CMP b) and type is an integral type, return
7743 	     new expression involving the new type.  Canonicalize
7744 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7745 	     non-integral type.
7746 	     Do not fold the result as that would not simplify further, also
7747 	     folding again results in recursions.  */
7748 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7749 	    return build2_loc (loc, TREE_CODE (op0), type,
7750 			       TREE_OPERAND (op0, 0),
7751 			       TREE_OPERAND (op0, 1));
7752 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7753 		   && TREE_CODE (type) != VECTOR_TYPE)
7754 	    return build3_loc (loc, COND_EXPR, type, op0,
7755 			       constant_boolean_node (true, type),
7756 			       constant_boolean_node (false, type));
7757 	}
7758 
7759       /* Handle (T *)&A.B.C for A being of type T and B and C
7760 	 living at offset zero.  This occurs frequently in
7761 	 C++ upcasting and then accessing the base.  */
7762       if (TREE_CODE (op0) == ADDR_EXPR
7763 	  && POINTER_TYPE_P (type)
7764 	  && handled_component_p (TREE_OPERAND (op0, 0)))
7765         {
7766 	  HOST_WIDE_INT bitsize, bitpos;
7767 	  tree offset;
7768 	  machine_mode mode;
7769 	  int unsignedp, reversep, volatilep;
7770 	  tree base
7771 	    = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7772 				   &offset, &mode, &unsignedp, &reversep,
7773 				   &volatilep);
7774 	  /* If the reference was to a (constant) zero offset, we can use
7775 	     the address of the base if it has the same base type
7776 	     as the result type and the pointer type is unqualified.  */
7777 	  if (! offset && bitpos == 0
7778 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7779 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7780 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7781 	    return fold_convert_loc (loc, type,
7782 				     build_fold_addr_expr_loc (loc, base));
7783         }
7784 
7785       if (TREE_CODE (op0) == MODIFY_EXPR
7786 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7787 	  /* Detect assigning a bitfield.  */
7788 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7789 	       && DECL_BIT_FIELD
7790 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7791 	{
7792 	  /* Don't leave an assignment inside a conversion
7793 	     unless assigning a bitfield.  */
7794 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7795 	  /* First do the assignment, then return converted constant.  */
7796 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7797 	  TREE_NO_WARNING (tem) = 1;
7798 	  TREE_USED (tem) = 1;
7799 	  return tem;
7800 	}
7801 
7802       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7803 	 constants (if x has signed type, the sign bit cannot be set
7804 	 in c).  This folds extension into the BIT_AND_EXPR.
7805 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7806 	 very likely don't have maximal range for their precision and this
7807 	 transformation effectively doesn't preserve non-maximal ranges.  */
7808       if (TREE_CODE (type) == INTEGER_TYPE
7809 	  && TREE_CODE (op0) == BIT_AND_EXPR
7810 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7811 	{
7812 	  tree and_expr = op0;
7813 	  tree and0 = TREE_OPERAND (and_expr, 0);
7814 	  tree and1 = TREE_OPERAND (and_expr, 1);
7815 	  int change = 0;
7816 
7817 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7818 	      || (TYPE_PRECISION (type)
7819 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7820 	    change = 1;
7821 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7822 		   <= HOST_BITS_PER_WIDE_INT
7823 		   && tree_fits_uhwi_p (and1))
7824 	    {
7825 	      unsigned HOST_WIDE_INT cst;
7826 
7827 	      cst = tree_to_uhwi (and1);
7828 	      cst &= HOST_WIDE_INT_M1U
7829 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7830 	      change = (cst == 0);
7831 	      if (change
7832 		  && !flag_syntax_only
7833 		  && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7834 		      == ZERO_EXTEND))
7835 		{
7836 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
7837 		  and0 = fold_convert_loc (loc, uns, and0);
7838 		  and1 = fold_convert_loc (loc, uns, and1);
7839 		}
7840 	    }
7841 	  if (change)
7842 	    {
7843 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
7844 				    TREE_OVERFLOW (and1));
7845 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
7846 				      fold_convert_loc (loc, type, and0), tem);
7847 	    }
7848 	}
7849 
7850       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7851 	 cast (T1)X will fold away.  We assume that this happens when X itself
7852 	 is a cast.  */
7853       if (POINTER_TYPE_P (type)
7854 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7855 	  && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7856 	{
7857 	  tree arg00 = TREE_OPERAND (arg0, 0);
7858 	  tree arg01 = TREE_OPERAND (arg0, 1);
7859 
7860 	  return fold_build_pointer_plus_loc
7861 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
7862 	}
7863 
7864       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7865 	 of the same precision, and X is an integer type not narrower than
7866 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
7867       if (INTEGRAL_TYPE_P (type)
7868 	  && TREE_CODE (op0) == BIT_NOT_EXPR
7869 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7870 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7871 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7872 	{
7873 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7874 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7875 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7876 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7877 				fold_convert_loc (loc, type, tem));
7878 	}
7879 
7880       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7881 	 type of X and Y (integer types only).  */
7882       if (INTEGRAL_TYPE_P (type)
7883 	  && TREE_CODE (op0) == MULT_EXPR
7884 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7885 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7886 	{
7887 	  /* Be careful not to introduce new overflows.  */
7888 	  tree mult_type;
7889           if (TYPE_OVERFLOW_WRAPS (type))
7890 	    mult_type = type;
7891 	  else
7892 	    mult_type = unsigned_type_for (type);
7893 
7894 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7895 	    {
7896 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7897 				 fold_convert_loc (loc, mult_type,
7898 						   TREE_OPERAND (op0, 0)),
7899 				 fold_convert_loc (loc, mult_type,
7900 						   TREE_OPERAND (op0, 1)));
7901 	      return fold_convert_loc (loc, type, tem);
7902 	    }
7903 	}
7904 
7905       return NULL_TREE;
7906 
7907     case VIEW_CONVERT_EXPR:
7908       if (TREE_CODE (op0) == MEM_REF)
7909         {
7910 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7911 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7912 	  tem = fold_build2_loc (loc, MEM_REF, type,
7913 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7914 	  REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7915 	  return tem;
7916 	}
7917 
7918       return NULL_TREE;
7919 
7920     case NEGATE_EXPR:
7921       tem = fold_negate_expr (loc, arg0);
7922       if (tem)
7923 	return fold_convert_loc (loc, type, tem);
7924       return NULL_TREE;
7925 
7926     case ABS_EXPR:
7927       /* Convert fabs((double)float) into (double)fabsf(float).  */
7928       if (TREE_CODE (arg0) == NOP_EXPR
7929 	  && TREE_CODE (type) == REAL_TYPE)
7930 	{
7931 	  tree targ0 = strip_float_extensions (arg0);
7932 	  if (targ0 != arg0)
7933 	    return fold_convert_loc (loc, type,
7934 				     fold_build1_loc (loc, ABS_EXPR,
7935 						  TREE_TYPE (targ0),
7936 						  targ0));
7937 	}
7938       return NULL_TREE;
7939 
7940     case BIT_NOT_EXPR:
7941       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
7942       if (TREE_CODE (arg0) == BIT_XOR_EXPR
7943 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7944 				    fold_convert_loc (loc, type,
7945 						      TREE_OPERAND (arg0, 0)))))
7946 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7947 				fold_convert_loc (loc, type,
7948 						  TREE_OPERAND (arg0, 1)));
7949       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7950 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7951 			       	     fold_convert_loc (loc, type,
7952 						       TREE_OPERAND (arg0, 1)))))
7953 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7954 			    fold_convert_loc (loc, type,
7955 					      TREE_OPERAND (arg0, 0)), tem);
7956 
7957       return NULL_TREE;
7958 
7959     case TRUTH_NOT_EXPR:
7960       /* Note that the operand of this must be an int
7961 	 and its values must be 0 or 1.
7962 	 ("true" is a fixed value perhaps depending on the language,
7963 	 but we don't handle values other than 1 correctly yet.)  */
7964       tem = fold_truth_not_expr (loc, arg0);
7965       if (!tem)
7966 	return NULL_TREE;
7967       return fold_convert_loc (loc, type, tem);
7968 
7969     case INDIRECT_REF:
7970       /* Fold *&X to X if X is an lvalue.  */
7971       if (TREE_CODE (op0) == ADDR_EXPR)
7972 	{
7973 	  tree op00 = TREE_OPERAND (op0, 0);
7974 	  if ((VAR_P (op00)
7975 	       || TREE_CODE (op00) == PARM_DECL
7976 	       || TREE_CODE (op00) == RESULT_DECL)
7977 	      && !TREE_READONLY (op00))
7978 	    return op00;
7979 	}
7980       return NULL_TREE;
7981 
7982     default:
7983       return NULL_TREE;
7984     } /* switch (code) */
7985 }
7986 
7987 
7988 /* If the operation was a conversion do _not_ mark a resulting constant
7989    with TREE_OVERFLOW if the original constant was not.  These conversions
7990    have implementation defined behavior and retaining the TREE_OVERFLOW
7991    flag here would confuse later passes such as VRP.  */
7992 tree
7993 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7994 				tree type, tree op0)
7995 {
7996   tree res = fold_unary_loc (loc, code, type, op0);
7997   if (res
7998       && TREE_CODE (res) == INTEGER_CST
7999       && TREE_CODE (op0) == INTEGER_CST
8000       && CONVERT_EXPR_CODE_P (code))
8001     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8002 
8003   return res;
8004 }
8005 
8006 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8007    operands OP0 and OP1.  LOC is the location of the resulting expression.
8008    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8009    Return the folded expression if folding is successful.  Otherwise,
8010    return NULL_TREE.  */
8011 static tree
8012 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8013 		  tree arg0, tree arg1, tree op0, tree op1)
8014 {
8015   tree tem;
8016 
8017   /* We only do these simplifications if we are optimizing.  */
8018   if (!optimize)
8019     return NULL_TREE;
8020 
8021   /* Check for things like (A || B) && (A || C).  We can convert this
8022      to A || (B && C).  Note that either operator can be any of the four
8023      truth and/or operations and the transformation will still be
8024      valid.   Also note that we only care about order for the
8025      ANDIF and ORIF operators.  If B contains side effects, this
8026      might change the truth-value of A.  */
8027   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8028       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8029 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8030 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8031 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8032       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8033     {
8034       tree a00 = TREE_OPERAND (arg0, 0);
8035       tree a01 = TREE_OPERAND (arg0, 1);
8036       tree a10 = TREE_OPERAND (arg1, 0);
8037       tree a11 = TREE_OPERAND (arg1, 1);
8038       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8039 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8040 			 && (code == TRUTH_AND_EXPR
8041 			     || code == TRUTH_OR_EXPR));
8042 
8043       if (operand_equal_p (a00, a10, 0))
8044 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8045 			    fold_build2_loc (loc, code, type, a01, a11));
8046       else if (commutative && operand_equal_p (a00, a11, 0))
8047 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8048 			    fold_build2_loc (loc, code, type, a01, a10));
8049       else if (commutative && operand_equal_p (a01, a10, 0))
8050 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8051 			    fold_build2_loc (loc, code, type, a00, a11));
8052 
8053       /* This case if tricky because we must either have commutative
8054 	 operators or else A10 must not have side-effects.  */
8055 
8056       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8057 	       && operand_equal_p (a01, a11, 0))
8058 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8059 			    fold_build2_loc (loc, code, type, a00, a10),
8060 			    a01);
8061     }
8062 
8063   /* See if we can build a range comparison.  */
8064   if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8065     return tem;
8066 
8067   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8068       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8069     {
8070       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8071       if (tem)
8072 	return fold_build2_loc (loc, code, type, tem, arg1);
8073     }
8074 
8075   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8076       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8077     {
8078       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8079       if (tem)
8080 	return fold_build2_loc (loc, code, type, arg0, tem);
8081     }
8082 
8083   /* Check for the possibility of merging component references.  If our
8084      lhs is another similar operation, try to merge its rhs with our
8085      rhs.  Then try to merge our lhs and rhs.  */
8086   if (TREE_CODE (arg0) == code
8087       && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8088 					 TREE_OPERAND (arg0, 1), arg1)))
8089     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8090 
8091   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8092     return tem;
8093 
8094   if (LOGICAL_OP_NON_SHORT_CIRCUIT
8095       && (code == TRUTH_AND_EXPR
8096           || code == TRUTH_ANDIF_EXPR
8097           || code == TRUTH_OR_EXPR
8098           || code == TRUTH_ORIF_EXPR))
8099     {
8100       enum tree_code ncode, icode;
8101 
8102       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8103 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8104       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8105 
8106       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8107 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8108 	 We don't want to pack more than two leafs to a non-IF AND/OR
8109 	 expression.
8110 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8111 	 equal to IF-CODE, then we don't want to add right-hand operand.
8112 	 If the inner right-hand side of left-hand operand has
8113 	 side-effects, or isn't simple, then we can't add to it,
8114 	 as otherwise we might destroy if-sequence.  */
8115       if (TREE_CODE (arg0) == icode
8116 	  && simple_operand_p_2 (arg1)
8117 	  /* Needed for sequence points to handle trappings, and
8118 	     side-effects.  */
8119 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8120 	{
8121 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8122 				 arg1);
8123 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8124 				  tem);
8125 	}
8126 	/* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8127 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8128       else if (TREE_CODE (arg1) == icode
8129 	  && simple_operand_p_2 (arg0)
8130 	  /* Needed for sequence points to handle trappings, and
8131 	     side-effects.  */
8132 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8133 	{
8134 	  tem = fold_build2_loc (loc, ncode, type,
8135 				 arg0, TREE_OPERAND (arg1, 0));
8136 	  return fold_build2_loc (loc, icode, type, tem,
8137 				  TREE_OPERAND (arg1, 1));
8138 	}
8139       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8140 	 into (A OR B).
8141 	 For sequence point consistancy, we need to check for trapping,
8142 	 and side-effects.  */
8143       else if (code == icode && simple_operand_p_2 (arg0)
8144                && simple_operand_p_2 (arg1))
8145 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8146     }
8147 
8148   return NULL_TREE;
8149 }
8150 
8151 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8152    by changing CODE to reduce the magnitude of constants involved in
8153    ARG0 of the comparison.
8154    Returns a canonicalized comparison tree if a simplification was
8155    possible, otherwise returns NULL_TREE.
8156    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8157    valid if signed overflow is undefined.  */
8158 
8159 static tree
8160 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8161 				 tree arg0, tree arg1,
8162 				 bool *strict_overflow_p)
8163 {
8164   enum tree_code code0 = TREE_CODE (arg0);
8165   tree t, cst0 = NULL_TREE;
8166   int sgn0;
8167 
8168   /* Match A +- CST code arg1.  We can change this only if overflow
8169      is undefined.  */
8170   if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8171 	 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8172 	/* In principle pointers also have undefined overflow behavior,
8173 	   but that causes problems elsewhere.  */
8174 	&& !POINTER_TYPE_P (TREE_TYPE (arg0))
8175 	&& (code0 == MINUS_EXPR
8176 	    || code0 == PLUS_EXPR)
8177 	&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8178     return NULL_TREE;
8179 
8180   /* Identify the constant in arg0 and its sign.  */
8181   cst0 = TREE_OPERAND (arg0, 1);
8182   sgn0 = tree_int_cst_sgn (cst0);
8183 
8184   /* Overflowed constants and zero will cause problems.  */
8185   if (integer_zerop (cst0)
8186       || TREE_OVERFLOW (cst0))
8187     return NULL_TREE;
8188 
8189   /* See if we can reduce the magnitude of the constant in
8190      arg0 by changing the comparison code.  */
8191   /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8192   if (code == LT_EXPR
8193       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8194     code = LE_EXPR;
8195   /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8196   else if (code == GT_EXPR
8197 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8198     code = GE_EXPR;
8199   /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8200   else if (code == LE_EXPR
8201 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8202     code = LT_EXPR;
8203   /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8204   else if (code == GE_EXPR
8205 	   && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8206     code = GT_EXPR;
8207   else
8208     return NULL_TREE;
8209   *strict_overflow_p = true;
8210 
8211   /* Now build the constant reduced in magnitude.  But not if that
8212      would produce one outside of its types range.  */
8213   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8214       && ((sgn0 == 1
8215 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8216 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8217 	  || (sgn0 == -1
8218 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8219 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8220     return NULL_TREE;
8221 
8222   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8223 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8224   t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8225   t = fold_convert (TREE_TYPE (arg1), t);
8226 
8227   return fold_build2_loc (loc, code, type, t, arg1);
8228 }
8229 
8230 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8231    overflow further.  Try to decrease the magnitude of constants involved
8232    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8233    and put sole constants at the second argument position.
8234    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8235 
8236 static tree
8237 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8238 			       tree arg0, tree arg1)
8239 {
8240   tree t;
8241   bool strict_overflow_p;
8242   const char * const warnmsg = G_("assuming signed overflow does not occur "
8243 				  "when reducing constant in comparison");
8244 
8245   /* Try canonicalization by simplifying arg0.  */
8246   strict_overflow_p = false;
8247   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8248 				       &strict_overflow_p);
8249   if (t)
8250     {
8251       if (strict_overflow_p)
8252 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8253       return t;
8254     }
8255 
8256   /* Try canonicalization by simplifying arg1 using the swapped
8257      comparison.  */
8258   code = swap_tree_comparison (code);
8259   strict_overflow_p = false;
8260   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8261 				       &strict_overflow_p);
8262   if (t && strict_overflow_p)
8263     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8264   return t;
8265 }
8266 
8267 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8268    space.  This is used to avoid issuing overflow warnings for
8269    expressions like &p->x which can not wrap.  */
8270 
8271 static bool
8272 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8273 {
8274   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8275     return true;
8276 
8277   if (bitpos < 0)
8278     return true;
8279 
8280   wide_int wi_offset;
8281   int precision = TYPE_PRECISION (TREE_TYPE (base));
8282   if (offset == NULL_TREE)
8283     wi_offset = wi::zero (precision);
8284   else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8285     return true;
8286   else
8287     wi_offset = offset;
8288 
8289   bool overflow;
8290   wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8291   wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8292   if (overflow)
8293     return true;
8294 
8295   if (!wi::fits_uhwi_p (total))
8296     return true;
8297 
8298   HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8299   if (size <= 0)
8300     return true;
8301 
8302   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8303      array.  */
8304   if (TREE_CODE (base) == ADDR_EXPR)
8305     {
8306       HOST_WIDE_INT base_size;
8307 
8308       base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8309       if (base_size > 0 && size < base_size)
8310 	size = base_size;
8311     }
8312 
8313   return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8314 }
8315 
8316 /* Return a positive integer when the symbol DECL is known to have
8317    a nonzero address, zero when it's known not to (e.g., it's a weak
8318    symbol), and a negative integer when the symbol is not yet in the
8319    symbol table and so whether or not its address is zero is unknown.
8320    For function local objects always return positive integer.  */
8321 static int
8322 maybe_nonzero_address (tree decl)
8323 {
8324   if (DECL_P (decl) && decl_in_symtab_p (decl))
8325     if (struct symtab_node *symbol = symtab_node::get_create (decl))
8326       return symbol->nonzero_address ();
8327 
8328   /* Function local objects are never NULL.  */
8329   if (DECL_P (decl)
8330       && (DECL_CONTEXT (decl)
8331       && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8332       && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8333     return 1;
8334 
8335   return -1;
8336 }
8337 
8338 /* Subroutine of fold_binary.  This routine performs all of the
8339    transformations that are common to the equality/inequality
8340    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8341    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8342    fold_binary should call fold_binary.  Fold a comparison with
8343    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8344    the folded comparison or NULL_TREE.  */
8345 
8346 static tree
8347 fold_comparison (location_t loc, enum tree_code code, tree type,
8348 		 tree op0, tree op1)
8349 {
8350   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8351   tree arg0, arg1, tem;
8352 
8353   arg0 = op0;
8354   arg1 = op1;
8355 
8356   STRIP_SIGN_NOPS (arg0);
8357   STRIP_SIGN_NOPS (arg1);
8358 
8359   /* For comparisons of pointers we can decompose it to a compile time
8360      comparison of the base objects and the offsets into the object.
8361      This requires at least one operand being an ADDR_EXPR or a
8362      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
8363   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8364       && (TREE_CODE (arg0) == ADDR_EXPR
8365 	  || TREE_CODE (arg1) == ADDR_EXPR
8366 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8367 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8368     {
8369       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8370       HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8371       machine_mode mode;
8372       int volatilep, reversep, unsignedp;
8373       bool indirect_base0 = false, indirect_base1 = false;
8374 
8375       /* Get base and offset for the access.  Strip ADDR_EXPR for
8376 	 get_inner_reference, but put it back by stripping INDIRECT_REF
8377 	 off the base object if possible.  indirect_baseN will be true
8378 	 if baseN is not an address but refers to the object itself.  */
8379       base0 = arg0;
8380       if (TREE_CODE (arg0) == ADDR_EXPR)
8381 	{
8382 	  base0
8383 	    = get_inner_reference (TREE_OPERAND (arg0, 0),
8384 				   &bitsize, &bitpos0, &offset0, &mode,
8385 				   &unsignedp, &reversep, &volatilep);
8386 	  if (TREE_CODE (base0) == INDIRECT_REF)
8387 	    base0 = TREE_OPERAND (base0, 0);
8388 	  else
8389 	    indirect_base0 = true;
8390 	}
8391       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8392 	{
8393 	  base0 = TREE_OPERAND (arg0, 0);
8394 	  STRIP_SIGN_NOPS (base0);
8395 	  if (TREE_CODE (base0) == ADDR_EXPR)
8396 	    {
8397 	      base0
8398 		= get_inner_reference (TREE_OPERAND (base0, 0),
8399 				       &bitsize, &bitpos0, &offset0, &mode,
8400 				       &unsignedp, &reversep, &volatilep);
8401 	      if (TREE_CODE (base0) == INDIRECT_REF)
8402 		base0 = TREE_OPERAND (base0, 0);
8403 	      else
8404 		indirect_base0 = true;
8405 	    }
8406 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
8407 	    offset0 = TREE_OPERAND (arg0, 1);
8408 	  else
8409 	    offset0 = size_binop (PLUS_EXPR, offset0,
8410 				  TREE_OPERAND (arg0, 1));
8411 	  if (TREE_CODE (offset0) == INTEGER_CST)
8412 	    {
8413 	      offset_int tem = wi::sext (wi::to_offset (offset0),
8414 					 TYPE_PRECISION (sizetype));
8415 	      tem <<= LOG2_BITS_PER_UNIT;
8416 	      tem += bitpos0;
8417 	      if (wi::fits_shwi_p (tem))
8418 		{
8419 		  bitpos0 = tem.to_shwi ();
8420 		  offset0 = NULL_TREE;
8421 		}
8422 	    }
8423 	}
8424 
8425       base1 = arg1;
8426       if (TREE_CODE (arg1) == ADDR_EXPR)
8427 	{
8428 	  base1
8429 	    = get_inner_reference (TREE_OPERAND (arg1, 0),
8430 				   &bitsize, &bitpos1, &offset1, &mode,
8431 				   &unsignedp, &reversep, &volatilep);
8432 	  if (TREE_CODE (base1) == INDIRECT_REF)
8433 	    base1 = TREE_OPERAND (base1, 0);
8434 	  else
8435 	    indirect_base1 = true;
8436 	}
8437       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8438 	{
8439 	  base1 = TREE_OPERAND (arg1, 0);
8440 	  STRIP_SIGN_NOPS (base1);
8441 	  if (TREE_CODE (base1) == ADDR_EXPR)
8442 	    {
8443 	      base1
8444 		= get_inner_reference (TREE_OPERAND (base1, 0),
8445 				       &bitsize, &bitpos1, &offset1, &mode,
8446 				       &unsignedp, &reversep, &volatilep);
8447 	      if (TREE_CODE (base1) == INDIRECT_REF)
8448 		base1 = TREE_OPERAND (base1, 0);
8449 	      else
8450 		indirect_base1 = true;
8451 	    }
8452 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
8453 	    offset1 = TREE_OPERAND (arg1, 1);
8454 	  else
8455 	    offset1 = size_binop (PLUS_EXPR, offset1,
8456 				  TREE_OPERAND (arg1, 1));
8457 	  if (TREE_CODE (offset1) == INTEGER_CST)
8458 	    {
8459 	      offset_int tem = wi::sext (wi::to_offset (offset1),
8460 					 TYPE_PRECISION (sizetype));
8461 	      tem <<= LOG2_BITS_PER_UNIT;
8462 	      tem += bitpos1;
8463 	      if (wi::fits_shwi_p (tem))
8464 		{
8465 		  bitpos1 = tem.to_shwi ();
8466 		  offset1 = NULL_TREE;
8467 		}
8468 	    }
8469 	}
8470 
8471       /* If we have equivalent bases we might be able to simplify.  */
8472       if (indirect_base0 == indirect_base1
8473 	  && operand_equal_p (base0, base1,
8474 			      indirect_base0 ? OEP_ADDRESS_OF : 0))
8475 	{
8476 	  /* We can fold this expression to a constant if the non-constant
8477 	     offset parts are equal.  */
8478 	  if ((offset0 == offset1
8479 	       || (offset0 && offset1
8480 		   && operand_equal_p (offset0, offset1, 0)))
8481 	      && (equality_code
8482 		  || (indirect_base0
8483 		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8484 		  || POINTER_TYPE_OVERFLOW_UNDEFINED))
8485 
8486 	    {
8487 	      if (!equality_code
8488 		  && bitpos0 != bitpos1
8489 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8490 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8491 		fold_overflow_warning (("assuming pointer wraparound does not "
8492 					"occur when comparing P +- C1 with "
8493 					"P +- C2"),
8494 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
8495 
8496 	      switch (code)
8497 		{
8498 		case EQ_EXPR:
8499 		  return constant_boolean_node (bitpos0 == bitpos1, type);
8500 		case NE_EXPR:
8501 		  return constant_boolean_node (bitpos0 != bitpos1, type);
8502 		case LT_EXPR:
8503 		  return constant_boolean_node (bitpos0 < bitpos1, type);
8504 		case LE_EXPR:
8505 		  return constant_boolean_node (bitpos0 <= bitpos1, type);
8506 		case GE_EXPR:
8507 		  return constant_boolean_node (bitpos0 >= bitpos1, type);
8508 		case GT_EXPR:
8509 		  return constant_boolean_node (bitpos0 > bitpos1, type);
8510 		default:;
8511 		}
8512 	    }
8513 	  /* We can simplify the comparison to a comparison of the variable
8514 	     offset parts if the constant offset parts are equal.
8515 	     Be careful to use signed sizetype here because otherwise we
8516 	     mess with array offsets in the wrong way.  This is possible
8517 	     because pointer arithmetic is restricted to retain within an
8518 	     object and overflow on pointer differences is undefined as of
8519 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
8520 	  else if (bitpos0 == bitpos1
8521 		   && (equality_code
8522 		       || (indirect_base0
8523 			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8524 		       || POINTER_TYPE_OVERFLOW_UNDEFINED))
8525 	    {
8526 	      /* By converting to signed sizetype we cover middle-end pointer
8527 	         arithmetic which operates on unsigned pointer types of size
8528 	         type size and ARRAY_REF offsets which are properly sign or
8529 	         zero extended from their type in case it is narrower than
8530 	         sizetype.  */
8531 	      if (offset0 == NULL_TREE)
8532 		offset0 = build_int_cst (ssizetype, 0);
8533 	      else
8534 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
8535 	      if (offset1 == NULL_TREE)
8536 		offset1 = build_int_cst (ssizetype, 0);
8537 	      else
8538 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
8539 
8540 	      if (!equality_code
8541 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8542 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8543 		fold_overflow_warning (("assuming pointer wraparound does not "
8544 					"occur when comparing P +- C1 with "
8545 					"P +- C2"),
8546 				       WARN_STRICT_OVERFLOW_COMPARISON);
8547 
8548 	      return fold_build2_loc (loc, code, type, offset0, offset1);
8549 	    }
8550 	}
8551       /* For equal offsets we can simplify to a comparison of the
8552 	 base addresses.  */
8553       else if (bitpos0 == bitpos1
8554 	       && (indirect_base0
8555 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8556 	       && (indirect_base1
8557 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8558 	       && ((offset0 == offset1)
8559 		   || (offset0 && offset1
8560 		       && operand_equal_p (offset0, offset1, 0))))
8561 	{
8562 	  if (indirect_base0)
8563 	    base0 = build_fold_addr_expr_loc (loc, base0);
8564 	  if (indirect_base1)
8565 	    base1 = build_fold_addr_expr_loc (loc, base1);
8566 	  return fold_build2_loc (loc, code, type, base0, base1);
8567 	}
8568       /* Comparison between an ordinary (non-weak) symbol and a null
8569 	 pointer can be eliminated since such symbols must have a non
8570 	 null address.  In C, relational expressions between pointers
8571 	 to objects and null pointers are undefined.  The results
8572 	 below follow the C++ rules with the additional property that
8573 	 every object pointer compares greater than a null pointer.
8574       */
8575       else if (((DECL_P (base0)
8576 		 && maybe_nonzero_address (base0) > 0
8577 		 /* Avoid folding references to struct members at offset 0 to
8578 		    prevent tests like '&ptr->firstmember == 0' from getting
8579 		    eliminated.  When ptr is null, although the -> expression
8580 		    is strictly speaking invalid, GCC retains it as a matter
8581 		    of QoI.  See PR c/44555. */
8582 		 && (offset0 == NULL_TREE && bitpos0 != 0))
8583 		|| CONSTANT_CLASS_P (base0))
8584 	       && indirect_base0
8585 	       /* The caller guarantees that when one of the arguments is
8586 		  constant (i.e., null in this case) it is second.  */
8587 	       && integer_zerop (arg1))
8588 	{
8589 	  switch (code)
8590 	    {
8591 	    case EQ_EXPR:
8592 	    case LE_EXPR:
8593 	    case LT_EXPR:
8594 	      return constant_boolean_node (false, type);
8595 	    case GE_EXPR:
8596 	    case GT_EXPR:
8597 	    case NE_EXPR:
8598 	      return constant_boolean_node (true, type);
8599 	    default:
8600 	      gcc_unreachable ();
8601 	    }
8602 	}
8603     }
8604 
8605   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8606      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
8607      the resulting offset is smaller in absolute value than the
8608      original one and has the same sign.  */
8609   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8610       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8611       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8612       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8613 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8614       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8615       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8616 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8617     {
8618       tree const1 = TREE_OPERAND (arg0, 1);
8619       tree const2 = TREE_OPERAND (arg1, 1);
8620       tree variable1 = TREE_OPERAND (arg0, 0);
8621       tree variable2 = TREE_OPERAND (arg1, 0);
8622       tree cst;
8623       const char * const warnmsg = G_("assuming signed overflow does not "
8624 				      "occur when combining constants around "
8625 				      "a comparison");
8626 
8627       /* Put the constant on the side where it doesn't overflow and is
8628 	 of lower absolute value and of same sign than before.  */
8629       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8630 			     ? MINUS_EXPR : PLUS_EXPR,
8631 			     const2, const1);
8632       if (!TREE_OVERFLOW (cst)
8633 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8634 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8635 	{
8636 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8637 	  return fold_build2_loc (loc, code, type,
8638 				  variable1,
8639 				  fold_build2_loc (loc, TREE_CODE (arg1),
8640 						   TREE_TYPE (arg1),
8641 						   variable2, cst));
8642 	}
8643 
8644       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8645 			     ? MINUS_EXPR : PLUS_EXPR,
8646 			     const1, const2);
8647       if (!TREE_OVERFLOW (cst)
8648 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8649 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8650 	{
8651 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8652 	  return fold_build2_loc (loc, code, type,
8653 				  fold_build2_loc (loc, TREE_CODE (arg0),
8654 						   TREE_TYPE (arg0),
8655 						   variable1, cst),
8656 				  variable2);
8657 	}
8658     }
8659 
8660   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8661   if (tem)
8662     return tem;
8663 
8664   /* If we are comparing an expression that just has comparisons
8665      of two integer values, arithmetic expressions of those comparisons,
8666      and constants, we can simplify it.  There are only three cases
8667      to check: the two values can either be equal, the first can be
8668      greater, or the second can be greater.  Fold the expression for
8669      those three values.  Since each value must be 0 or 1, we have
8670      eight possibilities, each of which corresponds to the constant 0
8671      or 1 or one of the six possible comparisons.
8672 
8673      This handles common cases like (a > b) == 0 but also handles
8674      expressions like  ((x > y) - (y > x)) > 0, which supposedly
8675      occur in macroized code.  */
8676 
8677   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8678     {
8679       tree cval1 = 0, cval2 = 0;
8680       int save_p = 0;
8681 
8682       if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8683 	  /* Don't handle degenerate cases here; they should already
8684 	     have been handled anyway.  */
8685 	  && cval1 != 0 && cval2 != 0
8686 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8687 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8688 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8689 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8690 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8691 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8692 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8693 	{
8694 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8695 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8696 
8697 	  /* We can't just pass T to eval_subst in case cval1 or cval2
8698 	     was the same as ARG1.  */
8699 
8700 	  tree high_result
8701 		= fold_build2_loc (loc, code, type,
8702 			       eval_subst (loc, arg0, cval1, maxval,
8703 					   cval2, minval),
8704 			       arg1);
8705 	  tree equal_result
8706 		= fold_build2_loc (loc, code, type,
8707 			       eval_subst (loc, arg0, cval1, maxval,
8708 					   cval2, maxval),
8709 			       arg1);
8710 	  tree low_result
8711 		= fold_build2_loc (loc, code, type,
8712 			       eval_subst (loc, arg0, cval1, minval,
8713 					   cval2, maxval),
8714 			       arg1);
8715 
8716 	  /* All three of these results should be 0 or 1.  Confirm they are.
8717 	     Then use those values to select the proper code to use.  */
8718 
8719 	  if (TREE_CODE (high_result) == INTEGER_CST
8720 	      && TREE_CODE (equal_result) == INTEGER_CST
8721 	      && TREE_CODE (low_result) == INTEGER_CST)
8722 	    {
8723 	      /* Make a 3-bit mask with the high-order bit being the
8724 		 value for `>', the next for '=', and the low for '<'.  */
8725 	      switch ((integer_onep (high_result) * 4)
8726 		      + (integer_onep (equal_result) * 2)
8727 		      + integer_onep (low_result))
8728 		{
8729 		case 0:
8730 		  /* Always false.  */
8731 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8732 		case 1:
8733 		  code = LT_EXPR;
8734 		  break;
8735 		case 2:
8736 		  code = EQ_EXPR;
8737 		  break;
8738 		case 3:
8739 		  code = LE_EXPR;
8740 		  break;
8741 		case 4:
8742 		  code = GT_EXPR;
8743 		  break;
8744 		case 5:
8745 		  code = NE_EXPR;
8746 		  break;
8747 		case 6:
8748 		  code = GE_EXPR;
8749 		  break;
8750 		case 7:
8751 		  /* Always true.  */
8752 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8753 		}
8754 
8755 	      if (save_p)
8756 		{
8757 		  tem = save_expr (build2 (code, type, cval1, cval2));
8758 		  protected_set_expr_location (tem, loc);
8759 		  return tem;
8760 		}
8761 	      return fold_build2_loc (loc, code, type, cval1, cval2);
8762 	    }
8763 	}
8764     }
8765 
8766   /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8767      into a single range test.  */
8768   if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8769       && TREE_CODE (arg1) == INTEGER_CST
8770       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8771       && !integer_zerop (TREE_OPERAND (arg0, 1))
8772       && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8773       && !TREE_OVERFLOW (arg1))
8774     {
8775       tem = fold_div_compare (loc, code, type, arg0, arg1);
8776       if (tem != NULL_TREE)
8777 	return tem;
8778     }
8779 
8780   return NULL_TREE;
8781 }
8782 
8783 
8784 /* Subroutine of fold_binary.  Optimize complex multiplications of the
8785    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
8786    argument EXPR represents the expression "z" of type TYPE.  */
8787 
8788 static tree
8789 fold_mult_zconjz (location_t loc, tree type, tree expr)
8790 {
8791   tree itype = TREE_TYPE (type);
8792   tree rpart, ipart, tem;
8793 
8794   if (TREE_CODE (expr) == COMPLEX_EXPR)
8795     {
8796       rpart = TREE_OPERAND (expr, 0);
8797       ipart = TREE_OPERAND (expr, 1);
8798     }
8799   else if (TREE_CODE (expr) == COMPLEX_CST)
8800     {
8801       rpart = TREE_REALPART (expr);
8802       ipart = TREE_IMAGPART (expr);
8803     }
8804   else
8805     {
8806       expr = save_expr (expr);
8807       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8808       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8809     }
8810 
8811   rpart = save_expr (rpart);
8812   ipart = save_expr (ipart);
8813   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8814 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8815 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8816   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8817 			  build_zero_cst (itype));
8818 }
8819 
8820 
8821 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
8822    CONSTRUCTOR ARG into array ELTS and return true if successful.  */
8823 
8824 static bool
8825 vec_cst_ctor_to_array (tree arg, tree *elts)
8826 {
8827   unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8828 
8829   if (TREE_CODE (arg) == VECTOR_CST)
8830     {
8831       for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8832 	elts[i] = VECTOR_CST_ELT (arg, i);
8833     }
8834   else if (TREE_CODE (arg) == CONSTRUCTOR)
8835     {
8836       constructor_elt *elt;
8837 
8838       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8839 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8840 	  return false;
8841 	else
8842 	  elts[i] = elt->value;
8843     }
8844   else
8845     return false;
8846   for (; i < nelts; i++)
8847     elts[i]
8848       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8849   return true;
8850 }
8851 
8852 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8853    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8854    NULL_TREE otherwise.  */
8855 
8856 static tree
8857 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8858 {
8859   unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8860   tree *elts;
8861   bool need_ctor = false;
8862 
8863   gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8864 	      && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8865   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8866       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8867     return NULL_TREE;
8868 
8869   elts = XALLOCAVEC (tree, nelts * 3);
8870   if (!vec_cst_ctor_to_array (arg0, elts)
8871       || !vec_cst_ctor_to_array (arg1, elts + nelts))
8872     return NULL_TREE;
8873 
8874   for (i = 0; i < nelts; i++)
8875     {
8876       if (!CONSTANT_CLASS_P (elts[sel[i]]))
8877 	need_ctor = true;
8878       elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8879     }
8880 
8881   if (need_ctor)
8882     {
8883       vec<constructor_elt, va_gc> *v;
8884       vec_alloc (v, nelts);
8885       for (i = 0; i < nelts; i++)
8886 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8887       return build_constructor (type, v);
8888     }
8889   else
8890     return build_vector (type, &elts[2 * nelts]);
8891 }
8892 
8893 /* Try to fold a pointer difference of type TYPE two address expressions of
8894    array references AREF0 and AREF1 using location LOC.  Return a
8895    simplified expression for the difference or NULL_TREE.  */
8896 
8897 static tree
8898 fold_addr_of_array_ref_difference (location_t loc, tree type,
8899 				   tree aref0, tree aref1)
8900 {
8901   tree base0 = TREE_OPERAND (aref0, 0);
8902   tree base1 = TREE_OPERAND (aref1, 0);
8903   tree base_offset = build_int_cst (type, 0);
8904 
8905   /* If the bases are array references as well, recurse.  If the bases
8906      are pointer indirections compute the difference of the pointers.
8907      If the bases are equal, we are set.  */
8908   if ((TREE_CODE (base0) == ARRAY_REF
8909        && TREE_CODE (base1) == ARRAY_REF
8910        && (base_offset
8911 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8912       || (INDIRECT_REF_P (base0)
8913 	  && INDIRECT_REF_P (base1)
8914 	  && (base_offset
8915 	        = fold_binary_loc (loc, MINUS_EXPR, type,
8916 				   fold_convert (type, TREE_OPERAND (base0, 0)),
8917 				   fold_convert (type,
8918 						 TREE_OPERAND (base1, 0)))))
8919       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8920     {
8921       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8922       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8923       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8924       tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8925       return fold_build2_loc (loc, PLUS_EXPR, type,
8926 			      base_offset,
8927 			      fold_build2_loc (loc, MULT_EXPR, type,
8928 					       diff, esz));
8929     }
8930   return NULL_TREE;
8931 }
8932 
8933 /* If the real or vector real constant CST of type TYPE has an exact
8934    inverse, return it, else return NULL.  */
8935 
8936 tree
8937 exact_inverse (tree type, tree cst)
8938 {
8939   REAL_VALUE_TYPE r;
8940   tree unit_type, *elts;
8941   machine_mode mode;
8942   unsigned vec_nelts, i;
8943 
8944   switch (TREE_CODE (cst))
8945     {
8946     case REAL_CST:
8947       r = TREE_REAL_CST (cst);
8948 
8949       if (exact_real_inverse (TYPE_MODE (type), &r))
8950 	return build_real (type, r);
8951 
8952       return NULL_TREE;
8953 
8954     case VECTOR_CST:
8955       vec_nelts = VECTOR_CST_NELTS (cst);
8956       elts = XALLOCAVEC (tree, vec_nelts);
8957       unit_type = TREE_TYPE (type);
8958       mode = TYPE_MODE (unit_type);
8959 
8960       for (i = 0; i < vec_nelts; i++)
8961 	{
8962 	  r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8963 	  if (!exact_real_inverse (mode, &r))
8964 	    return NULL_TREE;
8965 	  elts[i] = build_real (unit_type, r);
8966 	}
8967 
8968       return build_vector (type, elts);
8969 
8970     default:
8971       return NULL_TREE;
8972     }
8973 }
8974 
8975 /*  Mask out the tz least significant bits of X of type TYPE where
8976     tz is the number of trailing zeroes in Y.  */
8977 static wide_int
8978 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8979 {
8980   int tz = wi::ctz (y);
8981   if (tz > 0)
8982     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8983   return x;
8984 }
8985 
8986 /* Return true when T is an address and is known to be nonzero.
8987    For floating point we further ensure that T is not denormal.
8988    Similar logic is present in nonzero_address in rtlanal.h.
8989 
8990    If the return value is based on the assumption that signed overflow
8991    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8992    change *STRICT_OVERFLOW_P.  */
8993 
8994 static bool
8995 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8996 {
8997   tree type = TREE_TYPE (t);
8998   enum tree_code code;
8999 
9000   /* Doing something useful for floating point would need more work.  */
9001   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9002     return false;
9003 
9004   code = TREE_CODE (t);
9005   switch (TREE_CODE_CLASS (code))
9006     {
9007     case tcc_unary:
9008       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9009 					      strict_overflow_p);
9010     case tcc_binary:
9011     case tcc_comparison:
9012       return tree_binary_nonzero_warnv_p (code, type,
9013 					       TREE_OPERAND (t, 0),
9014 					       TREE_OPERAND (t, 1),
9015 					       strict_overflow_p);
9016     case tcc_constant:
9017     case tcc_declaration:
9018     case tcc_reference:
9019       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9020 
9021     default:
9022       break;
9023     }
9024 
9025   switch (code)
9026     {
9027     case TRUTH_NOT_EXPR:
9028       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9029 					      strict_overflow_p);
9030 
9031     case TRUTH_AND_EXPR:
9032     case TRUTH_OR_EXPR:
9033     case TRUTH_XOR_EXPR:
9034       return tree_binary_nonzero_warnv_p (code, type,
9035 					       TREE_OPERAND (t, 0),
9036 					       TREE_OPERAND (t, 1),
9037 					       strict_overflow_p);
9038 
9039     case COND_EXPR:
9040     case CONSTRUCTOR:
9041     case OBJ_TYPE_REF:
9042     case ASSERT_EXPR:
9043     case ADDR_EXPR:
9044     case WITH_SIZE_EXPR:
9045     case SSA_NAME:
9046       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9047 
9048     case COMPOUND_EXPR:
9049     case MODIFY_EXPR:
9050     case BIND_EXPR:
9051       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9052 					strict_overflow_p);
9053 
9054     case SAVE_EXPR:
9055       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9056 					strict_overflow_p);
9057 
9058     case CALL_EXPR:
9059       {
9060 	tree fndecl = get_callee_fndecl (t);
9061 	if (!fndecl) return false;
9062 	if (flag_delete_null_pointer_checks && !flag_check_new
9063 	    && DECL_IS_OPERATOR_NEW (fndecl)
9064 	    && !TREE_NOTHROW (fndecl))
9065 	  return true;
9066 	if (flag_delete_null_pointer_checks
9067 	    && lookup_attribute ("returns_nonnull",
9068 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9069 	  return true;
9070 	return alloca_call_p (t);
9071       }
9072 
9073     default:
9074       break;
9075     }
9076   return false;
9077 }
9078 
9079 /* Return true when T is an address and is known to be nonzero.
9080    Handle warnings about undefined signed overflow.  */
9081 
9082 bool
9083 tree_expr_nonzero_p (tree t)
9084 {
9085   bool ret, strict_overflow_p;
9086 
9087   strict_overflow_p = false;
9088   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9089   if (strict_overflow_p)
9090     fold_overflow_warning (("assuming signed overflow does not occur when "
9091 			    "determining that expression is always "
9092 			    "non-zero"),
9093 			   WARN_STRICT_OVERFLOW_MISC);
9094   return ret;
9095 }
9096 
9097 /* Return true if T is known not to be equal to an integer W.  */
9098 
9099 bool
9100 expr_not_equal_to (tree t, const wide_int &w)
9101 {
9102   wide_int min, max, nz;
9103   value_range_type rtype;
9104   switch (TREE_CODE (t))
9105     {
9106     case INTEGER_CST:
9107       return wi::ne_p (t, w);
9108 
9109     case SSA_NAME:
9110       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9111 	return false;
9112       rtype = get_range_info (t, &min, &max);
9113       if (rtype == VR_RANGE)
9114 	{
9115 	  if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9116 	    return true;
9117 	  if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9118 	    return true;
9119 	}
9120       else if (rtype == VR_ANTI_RANGE
9121 	       && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9122 	       && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9123 	return true;
9124       /* If T has some known zero bits and W has any of those bits set,
9125 	 then T is known not to be equal to W.  */
9126       if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9127 			      TYPE_PRECISION (TREE_TYPE (t))), 0))
9128 	return true;
9129       return false;
9130 
9131     default:
9132       return false;
9133     }
9134 }
9135 
9136 /* Fold a binary expression of code CODE and type TYPE with operands
9137    OP0 and OP1.  LOC is the location of the resulting expression.
9138    Return the folded expression if folding is successful.  Otherwise,
9139    return NULL_TREE.  */
9140 
9141 tree
9142 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9143 		 tree op0, tree op1)
9144 {
9145   enum tree_code_class kind = TREE_CODE_CLASS (code);
9146   tree arg0, arg1, tem;
9147   tree t1 = NULL_TREE;
9148   bool strict_overflow_p;
9149   unsigned int prec;
9150 
9151   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9152 	      && TREE_CODE_LENGTH (code) == 2
9153 	      && op0 != NULL_TREE
9154 	      && op1 != NULL_TREE);
9155 
9156   arg0 = op0;
9157   arg1 = op1;
9158 
9159   /* Strip any conversions that don't change the mode.  This is
9160      safe for every expression, except for a comparison expression
9161      because its signedness is derived from its operands.  So, in
9162      the latter case, only strip conversions that don't change the
9163      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
9164      preserved.
9165 
9166      Note that this is done as an internal manipulation within the
9167      constant folder, in order to find the simplest representation
9168      of the arguments so that their form can be studied.  In any
9169      cases, the appropriate type conversions should be put back in
9170      the tree that will get out of the constant folder.  */
9171 
9172   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9173     {
9174       STRIP_SIGN_NOPS (arg0);
9175       STRIP_SIGN_NOPS (arg1);
9176     }
9177   else
9178     {
9179       STRIP_NOPS (arg0);
9180       STRIP_NOPS (arg1);
9181     }
9182 
9183   /* Note that TREE_CONSTANT isn't enough: static var addresses are
9184      constant but we can't do arithmetic on them.  */
9185   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9186     {
9187       tem = const_binop (code, type, arg0, arg1);
9188       if (tem != NULL_TREE)
9189 	{
9190 	  if (TREE_TYPE (tem) != type)
9191 	    tem = fold_convert_loc (loc, type, tem);
9192 	  return tem;
9193 	}
9194     }
9195 
9196   /* If this is a commutative operation, and ARG0 is a constant, move it
9197      to ARG1 to reduce the number of tests below.  */
9198   if (commutative_tree_code (code)
9199       && tree_swap_operands_p (arg0, arg1))
9200     return fold_build2_loc (loc, code, type, op1, op0);
9201 
9202   /* Likewise if this is a comparison, and ARG0 is a constant, move it
9203      to ARG1 to reduce the number of tests below.  */
9204   if (kind == tcc_comparison
9205       && tree_swap_operands_p (arg0, arg1))
9206     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9207 
9208   tem = generic_simplify (loc, code, type, op0, op1);
9209   if (tem)
9210     return tem;
9211 
9212   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9213 
9214      First check for cases where an arithmetic operation is applied to a
9215      compound, conditional, or comparison operation.  Push the arithmetic
9216      operation inside the compound or conditional to see if any folding
9217      can then be done.  Convert comparison to conditional for this purpose.
9218      The also optimizes non-constant cases that used to be done in
9219      expand_expr.
9220 
9221      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9222      one of the operands is a comparison and the other is a comparison, a
9223      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
9224      code below would make the expression more complex.  Change it to a
9225      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
9226      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
9227 
9228   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9229        || code == EQ_EXPR || code == NE_EXPR)
9230       && TREE_CODE (type) != VECTOR_TYPE
9231       && ((truth_value_p (TREE_CODE (arg0))
9232 	   && (truth_value_p (TREE_CODE (arg1))
9233 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
9234 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
9235 	  || (truth_value_p (TREE_CODE (arg1))
9236 	      && (truth_value_p (TREE_CODE (arg0))
9237 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
9238 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
9239     {
9240       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9241 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9242 			 : TRUTH_XOR_EXPR,
9243 			 boolean_type_node,
9244 			 fold_convert_loc (loc, boolean_type_node, arg0),
9245 			 fold_convert_loc (loc, boolean_type_node, arg1));
9246 
9247       if (code == EQ_EXPR)
9248 	tem = invert_truthvalue_loc (loc, tem);
9249 
9250       return fold_convert_loc (loc, type, tem);
9251     }
9252 
9253   if (TREE_CODE_CLASS (code) == tcc_binary
9254       || TREE_CODE_CLASS (code) == tcc_comparison)
9255     {
9256       if (TREE_CODE (arg0) == COMPOUND_EXPR)
9257 	{
9258 	  tem = fold_build2_loc (loc, code, type,
9259 			     fold_convert_loc (loc, TREE_TYPE (op0),
9260 					       TREE_OPERAND (arg0, 1)), op1);
9261 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9262 			     tem);
9263 	}
9264       if (TREE_CODE (arg1) == COMPOUND_EXPR)
9265 	{
9266 	  tem = fold_build2_loc (loc, code, type, op0,
9267 			     fold_convert_loc (loc, TREE_TYPE (op1),
9268 					       TREE_OPERAND (arg1, 1)));
9269 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9270 			     tem);
9271 	}
9272 
9273       if (TREE_CODE (arg0) == COND_EXPR
9274 	  || TREE_CODE (arg0) == VEC_COND_EXPR
9275 	  || COMPARISON_CLASS_P (arg0))
9276 	{
9277 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9278 						     arg0, arg1,
9279 						     /*cond_first_p=*/1);
9280 	  if (tem != NULL_TREE)
9281 	    return tem;
9282 	}
9283 
9284       if (TREE_CODE (arg1) == COND_EXPR
9285 	  || TREE_CODE (arg1) == VEC_COND_EXPR
9286 	  || COMPARISON_CLASS_P (arg1))
9287 	{
9288 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9289 						     arg1, arg0,
9290 					             /*cond_first_p=*/0);
9291 	  if (tem != NULL_TREE)
9292 	    return tem;
9293 	}
9294     }
9295 
9296   switch (code)
9297     {
9298     case MEM_REF:
9299       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
9300       if (TREE_CODE (arg0) == ADDR_EXPR
9301 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9302 	{
9303 	  tree iref = TREE_OPERAND (arg0, 0);
9304 	  return fold_build2 (MEM_REF, type,
9305 			      TREE_OPERAND (iref, 0),
9306 			      int_const_binop (PLUS_EXPR, arg1,
9307 					       TREE_OPERAND (iref, 1)));
9308 	}
9309 
9310       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
9311       if (TREE_CODE (arg0) == ADDR_EXPR
9312 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
9313 	{
9314 	  tree base;
9315 	  HOST_WIDE_INT coffset;
9316 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9317 						&coffset);
9318 	  if (!base)
9319 	    return NULL_TREE;
9320 	  return fold_build2 (MEM_REF, type,
9321 			      build_fold_addr_expr (base),
9322 			      int_const_binop (PLUS_EXPR, arg1,
9323 					       size_int (coffset)));
9324 	}
9325 
9326       return NULL_TREE;
9327 
9328     case POINTER_PLUS_EXPR:
9329       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
9330       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9331 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9332         return fold_convert_loc (loc, type,
9333 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9334 					      fold_convert_loc (loc, sizetype,
9335 								arg1),
9336 					      fold_convert_loc (loc, sizetype,
9337 								arg0)));
9338 
9339       return NULL_TREE;
9340 
9341     case PLUS_EXPR:
9342       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9343 	{
9344 	  /* X + (X / CST) * -CST is X % CST.  */
9345 	  if (TREE_CODE (arg1) == MULT_EXPR
9346 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9347 	      && operand_equal_p (arg0,
9348 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9349 	    {
9350 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9351 	      tree cst1 = TREE_OPERAND (arg1, 1);
9352 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9353 				      cst1, cst0);
9354 	      if (sum && integer_zerop (sum))
9355 		return fold_convert_loc (loc, type,
9356 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9357 						      TREE_TYPE (arg0), arg0,
9358 						      cst0));
9359 	    }
9360 	}
9361 
9362       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9363 	 one.  Make sure the type is not saturating and has the signedness of
9364 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9365 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9366       if ((TREE_CODE (arg0) == MULT_EXPR
9367 	   || TREE_CODE (arg1) == MULT_EXPR)
9368 	  && !TYPE_SATURATING (type)
9369 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9370 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9371 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9372         {
9373 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9374 	  if (tem)
9375 	    return tem;
9376 	}
9377 
9378       if (! FLOAT_TYPE_P (type))
9379 	{
9380 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9381 	     (plus (plus (mult) (mult)) (foo)) so that we can
9382 	     take advantage of the factoring cases below.  */
9383 	  if (ANY_INTEGRAL_TYPE_P (type)
9384 	      && TYPE_OVERFLOW_WRAPS (type)
9385 	      && (((TREE_CODE (arg0) == PLUS_EXPR
9386 		    || TREE_CODE (arg0) == MINUS_EXPR)
9387 		   && TREE_CODE (arg1) == MULT_EXPR)
9388 		  || ((TREE_CODE (arg1) == PLUS_EXPR
9389 		       || TREE_CODE (arg1) == MINUS_EXPR)
9390 		      && TREE_CODE (arg0) == MULT_EXPR)))
9391 	    {
9392 	      tree parg0, parg1, parg, marg;
9393 	      enum tree_code pcode;
9394 
9395 	      if (TREE_CODE (arg1) == MULT_EXPR)
9396 		parg = arg0, marg = arg1;
9397 	      else
9398 		parg = arg1, marg = arg0;
9399 	      pcode = TREE_CODE (parg);
9400 	      parg0 = TREE_OPERAND (parg, 0);
9401 	      parg1 = TREE_OPERAND (parg, 1);
9402 	      STRIP_NOPS (parg0);
9403 	      STRIP_NOPS (parg1);
9404 
9405 	      if (TREE_CODE (parg0) == MULT_EXPR
9406 		  && TREE_CODE (parg1) != MULT_EXPR)
9407 		return fold_build2_loc (loc, pcode, type,
9408 				    fold_build2_loc (loc, PLUS_EXPR, type,
9409 						 fold_convert_loc (loc, type,
9410 								   parg0),
9411 						 fold_convert_loc (loc, type,
9412 								   marg)),
9413 				    fold_convert_loc (loc, type, parg1));
9414 	      if (TREE_CODE (parg0) != MULT_EXPR
9415 		  && TREE_CODE (parg1) == MULT_EXPR)
9416 		return
9417 		  fold_build2_loc (loc, PLUS_EXPR, type,
9418 			       fold_convert_loc (loc, type, parg0),
9419 			       fold_build2_loc (loc, pcode, type,
9420 					    fold_convert_loc (loc, type, marg),
9421 					    fold_convert_loc (loc, type,
9422 							      parg1)));
9423 	    }
9424 	}
9425       else
9426 	{
9427 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9428 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
9429 	     if signed zeros are involved.  */
9430 	  if (!HONOR_SNANS (element_mode (arg0))
9431               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9432 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9433 	    {
9434 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9435 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9436 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9437 	      bool arg0rz = false, arg0iz = false;
9438 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
9439 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
9440 		{
9441 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9442 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9443 		  if (arg0rz && arg1i && real_zerop (arg1i))
9444 		    {
9445 		      tree rp = arg1r ? arg1r
9446 				  : build1 (REALPART_EXPR, rtype, arg1);
9447 		      tree ip = arg0i ? arg0i
9448 				  : build1 (IMAGPART_EXPR, rtype, arg0);
9449 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9450 		    }
9451 		  else if (arg0iz && arg1r && real_zerop (arg1r))
9452 		    {
9453 		      tree rp = arg0r ? arg0r
9454 				  : build1 (REALPART_EXPR, rtype, arg0);
9455 		      tree ip = arg1i ? arg1i
9456 				  : build1 (IMAGPART_EXPR, rtype, arg1);
9457 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9458 		    }
9459 		}
9460 	    }
9461 
9462 	  if (flag_unsafe_math_optimizations
9463 	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9464 	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9465 	      && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9466 	    return tem;
9467 
9468           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9469              We associate floats only if the user has specified
9470              -fassociative-math.  */
9471           if (flag_associative_math
9472               && TREE_CODE (arg1) == PLUS_EXPR
9473               && TREE_CODE (arg0) != MULT_EXPR)
9474             {
9475               tree tree10 = TREE_OPERAND (arg1, 0);
9476               tree tree11 = TREE_OPERAND (arg1, 1);
9477               if (TREE_CODE (tree11) == MULT_EXPR
9478 		  && TREE_CODE (tree10) == MULT_EXPR)
9479                 {
9480                   tree tree0;
9481                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9482                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9483                 }
9484             }
9485           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9486              We associate floats only if the user has specified
9487              -fassociative-math.  */
9488           if (flag_associative_math
9489               && TREE_CODE (arg0) == PLUS_EXPR
9490               && TREE_CODE (arg1) != MULT_EXPR)
9491             {
9492               tree tree00 = TREE_OPERAND (arg0, 0);
9493               tree tree01 = TREE_OPERAND (arg0, 1);
9494               if (TREE_CODE (tree01) == MULT_EXPR
9495 		  && TREE_CODE (tree00) == MULT_EXPR)
9496                 {
9497                   tree tree0;
9498                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9499                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9500                 }
9501             }
9502 	}
9503 
9504      bit_rotate:
9505       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9506 	 is a rotate of A by C1 bits.  */
9507       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9508 	 is a rotate of A by B bits.  */
9509       {
9510 	enum tree_code code0, code1;
9511 	tree rtype;
9512 	code0 = TREE_CODE (arg0);
9513 	code1 = TREE_CODE (arg1);
9514 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9515 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9516 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
9517 			        TREE_OPERAND (arg1, 0), 0)
9518 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9519 	        TYPE_UNSIGNED (rtype))
9520 	    /* Only create rotates in complete modes.  Other cases are not
9521 	       expanded properly.  */
9522 	    && (element_precision (rtype)
9523 		== GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9524 	  {
9525 	    tree tree01, tree11;
9526 	    enum tree_code code01, code11;
9527 
9528 	    tree01 = TREE_OPERAND (arg0, 1);
9529 	    tree11 = TREE_OPERAND (arg1, 1);
9530 	    STRIP_NOPS (tree01);
9531 	    STRIP_NOPS (tree11);
9532 	    code01 = TREE_CODE (tree01);
9533 	    code11 = TREE_CODE (tree11);
9534 	    if (code01 == INTEGER_CST
9535 		&& code11 == INTEGER_CST
9536 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
9537 		    == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9538 	      {
9539 		tem = build2_loc (loc, LROTATE_EXPR,
9540 				  TREE_TYPE (TREE_OPERAND (arg0, 0)),
9541 				  TREE_OPERAND (arg0, 0),
9542 				  code0 == LSHIFT_EXPR
9543 				  ? TREE_OPERAND (arg0, 1)
9544 				  : TREE_OPERAND (arg1, 1));
9545 		return fold_convert_loc (loc, type, tem);
9546 	      }
9547 	    else if (code11 == MINUS_EXPR)
9548 	      {
9549 		tree tree110, tree111;
9550 		tree110 = TREE_OPERAND (tree11, 0);
9551 		tree111 = TREE_OPERAND (tree11, 1);
9552 		STRIP_NOPS (tree110);
9553 		STRIP_NOPS (tree111);
9554 		if (TREE_CODE (tree110) == INTEGER_CST
9555 		    && 0 == compare_tree_int (tree110,
9556 					      element_precision
9557 					      (TREE_TYPE (TREE_OPERAND
9558 							  (arg0, 0))))
9559 		    && operand_equal_p (tree01, tree111, 0))
9560 		  return
9561 		    fold_convert_loc (loc, type,
9562 				      build2 ((code0 == LSHIFT_EXPR
9563 					       ? LROTATE_EXPR
9564 					       : RROTATE_EXPR),
9565 					      TREE_TYPE (TREE_OPERAND (arg0, 0)),
9566 					      TREE_OPERAND (arg0, 0),
9567 					      TREE_OPERAND (arg0, 1)));
9568 	      }
9569 	    else if (code01 == MINUS_EXPR)
9570 	      {
9571 		tree tree010, tree011;
9572 		tree010 = TREE_OPERAND (tree01, 0);
9573 		tree011 = TREE_OPERAND (tree01, 1);
9574 		STRIP_NOPS (tree010);
9575 		STRIP_NOPS (tree011);
9576 		if (TREE_CODE (tree010) == INTEGER_CST
9577 		    && 0 == compare_tree_int (tree010,
9578 					      element_precision
9579 					      (TREE_TYPE (TREE_OPERAND
9580 							  (arg0, 0))))
9581 		    && operand_equal_p (tree11, tree011, 0))
9582 		    return fold_convert_loc
9583 		      (loc, type,
9584 		       build2 ((code0 != LSHIFT_EXPR
9585 				? LROTATE_EXPR
9586 				: RROTATE_EXPR),
9587 			       TREE_TYPE (TREE_OPERAND (arg0, 0)),
9588 			       TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9589 	      }
9590 	  }
9591       }
9592 
9593     associate:
9594       /* In most languages, can't associate operations on floats through
9595 	 parentheses.  Rather than remember where the parentheses were, we
9596 	 don't associate floats at all, unless the user has specified
9597 	 -fassociative-math.
9598 	 And, we need to make sure type is not saturating.  */
9599 
9600       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9601 	  && !TYPE_SATURATING (type))
9602 	{
9603 	  tree var0, con0, lit0, minus_lit0;
9604 	  tree var1, con1, lit1, minus_lit1;
9605 	  tree atype = type;
9606 	  bool ok = true;
9607 
9608 	  /* Split both trees into variables, constants, and literals.  Then
9609 	     associate each group together, the constants with literals,
9610 	     then the result with variables.  This increases the chances of
9611 	     literals being recombined later and of generating relocatable
9612 	     expressions for the sum of a constant and literal.  */
9613 	  var0 = split_tree (loc, arg0, type, code,
9614 			     &con0, &lit0, &minus_lit0, 0);
9615 	  var1 = split_tree (loc, arg1, type, code,
9616 			     &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9617 
9618 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
9619 	  if (code == MINUS_EXPR)
9620 	    code = PLUS_EXPR;
9621 
9622 	  /* With undefined overflow prefer doing association in a type
9623 	     which wraps on overflow, if that is one of the operand types.  */
9624 	  if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9625 	      || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9626 	    {
9627 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9628 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9629 		atype = TREE_TYPE (arg0);
9630 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9631 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9632 		atype = TREE_TYPE (arg1);
9633 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9634 	    }
9635 
9636 	  /* With undefined overflow we can only associate constants with one
9637 	     variable, and constants whose association doesn't overflow.  */
9638 	  if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9639 	      || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9640 	    {
9641 	      if (var0 && var1)
9642 		{
9643 		  tree tmp0 = var0;
9644 		  tree tmp1 = var1;
9645 		  bool one_neg = false;
9646 
9647 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
9648 		    {
9649 		      tmp0 = TREE_OPERAND (tmp0, 0);
9650 		      one_neg = !one_neg;
9651 		    }
9652 		  if (CONVERT_EXPR_P (tmp0)
9653 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9654 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9655 			  <= TYPE_PRECISION (atype)))
9656 		    tmp0 = TREE_OPERAND (tmp0, 0);
9657 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
9658 		    {
9659 		      tmp1 = TREE_OPERAND (tmp1, 0);
9660 		      one_neg = !one_neg;
9661 		    }
9662 		  if (CONVERT_EXPR_P (tmp1)
9663 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9664 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9665 			  <= TYPE_PRECISION (atype)))
9666 		    tmp1 = TREE_OPERAND (tmp1, 0);
9667 		  /* The only case we can still associate with two variables
9668 		     is if they cancel out.  */
9669 		  if (!one_neg
9670 		      || !operand_equal_p (tmp0, tmp1, 0))
9671 		    ok = false;
9672 		}
9673 	    }
9674 
9675 	  /* Only do something if we found more than two objects.  Otherwise,
9676 	     nothing has changed and we risk infinite recursion.  */
9677 	  if (ok
9678 	      && (2 < ((var0 != 0) + (var1 != 0)
9679 		       + (con0 != 0) + (con1 != 0)
9680 		       + (lit0 != 0) + (lit1 != 0)
9681 		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
9682 	    {
9683 	      var0 = associate_trees (loc, var0, var1, code, atype);
9684 	      con0 = associate_trees (loc, con0, con1, code, atype);
9685 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
9686 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9687 					    code, atype);
9688 
9689 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
9690 		 greater than the positive part.  Otherwise, the multiplicative
9691 		 folding code (i.e extract_muldiv) may be fooled in case
9692 		 unsigned constants are subtracted, like in the following
9693 		 example: ((X*2 + 4) - 8U)/2.  */
9694 	      if (minus_lit0 && lit0)
9695 		{
9696 		  if (TREE_CODE (lit0) == INTEGER_CST
9697 		      && TREE_CODE (minus_lit0) == INTEGER_CST
9698 		      && tree_int_cst_lt (lit0, minus_lit0))
9699 		    {
9700 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9701 						    MINUS_EXPR, atype);
9702 		      lit0 = 0;
9703 		    }
9704 		  else
9705 		    {
9706 		      lit0 = associate_trees (loc, lit0, minus_lit0,
9707 					      MINUS_EXPR, atype);
9708 		      minus_lit0 = 0;
9709 		    }
9710 		}
9711 
9712 	      /* Don't introduce overflows through reassociation.  */
9713 	      if ((lit0 && TREE_OVERFLOW_P (lit0))
9714 		  || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9715 		return NULL_TREE;
9716 
9717 	      if (minus_lit0)
9718 		{
9719 		  if (con0 == 0)
9720 		    return
9721 		      fold_convert_loc (loc, type,
9722 					associate_trees (loc, var0, minus_lit0,
9723 							 MINUS_EXPR, atype));
9724 		  else
9725 		    {
9726 		      con0 = associate_trees (loc, con0, minus_lit0,
9727 					      MINUS_EXPR, atype);
9728 		      return
9729 			fold_convert_loc (loc, type,
9730 					  associate_trees (loc, var0, con0,
9731 							   PLUS_EXPR, atype));
9732 		    }
9733 		}
9734 
9735 	      con0 = associate_trees (loc, con0, lit0, code, atype);
9736 	      return
9737 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9738 							      code, atype));
9739 	    }
9740 	}
9741 
9742       return NULL_TREE;
9743 
9744     case MINUS_EXPR:
9745       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
9746       if (TREE_CODE (arg0) == NEGATE_EXPR
9747 	  && negate_expr_p (op1)
9748 	  /* If arg0 is e.g. unsigned int and type is int, then this could
9749 	     introduce UB, because if A is INT_MIN at runtime, the original
9750 	     expression can be well defined while the latter is not.
9751 	     See PR83269.  */
9752 	  && !(ANY_INTEGRAL_TYPE_P (type)
9753 	       && TYPE_OVERFLOW_UNDEFINED (type)
9754 	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9755 	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9756 	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
9757 			        fold_convert_loc (loc, type,
9758 						  TREE_OPERAND (arg0, 0)));
9759 
9760       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9761 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
9762 	 signed zeros are involved.  */
9763       if (!HONOR_SNANS (element_mode (arg0))
9764 	  && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9765 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9766         {
9767 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9768 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9769 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9770 	  bool arg0rz = false, arg0iz = false;
9771 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
9772 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
9773 	    {
9774 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9775 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9776 	      if (arg0rz && arg1i && real_zerop (arg1i))
9777 	        {
9778 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9779 					 arg1r ? arg1r
9780 					 : build1 (REALPART_EXPR, rtype, arg1));
9781 		  tree ip = arg0i ? arg0i
9782 		    : build1 (IMAGPART_EXPR, rtype, arg0);
9783 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9784 		}
9785 	      else if (arg0iz && arg1r && real_zerop (arg1r))
9786 	        {
9787 		  tree rp = arg0r ? arg0r
9788 		    : build1 (REALPART_EXPR, rtype, arg0);
9789 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9790 					 arg1i ? arg1i
9791 					 : build1 (IMAGPART_EXPR, rtype, arg1));
9792 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9793 		}
9794 	    }
9795 	}
9796 
9797       /* A - B -> A + (-B) if B is easily negatable.  */
9798       if (negate_expr_p (op1)
9799 	  && ! TYPE_OVERFLOW_SANITIZED (type)
9800 	  && ((FLOAT_TYPE_P (type)
9801                /* Avoid this transformation if B is a positive REAL_CST.  */
9802 	       && (TREE_CODE (op1) != REAL_CST
9803 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9804 	      || INTEGRAL_TYPE_P (type)))
9805 	return fold_build2_loc (loc, PLUS_EXPR, type,
9806 				fold_convert_loc (loc, type, arg0),
9807 				negate_expr (op1));
9808 
9809       /* Fold &a[i] - &a[j] to i-j.  */
9810       if (TREE_CODE (arg0) == ADDR_EXPR
9811 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9812 	  && TREE_CODE (arg1) == ADDR_EXPR
9813 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9814         {
9815 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
9816 							TREE_OPERAND (arg0, 0),
9817 							TREE_OPERAND (arg1, 0));
9818 	  if (tem)
9819 	    return tem;
9820 	}
9821 
9822       if (FLOAT_TYPE_P (type)
9823 	  && flag_unsafe_math_optimizations
9824 	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9825 	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9826 	  && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9827 	return tem;
9828 
9829       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9830 	 one.  Make sure the type is not saturating and has the signedness of
9831 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9832 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9833       if ((TREE_CODE (arg0) == MULT_EXPR
9834 	   || TREE_CODE (arg1) == MULT_EXPR)
9835 	  && !TYPE_SATURATING (type)
9836 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9837 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9838 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9839         {
9840 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9841 	  if (tem)
9842 	    return tem;
9843 	}
9844 
9845       goto associate;
9846 
9847     case MULT_EXPR:
9848       if (! FLOAT_TYPE_P (type))
9849 	{
9850 	  /* Transform x * -C into -x * C if x is easily negatable.  */
9851 	  if (TREE_CODE (op1) == INTEGER_CST
9852 	      && tree_int_cst_sgn (op1) == -1
9853 	      && negate_expr_p (op0)
9854 	      && negate_expr_p (op1)
9855 	      && (tem = negate_expr (op1)) != op1
9856 	      && ! TREE_OVERFLOW (tem))
9857 	    return fold_build2_loc (loc, MULT_EXPR, type,
9858 				    fold_convert_loc (loc, type,
9859 						      negate_expr (op0)), tem);
9860 
9861 	  strict_overflow_p = false;
9862 	  if (TREE_CODE (arg1) == INTEGER_CST
9863 	      && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9864 					     &strict_overflow_p)))
9865 	    {
9866 	      if (strict_overflow_p)
9867 		fold_overflow_warning (("assuming signed overflow does not "
9868 					"occur when simplifying "
9869 					"multiplication"),
9870 				       WARN_STRICT_OVERFLOW_MISC);
9871 	      return fold_convert_loc (loc, type, tem);
9872 	    }
9873 
9874 	  /* Optimize z * conj(z) for integer complex numbers.  */
9875 	  if (TREE_CODE (arg0) == CONJ_EXPR
9876 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9877 	    return fold_mult_zconjz (loc, type, arg1);
9878 	  if (TREE_CODE (arg1) == CONJ_EXPR
9879 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9880 	    return fold_mult_zconjz (loc, type, arg0);
9881 	}
9882       else
9883 	{
9884 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9885 	     This is not the same for NaNs or if signed zeros are
9886 	     involved.  */
9887 	  if (!HONOR_NANS (arg0)
9888               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9889 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9890 	      && TREE_CODE (arg1) == COMPLEX_CST
9891 	      && real_zerop (TREE_REALPART (arg1)))
9892 	    {
9893 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9894 	      if (real_onep (TREE_IMAGPART (arg1)))
9895 		return
9896 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
9897 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9898 							     rtype, arg0)),
9899 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9900 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
9901 		return
9902 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
9903 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9904 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9905 							     rtype, arg0)));
9906 	    }
9907 
9908 	  /* Optimize z * conj(z) for floating point complex numbers.
9909 	     Guarded by flag_unsafe_math_optimizations as non-finite
9910 	     imaginary components don't produce scalar results.  */
9911 	  if (flag_unsafe_math_optimizations
9912 	      && TREE_CODE (arg0) == CONJ_EXPR
9913 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9914 	    return fold_mult_zconjz (loc, type, arg1);
9915 	  if (flag_unsafe_math_optimizations
9916 	      && TREE_CODE (arg1) == CONJ_EXPR
9917 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9918 	    return fold_mult_zconjz (loc, type, arg0);
9919 	}
9920       goto associate;
9921 
9922     case BIT_IOR_EXPR:
9923       /* Canonicalize (X & C1) | C2.  */
9924       if (TREE_CODE (arg0) == BIT_AND_EXPR
9925 	  && TREE_CODE (arg1) == INTEGER_CST
9926 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9927 	{
9928 	  int width = TYPE_PRECISION (type), w;
9929 	  wide_int c1 = TREE_OPERAND (arg0, 1);
9930 	  wide_int c2 = arg1;
9931 
9932 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
9933 	  if ((c1 & c2) == c1)
9934 	    return omit_one_operand_loc (loc, type, arg1,
9935 					 TREE_OPERAND (arg0, 0));
9936 
9937 	  wide_int msk = wi::mask (width, false,
9938 				   TYPE_PRECISION (TREE_TYPE (arg1)));
9939 
9940 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
9941 	  if (msk.and_not (c1 | c2) == 0)
9942 	    {
9943 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9944 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9945 	    }
9946 
9947 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9948 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9949 	     mode which allows further optimizations.  */
9950 	  c1 &= msk;
9951 	  c2 &= msk;
9952 	  wide_int c3 = c1.and_not (c2);
9953 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9954 	    {
9955 	      wide_int mask = wi::mask (w, false,
9956 					TYPE_PRECISION (type));
9957 	      if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9958 		{
9959 		  c3 = mask;
9960 		  break;
9961 		}
9962 	    }
9963 
9964 	  if (c3 != c1)
9965 	    {
9966 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9967 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9968 				     wide_int_to_tree (type, c3));
9969 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9970 	    }
9971 	}
9972 
9973       /* See if this can be simplified into a rotate first.  If that
9974 	 is unsuccessful continue in the association code.  */
9975       goto bit_rotate;
9976 
9977     case BIT_XOR_EXPR:
9978       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
9979       if (TREE_CODE (arg0) == BIT_AND_EXPR
9980 	  && INTEGRAL_TYPE_P (type)
9981 	  && integer_onep (TREE_OPERAND (arg0, 1))
9982 	  && integer_onep (arg1))
9983 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9984 				build_zero_cst (TREE_TYPE (arg0)));
9985 
9986       /* See if this can be simplified into a rotate first.  If that
9987 	 is unsuccessful continue in the association code.  */
9988       goto bit_rotate;
9989 
9990     case BIT_AND_EXPR:
9991       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
9992       if (TREE_CODE (arg0) == BIT_XOR_EXPR
9993 	  && INTEGRAL_TYPE_P (type)
9994 	  && integer_onep (TREE_OPERAND (arg0, 1))
9995 	  && integer_onep (arg1))
9996 	{
9997 	  tree tem2;
9998 	  tem = TREE_OPERAND (arg0, 0);
9999 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10000 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10001 				  tem, tem2);
10002 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10003 				  build_zero_cst (TREE_TYPE (tem)));
10004 	}
10005       /* Fold ~X & 1 as (X & 1) == 0.  */
10006       if (TREE_CODE (arg0) == BIT_NOT_EXPR
10007 	  && INTEGRAL_TYPE_P (type)
10008 	  && integer_onep (arg1))
10009 	{
10010 	  tree tem2;
10011 	  tem = TREE_OPERAND (arg0, 0);
10012 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10013 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10014 				  tem, tem2);
10015 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10016 				  build_zero_cst (TREE_TYPE (tem)));
10017 	}
10018       /* Fold !X & 1 as X == 0.  */
10019       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10020 	  && integer_onep (arg1))
10021 	{
10022 	  tem = TREE_OPERAND (arg0, 0);
10023 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
10024 				  build_zero_cst (TREE_TYPE (tem)));
10025 	}
10026 
10027       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10028          multiple of 1 << CST.  */
10029       if (TREE_CODE (arg1) == INTEGER_CST)
10030 	{
10031 	  wide_int cst1 = arg1;
10032 	  wide_int ncst1 = -cst1;
10033 	  if ((cst1 & ncst1) == ncst1
10034 	      && multiple_of_p (type, arg0,
10035 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10036 	    return fold_convert_loc (loc, type, arg0);
10037 	}
10038 
10039       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10040          bits from CST2.  */
10041       if (TREE_CODE (arg1) == INTEGER_CST
10042 	  && TREE_CODE (arg0) == MULT_EXPR
10043 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10044 	{
10045 	  wide_int warg1 = arg1;
10046 	  wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10047 
10048 	  if (masked == 0)
10049 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
10050 	                                  arg0, arg1);
10051 	  else if (masked != warg1)
10052 	    {
10053 	      /* Avoid the transform if arg1 is a mask of some
10054 	         mode which allows further optimizations.  */
10055 	      int pop = wi::popcount (warg1);
10056 	      if (!(pop >= BITS_PER_UNIT
10057 		    && pow2p_hwi (pop)
10058 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10059 		return fold_build2_loc (loc, code, type, op0,
10060 					wide_int_to_tree (type, masked));
10061 	    }
10062 	}
10063 
10064       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10065 	 ((A & N) + B) & M -> (A + B) & M
10066 	 Similarly if (N & M) == 0,
10067 	 ((A | N) + B) & M -> (A + B) & M
10068 	 and for - instead of + (or unary - instead of +)
10069 	 and/or ^ instead of |.
10070 	 If B is constant and (B & M) == 0, fold into A & M.  */
10071       if (TREE_CODE (arg1) == INTEGER_CST)
10072 	{
10073 	  wide_int cst1 = arg1;
10074 	  if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10075 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10076 	      && (TREE_CODE (arg0) == PLUS_EXPR
10077 		  || TREE_CODE (arg0) == MINUS_EXPR
10078 		  || TREE_CODE (arg0) == NEGATE_EXPR)
10079 	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10080 		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10081 	    {
10082 	      tree pmop[2];
10083 	      int which = 0;
10084 	      wide_int cst0;
10085 
10086 	      /* Now we know that arg0 is (C + D) or (C - D) or
10087 		 -C and arg1 (M) is == (1LL << cst) - 1.
10088 		 Store C into PMOP[0] and D into PMOP[1].  */
10089 	      pmop[0] = TREE_OPERAND (arg0, 0);
10090 	      pmop[1] = NULL;
10091 	      if (TREE_CODE (arg0) != NEGATE_EXPR)
10092 		{
10093 		  pmop[1] = TREE_OPERAND (arg0, 1);
10094 		  which = 1;
10095 		}
10096 
10097 	      if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10098 		which = -1;
10099 
10100 	      for (; which >= 0; which--)
10101 		switch (TREE_CODE (pmop[which]))
10102 		  {
10103 		  case BIT_AND_EXPR:
10104 		  case BIT_IOR_EXPR:
10105 		  case BIT_XOR_EXPR:
10106 		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10107 			!= INTEGER_CST)
10108 		      break;
10109 		    cst0 = TREE_OPERAND (pmop[which], 1);
10110 		    cst0 &= cst1;
10111 		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10112 		      {
10113 			if (cst0 != cst1)
10114 			  break;
10115 		      }
10116 		    else if (cst0 != 0)
10117 		      break;
10118 		    /* If C or D is of the form (A & N) where
10119 		       (N & M) == M, or of the form (A | N) or
10120 		       (A ^ N) where (N & M) == 0, replace it with A.  */
10121 		    pmop[which] = TREE_OPERAND (pmop[which], 0);
10122 		    break;
10123 		  case INTEGER_CST:
10124 		    /* If C or D is a N where (N & M) == 0, it can be
10125 		       omitted (assumed 0).  */
10126 		    if ((TREE_CODE (arg0) == PLUS_EXPR
10127 			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10128 			&& (cst1 & pmop[which]) == 0)
10129 		      pmop[which] = NULL;
10130 		    break;
10131 		  default:
10132 		    break;
10133 		  }
10134 
10135 	      /* Only build anything new if we optimized one or both arguments
10136 		 above.  */
10137 	      if (pmop[0] != TREE_OPERAND (arg0, 0)
10138 		  || (TREE_CODE (arg0) != NEGATE_EXPR
10139 		      && pmop[1] != TREE_OPERAND (arg0, 1)))
10140 		{
10141 		  tree utype = TREE_TYPE (arg0);
10142 		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10143 		    {
10144 		      /* Perform the operations in a type that has defined
10145 			 overflow behavior.  */
10146 		      utype = unsigned_type_for (TREE_TYPE (arg0));
10147 		      if (pmop[0] != NULL)
10148 			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10149 		      if (pmop[1] != NULL)
10150 			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10151 		    }
10152 
10153 		  if (TREE_CODE (arg0) == NEGATE_EXPR)
10154 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10155 		  else if (TREE_CODE (arg0) == PLUS_EXPR)
10156 		    {
10157 		      if (pmop[0] != NULL && pmop[1] != NULL)
10158 			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10159 					       pmop[0], pmop[1]);
10160 		      else if (pmop[0] != NULL)
10161 			tem = pmop[0];
10162 		      else if (pmop[1] != NULL)
10163 			tem = pmop[1];
10164 		      else
10165 			return build_int_cst (type, 0);
10166 		    }
10167 		  else if (pmop[0] == NULL)
10168 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10169 		  else
10170 		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10171 					   pmop[0], pmop[1]);
10172 		  /* TEM is now the new binary +, - or unary - replacement.  */
10173 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10174 					 fold_convert_loc (loc, utype, arg1));
10175 		  return fold_convert_loc (loc, type, tem);
10176 		}
10177 	    }
10178 	}
10179 
10180       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
10181       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10182 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10183 	{
10184 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10185 
10186 	  wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10187 	  if (mask == -1)
10188 	    return
10189 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10190 	}
10191 
10192       goto associate;
10193 
10194     case RDIV_EXPR:
10195       /* Don't touch a floating-point divide by zero unless the mode
10196 	 of the constant can represent infinity.  */
10197       if (TREE_CODE (arg1) == REAL_CST
10198 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10199 	  && real_zerop (arg1))
10200 	return NULL_TREE;
10201 
10202       /* (-A) / (-B) -> A / B  */
10203       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10204 	return fold_build2_loc (loc, RDIV_EXPR, type,
10205 			    TREE_OPERAND (arg0, 0),
10206 			    negate_expr (arg1));
10207       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10208 	return fold_build2_loc (loc, RDIV_EXPR, type,
10209 			    negate_expr (arg0),
10210 			    TREE_OPERAND (arg1, 0));
10211       return NULL_TREE;
10212 
10213     case TRUNC_DIV_EXPR:
10214       /* Fall through */
10215 
10216     case FLOOR_DIV_EXPR:
10217       /* Simplify A / (B << N) where A and B are positive and B is
10218 	 a power of 2, to A >> (N + log2(B)).  */
10219       strict_overflow_p = false;
10220       if (TREE_CODE (arg1) == LSHIFT_EXPR
10221 	  && (TYPE_UNSIGNED (type)
10222 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10223 	{
10224 	  tree sval = TREE_OPERAND (arg1, 0);
10225 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10226 	    {
10227 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
10228 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10229 					 wi::exact_log2 (sval));
10230 
10231 	      if (strict_overflow_p)
10232 		fold_overflow_warning (("assuming signed overflow does not "
10233 					"occur when simplifying A / (B << N)"),
10234 				       WARN_STRICT_OVERFLOW_MISC);
10235 
10236 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10237 					sh_cnt, pow2);
10238 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
10239 				      fold_convert_loc (loc, type, arg0), sh_cnt);
10240 	    }
10241 	}
10242 
10243       /* Fall through */
10244 
10245     case ROUND_DIV_EXPR:
10246     case CEIL_DIV_EXPR:
10247     case EXACT_DIV_EXPR:
10248       if (integer_zerop (arg1))
10249 	return NULL_TREE;
10250 
10251       /* Convert -A / -B to A / B when the type is signed and overflow is
10252 	 undefined.  */
10253       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10254 	  && TREE_CODE (op0) == NEGATE_EXPR
10255 	  && negate_expr_p (op1))
10256 	{
10257 	  if (INTEGRAL_TYPE_P (type))
10258 	    fold_overflow_warning (("assuming signed overflow does not occur "
10259 				    "when distributing negation across "
10260 				    "division"),
10261 				   WARN_STRICT_OVERFLOW_MISC);
10262 	  return fold_build2_loc (loc, code, type,
10263 				  fold_convert_loc (loc, type,
10264 						    TREE_OPERAND (arg0, 0)),
10265 				  negate_expr (op1));
10266 	}
10267       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10268 	  && TREE_CODE (arg1) == NEGATE_EXPR
10269 	  && negate_expr_p (op0))
10270 	{
10271 	  if (INTEGRAL_TYPE_P (type))
10272 	    fold_overflow_warning (("assuming signed overflow does not occur "
10273 				    "when distributing negation across "
10274 				    "division"),
10275 				   WARN_STRICT_OVERFLOW_MISC);
10276 	  return fold_build2_loc (loc, code, type,
10277 				  negate_expr (op0),
10278 				  fold_convert_loc (loc, type,
10279 						    TREE_OPERAND (arg1, 0)));
10280 	}
10281 
10282       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10283 	 operation, EXACT_DIV_EXPR.
10284 
10285 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10286 	 At one time others generated faster code, it's not clear if they do
10287 	 after the last round to changes to the DIV code in expmed.c.  */
10288       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10289 	  && multiple_of_p (type, arg0, arg1))
10290 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10291 				fold_convert (type, arg0),
10292 				fold_convert (type, arg1));
10293 
10294       strict_overflow_p = false;
10295       if (TREE_CODE (arg1) == INTEGER_CST
10296 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10297 					 &strict_overflow_p)))
10298 	{
10299 	  if (strict_overflow_p)
10300 	    fold_overflow_warning (("assuming signed overflow does not occur "
10301 				    "when simplifying division"),
10302 				   WARN_STRICT_OVERFLOW_MISC);
10303 	  return fold_convert_loc (loc, type, tem);
10304 	}
10305 
10306       return NULL_TREE;
10307 
10308     case CEIL_MOD_EXPR:
10309     case FLOOR_MOD_EXPR:
10310     case ROUND_MOD_EXPR:
10311     case TRUNC_MOD_EXPR:
10312       strict_overflow_p = false;
10313       if (TREE_CODE (arg1) == INTEGER_CST
10314 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10315 					 &strict_overflow_p)))
10316 	{
10317 	  if (strict_overflow_p)
10318 	    fold_overflow_warning (("assuming signed overflow does not occur "
10319 				    "when simplifying modulus"),
10320 				   WARN_STRICT_OVERFLOW_MISC);
10321 	  return fold_convert_loc (loc, type, tem);
10322 	}
10323 
10324       return NULL_TREE;
10325 
10326     case LROTATE_EXPR:
10327     case RROTATE_EXPR:
10328     case RSHIFT_EXPR:
10329     case LSHIFT_EXPR:
10330       /* Since negative shift count is not well-defined,
10331 	 don't try to compute it in the compiler.  */
10332       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10333 	return NULL_TREE;
10334 
10335       prec = element_precision (type);
10336 
10337       /* If we have a rotate of a bit operation with the rotate count and
10338 	 the second operand of the bit operation both constant,
10339 	 permute the two operations.  */
10340       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10341 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
10342 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
10343 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
10344 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10345 	{
10346 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10347 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10348 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
10349 				  fold_build2_loc (loc, code, type,
10350 						   arg00, arg1),
10351 				  fold_build2_loc (loc, code, type,
10352 						   arg01, arg1));
10353 	}
10354 
10355       /* Two consecutive rotates adding up to the some integer
10356 	 multiple of the precision of the type can be ignored.  */
10357       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10358 	  && TREE_CODE (arg0) == RROTATE_EXPR
10359 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10360 	  && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10361 			     prec) == 0)
10362 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10363 
10364       return NULL_TREE;
10365 
10366     case MIN_EXPR:
10367     case MAX_EXPR:
10368       goto associate;
10369 
10370     case TRUTH_ANDIF_EXPR:
10371       /* Note that the operands of this must be ints
10372 	 and their values must be 0 or 1.
10373 	 ("true" is a fixed value perhaps depending on the language.)  */
10374       /* If first arg is constant zero, return it.  */
10375       if (integer_zerop (arg0))
10376 	return fold_convert_loc (loc, type, arg0);
10377       /* FALLTHRU */
10378     case TRUTH_AND_EXPR:
10379       /* If either arg is constant true, drop it.  */
10380       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10381 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10382       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10383 	  /* Preserve sequence points.  */
10384 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10385 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10386       /* If second arg is constant zero, result is zero, but first arg
10387 	 must be evaluated.  */
10388       if (integer_zerop (arg1))
10389 	return omit_one_operand_loc (loc, type, arg1, arg0);
10390       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10391 	 case will be handled here.  */
10392       if (integer_zerop (arg0))
10393 	return omit_one_operand_loc (loc, type, arg0, arg1);
10394 
10395       /* !X && X is always false.  */
10396       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10397 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10398 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10399       /* X && !X is always false.  */
10400       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10401 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10402 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10403 
10404       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
10405 	 means A >= Y && A != MAX, but in this case we know that
10406 	 A < X <= MAX.  */
10407 
10408       if (!TREE_SIDE_EFFECTS (arg0)
10409 	  && !TREE_SIDE_EFFECTS (arg1))
10410 	{
10411 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10412 	  if (tem && !operand_equal_p (tem, arg0, 0))
10413 	    return fold_build2_loc (loc, code, type, tem, arg1);
10414 
10415 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10416 	  if (tem && !operand_equal_p (tem, arg1, 0))
10417 	    return fold_build2_loc (loc, code, type, arg0, tem);
10418 	}
10419 
10420       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10421           != NULL_TREE)
10422         return tem;
10423 
10424       return NULL_TREE;
10425 
10426     case TRUTH_ORIF_EXPR:
10427       /* Note that the operands of this must be ints
10428 	 and their values must be 0 or true.
10429 	 ("true" is a fixed value perhaps depending on the language.)  */
10430       /* If first arg is constant true, return it.  */
10431       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10432 	return fold_convert_loc (loc, type, arg0);
10433       /* FALLTHRU */
10434     case TRUTH_OR_EXPR:
10435       /* If either arg is constant zero, drop it.  */
10436       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10437 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10438       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10439 	  /* Preserve sequence points.  */
10440 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10441 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10442       /* If second arg is constant true, result is true, but we must
10443 	 evaluate first arg.  */
10444       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10445 	return omit_one_operand_loc (loc, type, arg1, arg0);
10446       /* Likewise for first arg, but note this only occurs here for
10447 	 TRUTH_OR_EXPR.  */
10448       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10449 	return omit_one_operand_loc (loc, type, arg0, arg1);
10450 
10451       /* !X || X is always true.  */
10452       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10453 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10454 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10455       /* X || !X is always true.  */
10456       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10457 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10458 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10459 
10460       /* (X && !Y) || (!X && Y) is X ^ Y */
10461       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10462 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10463         {
10464 	  tree a0, a1, l0, l1, n0, n1;
10465 
10466 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10467 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10468 
10469 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10470 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10471 
10472 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10473 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10474 
10475 	  if ((operand_equal_p (n0, a0, 0)
10476 	       && operand_equal_p (n1, a1, 0))
10477 	      || (operand_equal_p (n0, a1, 0)
10478 		  && operand_equal_p (n1, a0, 0)))
10479 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10480 	}
10481 
10482       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10483           != NULL_TREE)
10484         return tem;
10485 
10486       return NULL_TREE;
10487 
10488     case TRUTH_XOR_EXPR:
10489       /* If the second arg is constant zero, drop it.  */
10490       if (integer_zerop (arg1))
10491 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10492       /* If the second arg is constant true, this is a logical inversion.  */
10493       if (integer_onep (arg1))
10494 	{
10495 	  tem = invert_truthvalue_loc (loc, arg0);
10496 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10497 	}
10498       /* Identical arguments cancel to zero.  */
10499       if (operand_equal_p (arg0, arg1, 0))
10500 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10501 
10502       /* !X ^ X is always true.  */
10503       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10504 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10505 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10506 
10507       /* X ^ !X is always true.  */
10508       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10509 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10510 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10511 
10512       return NULL_TREE;
10513 
10514     case EQ_EXPR:
10515     case NE_EXPR:
10516       STRIP_NOPS (arg0);
10517       STRIP_NOPS (arg1);
10518 
10519       tem = fold_comparison (loc, code, type, op0, op1);
10520       if (tem != NULL_TREE)
10521 	return tem;
10522 
10523       /* bool_var != 1 becomes !bool_var. */
10524       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10525           && code == NE_EXPR)
10526         return fold_convert_loc (loc, type,
10527 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10528 						  TREE_TYPE (arg0), arg0));
10529 
10530       /* bool_var == 0 becomes !bool_var. */
10531       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10532           && code == EQ_EXPR)
10533         return fold_convert_loc (loc, type,
10534 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10535 						  TREE_TYPE (arg0), arg0));
10536 
10537       /* !exp != 0 becomes !exp */
10538       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10539 	  && code == NE_EXPR)
10540         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10541 
10542       /* Transform comparisons of the form X +- Y CMP X to Y CMP 0.  */
10543       if ((TREE_CODE (arg0) == PLUS_EXPR
10544 	   || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10545 	   || TREE_CODE (arg0) == MINUS_EXPR)
10546 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10547 									0)),
10548 			      arg1, 0)
10549 	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10550 	      || POINTER_TYPE_P (TREE_TYPE (arg0))))
10551 	{
10552 	  tree val = TREE_OPERAND (arg0, 1);
10553 	  val = fold_build2_loc (loc, code, type, val,
10554 				 build_int_cst (TREE_TYPE (val), 0));
10555 	  return omit_two_operands_loc (loc, type, val,
10556 					TREE_OPERAND (arg0, 0), arg1);
10557 	}
10558 
10559       /* Transform comparisons of the form X CMP X +- Y to Y CMP 0.  */
10560       if ((TREE_CODE (arg1) == PLUS_EXPR
10561 	   || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10562 	   || TREE_CODE (arg1) == MINUS_EXPR)
10563 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10564 									0)),
10565 			      arg0, 0)
10566 	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10567 	      || POINTER_TYPE_P (TREE_TYPE (arg1))))
10568 	{
10569 	  tree val = TREE_OPERAND (arg1, 1);
10570 	  val = fold_build2_loc (loc, code, type, val,
10571 				 build_int_cst (TREE_TYPE (val), 0));
10572 	  return omit_two_operands_loc (loc, type, val,
10573 					TREE_OPERAND (arg1, 0), arg0);
10574 	}
10575 
10576       /* Transform comparisons of the form C - X CMP X if C % 2 == 1.  */
10577       if (TREE_CODE (arg0) == MINUS_EXPR
10578 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10579 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10580 									1)),
10581 			      arg1, 0)
10582 	  && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10583 	return omit_two_operands_loc (loc, type,
10584 				      code == NE_EXPR
10585 				      ? boolean_true_node : boolean_false_node,
10586 				      TREE_OPERAND (arg0, 1), arg1);
10587 
10588       /* Transform comparisons of the form X CMP C - X if C % 2 == 1.  */
10589       if (TREE_CODE (arg1) == MINUS_EXPR
10590 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10591 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10592 									1)),
10593 			      arg0, 0)
10594 	  && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10595 	return omit_two_operands_loc (loc, type,
10596 				      code == NE_EXPR
10597 				      ? boolean_true_node : boolean_false_node,
10598 				      TREE_OPERAND (arg1, 1), arg0);
10599 
10600       /* If this is an EQ or NE comparison with zero and ARG0 is
10601 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
10602 	 two operations, but the latter can be done in one less insn
10603 	 on machines that have only two-operand insns or on which a
10604 	 constant cannot be the first operand.  */
10605       if (TREE_CODE (arg0) == BIT_AND_EXPR
10606 	  && integer_zerop (arg1))
10607 	{
10608 	  tree arg00 = TREE_OPERAND (arg0, 0);
10609 	  tree arg01 = TREE_OPERAND (arg0, 1);
10610 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
10611 	      && integer_onep (TREE_OPERAND (arg00, 0)))
10612 	    {
10613 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10614 				      arg01, TREE_OPERAND (arg00, 1));
10615 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10616 				 build_int_cst (TREE_TYPE (arg0), 1));
10617 	      return fold_build2_loc (loc, code, type,
10618 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10619 				  arg1);
10620 	    }
10621 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
10622 		   && integer_onep (TREE_OPERAND (arg01, 0)))
10623 	    {
10624 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10625 				      arg00, TREE_OPERAND (arg01, 1));
10626 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10627 				 build_int_cst (TREE_TYPE (arg0), 1));
10628 	      return fold_build2_loc (loc, code, type,
10629 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10630 				  arg1);
10631 	    }
10632 	}
10633 
10634       /* If this is an NE or EQ comparison of zero against the result of a
10635 	 signed MOD operation whose second operand is a power of 2, make
10636 	 the MOD operation unsigned since it is simpler and equivalent.  */
10637       if (integer_zerop (arg1)
10638 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10639 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10640 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
10641 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10642 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10643 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10644 	{
10645 	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10646 	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10647 				     fold_convert_loc (loc, newtype,
10648 						       TREE_OPERAND (arg0, 0)),
10649 				     fold_convert_loc (loc, newtype,
10650 						       TREE_OPERAND (arg0, 1)));
10651 
10652 	  return fold_build2_loc (loc, code, type, newmod,
10653 			      fold_convert_loc (loc, newtype, arg1));
10654 	}
10655 
10656       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10657 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
10658 	 a single bit.  */
10659       if (TREE_CODE (arg0) == BIT_AND_EXPR
10660 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10661 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10662 	     == INTEGER_CST
10663 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10664 	  && integer_zerop (arg1))
10665 	{
10666 	  tree itype = TREE_TYPE (arg0);
10667 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10668 	  prec = TYPE_PRECISION (itype);
10669 
10670 	  /* Check for a valid shift count.  */
10671 	  if (wi::ltu_p (arg001, prec))
10672 	    {
10673 	      tree arg01 = TREE_OPERAND (arg0, 1);
10674 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10675 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10676 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10677 		 can be rewritten as (X & (C2 << C1)) != 0.  */
10678 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10679 		{
10680 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10681 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10682 		  return fold_build2_loc (loc, code, type, tem,
10683 					  fold_convert_loc (loc, itype, arg1));
10684 		}
10685 	      /* Otherwise, for signed (arithmetic) shifts,
10686 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10687 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
10688 	      else if (!TYPE_UNSIGNED (itype))
10689 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10690 				    arg000, build_int_cst (itype, 0));
10691 	      /* Otherwise, of unsigned (logical) shifts,
10692 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10693 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
10694 	      else
10695 		return omit_one_operand_loc (loc, type,
10696 					 code == EQ_EXPR ? integer_one_node
10697 							 : integer_zero_node,
10698 					 arg000);
10699 	    }
10700 	}
10701 
10702       /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10703 	 Similarly for NE_EXPR.  */
10704       if (TREE_CODE (arg0) == BIT_AND_EXPR
10705 	  && TREE_CODE (arg1) == INTEGER_CST
10706 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10707 	{
10708 	  tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10709 				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
10710 				   TREE_OPERAND (arg0, 1));
10711 	  tree dandnotc
10712 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10713 			       fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10714 			       notc);
10715 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10716 	  if (integer_nonzerop (dandnotc))
10717 	    return omit_one_operand_loc (loc, type, rslt, arg0);
10718 	}
10719 
10720       /* If this is a comparison of a field, we may be able to simplify it.  */
10721       if ((TREE_CODE (arg0) == COMPONENT_REF
10722 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
10723 	  /* Handle the constant case even without -O
10724 	     to make sure the warnings are given.  */
10725 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10726 	{
10727 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10728 	  if (t1)
10729 	    return t1;
10730 	}
10731 
10732       /* Optimize comparisons of strlen vs zero to a compare of the
10733 	 first character of the string vs zero.  To wit,
10734 		strlen(ptr) == 0   =>  *ptr == 0
10735 		strlen(ptr) != 0   =>  *ptr != 0
10736 	 Other cases should reduce to one of these two (or a constant)
10737 	 due to the return value of strlen being unsigned.  */
10738       if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
10739 	{
10740 	  tree fndecl = get_callee_fndecl (arg0);
10741 
10742 	  if (fndecl
10743 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10744 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10745 	      && call_expr_nargs (arg0) == 1
10746 	      && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
10747 		  == POINTER_TYPE))
10748 	    {
10749 	      tree ptrtype
10750 		= build_pointer_type (build_qualified_type (char_type_node,
10751 							    TYPE_QUAL_CONST));
10752 	      tree ptr = fold_convert_loc (loc, ptrtype,
10753 					   CALL_EXPR_ARG (arg0, 0));
10754 	      tree iref = build_fold_indirect_ref_loc (loc, ptr);
10755 	      return fold_build2_loc (loc, code, type, iref,
10756 				      build_int_cst (TREE_TYPE (iref), 0));
10757 	    }
10758 	}
10759 
10760       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10761 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
10762       if (TREE_CODE (arg0) == RSHIFT_EXPR
10763 	  && integer_zerop (arg1)
10764 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10765 	{
10766 	  tree arg00 = TREE_OPERAND (arg0, 0);
10767 	  tree arg01 = TREE_OPERAND (arg0, 1);
10768 	  tree itype = TREE_TYPE (arg00);
10769 	  if (wi::eq_p (arg01, element_precision (itype) - 1))
10770 	    {
10771 	      if (TYPE_UNSIGNED (itype))
10772 		{
10773 		  itype = signed_type_for (itype);
10774 		  arg00 = fold_convert_loc (loc, itype, arg00);
10775 		}
10776 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10777 				  type, arg00, build_zero_cst (itype));
10778 	    }
10779 	}
10780 
10781       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10782 	 (X & C) == 0 when C is a single bit.  */
10783       if (TREE_CODE (arg0) == BIT_AND_EXPR
10784 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10785 	  && integer_zerop (arg1)
10786 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10787 	{
10788 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10789 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10790 				 TREE_OPERAND (arg0, 1));
10791 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10792 				  type, tem,
10793 				  fold_convert_loc (loc, TREE_TYPE (arg0),
10794 						    arg1));
10795 	}
10796 
10797       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10798 	 constant C is a power of two, i.e. a single bit.  */
10799       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10800 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10801 	  && integer_zerop (arg1)
10802 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10803 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10804 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10805 	{
10806 	  tree arg00 = TREE_OPERAND (arg0, 0);
10807 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10808 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
10809 	}
10810 
10811       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10812 	 when is C is a power of two, i.e. a single bit.  */
10813       if (TREE_CODE (arg0) == BIT_AND_EXPR
10814 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10815 	  && integer_zerop (arg1)
10816 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10817 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10818 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10819 	{
10820 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10821 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10822 			     arg000, TREE_OPERAND (arg0, 1));
10823 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10824 			      tem, build_int_cst (TREE_TYPE (tem), 0));
10825 	}
10826 
10827       if (integer_zerop (arg1)
10828 	  && tree_expr_nonzero_p (arg0))
10829         {
10830 	  tree res = constant_boolean_node (code==NE_EXPR, type);
10831 	  return omit_one_operand_loc (loc, type, res, arg0);
10832 	}
10833 
10834       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
10835       if (TREE_CODE (arg0) == BIT_AND_EXPR
10836 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
10837 	{
10838 	  tree arg00 = TREE_OPERAND (arg0, 0);
10839 	  tree arg01 = TREE_OPERAND (arg0, 1);
10840 	  tree arg10 = TREE_OPERAND (arg1, 0);
10841 	  tree arg11 = TREE_OPERAND (arg1, 1);
10842 	  tree itype = TREE_TYPE (arg0);
10843 
10844 	  if (operand_equal_p (arg01, arg11, 0))
10845 	    return fold_build2_loc (loc, code, type,
10846 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
10847 					     fold_build2_loc (loc,
10848 							  BIT_XOR_EXPR, itype,
10849 							  arg00, arg10),
10850 					     arg01),
10851 				build_zero_cst (itype));
10852 
10853 	  if (operand_equal_p (arg01, arg10, 0))
10854 	    return fold_build2_loc (loc, code, type,
10855 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
10856 					     fold_build2_loc (loc,
10857 							  BIT_XOR_EXPR, itype,
10858 							  arg00, arg11),
10859 					     arg01),
10860 				build_zero_cst (itype));
10861 
10862 	  if (operand_equal_p (arg00, arg11, 0))
10863 	    return fold_build2_loc (loc, code, type,
10864 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
10865 					     fold_build2_loc (loc,
10866 							  BIT_XOR_EXPR, itype,
10867 							  arg01, arg10),
10868 					     arg00),
10869 				build_zero_cst (itype));
10870 
10871 	  if (operand_equal_p (arg00, arg10, 0))
10872 	    return fold_build2_loc (loc, code, type,
10873 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
10874 					     fold_build2_loc (loc,
10875 							  BIT_XOR_EXPR, itype,
10876 							  arg01, arg11),
10877 					     arg00),
10878 				build_zero_cst (itype));
10879 	}
10880 
10881       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10882 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
10883 	{
10884 	  tree arg00 = TREE_OPERAND (arg0, 0);
10885 	  tree arg01 = TREE_OPERAND (arg0, 1);
10886 	  tree arg10 = TREE_OPERAND (arg1, 0);
10887 	  tree arg11 = TREE_OPERAND (arg1, 1);
10888 	  tree itype = TREE_TYPE (arg0);
10889 
10890 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10891 	     operand_equal_p guarantees no side-effects so we don't need
10892 	     to use omit_one_operand on Z.  */
10893 	  if (operand_equal_p (arg01, arg11, 0))
10894 	    return fold_build2_loc (loc, code, type, arg00,
10895 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10896 						      arg10));
10897 	  if (operand_equal_p (arg01, arg10, 0))
10898 	    return fold_build2_loc (loc, code, type, arg00,
10899 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10900 						      arg11));
10901 	  if (operand_equal_p (arg00, arg11, 0))
10902 	    return fold_build2_loc (loc, code, type, arg01,
10903 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10904 						      arg10));
10905 	  if (operand_equal_p (arg00, arg10, 0))
10906 	    return fold_build2_loc (loc, code, type, arg01,
10907 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10908 						      arg11));
10909 
10910 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
10911 	  if (TREE_CODE (arg01) == INTEGER_CST
10912 	      && TREE_CODE (arg11) == INTEGER_CST)
10913 	    {
10914 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10915 				     fold_convert_loc (loc, itype, arg11));
10916 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10917 	      return fold_build2_loc (loc, code, type, tem,
10918 				      fold_convert_loc (loc, itype, arg10));
10919 	    }
10920 	}
10921 
10922       /* Attempt to simplify equality/inequality comparisons of complex
10923 	 values.  Only lower the comparison if the result is known or
10924 	 can be simplified to a single scalar comparison.  */
10925       if ((TREE_CODE (arg0) == COMPLEX_EXPR
10926 	   || TREE_CODE (arg0) == COMPLEX_CST)
10927 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
10928 	      || TREE_CODE (arg1) == COMPLEX_CST))
10929 	{
10930 	  tree real0, imag0, real1, imag1;
10931 	  tree rcond, icond;
10932 
10933 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
10934 	    {
10935 	      real0 = TREE_OPERAND (arg0, 0);
10936 	      imag0 = TREE_OPERAND (arg0, 1);
10937 	    }
10938 	  else
10939 	    {
10940 	      real0 = TREE_REALPART (arg0);
10941 	      imag0 = TREE_IMAGPART (arg0);
10942 	    }
10943 
10944 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
10945 	    {
10946 	      real1 = TREE_OPERAND (arg1, 0);
10947 	      imag1 = TREE_OPERAND (arg1, 1);
10948 	    }
10949 	  else
10950 	    {
10951 	      real1 = TREE_REALPART (arg1);
10952 	      imag1 = TREE_IMAGPART (arg1);
10953 	    }
10954 
10955 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
10956 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10957 	    {
10958 	      if (integer_zerop (rcond))
10959 		{
10960 		  if (code == EQ_EXPR)
10961 		    return omit_two_operands_loc (loc, type, boolean_false_node,
10962 					      imag0, imag1);
10963 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10964 		}
10965 	      else
10966 		{
10967 		  if (code == NE_EXPR)
10968 		    return omit_two_operands_loc (loc, type, boolean_true_node,
10969 					      imag0, imag1);
10970 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10971 		}
10972 	    }
10973 
10974 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
10975 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
10976 	    {
10977 	      if (integer_zerop (icond))
10978 		{
10979 		  if (code == EQ_EXPR)
10980 		    return omit_two_operands_loc (loc, type, boolean_false_node,
10981 					      real0, real1);
10982 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10983 		}
10984 	      else
10985 		{
10986 		  if (code == NE_EXPR)
10987 		    return omit_two_operands_loc (loc, type, boolean_true_node,
10988 					      real0, real1);
10989 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10990 		}
10991 	    }
10992 	}
10993 
10994       return NULL_TREE;
10995 
10996     case LT_EXPR:
10997     case GT_EXPR:
10998     case LE_EXPR:
10999     case GE_EXPR:
11000       tem = fold_comparison (loc, code, type, op0, op1);
11001       if (tem != NULL_TREE)
11002 	return tem;
11003 
11004       /* Transform comparisons of the form X +- C CMP X.  */
11005       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11006 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11007 	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11008 	       && !HONOR_SNANS (arg0))
11009 	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11010 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11011 	{
11012 	  tree arg01 = TREE_OPERAND (arg0, 1);
11013 	  enum tree_code code0 = TREE_CODE (arg0);
11014 	  int is_positive;
11015 
11016 	  if (TREE_CODE (arg01) == REAL_CST)
11017 	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11018 	  else
11019 	    is_positive = tree_int_cst_sgn (arg01);
11020 
11021 	  /* (X - c) > X becomes false.  */
11022 	  if (code == GT_EXPR
11023 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11024 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11025 	    {
11026 	      if (TREE_CODE (arg01) == INTEGER_CST
11027 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11028 		fold_overflow_warning (("assuming signed overflow does not "
11029 					"occur when assuming that (X - c) > X "
11030 					"is always false"),
11031 				       WARN_STRICT_OVERFLOW_ALL);
11032 	      return constant_boolean_node (0, type);
11033 	    }
11034 
11035 	  /* Likewise (X + c) < X becomes false.  */
11036 	  if (code == LT_EXPR
11037 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11038 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11039 	    {
11040 	      if (TREE_CODE (arg01) == INTEGER_CST
11041 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11042 		fold_overflow_warning (("assuming signed overflow does not "
11043 					"occur when assuming that "
11044 					"(X + c) < X is always false"),
11045 				       WARN_STRICT_OVERFLOW_ALL);
11046 	      return constant_boolean_node (0, type);
11047 	    }
11048 
11049 	  /* Convert (X - c) <= X to true.  */
11050 	  if (!HONOR_NANS (arg1)
11051 	      && code == LE_EXPR
11052 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11053 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11054 	    {
11055 	      if (TREE_CODE (arg01) == INTEGER_CST
11056 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11057 		fold_overflow_warning (("assuming signed overflow does not "
11058 					"occur when assuming that "
11059 					"(X - c) <= X is always true"),
11060 				       WARN_STRICT_OVERFLOW_ALL);
11061 	      return constant_boolean_node (1, type);
11062 	    }
11063 
11064 	  /* Convert (X + c) >= X to true.  */
11065 	  if (!HONOR_NANS (arg1)
11066 	      && code == GE_EXPR
11067 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11068 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11069 	    {
11070 	      if (TREE_CODE (arg01) == INTEGER_CST
11071 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11072 		fold_overflow_warning (("assuming signed overflow does not "
11073 					"occur when assuming that "
11074 					"(X + c) >= X is always true"),
11075 				       WARN_STRICT_OVERFLOW_ALL);
11076 	      return constant_boolean_node (1, type);
11077 	    }
11078 
11079 	  if (TREE_CODE (arg01) == INTEGER_CST)
11080 	    {
11081 	      /* Convert X + c > X and X - c < X to true for integers.  */
11082 	      if (code == GT_EXPR
11083 	          && ((code0 == PLUS_EXPR && is_positive > 0)
11084 		      || (code0 == MINUS_EXPR && is_positive < 0)))
11085 		{
11086 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11087 		    fold_overflow_warning (("assuming signed overflow does "
11088 					    "not occur when assuming that "
11089 					    "(X + c) > X is always true"),
11090 					   WARN_STRICT_OVERFLOW_ALL);
11091 		  return constant_boolean_node (1, type);
11092 		}
11093 
11094 	      if (code == LT_EXPR
11095 	          && ((code0 == MINUS_EXPR && is_positive > 0)
11096 		      || (code0 == PLUS_EXPR && is_positive < 0)))
11097 		{
11098 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11099 		    fold_overflow_warning (("assuming signed overflow does "
11100 					    "not occur when assuming that "
11101 					    "(X - c) < X is always true"),
11102 					   WARN_STRICT_OVERFLOW_ALL);
11103 		  return constant_boolean_node (1, type);
11104 		}
11105 
11106 	      /* Convert X + c <= X and X - c >= X to false for integers.  */
11107 	      if (code == LE_EXPR
11108 	          && ((code0 == PLUS_EXPR && is_positive > 0)
11109 		      || (code0 == MINUS_EXPR && is_positive < 0)))
11110 		{
11111 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11112 		    fold_overflow_warning (("assuming signed overflow does "
11113 					    "not occur when assuming that "
11114 					    "(X + c) <= X is always false"),
11115 					   WARN_STRICT_OVERFLOW_ALL);
11116 		  return constant_boolean_node (0, type);
11117 		}
11118 
11119 	      if (code == GE_EXPR
11120 	          && ((code0 == MINUS_EXPR && is_positive > 0)
11121 		      || (code0 == PLUS_EXPR && is_positive < 0)))
11122 		{
11123 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11124 		    fold_overflow_warning (("assuming signed overflow does "
11125 					    "not occur when assuming that "
11126 					    "(X - c) >= X is always false"),
11127 					   WARN_STRICT_OVERFLOW_ALL);
11128 		  return constant_boolean_node (0, type);
11129 		}
11130 	    }
11131 	}
11132 
11133       /* If we are comparing an ABS_EXPR with a constant, we can
11134 	 convert all the cases into explicit comparisons, but they may
11135 	 well not be faster than doing the ABS and one comparison.
11136 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11137 	 and a comparison, and is probably faster.  */
11138       if (code == LE_EXPR
11139 	  && TREE_CODE (arg1) == INTEGER_CST
11140 	  && TREE_CODE (arg0) == ABS_EXPR
11141 	  && ! TREE_SIDE_EFFECTS (arg0)
11142 	  && (0 != (tem = negate_expr (arg1)))
11143 	  && TREE_CODE (tem) == INTEGER_CST
11144 	  && !TREE_OVERFLOW (tem))
11145 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11146 			    build2 (GE_EXPR, type,
11147 				    TREE_OPERAND (arg0, 0), tem),
11148 			    build2 (LE_EXPR, type,
11149 				    TREE_OPERAND (arg0, 0), arg1));
11150 
11151       /* Convert ABS_EXPR<x> >= 0 to true.  */
11152       strict_overflow_p = false;
11153       if (code == GE_EXPR
11154 	  && (integer_zerop (arg1)
11155 	      || (! HONOR_NANS (arg0)
11156 		  && real_zerop (arg1)))
11157 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11158 	{
11159 	  if (strict_overflow_p)
11160 	    fold_overflow_warning (("assuming signed overflow does not occur "
11161 				    "when simplifying comparison of "
11162 				    "absolute value and zero"),
11163 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11164 	  return omit_one_operand_loc (loc, type,
11165 				       constant_boolean_node (true, type),
11166 				       arg0);
11167 	}
11168 
11169       /* Convert ABS_EXPR<x> < 0 to false.  */
11170       strict_overflow_p = false;
11171       if (code == LT_EXPR
11172 	  && (integer_zerop (arg1) || real_zerop (arg1))
11173 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11174 	{
11175 	  if (strict_overflow_p)
11176 	    fold_overflow_warning (("assuming signed overflow does not occur "
11177 				    "when simplifying comparison of "
11178 				    "absolute value and zero"),
11179 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11180 	  return omit_one_operand_loc (loc, type,
11181 				       constant_boolean_node (false, type),
11182 				       arg0);
11183 	}
11184 
11185       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11186 	 and similarly for >= into !=.  */
11187       if ((code == LT_EXPR || code == GE_EXPR)
11188 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11189 	  && TREE_CODE (arg1) == LSHIFT_EXPR
11190 	  && integer_onep (TREE_OPERAND (arg1, 0)))
11191 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11192 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11193 				   TREE_OPERAND (arg1, 1)),
11194 			   build_zero_cst (TREE_TYPE (arg0)));
11195 
11196       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
11197 	 otherwise Y might be >= # of bits in X's type and thus e.g.
11198 	 (unsigned char) (1 << Y) for Y 15 might be 0.
11199 	 If the cast is widening, then 1 << Y should have unsigned type,
11200 	 otherwise if Y is number of bits in the signed shift type minus 1,
11201 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
11202 	 31 might be 0xffffffff80000000.  */
11203       if ((code == LT_EXPR || code == GE_EXPR)
11204 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11205 	  && CONVERT_EXPR_P (arg1)
11206 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11207 	  && (element_precision (TREE_TYPE (arg1))
11208 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11209 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11210 	      || (element_precision (TREE_TYPE (arg1))
11211 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11212 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11213 	{
11214 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11215 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11216 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11217 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11218 			     build_zero_cst (TREE_TYPE (arg0)));
11219 	}
11220 
11221       return NULL_TREE;
11222 
11223     case UNORDERED_EXPR:
11224     case ORDERED_EXPR:
11225     case UNLT_EXPR:
11226     case UNLE_EXPR:
11227     case UNGT_EXPR:
11228     case UNGE_EXPR:
11229     case UNEQ_EXPR:
11230     case LTGT_EXPR:
11231       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
11232       {
11233 	tree targ0 = strip_float_extensions (arg0);
11234 	tree targ1 = strip_float_extensions (arg1);
11235 	tree newtype = TREE_TYPE (targ0);
11236 
11237 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11238 	  newtype = TREE_TYPE (targ1);
11239 
11240 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11241 	  return fold_build2_loc (loc, code, type,
11242 			      fold_convert_loc (loc, newtype, targ0),
11243 			      fold_convert_loc (loc, newtype, targ1));
11244       }
11245 
11246       return NULL_TREE;
11247 
11248     case COMPOUND_EXPR:
11249       /* When pedantic, a compound expression can be neither an lvalue
11250 	 nor an integer constant expression.  */
11251       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11252 	return NULL_TREE;
11253       /* Don't let (0, 0) be null pointer constant.  */
11254       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11255 				 : fold_convert_loc (loc, type, arg1);
11256       return pedantic_non_lvalue_loc (loc, tem);
11257 
11258     case ASSERT_EXPR:
11259       /* An ASSERT_EXPR should never be passed to fold_binary.  */
11260       gcc_unreachable ();
11261 
11262     default:
11263       return NULL_TREE;
11264     } /* switch (code) */
11265 }
11266 
11267 /* Used by contains_label_[p1].  */
11268 
11269 struct contains_label_data
11270 {
11271   hash_set<tree> *pset;
11272   bool inside_switch_p;
11273 };
11274 
11275 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
11276    a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11277    return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
11278 
11279 static tree
11280 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11281 {
11282   contains_label_data *d = (contains_label_data *) data;
11283   switch (TREE_CODE (*tp))
11284     {
11285     case LABEL_EXPR:
11286       return *tp;
11287 
11288     case CASE_LABEL_EXPR:
11289       if (!d->inside_switch_p)
11290 	return *tp;
11291       return NULL_TREE;
11292 
11293     case SWITCH_EXPR:
11294       if (!d->inside_switch_p)
11295 	{
11296 	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11297 	    return *tp;
11298 	  d->inside_switch_p = true;
11299 	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11300 	    return *tp;
11301 	  d->inside_switch_p = false;
11302 	  *walk_subtrees = 0;
11303 	}
11304       return NULL_TREE;
11305 
11306     case GOTO_EXPR:
11307       *walk_subtrees = 0;
11308       return NULL_TREE;
11309 
11310     default:
11311       return NULL_TREE;
11312     }
11313 }
11314 
11315 /* Return whether the sub-tree ST contains a label which is accessible from
11316    outside the sub-tree.  */
11317 
11318 static bool
11319 contains_label_p (tree st)
11320 {
11321   hash_set<tree> pset;
11322   contains_label_data data = { &pset, false };
11323   return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11324 }
11325 
11326 /* Fold a ternary expression of code CODE and type TYPE with operands
11327    OP0, OP1, and OP2.  Return the folded expression if folding is
11328    successful.  Otherwise, return NULL_TREE.  */
11329 
11330 tree
11331 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11332 		  tree op0, tree op1, tree op2)
11333 {
11334   tree tem;
11335   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11336   enum tree_code_class kind = TREE_CODE_CLASS (code);
11337 
11338   gcc_assert (IS_EXPR_CODE_CLASS (kind)
11339 	      && TREE_CODE_LENGTH (code) == 3);
11340 
11341   /* If this is a commutative operation, and OP0 is a constant, move it
11342      to OP1 to reduce the number of tests below.  */
11343   if (commutative_ternary_tree_code (code)
11344       && tree_swap_operands_p (op0, op1))
11345     return fold_build3_loc (loc, code, type, op1, op0, op2);
11346 
11347   tem = generic_simplify (loc, code, type, op0, op1, op2);
11348   if (tem)
11349     return tem;
11350 
11351   /* Strip any conversions that don't change the mode.  This is safe
11352      for every expression, except for a comparison expression because
11353      its signedness is derived from its operands.  So, in the latter
11354      case, only strip conversions that don't change the signedness.
11355 
11356      Note that this is done as an internal manipulation within the
11357      constant folder, in order to find the simplest representation of
11358      the arguments so that their form can be studied.  In any cases,
11359      the appropriate type conversions should be put back in the tree
11360      that will get out of the constant folder.  */
11361   if (op0)
11362     {
11363       arg0 = op0;
11364       STRIP_NOPS (arg0);
11365     }
11366 
11367   if (op1)
11368     {
11369       arg1 = op1;
11370       STRIP_NOPS (arg1);
11371     }
11372 
11373   if (op2)
11374     {
11375       arg2 = op2;
11376       STRIP_NOPS (arg2);
11377     }
11378 
11379   switch (code)
11380     {
11381     case COMPONENT_REF:
11382       if (TREE_CODE (arg0) == CONSTRUCTOR
11383 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11384 	{
11385 	  unsigned HOST_WIDE_INT idx;
11386 	  tree field, value;
11387 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11388 	    if (field == arg1)
11389 	      return value;
11390 	}
11391       return NULL_TREE;
11392 
11393     case COND_EXPR:
11394     case VEC_COND_EXPR:
11395       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11396 	 so all simple results must be passed through pedantic_non_lvalue.  */
11397       if (TREE_CODE (arg0) == INTEGER_CST)
11398 	{
11399 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
11400 	  tem = integer_zerop (arg0) ? op2 : op1;
11401 	  /* Only optimize constant conditions when the selected branch
11402 	     has the same type as the COND_EXPR.  This avoids optimizing
11403              away "c ? x : throw", where the throw has a void type.
11404              Avoid throwing away that operand which contains label.  */
11405           if ((!TREE_SIDE_EFFECTS (unused_op)
11406                || !contains_label_p (unused_op))
11407               && (! VOID_TYPE_P (TREE_TYPE (tem))
11408                   || VOID_TYPE_P (type)))
11409 	    return pedantic_non_lvalue_loc (loc, tem);
11410 	  return NULL_TREE;
11411 	}
11412       else if (TREE_CODE (arg0) == VECTOR_CST)
11413 	{
11414 	  if ((TREE_CODE (arg1) == VECTOR_CST
11415 	       || TREE_CODE (arg1) == CONSTRUCTOR)
11416 	      && (TREE_CODE (arg2) == VECTOR_CST
11417 		  || TREE_CODE (arg2) == CONSTRUCTOR))
11418 	    {
11419 	      unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11420 	      unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11421 	      gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11422 	      for (i = 0; i < nelts; i++)
11423 		{
11424 		  tree val = VECTOR_CST_ELT (arg0, i);
11425 		  if (integer_all_onesp (val))
11426 		    sel[i] = i;
11427 		  else if (integer_zerop (val))
11428 		    sel[i] = nelts + i;
11429 		  else /* Currently unreachable.  */
11430 		    return NULL_TREE;
11431 		}
11432 	      tree t = fold_vec_perm (type, arg1, arg2, sel);
11433 	      if (t != NULL_TREE)
11434 		return t;
11435 	    }
11436 	}
11437 
11438       /* If we have A op B ? A : C, we may be able to convert this to a
11439 	 simpler expression, depending on the operation and the values
11440 	 of B and C.  Signed zeros prevent all of these transformations,
11441 	 for reasons given above each one.
11442 
11443          Also try swapping the arguments and inverting the conditional.  */
11444       if (COMPARISON_CLASS_P (arg0)
11445 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11446 					     arg1, TREE_OPERAND (arg0, 1))
11447 	  && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11448 	{
11449 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11450 	  if (tem)
11451 	    return tem;
11452 	}
11453 
11454       if (COMPARISON_CLASS_P (arg0)
11455 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11456 					     op2,
11457 					     TREE_OPERAND (arg0, 1))
11458 	  && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11459 	{
11460 	  location_t loc0 = expr_location_or (arg0, loc);
11461 	  tem = fold_invert_truthvalue (loc0, arg0);
11462 	  if (tem && COMPARISON_CLASS_P (tem))
11463 	    {
11464 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11465 	      if (tem)
11466 		return tem;
11467 	    }
11468 	}
11469 
11470       /* If the second operand is simpler than the third, swap them
11471 	 since that produces better jump optimization results.  */
11472       if (truth_value_p (TREE_CODE (arg0))
11473 	  && tree_swap_operands_p (op1, op2))
11474 	{
11475 	  location_t loc0 = expr_location_or (arg0, loc);
11476 	  /* See if this can be inverted.  If it can't, possibly because
11477 	     it was a floating-point inequality comparison, don't do
11478 	     anything.  */
11479 	  tem = fold_invert_truthvalue (loc0, arg0);
11480 	  if (tem)
11481 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
11482 	}
11483 
11484       /* Convert A ? 1 : 0 to simply A.  */
11485       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11486 				 : (integer_onep (op1)
11487 				    && !VECTOR_TYPE_P (type)))
11488 	  && integer_zerop (op2)
11489 	  /* If we try to convert OP0 to our type, the
11490 	     call to fold will try to move the conversion inside
11491 	     a COND, which will recurse.  In that case, the COND_EXPR
11492 	     is probably the best choice, so leave it alone.  */
11493 	  && type == TREE_TYPE (arg0))
11494 	return pedantic_non_lvalue_loc (loc, arg0);
11495 
11496       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
11497 	 over COND_EXPR in cases such as floating point comparisons.  */
11498       if (integer_zerop (op1)
11499 	  && code == COND_EXPR
11500 	  && integer_onep (op2)
11501 	  && !VECTOR_TYPE_P (type)
11502 	  && truth_value_p (TREE_CODE (arg0)))
11503 	return pedantic_non_lvalue_loc (loc,
11504 				    fold_convert_loc (loc, type,
11505 					      invert_truthvalue_loc (loc,
11506 								     arg0)));
11507 
11508       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
11509       if (TREE_CODE (arg0) == LT_EXPR
11510 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11511 	  && integer_zerop (op2)
11512 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11513 	{
11514 	  /* sign_bit_p looks through both zero and sign extensions,
11515 	     but for this optimization only sign extensions are
11516 	     usable.  */
11517 	  tree tem2 = TREE_OPERAND (arg0, 0);
11518 	  while (tem != tem2)
11519 	    {
11520 	      if (TREE_CODE (tem2) != NOP_EXPR
11521 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11522 		{
11523 		  tem = NULL_TREE;
11524 		  break;
11525 		}
11526 	      tem2 = TREE_OPERAND (tem2, 0);
11527 	    }
11528 	  /* sign_bit_p only checks ARG1 bits within A's precision.
11529 	     If <sign bit of A> has wider type than A, bits outside
11530 	     of A's precision in <sign bit of A> need to be checked.
11531 	     If they are all 0, this optimization needs to be done
11532 	     in unsigned A's type, if they are all 1 in signed A's type,
11533 	     otherwise this can't be done.  */
11534 	  if (tem
11535 	      && TYPE_PRECISION (TREE_TYPE (tem))
11536 		 < TYPE_PRECISION (TREE_TYPE (arg1))
11537 	      && TYPE_PRECISION (TREE_TYPE (tem))
11538 		 < TYPE_PRECISION (type))
11539 	    {
11540 	      int inner_width, outer_width;
11541 	      tree tem_type;
11542 
11543 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11544 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11545 	      if (outer_width > TYPE_PRECISION (type))
11546 		outer_width = TYPE_PRECISION (type);
11547 
11548 	      wide_int mask = wi::shifted_mask
11549 		(inner_width, outer_width - inner_width, false,
11550 		 TYPE_PRECISION (TREE_TYPE (arg1)));
11551 
11552 	      wide_int common = mask & arg1;
11553 	      if (common == mask)
11554 		{
11555 		  tem_type = signed_type_for (TREE_TYPE (tem));
11556 		  tem = fold_convert_loc (loc, tem_type, tem);
11557 		}
11558 	      else if (common == 0)
11559 		{
11560 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
11561 		  tem = fold_convert_loc (loc, tem_type, tem);
11562 		}
11563 	      else
11564 		tem = NULL;
11565 	    }
11566 
11567 	  if (tem)
11568 	    return
11569 	      fold_convert_loc (loc, type,
11570 				fold_build2_loc (loc, BIT_AND_EXPR,
11571 					     TREE_TYPE (tem), tem,
11572 					     fold_convert_loc (loc,
11573 							       TREE_TYPE (tem),
11574 							       arg1)));
11575 	}
11576 
11577       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
11578 	 already handled above.  */
11579       if (TREE_CODE (arg0) == BIT_AND_EXPR
11580 	  && integer_onep (TREE_OPERAND (arg0, 1))
11581 	  && integer_zerop (op2)
11582 	  && integer_pow2p (arg1))
11583 	{
11584 	  tree tem = TREE_OPERAND (arg0, 0);
11585 	  STRIP_NOPS (tem);
11586 	  if (TREE_CODE (tem) == RSHIFT_EXPR
11587 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11588               && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11589 		 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11590 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
11591 				    fold_convert_loc (loc, type,
11592 						      TREE_OPERAND (tem, 0)),
11593 				    op1);
11594 	}
11595 
11596       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
11597 	 is probably obsolete because the first operand should be a
11598 	 truth value (that's why we have the two cases above), but let's
11599 	 leave it in until we can confirm this for all front-ends.  */
11600       if (integer_zerop (op2)
11601 	  && TREE_CODE (arg0) == NE_EXPR
11602 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11603 	  && integer_pow2p (arg1)
11604 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11605 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11606 			      arg1, OEP_ONLY_CONST)
11607 	  /* operand_equal_p compares just value, not precision, so e.g.
11608 	     arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
11609 	     second operand 32-bit -128, which is not a power of two (or vice
11610 	     versa.  */
11611 	  && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
11612 	return pedantic_non_lvalue_loc (loc,
11613 					fold_convert_loc (loc, type,
11614 							  TREE_OPERAND (arg0,
11615 									0)));
11616 
11617       /* Disable the transformations below for vectors, since
11618 	 fold_binary_op_with_conditional_arg may undo them immediately,
11619 	 yielding an infinite loop.  */
11620       if (code == VEC_COND_EXPR)
11621 	return NULL_TREE;
11622 
11623       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
11624       if (integer_zerop (op2)
11625 	  && truth_value_p (TREE_CODE (arg0))
11626 	  && truth_value_p (TREE_CODE (arg1))
11627 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11628 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11629 							   : TRUTH_ANDIF_EXPR,
11630 				type, fold_convert_loc (loc, type, arg0), op1);
11631 
11632       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
11633       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11634 	  && truth_value_p (TREE_CODE (arg0))
11635 	  && truth_value_p (TREE_CODE (arg1))
11636 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11637 	{
11638 	  location_t loc0 = expr_location_or (arg0, loc);
11639 	  /* Only perform transformation if ARG0 is easily inverted.  */
11640 	  tem = fold_invert_truthvalue (loc0, arg0);
11641 	  if (tem)
11642 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11643 					 ? BIT_IOR_EXPR
11644 					 : TRUTH_ORIF_EXPR,
11645 				    type, fold_convert_loc (loc, type, tem),
11646 				    op1);
11647 	}
11648 
11649       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
11650       if (integer_zerop (arg1)
11651 	  && truth_value_p (TREE_CODE (arg0))
11652 	  && truth_value_p (TREE_CODE (op2))
11653 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11654 	{
11655 	  location_t loc0 = expr_location_or (arg0, loc);
11656 	  /* Only perform transformation if ARG0 is easily inverted.  */
11657 	  tem = fold_invert_truthvalue (loc0, arg0);
11658 	  if (tem)
11659 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11660 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11661 				    type, fold_convert_loc (loc, type, tem),
11662 				    op2);
11663 	}
11664 
11665       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
11666       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11667 	  && truth_value_p (TREE_CODE (arg0))
11668 	  && truth_value_p (TREE_CODE (op2))
11669 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11670 	return fold_build2_loc (loc, code == VEC_COND_EXPR
11671 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11672 				type, fold_convert_loc (loc, type, arg0), op2);
11673 
11674       return NULL_TREE;
11675 
11676     case CALL_EXPR:
11677       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
11678 	 of fold_ternary on them.  */
11679       gcc_unreachable ();
11680 
11681     case BIT_FIELD_REF:
11682       if (TREE_CODE (arg0) == VECTOR_CST
11683 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
11684 	      || (TREE_CODE (type) == VECTOR_TYPE
11685 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11686 	{
11687 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11688 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11689 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11690 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11691 
11692 	  if (n != 0
11693 	      && (idx % width) == 0
11694 	      && (n % width) == 0
11695 	      && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11696 	    {
11697 	      idx = idx / width;
11698 	      n = n / width;
11699 
11700 	      if (TREE_CODE (arg0) == VECTOR_CST)
11701 		{
11702 		  if (n == 1)
11703 		    return VECTOR_CST_ELT (arg0, idx);
11704 
11705 		  tree *vals = XALLOCAVEC (tree, n);
11706 		  for (unsigned i = 0; i < n; ++i)
11707 		    vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11708 		  return build_vector (type, vals);
11709 		}
11710 	    }
11711 	}
11712 
11713       /* On constants we can use native encode/interpret to constant
11714          fold (nearly) all BIT_FIELD_REFs.  */
11715       if (CONSTANT_CLASS_P (arg0)
11716 	  && can_native_interpret_type_p (type)
11717 	  && BITS_PER_UNIT == 8)
11718 	{
11719 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11720 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11721 	  /* Limit us to a reasonable amount of work.  To relax the
11722 	     other limitations we need bit-shifting of the buffer
11723 	     and rounding up the size.  */
11724 	  if (bitpos % BITS_PER_UNIT == 0
11725 	      && bitsize % BITS_PER_UNIT == 0
11726 	      && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11727 	    {
11728 	      unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11729 	      unsigned HOST_WIDE_INT len
11730 		= native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11731 				      bitpos / BITS_PER_UNIT);
11732 	      if (len > 0
11733 		  && len * BITS_PER_UNIT >= bitsize)
11734 		{
11735 		  tree v = native_interpret_expr (type, b,
11736 						  bitsize / BITS_PER_UNIT);
11737 		  if (v)
11738 		    return v;
11739 		}
11740 	    }
11741 	}
11742 
11743       return NULL_TREE;
11744 
11745     case FMA_EXPR:
11746       /* For integers we can decompose the FMA if possible.  */
11747       if (TREE_CODE (arg0) == INTEGER_CST
11748 	  && TREE_CODE (arg1) == INTEGER_CST)
11749 	return fold_build2_loc (loc, PLUS_EXPR, type,
11750 				const_binop (MULT_EXPR, arg0, arg1), arg2);
11751       if (integer_zerop (arg2))
11752 	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11753 
11754       return fold_fma (loc, type, arg0, arg1, arg2);
11755 
11756     case VEC_PERM_EXPR:
11757       if (TREE_CODE (arg2) == VECTOR_CST)
11758 	{
11759 	  unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11760 	  unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11761 	  unsigned char *sel2 = sel + nelts;
11762 	  bool need_mask_canon = false;
11763 	  bool need_mask_canon2 = false;
11764 	  bool all_in_vec0 = true;
11765 	  bool all_in_vec1 = true;
11766 	  bool maybe_identity = true;
11767 	  bool single_arg = (op0 == op1);
11768 	  bool changed = false;
11769 
11770 	  mask2 = 2 * nelts - 1;
11771 	  mask = single_arg ? (nelts - 1) : mask2;
11772 	  gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11773 	  for (i = 0; i < nelts; i++)
11774 	    {
11775 	      tree val = VECTOR_CST_ELT (arg2, i);
11776 	      if (TREE_CODE (val) != INTEGER_CST)
11777 		return NULL_TREE;
11778 
11779 	      /* Make sure that the perm value is in an acceptable
11780 		 range.  */
11781 	      wide_int t = val;
11782 	      need_mask_canon |= wi::gtu_p (t, mask);
11783 	      need_mask_canon2 |= wi::gtu_p (t, mask2);
11784 	      sel[i] = t.to_uhwi () & mask;
11785 	      sel2[i] = t.to_uhwi () & mask2;
11786 
11787 	      if (sel[i] < nelts)
11788 		all_in_vec1 = false;
11789 	      else
11790 		all_in_vec0 = false;
11791 
11792 	      if ((sel[i] & (nelts-1)) != i)
11793 		maybe_identity = false;
11794 	    }
11795 
11796 	  if (maybe_identity)
11797 	    {
11798 	      if (all_in_vec0)
11799 		return op0;
11800 	      if (all_in_vec1)
11801 		return op1;
11802 	    }
11803 
11804 	  if (all_in_vec0)
11805 	    op1 = op0;
11806 	  else if (all_in_vec1)
11807 	    {
11808 	      op0 = op1;
11809 	      for (i = 0; i < nelts; i++)
11810 		sel[i] -= nelts;
11811 	      need_mask_canon = true;
11812 	    }
11813 
11814 	  if ((TREE_CODE (op0) == VECTOR_CST
11815 	       || TREE_CODE (op0) == CONSTRUCTOR)
11816 	      && (TREE_CODE (op1) == VECTOR_CST
11817 		  || TREE_CODE (op1) == CONSTRUCTOR))
11818 	    {
11819 	      tree t = fold_vec_perm (type, op0, op1, sel);
11820 	      if (t != NULL_TREE)
11821 		return t;
11822 	    }
11823 
11824 	  if (op0 == op1 && !single_arg)
11825 	    changed = true;
11826 
11827 	  /* Some targets are deficient and fail to expand a single
11828 	     argument permutation while still allowing an equivalent
11829 	     2-argument version.  */
11830 	  if (need_mask_canon && arg2 == op2
11831 	      && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11832 	      && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11833 	    {
11834 	      need_mask_canon = need_mask_canon2;
11835 	      sel = sel2;
11836 	    }
11837 
11838 	  if (need_mask_canon && arg2 == op2)
11839 	    {
11840 	      tree *tsel = XALLOCAVEC (tree, nelts);
11841 	      tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11842 	      for (i = 0; i < nelts; i++)
11843 		tsel[i] = build_int_cst (eltype, sel[i]);
11844 	      op2 = build_vector (TREE_TYPE (arg2), tsel);
11845 	      changed = true;
11846 	    }
11847 
11848 	  if (changed)
11849 	    return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11850 	}
11851       return NULL_TREE;
11852 
11853     case BIT_INSERT_EXPR:
11854       /* Perform (partial) constant folding of BIT_INSERT_EXPR.  */
11855       if (TREE_CODE (arg0) == INTEGER_CST
11856 	  && TREE_CODE (arg1) == INTEGER_CST)
11857 	{
11858 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11859 	  unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11860 	  wide_int tem = wi::bit_and (arg0,
11861 				      wi::shifted_mask (bitpos, bitsize, true,
11862 							TYPE_PRECISION (type)));
11863 	  wide_int tem2
11864 	    = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11865 				    bitsize), bitpos);
11866 	  return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11867 	}
11868       else if (TREE_CODE (arg0) == VECTOR_CST
11869 	       && CONSTANT_CLASS_P (arg1)
11870 	       && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11871 				      TREE_TYPE (arg1)))
11872 	{
11873 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11874 	  unsigned HOST_WIDE_INT elsize
11875 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11876 	  if (bitpos % elsize == 0)
11877 	    {
11878 	      unsigned k = bitpos / elsize;
11879 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11880 		return arg0;
11881 	      else
11882 		{
11883 		  tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
11884 		  memcpy (elts, VECTOR_CST_ELTS (arg0),
11885 			  sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
11886 		  elts[k] = arg1;
11887 		  return build_vector (type, elts);
11888 		}
11889 	    }
11890 	}
11891       return NULL_TREE;
11892 
11893     default:
11894       return NULL_TREE;
11895     } /* switch (code) */
11896 }
11897 
11898 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11899    of an array (or vector).  */
11900 
11901 tree
11902 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11903 {
11904   tree index_type = NULL_TREE;
11905   offset_int low_bound = 0;
11906 
11907   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11908     {
11909       tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11910       if (domain_type && TYPE_MIN_VALUE (domain_type))
11911 	{
11912 	  /* Static constructors for variably sized objects makes no sense.  */
11913 	  gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11914 	  index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11915 	  low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11916 	}
11917     }
11918 
11919   if (index_type)
11920     access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11921 			    TYPE_SIGN (index_type));
11922 
11923   offset_int index = low_bound - 1;
11924   if (index_type)
11925     index = wi::ext (index, TYPE_PRECISION (index_type),
11926 		     TYPE_SIGN (index_type));
11927 
11928   offset_int max_index;
11929   unsigned HOST_WIDE_INT cnt;
11930   tree cfield, cval;
11931 
11932   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11933     {
11934       /* Array constructor might explicitly set index, or specify a range,
11935 	 or leave index NULL meaning that it is next index after previous
11936 	 one.  */
11937       if (cfield)
11938 	{
11939 	  if (TREE_CODE (cfield) == INTEGER_CST)
11940 	    max_index = index = wi::to_offset (cfield);
11941 	  else
11942 	    {
11943 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11944 	      index = wi::to_offset (TREE_OPERAND (cfield, 0));
11945 	      max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11946 	    }
11947 	}
11948       else
11949 	{
11950 	  index += 1;
11951 	  if (index_type)
11952 	    index = wi::ext (index, TYPE_PRECISION (index_type),
11953 			     TYPE_SIGN (index_type));
11954 	  max_index = index;
11955 	}
11956 
11957     /* Do we have match?  */
11958     if (wi::cmpu (access_index, index) >= 0
11959 	&& wi::cmpu (access_index, max_index) <= 0)
11960       return cval;
11961   }
11962   return NULL_TREE;
11963 }
11964 
11965 /* Perform constant folding and related simplification of EXPR.
11966    The related simplifications include x*1 => x, x*0 => 0, etc.,
11967    and application of the associative law.
11968    NOP_EXPR conversions may be removed freely (as long as we
11969    are careful not to change the type of the overall expression).
11970    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11971    but we can constant-fold them if they have constant operands.  */
11972 
11973 #ifdef ENABLE_FOLD_CHECKING
11974 # define fold(x) fold_1 (x)
11975 static tree fold_1 (tree);
11976 static
11977 #endif
11978 tree
11979 fold (tree expr)
11980 {
11981   const tree t = expr;
11982   enum tree_code code = TREE_CODE (t);
11983   enum tree_code_class kind = TREE_CODE_CLASS (code);
11984   tree tem;
11985   location_t loc = EXPR_LOCATION (expr);
11986 
11987   /* Return right away if a constant.  */
11988   if (kind == tcc_constant)
11989     return t;
11990 
11991   /* CALL_EXPR-like objects with variable numbers of operands are
11992      treated specially.  */
11993   if (kind == tcc_vl_exp)
11994     {
11995       if (code == CALL_EXPR)
11996 	{
11997 	  tem = fold_call_expr (loc, expr, false);
11998 	  return tem ? tem : expr;
11999 	}
12000       return expr;
12001     }
12002 
12003   if (IS_EXPR_CODE_CLASS (kind))
12004     {
12005       tree type = TREE_TYPE (t);
12006       tree op0, op1, op2;
12007 
12008       switch (TREE_CODE_LENGTH (code))
12009 	{
12010 	case 1:
12011 	  op0 = TREE_OPERAND (t, 0);
12012 	  tem = fold_unary_loc (loc, code, type, op0);
12013 	  return tem ? tem : expr;
12014 	case 2:
12015 	  op0 = TREE_OPERAND (t, 0);
12016 	  op1 = TREE_OPERAND (t, 1);
12017 	  tem = fold_binary_loc (loc, code, type, op0, op1);
12018 	  return tem ? tem : expr;
12019 	case 3:
12020 	  op0 = TREE_OPERAND (t, 0);
12021 	  op1 = TREE_OPERAND (t, 1);
12022 	  op2 = TREE_OPERAND (t, 2);
12023 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12024 	  return tem ? tem : expr;
12025 	default:
12026 	  break;
12027 	}
12028     }
12029 
12030   switch (code)
12031     {
12032     case ARRAY_REF:
12033       {
12034 	tree op0 = TREE_OPERAND (t, 0);
12035 	tree op1 = TREE_OPERAND (t, 1);
12036 
12037 	if (TREE_CODE (op1) == INTEGER_CST
12038 	    && TREE_CODE (op0) == CONSTRUCTOR
12039 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12040 	  {
12041 	    tree val = get_array_ctor_element_at_index (op0,
12042 							wi::to_offset (op1));
12043 	    if (val)
12044 	      return val;
12045 	  }
12046 
12047 	return t;
12048       }
12049 
12050       /* Return a VECTOR_CST if possible.  */
12051     case CONSTRUCTOR:
12052       {
12053 	tree type = TREE_TYPE (t);
12054 	if (TREE_CODE (type) != VECTOR_TYPE)
12055 	  return t;
12056 
12057 	unsigned i;
12058 	tree val;
12059 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12060 	  if (! CONSTANT_CLASS_P (val))
12061 	    return t;
12062 
12063 	return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12064       }
12065 
12066     case CONST_DECL:
12067       return fold (DECL_INITIAL (t));
12068 
12069     default:
12070       return t;
12071     } /* switch (code) */
12072 }
12073 
12074 #ifdef ENABLE_FOLD_CHECKING
12075 #undef fold
12076 
12077 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12078 				hash_table<nofree_ptr_hash<const tree_node> > *);
12079 static void fold_check_failed (const_tree, const_tree);
12080 void print_fold_checksum (const_tree);
12081 
12082 /* When --enable-checking=fold, compute a digest of expr before
12083    and after actual fold call to see if fold did not accidentally
12084    change original expr.  */
12085 
12086 tree
12087 fold (tree expr)
12088 {
12089   tree ret;
12090   struct md5_ctx ctx;
12091   unsigned char checksum_before[16], checksum_after[16];
12092   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12093 
12094   md5_init_ctx (&ctx);
12095   fold_checksum_tree (expr, &ctx, &ht);
12096   md5_finish_ctx (&ctx, checksum_before);
12097   ht.empty ();
12098 
12099   ret = fold_1 (expr);
12100 
12101   md5_init_ctx (&ctx);
12102   fold_checksum_tree (expr, &ctx, &ht);
12103   md5_finish_ctx (&ctx, checksum_after);
12104 
12105   if (memcmp (checksum_before, checksum_after, 16))
12106     fold_check_failed (expr, ret);
12107 
12108   return ret;
12109 }
12110 
12111 void
12112 print_fold_checksum (const_tree expr)
12113 {
12114   struct md5_ctx ctx;
12115   unsigned char checksum[16], cnt;
12116   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12117 
12118   md5_init_ctx (&ctx);
12119   fold_checksum_tree (expr, &ctx, &ht);
12120   md5_finish_ctx (&ctx, checksum);
12121   for (cnt = 0; cnt < 16; ++cnt)
12122     fprintf (stderr, "%02x", checksum[cnt]);
12123   putc ('\n', stderr);
12124 }
12125 
12126 static void
12127 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12128 {
12129   internal_error ("fold check: original tree changed by fold");
12130 }
12131 
12132 static void
12133 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12134 		    hash_table<nofree_ptr_hash <const tree_node> > *ht)
12135 {
12136   const tree_node **slot;
12137   enum tree_code code;
12138   union tree_node buf;
12139   int i, len;
12140 
12141  recursive_label:
12142   if (expr == NULL)
12143     return;
12144   slot = ht->find_slot (expr, INSERT);
12145   if (*slot != NULL)
12146     return;
12147   *slot = expr;
12148   code = TREE_CODE (expr);
12149   if (TREE_CODE_CLASS (code) == tcc_declaration
12150       && HAS_DECL_ASSEMBLER_NAME_P (expr))
12151     {
12152       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
12153       memcpy ((char *) &buf, expr, tree_size (expr));
12154       SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12155       buf.decl_with_vis.symtab_node = NULL;
12156       expr = (tree) &buf;
12157     }
12158   else if (TREE_CODE_CLASS (code) == tcc_type
12159 	   && (TYPE_POINTER_TO (expr)
12160 	       || TYPE_REFERENCE_TO (expr)
12161 	       || TYPE_CACHED_VALUES_P (expr)
12162 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12163 	       || TYPE_NEXT_VARIANT (expr)
12164 	       || TYPE_ALIAS_SET_KNOWN_P (expr)))
12165     {
12166       /* Allow these fields to be modified.  */
12167       tree tmp;
12168       memcpy ((char *) &buf, expr, tree_size (expr));
12169       expr = tmp = (tree) &buf;
12170       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12171       TYPE_POINTER_TO (tmp) = NULL;
12172       TYPE_REFERENCE_TO (tmp) = NULL;
12173       TYPE_NEXT_VARIANT (tmp) = NULL;
12174       TYPE_ALIAS_SET (tmp) = -1;
12175       if (TYPE_CACHED_VALUES_P (tmp))
12176 	{
12177 	  TYPE_CACHED_VALUES_P (tmp) = 0;
12178 	  TYPE_CACHED_VALUES (tmp) = NULL;
12179 	}
12180     }
12181   md5_process_bytes (expr, tree_size (expr), ctx);
12182   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12183     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12184   if (TREE_CODE_CLASS (code) != tcc_type
12185       && TREE_CODE_CLASS (code) != tcc_declaration
12186       && code != TREE_LIST
12187       && code != SSA_NAME
12188       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12189     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12190   switch (TREE_CODE_CLASS (code))
12191     {
12192     case tcc_constant:
12193       switch (code)
12194 	{
12195 	case STRING_CST:
12196 	  md5_process_bytes (TREE_STRING_POINTER (expr),
12197 			     TREE_STRING_LENGTH (expr), ctx);
12198 	  break;
12199 	case COMPLEX_CST:
12200 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12201 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12202 	  break;
12203 	case VECTOR_CST:
12204 	  for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12205 	    fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12206 	  break;
12207 	default:
12208 	  break;
12209 	}
12210       break;
12211     case tcc_exceptional:
12212       switch (code)
12213 	{
12214 	case TREE_LIST:
12215 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12216 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12217 	  expr = TREE_CHAIN (expr);
12218 	  goto recursive_label;
12219 	  break;
12220 	case TREE_VEC:
12221 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12222 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12223 	  break;
12224 	default:
12225 	  break;
12226 	}
12227       break;
12228     case tcc_expression:
12229     case tcc_reference:
12230     case tcc_comparison:
12231     case tcc_unary:
12232     case tcc_binary:
12233     case tcc_statement:
12234     case tcc_vl_exp:
12235       len = TREE_OPERAND_LENGTH (expr);
12236       for (i = 0; i < len; ++i)
12237 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12238       break;
12239     case tcc_declaration:
12240       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12241       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12242       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12243 	{
12244 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12245 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12246 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12247 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12248 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12249 	}
12250 
12251       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12252 	{
12253 	  if (TREE_CODE (expr) == FUNCTION_DECL)
12254 	    {
12255 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12256 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12257 	    }
12258 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12259 	}
12260       break;
12261     case tcc_type:
12262       if (TREE_CODE (expr) == ENUMERAL_TYPE)
12263         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12264       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12265       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12266       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12267       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12268       if (INTEGRAL_TYPE_P (expr)
12269           || SCALAR_FLOAT_TYPE_P (expr))
12270 	{
12271 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12272 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12273 	}
12274       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12275       if (TREE_CODE (expr) == RECORD_TYPE
12276 	  || TREE_CODE (expr) == UNION_TYPE
12277 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
12278 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12279       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12280       break;
12281     default:
12282       break;
12283     }
12284 }
12285 
12286 /* Helper function for outputting the checksum of a tree T.  When
12287    debugging with gdb, you can "define mynext" to be "next" followed
12288    by "call debug_fold_checksum (op0)", then just trace down till the
12289    outputs differ.  */
12290 
12291 DEBUG_FUNCTION void
12292 debug_fold_checksum (const_tree t)
12293 {
12294   int i;
12295   unsigned char checksum[16];
12296   struct md5_ctx ctx;
12297   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12298 
12299   md5_init_ctx (&ctx);
12300   fold_checksum_tree (t, &ctx, &ht);
12301   md5_finish_ctx (&ctx, checksum);
12302   ht.empty ();
12303 
12304   for (i = 0; i < 16; i++)
12305     fprintf (stderr, "%d ", checksum[i]);
12306 
12307   fprintf (stderr, "\n");
12308 }
12309 
12310 #endif
12311 
12312 /* Fold a unary tree expression with code CODE of type TYPE with an
12313    operand OP0.  LOC is the location of the resulting expression.
12314    Return a folded expression if successful.  Otherwise, return a tree
12315    expression with code CODE of type TYPE with an operand OP0.  */
12316 
12317 tree
12318 fold_build1_stat_loc (location_t loc,
12319 		      enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12320 {
12321   tree tem;
12322 #ifdef ENABLE_FOLD_CHECKING
12323   unsigned char checksum_before[16], checksum_after[16];
12324   struct md5_ctx ctx;
12325   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12326 
12327   md5_init_ctx (&ctx);
12328   fold_checksum_tree (op0, &ctx, &ht);
12329   md5_finish_ctx (&ctx, checksum_before);
12330   ht.empty ();
12331 #endif
12332 
12333   tem = fold_unary_loc (loc, code, type, op0);
12334   if (!tem)
12335     tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12336 
12337 #ifdef ENABLE_FOLD_CHECKING
12338   md5_init_ctx (&ctx);
12339   fold_checksum_tree (op0, &ctx, &ht);
12340   md5_finish_ctx (&ctx, checksum_after);
12341 
12342   if (memcmp (checksum_before, checksum_after, 16))
12343     fold_check_failed (op0, tem);
12344 #endif
12345   return tem;
12346 }
12347 
12348 /* Fold a binary tree expression with code CODE of type TYPE with
12349    operands OP0 and OP1.  LOC is the location of the resulting
12350    expression.  Return a folded expression if successful.  Otherwise,
12351    return a tree expression with code CODE of type TYPE with operands
12352    OP0 and OP1.  */
12353 
12354 tree
12355 fold_build2_stat_loc (location_t loc,
12356 		      enum tree_code code, tree type, tree op0, tree op1
12357 		      MEM_STAT_DECL)
12358 {
12359   tree tem;
12360 #ifdef ENABLE_FOLD_CHECKING
12361   unsigned char checksum_before_op0[16],
12362                 checksum_before_op1[16],
12363 		checksum_after_op0[16],
12364 		checksum_after_op1[16];
12365   struct md5_ctx ctx;
12366   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12367 
12368   md5_init_ctx (&ctx);
12369   fold_checksum_tree (op0, &ctx, &ht);
12370   md5_finish_ctx (&ctx, checksum_before_op0);
12371   ht.empty ();
12372 
12373   md5_init_ctx (&ctx);
12374   fold_checksum_tree (op1, &ctx, &ht);
12375   md5_finish_ctx (&ctx, checksum_before_op1);
12376   ht.empty ();
12377 #endif
12378 
12379   tem = fold_binary_loc (loc, code, type, op0, op1);
12380   if (!tem)
12381     tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12382 
12383 #ifdef ENABLE_FOLD_CHECKING
12384   md5_init_ctx (&ctx);
12385   fold_checksum_tree (op0, &ctx, &ht);
12386   md5_finish_ctx (&ctx, checksum_after_op0);
12387   ht.empty ();
12388 
12389   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12390     fold_check_failed (op0, tem);
12391 
12392   md5_init_ctx (&ctx);
12393   fold_checksum_tree (op1, &ctx, &ht);
12394   md5_finish_ctx (&ctx, checksum_after_op1);
12395 
12396   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12397     fold_check_failed (op1, tem);
12398 #endif
12399   return tem;
12400 }
12401 
12402 /* Fold a ternary tree expression with code CODE of type TYPE with
12403    operands OP0, OP1, and OP2.  Return a folded expression if
12404    successful.  Otherwise, return a tree expression with code CODE of
12405    type TYPE with operands OP0, OP1, and OP2.  */
12406 
12407 tree
12408 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12409 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
12410 {
12411   tree tem;
12412 #ifdef ENABLE_FOLD_CHECKING
12413   unsigned char checksum_before_op0[16],
12414                 checksum_before_op1[16],
12415                 checksum_before_op2[16],
12416 		checksum_after_op0[16],
12417 		checksum_after_op1[16],
12418 		checksum_after_op2[16];
12419   struct md5_ctx ctx;
12420   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12421 
12422   md5_init_ctx (&ctx);
12423   fold_checksum_tree (op0, &ctx, &ht);
12424   md5_finish_ctx (&ctx, checksum_before_op0);
12425   ht.empty ();
12426 
12427   md5_init_ctx (&ctx);
12428   fold_checksum_tree (op1, &ctx, &ht);
12429   md5_finish_ctx (&ctx, checksum_before_op1);
12430   ht.empty ();
12431 
12432   md5_init_ctx (&ctx);
12433   fold_checksum_tree (op2, &ctx, &ht);
12434   md5_finish_ctx (&ctx, checksum_before_op2);
12435   ht.empty ();
12436 #endif
12437 
12438   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12439   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12440   if (!tem)
12441     tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12442 
12443 #ifdef ENABLE_FOLD_CHECKING
12444   md5_init_ctx (&ctx);
12445   fold_checksum_tree (op0, &ctx, &ht);
12446   md5_finish_ctx (&ctx, checksum_after_op0);
12447   ht.empty ();
12448 
12449   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12450     fold_check_failed (op0, tem);
12451 
12452   md5_init_ctx (&ctx);
12453   fold_checksum_tree (op1, &ctx, &ht);
12454   md5_finish_ctx (&ctx, checksum_after_op1);
12455   ht.empty ();
12456 
12457   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12458     fold_check_failed (op1, tem);
12459 
12460   md5_init_ctx (&ctx);
12461   fold_checksum_tree (op2, &ctx, &ht);
12462   md5_finish_ctx (&ctx, checksum_after_op2);
12463 
12464   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12465     fold_check_failed (op2, tem);
12466 #endif
12467   return tem;
12468 }
12469 
12470 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12471    arguments in ARGARRAY, and a null static chain.
12472    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
12473    of type TYPE from the given operands as constructed by build_call_array.  */
12474 
12475 tree
12476 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12477 			   int nargs, tree *argarray)
12478 {
12479   tree tem;
12480 #ifdef ENABLE_FOLD_CHECKING
12481   unsigned char checksum_before_fn[16],
12482                 checksum_before_arglist[16],
12483 		checksum_after_fn[16],
12484 		checksum_after_arglist[16];
12485   struct md5_ctx ctx;
12486   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12487   int i;
12488 
12489   md5_init_ctx (&ctx);
12490   fold_checksum_tree (fn, &ctx, &ht);
12491   md5_finish_ctx (&ctx, checksum_before_fn);
12492   ht.empty ();
12493 
12494   md5_init_ctx (&ctx);
12495   for (i = 0; i < nargs; i++)
12496     fold_checksum_tree (argarray[i], &ctx, &ht);
12497   md5_finish_ctx (&ctx, checksum_before_arglist);
12498   ht.empty ();
12499 #endif
12500 
12501   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12502   if (!tem)
12503     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12504 
12505 #ifdef ENABLE_FOLD_CHECKING
12506   md5_init_ctx (&ctx);
12507   fold_checksum_tree (fn, &ctx, &ht);
12508   md5_finish_ctx (&ctx, checksum_after_fn);
12509   ht.empty ();
12510 
12511   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12512     fold_check_failed (fn, tem);
12513 
12514   md5_init_ctx (&ctx);
12515   for (i = 0; i < nargs; i++)
12516     fold_checksum_tree (argarray[i], &ctx, &ht);
12517   md5_finish_ctx (&ctx, checksum_after_arglist);
12518 
12519   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12520     fold_check_failed (NULL_TREE, tem);
12521 #endif
12522   return tem;
12523 }
12524 
12525 /* Perform constant folding and related simplification of initializer
12526    expression EXPR.  These behave identically to "fold_buildN" but ignore
12527    potential run-time traps and exceptions that fold must preserve.  */
12528 
12529 #define START_FOLD_INIT \
12530   int saved_signaling_nans = flag_signaling_nans;\
12531   int saved_trapping_math = flag_trapping_math;\
12532   int saved_rounding_math = flag_rounding_math;\
12533   int saved_trapv = flag_trapv;\
12534   int saved_folding_initializer = folding_initializer;\
12535   flag_signaling_nans = 0;\
12536   flag_trapping_math = 0;\
12537   flag_rounding_math = 0;\
12538   flag_trapv = 0;\
12539   folding_initializer = 1;
12540 
12541 #define END_FOLD_INIT \
12542   flag_signaling_nans = saved_signaling_nans;\
12543   flag_trapping_math = saved_trapping_math;\
12544   flag_rounding_math = saved_rounding_math;\
12545   flag_trapv = saved_trapv;\
12546   folding_initializer = saved_folding_initializer;
12547 
12548 tree
12549 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12550 			     tree type, tree op)
12551 {
12552   tree result;
12553   START_FOLD_INIT;
12554 
12555   result = fold_build1_loc (loc, code, type, op);
12556 
12557   END_FOLD_INIT;
12558   return result;
12559 }
12560 
12561 tree
12562 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12563 			     tree type, tree op0, tree op1)
12564 {
12565   tree result;
12566   START_FOLD_INIT;
12567 
12568   result = fold_build2_loc (loc, code, type, op0, op1);
12569 
12570   END_FOLD_INIT;
12571   return result;
12572 }
12573 
12574 tree
12575 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12576 				       int nargs, tree *argarray)
12577 {
12578   tree result;
12579   START_FOLD_INIT;
12580 
12581   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12582 
12583   END_FOLD_INIT;
12584   return result;
12585 }
12586 
12587 #undef START_FOLD_INIT
12588 #undef END_FOLD_INIT
12589 
12590 /* Determine if first argument is a multiple of second argument.  Return 0 if
12591    it is not, or we cannot easily determined it to be.
12592 
12593    An example of the sort of thing we care about (at this point; this routine
12594    could surely be made more general, and expanded to do what the *_DIV_EXPR's
12595    fold cases do now) is discovering that
12596 
12597      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12598 
12599    is a multiple of
12600 
12601      SAVE_EXPR (J * 8)
12602 
12603    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12604 
12605    This code also handles discovering that
12606 
12607      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12608 
12609    is a multiple of 8 so we don't have to worry about dealing with a
12610    possible remainder.
12611 
12612    Note that we *look* inside a SAVE_EXPR only to determine how it was
12613    calculated; it is not safe for fold to do much of anything else with the
12614    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12615    at run time.  For example, the latter example above *cannot* be implemented
12616    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12617    evaluation time of the original SAVE_EXPR is not necessarily the same at
12618    the time the new expression is evaluated.  The only optimization of this
12619    sort that would be valid is changing
12620 
12621      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12622 
12623    divided by 8 to
12624 
12625      SAVE_EXPR (I) * SAVE_EXPR (J)
12626 
12627    (where the same SAVE_EXPR (J) is used in the original and the
12628    transformed version).  */
12629 
12630 int
12631 multiple_of_p (tree type, const_tree top, const_tree bottom)
12632 {
12633   gimple *stmt;
12634   tree t1, op1, op2;
12635 
12636   if (operand_equal_p (top, bottom, 0))
12637     return 1;
12638 
12639   if (TREE_CODE (type) != INTEGER_TYPE)
12640     return 0;
12641 
12642   switch (TREE_CODE (top))
12643     {
12644     case BIT_AND_EXPR:
12645       /* Bitwise and provides a power of two multiple.  If the mask is
12646 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
12647       if (!integer_pow2p (bottom))
12648 	return 0;
12649       /* FALLTHRU */
12650 
12651     case MULT_EXPR:
12652       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12653 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12654 
12655     case MINUS_EXPR:
12656       /* It is impossible to prove if op0 - op1 is multiple of bottom
12657 	 precisely, so be conservative here checking if both op0 and op1
12658 	 are multiple of bottom.  Note we check the second operand first
12659 	 since it's usually simpler.  */
12660       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12661 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12662 
12663     case PLUS_EXPR:
12664       /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12665 	 as op0 - 3 if the expression has unsigned type.  For example,
12666 	 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not.  */
12667       op1 = TREE_OPERAND (top, 1);
12668       if (TYPE_UNSIGNED (type)
12669 	  && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12670 	op1 = fold_build1 (NEGATE_EXPR, type, op1);
12671       return (multiple_of_p (type, op1, bottom)
12672 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12673 
12674     case LSHIFT_EXPR:
12675       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12676 	{
12677 	  op1 = TREE_OPERAND (top, 1);
12678 	  /* const_binop may not detect overflow correctly,
12679 	     so check for it explicitly here.  */
12680 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12681 	      && 0 != (t1 = fold_convert (type,
12682 					  const_binop (LSHIFT_EXPR,
12683 						       size_one_node,
12684 						       op1)))
12685 	      && !TREE_OVERFLOW (t1))
12686 	    return multiple_of_p (type, t1, bottom);
12687 	}
12688       return 0;
12689 
12690     case NOP_EXPR:
12691       /* Can't handle conversions from non-integral or wider integral type.  */
12692       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12693 	  || (TYPE_PRECISION (type)
12694 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12695 	return 0;
12696 
12697       /* fall through */
12698 
12699     case SAVE_EXPR:
12700       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12701 
12702     case COND_EXPR:
12703       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12704 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12705 
12706     case INTEGER_CST:
12707       if (TREE_CODE (bottom) != INTEGER_CST
12708 	  || integer_zerop (bottom)
12709 	  || (TYPE_UNSIGNED (type)
12710 	      && (tree_int_cst_sgn (top) < 0
12711 		  || tree_int_cst_sgn (bottom) < 0)))
12712 	return 0;
12713       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12714 				SIGNED);
12715 
12716     case SSA_NAME:
12717       if (TREE_CODE (bottom) == INTEGER_CST
12718 	  && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12719 	  && gimple_code (stmt) == GIMPLE_ASSIGN)
12720 	{
12721 	  enum tree_code code = gimple_assign_rhs_code (stmt);
12722 
12723 	  /* Check for special cases to see if top is defined as multiple
12724 	     of bottom:
12725 
12726 	       top = (X & ~(bottom - 1) ; bottom is power of 2
12727 
12728 	     or
12729 
12730 	       Y = X % bottom
12731 	       top = X - Y.  */
12732 	  if (code == BIT_AND_EXPR
12733 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12734 	      && TREE_CODE (op2) == INTEGER_CST
12735 	      && integer_pow2p (bottom)
12736 	      && wi::multiple_of_p (wi::to_widest (op2),
12737 				    wi::to_widest (bottom), UNSIGNED))
12738 	    return 1;
12739 
12740 	  op1 = gimple_assign_rhs1 (stmt);
12741 	  if (code == MINUS_EXPR
12742 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12743 	      && TREE_CODE (op2) == SSA_NAME
12744 	      && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12745 	      && gimple_code (stmt) == GIMPLE_ASSIGN
12746 	      && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12747 	      && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12748 	      && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12749 	    return 1;
12750 	}
12751 
12752       /* fall through */
12753 
12754     default:
12755       return 0;
12756     }
12757 }
12758 
12759 #define tree_expr_nonnegative_warnv_p(X, Y) \
12760   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12761 
12762 #define RECURSE(X) \
12763   ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12764 
12765 /* Return true if CODE or TYPE is known to be non-negative. */
12766 
12767 static bool
12768 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12769 {
12770   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12771       && truth_value_p (code))
12772     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12773        have a signed:1 type (where the value is -1 and 0).  */
12774     return true;
12775   return false;
12776 }
12777 
12778 /* Return true if (CODE OP0) is known to be non-negative.  If the return
12779    value is based on the assumption that signed overflow is undefined,
12780    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12781    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12782 
12783 bool
12784 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12785 				bool *strict_overflow_p, int depth)
12786 {
12787   if (TYPE_UNSIGNED (type))
12788     return true;
12789 
12790   switch (code)
12791     {
12792     case ABS_EXPR:
12793       /* We can't return 1 if flag_wrapv is set because
12794 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
12795       if (!ANY_INTEGRAL_TYPE_P (type))
12796 	return true;
12797       if (TYPE_OVERFLOW_UNDEFINED (type))
12798 	{
12799 	  *strict_overflow_p = true;
12800 	  return true;
12801 	}
12802       break;
12803 
12804     case NON_LVALUE_EXPR:
12805     case FLOAT_EXPR:
12806     case FIX_TRUNC_EXPR:
12807       return RECURSE (op0);
12808 
12809     CASE_CONVERT:
12810       {
12811 	tree inner_type = TREE_TYPE (op0);
12812 	tree outer_type = type;
12813 
12814 	if (TREE_CODE (outer_type) == REAL_TYPE)
12815 	  {
12816 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12817 	      return RECURSE (op0);
12818 	    if (INTEGRAL_TYPE_P (inner_type))
12819 	      {
12820 		if (TYPE_UNSIGNED (inner_type))
12821 		  return true;
12822 		return RECURSE (op0);
12823 	      }
12824 	  }
12825 	else if (INTEGRAL_TYPE_P (outer_type))
12826 	  {
12827 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12828 	      return RECURSE (op0);
12829 	    if (INTEGRAL_TYPE_P (inner_type))
12830 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12831 		      && TYPE_UNSIGNED (inner_type);
12832 	  }
12833       }
12834       break;
12835 
12836     default:
12837       return tree_simple_nonnegative_warnv_p (code, type);
12838     }
12839 
12840   /* We don't know sign of `t', so be conservative and return false.  */
12841   return false;
12842 }
12843 
12844 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
12845    value is based on the assumption that signed overflow is undefined,
12846    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12847    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12848 
12849 bool
12850 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12851 				 tree op1, bool *strict_overflow_p,
12852 				 int depth)
12853 {
12854   if (TYPE_UNSIGNED (type))
12855     return true;
12856 
12857   switch (code)
12858     {
12859     case POINTER_PLUS_EXPR:
12860     case PLUS_EXPR:
12861       if (FLOAT_TYPE_P (type))
12862 	return RECURSE (op0) && RECURSE (op1);
12863 
12864       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12865 	 both unsigned and at least 2 bits shorter than the result.  */
12866       if (TREE_CODE (type) == INTEGER_TYPE
12867 	  && TREE_CODE (op0) == NOP_EXPR
12868 	  && TREE_CODE (op1) == NOP_EXPR)
12869 	{
12870 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12871 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12872 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12873 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12874 	    {
12875 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
12876 				       TYPE_PRECISION (inner2)) + 1;
12877 	      return prec < TYPE_PRECISION (type);
12878 	    }
12879 	}
12880       break;
12881 
12882     case MULT_EXPR:
12883       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12884 	{
12885 	  /* x * x is always non-negative for floating point x
12886 	     or without overflow.  */
12887 	  if (operand_equal_p (op0, op1, 0)
12888 	      || (RECURSE (op0) && RECURSE (op1)))
12889 	    {
12890 	      if (ANY_INTEGRAL_TYPE_P (type)
12891 		  && TYPE_OVERFLOW_UNDEFINED (type))
12892 		*strict_overflow_p = true;
12893 	      return true;
12894 	    }
12895 	}
12896 
12897       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12898 	 both unsigned and their total bits is shorter than the result.  */
12899       if (TREE_CODE (type) == INTEGER_TYPE
12900 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12901 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12902 	{
12903 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12904 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
12905 	    : TREE_TYPE (op0);
12906 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12907 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
12908 	    : TREE_TYPE (op1);
12909 
12910 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
12911 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
12912 
12913 	  if (TREE_CODE (op0) == INTEGER_CST)
12914 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12915 
12916 	  if (TREE_CODE (op1) == INTEGER_CST)
12917 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12918 
12919 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12920 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12921 	    {
12922 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12923 		? tree_int_cst_min_precision (op0, UNSIGNED)
12924 		: TYPE_PRECISION (inner0);
12925 
12926 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12927 		? tree_int_cst_min_precision (op1, UNSIGNED)
12928 		: TYPE_PRECISION (inner1);
12929 
12930 	      return precision0 + precision1 < TYPE_PRECISION (type);
12931 	    }
12932 	}
12933       return false;
12934 
12935     case BIT_AND_EXPR:
12936     case MAX_EXPR:
12937       return RECURSE (op0) || RECURSE (op1);
12938 
12939     case BIT_IOR_EXPR:
12940     case BIT_XOR_EXPR:
12941     case MIN_EXPR:
12942     case RDIV_EXPR:
12943     case TRUNC_DIV_EXPR:
12944     case CEIL_DIV_EXPR:
12945     case FLOOR_DIV_EXPR:
12946     case ROUND_DIV_EXPR:
12947       return RECURSE (op0) && RECURSE (op1);
12948 
12949     case TRUNC_MOD_EXPR:
12950       return RECURSE (op0);
12951 
12952     case FLOOR_MOD_EXPR:
12953       return RECURSE (op1);
12954 
12955     case CEIL_MOD_EXPR:
12956     case ROUND_MOD_EXPR:
12957     default:
12958       return tree_simple_nonnegative_warnv_p (code, type);
12959     }
12960 
12961   /* We don't know sign of `t', so be conservative and return false.  */
12962   return false;
12963 }
12964 
12965 /* Return true if T is known to be non-negative.  If the return
12966    value is based on the assumption that signed overflow is undefined,
12967    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12968    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12969 
12970 bool
12971 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12972 {
12973   if (TYPE_UNSIGNED (TREE_TYPE (t)))
12974     return true;
12975 
12976   switch (TREE_CODE (t))
12977     {
12978     case INTEGER_CST:
12979       return tree_int_cst_sgn (t) >= 0;
12980 
12981     case REAL_CST:
12982       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12983 
12984     case FIXED_CST:
12985       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12986 
12987     case COND_EXPR:
12988       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12989 
12990     case SSA_NAME:
12991       /* Limit the depth of recursion to avoid quadratic behavior.
12992 	 This is expected to catch almost all occurrences in practice.
12993 	 If this code misses important cases that unbounded recursion
12994 	 would not, passes that need this information could be revised
12995 	 to provide it through dataflow propagation.  */
12996       return (!name_registered_for_update_p (t)
12997 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12998 	      && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12999 						  strict_overflow_p, depth));
13000 
13001     default:
13002       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13003     }
13004 }
13005 
13006 /* Return true if T is known to be non-negative.  If the return
13007    value is based on the assumption that signed overflow is undefined,
13008    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13009    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13010 
13011 bool
13012 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13013 			       bool *strict_overflow_p, int depth)
13014 {
13015   switch (fn)
13016     {
13017     CASE_CFN_ACOS:
13018     CASE_CFN_ACOSH:
13019     CASE_CFN_CABS:
13020     CASE_CFN_COSH:
13021     CASE_CFN_ERFC:
13022     CASE_CFN_EXP:
13023     CASE_CFN_EXP10:
13024     CASE_CFN_EXP2:
13025     CASE_CFN_FABS:
13026     CASE_CFN_FDIM:
13027     CASE_CFN_HYPOT:
13028     CASE_CFN_POW10:
13029     CASE_CFN_FFS:
13030     CASE_CFN_PARITY:
13031     CASE_CFN_POPCOUNT:
13032     CASE_CFN_CLZ:
13033     CASE_CFN_CLRSB:
13034     case CFN_BUILT_IN_BSWAP32:
13035     case CFN_BUILT_IN_BSWAP64:
13036       /* Always true.  */
13037       return true;
13038 
13039     CASE_CFN_SQRT:
13040       /* sqrt(-0.0) is -0.0.  */
13041       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13042 	return true;
13043       return RECURSE (arg0);
13044 
13045     CASE_CFN_ASINH:
13046     CASE_CFN_ATAN:
13047     CASE_CFN_ATANH:
13048     CASE_CFN_CBRT:
13049     CASE_CFN_CEIL:
13050     CASE_CFN_ERF:
13051     CASE_CFN_EXPM1:
13052     CASE_CFN_FLOOR:
13053     CASE_CFN_FMOD:
13054     CASE_CFN_FREXP:
13055     CASE_CFN_ICEIL:
13056     CASE_CFN_IFLOOR:
13057     CASE_CFN_IRINT:
13058     CASE_CFN_IROUND:
13059     CASE_CFN_LCEIL:
13060     CASE_CFN_LDEXP:
13061     CASE_CFN_LFLOOR:
13062     CASE_CFN_LLCEIL:
13063     CASE_CFN_LLFLOOR:
13064     CASE_CFN_LLRINT:
13065     CASE_CFN_LLROUND:
13066     CASE_CFN_LRINT:
13067     CASE_CFN_LROUND:
13068     CASE_CFN_MODF:
13069     CASE_CFN_NEARBYINT:
13070     CASE_CFN_RINT:
13071     CASE_CFN_ROUND:
13072     CASE_CFN_SCALB:
13073     CASE_CFN_SCALBLN:
13074     CASE_CFN_SCALBN:
13075     CASE_CFN_SIGNBIT:
13076     CASE_CFN_SIGNIFICAND:
13077     CASE_CFN_SINH:
13078     CASE_CFN_TANH:
13079     CASE_CFN_TRUNC:
13080       /* True if the 1st argument is nonnegative.  */
13081       return RECURSE (arg0);
13082 
13083     CASE_CFN_FMAX:
13084       /* True if the 1st OR 2nd arguments are nonnegative.  */
13085       return RECURSE (arg0) || RECURSE (arg1);
13086 
13087     CASE_CFN_FMIN:
13088       /* True if the 1st AND 2nd arguments are nonnegative.  */
13089       return RECURSE (arg0) && RECURSE (arg1);
13090 
13091     CASE_CFN_COPYSIGN:
13092       /* True if the 2nd argument is nonnegative.  */
13093       return RECURSE (arg1);
13094 
13095     CASE_CFN_POWI:
13096       /* True if the 1st argument is nonnegative or the second
13097 	 argument is an even integer.  */
13098       if (TREE_CODE (arg1) == INTEGER_CST
13099 	  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13100 	return true;
13101       return RECURSE (arg0);
13102 
13103     CASE_CFN_POW:
13104       /* True if the 1st argument is nonnegative or the second
13105 	 argument is an even integer valued real.  */
13106       if (TREE_CODE (arg1) == REAL_CST)
13107 	{
13108 	  REAL_VALUE_TYPE c;
13109 	  HOST_WIDE_INT n;
13110 
13111 	  c = TREE_REAL_CST (arg1);
13112 	  n = real_to_integer (&c);
13113 	  if ((n & 1) == 0)
13114 	    {
13115 	      REAL_VALUE_TYPE cint;
13116 	      real_from_integer (&cint, VOIDmode, n, SIGNED);
13117 	      if (real_identical (&c, &cint))
13118 		return true;
13119 	    }
13120 	}
13121       return RECURSE (arg0);
13122 
13123     default:
13124       break;
13125     }
13126   return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13127 }
13128 
13129 /* Return true if T is known to be non-negative.  If the return
13130    value is based on the assumption that signed overflow is undefined,
13131    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13132    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13133 
13134 static bool
13135 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13136 {
13137   enum tree_code code = TREE_CODE (t);
13138   if (TYPE_UNSIGNED (TREE_TYPE (t)))
13139     return true;
13140 
13141   switch (code)
13142     {
13143     case TARGET_EXPR:
13144       {
13145 	tree temp = TARGET_EXPR_SLOT (t);
13146 	t = TARGET_EXPR_INITIAL (t);
13147 
13148 	/* If the initializer is non-void, then it's a normal expression
13149 	   that will be assigned to the slot.  */
13150 	if (!VOID_TYPE_P (t))
13151 	  return RECURSE (t);
13152 
13153 	/* Otherwise, the initializer sets the slot in some way.  One common
13154 	   way is an assignment statement at the end of the initializer.  */
13155 	while (1)
13156 	  {
13157 	    if (TREE_CODE (t) == BIND_EXPR)
13158 	      t = expr_last (BIND_EXPR_BODY (t));
13159 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13160 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
13161 	      t = expr_last (TREE_OPERAND (t, 0));
13162 	    else if (TREE_CODE (t) == STATEMENT_LIST)
13163 	      t = expr_last (t);
13164 	    else
13165 	      break;
13166 	  }
13167 	if (TREE_CODE (t) == MODIFY_EXPR
13168 	    && TREE_OPERAND (t, 0) == temp)
13169 	  return RECURSE (TREE_OPERAND (t, 1));
13170 
13171 	return false;
13172       }
13173 
13174     case CALL_EXPR:
13175       {
13176 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
13177 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
13178 
13179 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13180 					      get_call_combined_fn (t),
13181 					      arg0,
13182 					      arg1,
13183 					      strict_overflow_p, depth);
13184       }
13185     case COMPOUND_EXPR:
13186     case MODIFY_EXPR:
13187       return RECURSE (TREE_OPERAND (t, 1));
13188 
13189     case BIND_EXPR:
13190       return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13191 
13192     case SAVE_EXPR:
13193       return RECURSE (TREE_OPERAND (t, 0));
13194 
13195     default:
13196       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13197     }
13198 }
13199 
13200 #undef RECURSE
13201 #undef tree_expr_nonnegative_warnv_p
13202 
13203 /* Return true if T is known to be non-negative.  If the return
13204    value is based on the assumption that signed overflow is undefined,
13205    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13206    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13207 
13208 bool
13209 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13210 {
13211   enum tree_code code;
13212   if (t == error_mark_node)
13213     return false;
13214 
13215   code = TREE_CODE (t);
13216   switch (TREE_CODE_CLASS (code))
13217     {
13218     case tcc_binary:
13219     case tcc_comparison:
13220       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13221 					      TREE_TYPE (t),
13222 					      TREE_OPERAND (t, 0),
13223 					      TREE_OPERAND (t, 1),
13224 					      strict_overflow_p, depth);
13225 
13226     case tcc_unary:
13227       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13228 					     TREE_TYPE (t),
13229 					     TREE_OPERAND (t, 0),
13230 					     strict_overflow_p, depth);
13231 
13232     case tcc_constant:
13233     case tcc_declaration:
13234     case tcc_reference:
13235       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13236 
13237     default:
13238       break;
13239     }
13240 
13241   switch (code)
13242     {
13243     case TRUTH_AND_EXPR:
13244     case TRUTH_OR_EXPR:
13245     case TRUTH_XOR_EXPR:
13246       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13247 					      TREE_TYPE (t),
13248 					      TREE_OPERAND (t, 0),
13249 					      TREE_OPERAND (t, 1),
13250 					      strict_overflow_p, depth);
13251     case TRUTH_NOT_EXPR:
13252       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13253 					     TREE_TYPE (t),
13254 					     TREE_OPERAND (t, 0),
13255 					     strict_overflow_p, depth);
13256 
13257     case COND_EXPR:
13258     case CONSTRUCTOR:
13259     case OBJ_TYPE_REF:
13260     case ASSERT_EXPR:
13261     case ADDR_EXPR:
13262     case WITH_SIZE_EXPR:
13263     case SSA_NAME:
13264       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13265 
13266     default:
13267       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13268     }
13269 }
13270 
13271 /* Return true if `t' is known to be non-negative.  Handle warnings
13272    about undefined signed overflow.  */
13273 
13274 bool
13275 tree_expr_nonnegative_p (tree t)
13276 {
13277   bool ret, strict_overflow_p;
13278 
13279   strict_overflow_p = false;
13280   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13281   if (strict_overflow_p)
13282     fold_overflow_warning (("assuming signed overflow does not occur when "
13283 			    "determining that expression is always "
13284 			    "non-negative"),
13285 			   WARN_STRICT_OVERFLOW_MISC);
13286   return ret;
13287 }
13288 
13289 
13290 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13291    For floating point we further ensure that T is not denormal.
13292    Similar logic is present in nonzero_address in rtlanal.h.
13293 
13294    If the return value is based on the assumption that signed overflow
13295    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13296    change *STRICT_OVERFLOW_P.  */
13297 
13298 bool
13299 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13300 				 bool *strict_overflow_p)
13301 {
13302   switch (code)
13303     {
13304     case ABS_EXPR:
13305       return tree_expr_nonzero_warnv_p (op0,
13306 					strict_overflow_p);
13307 
13308     case NOP_EXPR:
13309       {
13310 	tree inner_type = TREE_TYPE (op0);
13311 	tree outer_type = type;
13312 
13313 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13314 		&& tree_expr_nonzero_warnv_p (op0,
13315 					      strict_overflow_p));
13316       }
13317       break;
13318 
13319     case NON_LVALUE_EXPR:
13320       return tree_expr_nonzero_warnv_p (op0,
13321 					strict_overflow_p);
13322 
13323     default:
13324       break;
13325   }
13326 
13327   return false;
13328 }
13329 
13330 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13331    For floating point we further ensure that T is not denormal.
13332    Similar logic is present in nonzero_address in rtlanal.h.
13333 
13334    If the return value is based on the assumption that signed overflow
13335    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13336    change *STRICT_OVERFLOW_P.  */
13337 
13338 bool
13339 tree_binary_nonzero_warnv_p (enum tree_code code,
13340 			     tree type,
13341 			     tree op0,
13342 			     tree op1, bool *strict_overflow_p)
13343 {
13344   bool sub_strict_overflow_p;
13345   switch (code)
13346     {
13347     case POINTER_PLUS_EXPR:
13348     case PLUS_EXPR:
13349       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13350 	{
13351 	  /* With the presence of negative values it is hard
13352 	     to say something.  */
13353 	  sub_strict_overflow_p = false;
13354 	  if (!tree_expr_nonnegative_warnv_p (op0,
13355 					      &sub_strict_overflow_p)
13356 	      || !tree_expr_nonnegative_warnv_p (op1,
13357 						 &sub_strict_overflow_p))
13358 	    return false;
13359 	  /* One of operands must be positive and the other non-negative.  */
13360 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
13361 	     overflows, on a twos-complement machine the sum of two
13362 	     nonnegative numbers can never be zero.  */
13363 	  return (tree_expr_nonzero_warnv_p (op0,
13364 					     strict_overflow_p)
13365 		  || tree_expr_nonzero_warnv_p (op1,
13366 						strict_overflow_p));
13367 	}
13368       break;
13369 
13370     case MULT_EXPR:
13371       if (TYPE_OVERFLOW_UNDEFINED (type))
13372 	{
13373 	  if (tree_expr_nonzero_warnv_p (op0,
13374 					 strict_overflow_p)
13375 	      && tree_expr_nonzero_warnv_p (op1,
13376 					    strict_overflow_p))
13377 	    {
13378 	      *strict_overflow_p = true;
13379 	      return true;
13380 	    }
13381 	}
13382       break;
13383 
13384     case MIN_EXPR:
13385       sub_strict_overflow_p = false;
13386       if (tree_expr_nonzero_warnv_p (op0,
13387 				     &sub_strict_overflow_p)
13388 	  && tree_expr_nonzero_warnv_p (op1,
13389 					&sub_strict_overflow_p))
13390 	{
13391 	  if (sub_strict_overflow_p)
13392 	    *strict_overflow_p = true;
13393 	}
13394       break;
13395 
13396     case MAX_EXPR:
13397       sub_strict_overflow_p = false;
13398       if (tree_expr_nonzero_warnv_p (op0,
13399 				     &sub_strict_overflow_p))
13400 	{
13401 	  if (sub_strict_overflow_p)
13402 	    *strict_overflow_p = true;
13403 
13404 	  /* When both operands are nonzero, then MAX must be too.  */
13405 	  if (tree_expr_nonzero_warnv_p (op1,
13406 					 strict_overflow_p))
13407 	    return true;
13408 
13409 	  /* MAX where operand 0 is positive is positive.  */
13410 	  return tree_expr_nonnegative_warnv_p (op0,
13411 					       strict_overflow_p);
13412 	}
13413       /* MAX where operand 1 is positive is positive.  */
13414       else if (tree_expr_nonzero_warnv_p (op1,
13415 					  &sub_strict_overflow_p)
13416 	       && tree_expr_nonnegative_warnv_p (op1,
13417 						 &sub_strict_overflow_p))
13418 	{
13419 	  if (sub_strict_overflow_p)
13420 	    *strict_overflow_p = true;
13421 	  return true;
13422 	}
13423       break;
13424 
13425     case BIT_IOR_EXPR:
13426       return (tree_expr_nonzero_warnv_p (op1,
13427 					 strict_overflow_p)
13428 	      || tree_expr_nonzero_warnv_p (op0,
13429 					    strict_overflow_p));
13430 
13431     default:
13432       break;
13433   }
13434 
13435   return false;
13436 }
13437 
13438 /* Return true when T is an address and is known to be nonzero.
13439    For floating point we further ensure that T is not denormal.
13440    Similar logic is present in nonzero_address in rtlanal.h.
13441 
13442    If the return value is based on the assumption that signed overflow
13443    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13444    change *STRICT_OVERFLOW_P.  */
13445 
13446 bool
13447 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13448 {
13449   bool sub_strict_overflow_p;
13450   switch (TREE_CODE (t))
13451     {
13452     case INTEGER_CST:
13453       return !integer_zerop (t);
13454 
13455     case ADDR_EXPR:
13456       {
13457 	tree base = TREE_OPERAND (t, 0);
13458 
13459 	if (!DECL_P (base))
13460 	  base = get_base_address (base);
13461 
13462 	if (base && TREE_CODE (base) == TARGET_EXPR)
13463 	  base = TARGET_EXPR_SLOT (base);
13464 
13465 	if (!base)
13466 	  return false;
13467 
13468 	/* For objects in symbol table check if we know they are non-zero.
13469 	   Don't do anything for variables and functions before symtab is built;
13470 	   it is quite possible that they will be declared weak later.  */
13471 	int nonzero_addr = maybe_nonzero_address (base);
13472 	if (nonzero_addr >= 0)
13473 	  return nonzero_addr;
13474 
13475 	/* Constants are never weak.  */
13476 	if (CONSTANT_CLASS_P (base))
13477 	  return true;
13478 
13479 	return false;
13480       }
13481 
13482     case COND_EXPR:
13483       sub_strict_overflow_p = false;
13484       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13485 				     &sub_strict_overflow_p)
13486 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13487 					&sub_strict_overflow_p))
13488 	{
13489 	  if (sub_strict_overflow_p)
13490 	    *strict_overflow_p = true;
13491 	  return true;
13492 	}
13493       break;
13494 
13495     default:
13496       break;
13497     }
13498   return false;
13499 }
13500 
13501 #define integer_valued_real_p(X) \
13502   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13503 
13504 #define RECURSE(X) \
13505   ((integer_valued_real_p) (X, depth + 1))
13506 
13507 /* Return true if the floating point result of (CODE OP0) has an
13508    integer value.  We also allow +Inf, -Inf and NaN to be considered
13509    integer values. Return false for signaling NaN.
13510 
13511    DEPTH is the current nesting depth of the query.  */
13512 
13513 bool
13514 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13515 {
13516   switch (code)
13517     {
13518     case FLOAT_EXPR:
13519       return true;
13520 
13521     case ABS_EXPR:
13522       return RECURSE (op0);
13523 
13524     CASE_CONVERT:
13525       {
13526 	tree type = TREE_TYPE (op0);
13527 	if (TREE_CODE (type) == INTEGER_TYPE)
13528 	  return true;
13529 	if (TREE_CODE (type) == REAL_TYPE)
13530 	  return RECURSE (op0);
13531 	break;
13532       }
13533 
13534     default:
13535       break;
13536     }
13537   return false;
13538 }
13539 
13540 /* Return true if the floating point result of (CODE OP0 OP1) has an
13541    integer value.  We also allow +Inf, -Inf and NaN to be considered
13542    integer values. Return false for signaling NaN.
13543 
13544    DEPTH is the current nesting depth of the query.  */
13545 
13546 bool
13547 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13548 {
13549   switch (code)
13550     {
13551     case PLUS_EXPR:
13552     case MINUS_EXPR:
13553     case MULT_EXPR:
13554     case MIN_EXPR:
13555     case MAX_EXPR:
13556       return RECURSE (op0) && RECURSE (op1);
13557 
13558     default:
13559       break;
13560     }
13561   return false;
13562 }
13563 
13564 /* Return true if the floating point result of calling FNDECL with arguments
13565    ARG0 and ARG1 has an integer value.  We also allow +Inf, -Inf and NaN to be
13566    considered integer values. Return false for signaling NaN.  If FNDECL
13567    takes fewer than 2 arguments, the remaining ARGn are null.
13568 
13569    DEPTH is the current nesting depth of the query.  */
13570 
13571 bool
13572 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13573 {
13574   switch (fn)
13575     {
13576     CASE_CFN_CEIL:
13577     CASE_CFN_FLOOR:
13578     CASE_CFN_NEARBYINT:
13579     CASE_CFN_RINT:
13580     CASE_CFN_ROUND:
13581     CASE_CFN_TRUNC:
13582       return true;
13583 
13584     CASE_CFN_FMIN:
13585     CASE_CFN_FMAX:
13586       return RECURSE (arg0) && RECURSE (arg1);
13587 
13588     default:
13589       break;
13590     }
13591   return false;
13592 }
13593 
13594 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13595    has an integer value.  We also allow +Inf, -Inf and NaN to be
13596    considered integer values. Return false for signaling NaN.
13597 
13598    DEPTH is the current nesting depth of the query.  */
13599 
13600 bool
13601 integer_valued_real_single_p (tree t, int depth)
13602 {
13603   switch (TREE_CODE (t))
13604     {
13605     case REAL_CST:
13606       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13607 
13608     case COND_EXPR:
13609       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13610 
13611     case SSA_NAME:
13612       /* Limit the depth of recursion to avoid quadratic behavior.
13613 	 This is expected to catch almost all occurrences in practice.
13614 	 If this code misses important cases that unbounded recursion
13615 	 would not, passes that need this information could be revised
13616 	 to provide it through dataflow propagation.  */
13617       return (!name_registered_for_update_p (t)
13618 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13619 	      && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13620 						    depth));
13621 
13622     default:
13623       break;
13624     }
13625   return false;
13626 }
13627 
13628 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13629    has an integer value.  We also allow +Inf, -Inf and NaN to be
13630    considered integer values. Return false for signaling NaN.
13631 
13632    DEPTH is the current nesting depth of the query.  */
13633 
13634 static bool
13635 integer_valued_real_invalid_p (tree t, int depth)
13636 {
13637   switch (TREE_CODE (t))
13638     {
13639     case COMPOUND_EXPR:
13640     case MODIFY_EXPR:
13641     case BIND_EXPR:
13642       return RECURSE (TREE_OPERAND (t, 1));
13643 
13644     case SAVE_EXPR:
13645       return RECURSE (TREE_OPERAND (t, 0));
13646 
13647     default:
13648       break;
13649     }
13650   return false;
13651 }
13652 
13653 #undef RECURSE
13654 #undef integer_valued_real_p
13655 
13656 /* Return true if the floating point expression T has an integer value.
13657    We also allow +Inf, -Inf and NaN to be considered integer values.
13658    Return false for signaling NaN.
13659 
13660    DEPTH is the current nesting depth of the query.  */
13661 
13662 bool
13663 integer_valued_real_p (tree t, int depth)
13664 {
13665   if (t == error_mark_node)
13666     return false;
13667 
13668   tree_code code = TREE_CODE (t);
13669   switch (TREE_CODE_CLASS (code))
13670     {
13671     case tcc_binary:
13672     case tcc_comparison:
13673       return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13674 					   TREE_OPERAND (t, 1), depth);
13675 
13676     case tcc_unary:
13677       return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13678 
13679     case tcc_constant:
13680     case tcc_declaration:
13681     case tcc_reference:
13682       return integer_valued_real_single_p (t, depth);
13683 
13684     default:
13685       break;
13686     }
13687 
13688   switch (code)
13689     {
13690     case COND_EXPR:
13691     case SSA_NAME:
13692       return integer_valued_real_single_p (t, depth);
13693 
13694     case CALL_EXPR:
13695       {
13696 	tree arg0 = (call_expr_nargs (t) > 0
13697 		     ? CALL_EXPR_ARG (t, 0)
13698 		     : NULL_TREE);
13699 	tree arg1 = (call_expr_nargs (t) > 1
13700 		     ? CALL_EXPR_ARG (t, 1)
13701 		     : NULL_TREE);
13702 	return integer_valued_real_call_p (get_call_combined_fn (t),
13703 					   arg0, arg1, depth);
13704       }
13705 
13706     default:
13707       return integer_valued_real_invalid_p (t, depth);
13708     }
13709 }
13710 
13711 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13712    attempt to fold the expression to a constant without modifying TYPE,
13713    OP0 or OP1.
13714 
13715    If the expression could be simplified to a constant, then return
13716    the constant.  If the expression would not be simplified to a
13717    constant, then return NULL_TREE.  */
13718 
13719 tree
13720 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13721 {
13722   tree tem = fold_binary (code, type, op0, op1);
13723   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13724 }
13725 
13726 /* Given the components of a unary expression CODE, TYPE and OP0,
13727    attempt to fold the expression to a constant without modifying
13728    TYPE or OP0.
13729 
13730    If the expression could be simplified to a constant, then return
13731    the constant.  If the expression would not be simplified to a
13732    constant, then return NULL_TREE.  */
13733 
13734 tree
13735 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13736 {
13737   tree tem = fold_unary (code, type, op0);
13738   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13739 }
13740 
13741 /* If EXP represents referencing an element in a constant string
13742    (either via pointer arithmetic or array indexing), return the
13743    tree representing the value accessed, otherwise return NULL.  */
13744 
13745 tree
13746 fold_read_from_constant_string (tree exp)
13747 {
13748   if ((TREE_CODE (exp) == INDIRECT_REF
13749        || TREE_CODE (exp) == ARRAY_REF)
13750       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13751     {
13752       tree exp1 = TREE_OPERAND (exp, 0);
13753       tree index;
13754       tree string;
13755       location_t loc = EXPR_LOCATION (exp);
13756 
13757       if (TREE_CODE (exp) == INDIRECT_REF)
13758 	string = string_constant (exp1, &index);
13759       else
13760 	{
13761 	  tree low_bound = array_ref_low_bound (exp);
13762 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13763 
13764 	  /* Optimize the special-case of a zero lower bound.
13765 
13766 	     We convert the low_bound to sizetype to avoid some problems
13767 	     with constant folding.  (E.g. suppose the lower bound is 1,
13768 	     and its mode is QI.  Without the conversion,l (ARRAY
13769 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13770 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
13771 	  if (! integer_zerop (low_bound))
13772 	    index = size_diffop_loc (loc, index,
13773 				 fold_convert_loc (loc, sizetype, low_bound));
13774 
13775 	  string = exp1;
13776 	}
13777 
13778       if (string
13779 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13780 	  && TREE_CODE (string) == STRING_CST
13781 	  && TREE_CODE (index) == INTEGER_CST
13782 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13783 	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13784 	      == MODE_INT)
13785 	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13786 	return build_int_cst_type (TREE_TYPE (exp),
13787 				   (TREE_STRING_POINTER (string)
13788 				    [TREE_INT_CST_LOW (index)]));
13789     }
13790   return NULL;
13791 }
13792 
13793 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13794    an integer constant, real, or fixed-point constant.
13795 
13796    TYPE is the type of the result.  */
13797 
13798 static tree
13799 fold_negate_const (tree arg0, tree type)
13800 {
13801   tree t = NULL_TREE;
13802 
13803   switch (TREE_CODE (arg0))
13804     {
13805     case INTEGER_CST:
13806       {
13807 	bool overflow;
13808 	wide_int val = wi::neg (arg0, &overflow);
13809 	t = force_fit_type (type, val, 1,
13810 			    (overflow && ! TYPE_UNSIGNED (type))
13811 			    || TREE_OVERFLOW (arg0));
13812 	break;
13813       }
13814 
13815     case REAL_CST:
13816       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13817       break;
13818 
13819     case FIXED_CST:
13820       {
13821         FIXED_VALUE_TYPE f;
13822         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13823 					    &(TREE_FIXED_CST (arg0)), NULL,
13824 					    TYPE_SATURATING (type));
13825 	t = build_fixed (type, f);
13826 	/* Propagate overflow flags.  */
13827 	if (overflow_p | TREE_OVERFLOW (arg0))
13828 	  TREE_OVERFLOW (t) = 1;
13829 	break;
13830       }
13831 
13832     default:
13833       gcc_unreachable ();
13834     }
13835 
13836   return t;
13837 }
13838 
13839 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13840    an integer constant or real constant.
13841 
13842    TYPE is the type of the result.  */
13843 
13844 tree
13845 fold_abs_const (tree arg0, tree type)
13846 {
13847   tree t = NULL_TREE;
13848 
13849   switch (TREE_CODE (arg0))
13850     {
13851     case INTEGER_CST:
13852       {
13853         /* If the value is unsigned or non-negative, then the absolute value
13854 	   is the same as the ordinary value.  */
13855 	if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13856 	  t = arg0;
13857 
13858 	/* If the value is negative, then the absolute value is
13859 	   its negation.  */
13860 	else
13861 	  {
13862 	    bool overflow;
13863 	    wide_int val = wi::neg (arg0, &overflow);
13864 	    t = force_fit_type (type, val, -1,
13865 				overflow | TREE_OVERFLOW (arg0));
13866 	  }
13867       }
13868       break;
13869 
13870     case REAL_CST:
13871       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13872 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13873       else
13874 	t =  arg0;
13875       break;
13876 
13877     default:
13878       gcc_unreachable ();
13879     }
13880 
13881   return t;
13882 }
13883 
13884 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13885    constant.  TYPE is the type of the result.  */
13886 
13887 static tree
13888 fold_not_const (const_tree arg0, tree type)
13889 {
13890   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13891 
13892   return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13893 }
13894 
13895 /* Given CODE, a relational operator, the target type, TYPE and two
13896    constant operands OP0 and OP1, return the result of the
13897    relational operation.  If the result is not a compile time
13898    constant, then return NULL_TREE.  */
13899 
13900 static tree
13901 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13902 {
13903   int result, invert;
13904 
13905   /* From here on, the only cases we handle are when the result is
13906      known to be a constant.  */
13907 
13908   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13909     {
13910       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13911       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13912 
13913       /* Handle the cases where either operand is a NaN.  */
13914       if (real_isnan (c0) || real_isnan (c1))
13915 	{
13916 	  switch (code)
13917 	    {
13918 	    case EQ_EXPR:
13919 	    case ORDERED_EXPR:
13920 	      result = 0;
13921 	      break;
13922 
13923 	    case NE_EXPR:
13924 	    case UNORDERED_EXPR:
13925 	    case UNLT_EXPR:
13926 	    case UNLE_EXPR:
13927 	    case UNGT_EXPR:
13928 	    case UNGE_EXPR:
13929 	    case UNEQ_EXPR:
13930               result = 1;
13931 	      break;
13932 
13933 	    case LT_EXPR:
13934 	    case LE_EXPR:
13935 	    case GT_EXPR:
13936 	    case GE_EXPR:
13937 	    case LTGT_EXPR:
13938 	      if (flag_trapping_math)
13939 		return NULL_TREE;
13940 	      result = 0;
13941 	      break;
13942 
13943 	    default:
13944 	      gcc_unreachable ();
13945 	    }
13946 
13947 	  return constant_boolean_node (result, type);
13948 	}
13949 
13950       return constant_boolean_node (real_compare (code, c0, c1), type);
13951     }
13952 
13953   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13954     {
13955       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13956       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13957       return constant_boolean_node (fixed_compare (code, c0, c1), type);
13958     }
13959 
13960   /* Handle equality/inequality of complex constants.  */
13961   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13962     {
13963       tree rcond = fold_relational_const (code, type,
13964 					  TREE_REALPART (op0),
13965 					  TREE_REALPART (op1));
13966       tree icond = fold_relational_const (code, type,
13967 					  TREE_IMAGPART (op0),
13968 					  TREE_IMAGPART (op1));
13969       if (code == EQ_EXPR)
13970 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13971       else if (code == NE_EXPR)
13972 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13973       else
13974 	return NULL_TREE;
13975     }
13976 
13977   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13978     {
13979       if (!VECTOR_TYPE_P (type))
13980 	{
13981 	  /* Have vector comparison with scalar boolean result.  */
13982 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13983 		      && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13984 	  for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13985 	    {
13986 	      tree elem0 = VECTOR_CST_ELT (op0, i);
13987 	      tree elem1 = VECTOR_CST_ELT (op1, i);
13988 	      tree tmp = fold_relational_const (code, type, elem0, elem1);
13989 	      if (tmp == NULL_TREE)
13990 		return NULL_TREE;
13991 	      if (integer_zerop (tmp))
13992 		return constant_boolean_node (false, type);
13993 	    }
13994 	  return constant_boolean_node (true, type);
13995 	}
13996       unsigned count = VECTOR_CST_NELTS (op0);
13997       tree *elts =  XALLOCAVEC (tree, count);
13998       gcc_assert (VECTOR_CST_NELTS (op1) == count
13999 		  && TYPE_VECTOR_SUBPARTS (type) == count);
14000 
14001       for (unsigned i = 0; i < count; i++)
14002 	{
14003 	  tree elem_type = TREE_TYPE (type);
14004 	  tree elem0 = VECTOR_CST_ELT (op0, i);
14005 	  tree elem1 = VECTOR_CST_ELT (op1, i);
14006 
14007 	  tree tem = fold_relational_const (code, elem_type,
14008 					    elem0, elem1);
14009 
14010 	  if (tem == NULL_TREE)
14011 	    return NULL_TREE;
14012 
14013 	  elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14014 	}
14015 
14016       return build_vector (type, elts);
14017     }
14018 
14019   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14020 
14021      To compute GT, swap the arguments and do LT.
14022      To compute GE, do LT and invert the result.
14023      To compute LE, swap the arguments, do LT and invert the result.
14024      To compute NE, do EQ and invert the result.
14025 
14026      Therefore, the code below must handle only EQ and LT.  */
14027 
14028   if (code == LE_EXPR || code == GT_EXPR)
14029     {
14030       std::swap (op0, op1);
14031       code = swap_tree_comparison (code);
14032     }
14033 
14034   /* Note that it is safe to invert for real values here because we
14035      have already handled the one case that it matters.  */
14036 
14037   invert = 0;
14038   if (code == NE_EXPR || code == GE_EXPR)
14039     {
14040       invert = 1;
14041       code = invert_tree_comparison (code, false);
14042     }
14043 
14044   /* Compute a result for LT or EQ if args permit;
14045      Otherwise return T.  */
14046   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14047     {
14048       if (code == EQ_EXPR)
14049 	result = tree_int_cst_equal (op0, op1);
14050       else
14051 	result = tree_int_cst_lt (op0, op1);
14052     }
14053   else
14054     return NULL_TREE;
14055 
14056   if (invert)
14057     result ^= 1;
14058   return constant_boolean_node (result, type);
14059 }
14060 
14061 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14062    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
14063    itself.  */
14064 
14065 tree
14066 fold_build_cleanup_point_expr (tree type, tree expr)
14067 {
14068   /* If the expression does not have side effects then we don't have to wrap
14069      it with a cleanup point expression.  */
14070   if (!TREE_SIDE_EFFECTS (expr))
14071     return expr;
14072 
14073   /* If the expression is a return, check to see if the expression inside the
14074      return has no side effects or the right hand side of the modify expression
14075      inside the return. If either don't have side effects set we don't need to
14076      wrap the expression in a cleanup point expression.  Note we don't check the
14077      left hand side of the modify because it should always be a return decl.  */
14078   if (TREE_CODE (expr) == RETURN_EXPR)
14079     {
14080       tree op = TREE_OPERAND (expr, 0);
14081       if (!op || !TREE_SIDE_EFFECTS (op))
14082         return expr;
14083       op = TREE_OPERAND (op, 1);
14084       if (!TREE_SIDE_EFFECTS (op))
14085         return expr;
14086     }
14087 
14088   return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14089 }
14090 
14091 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14092    of an indirection through OP0, or NULL_TREE if no simplification is
14093    possible.  */
14094 
14095 tree
14096 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14097 {
14098   tree sub = op0;
14099   tree subtype;
14100 
14101   STRIP_NOPS (sub);
14102   subtype = TREE_TYPE (sub);
14103   if (!POINTER_TYPE_P (subtype)
14104       || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14105     return NULL_TREE;
14106 
14107   if (TREE_CODE (sub) == ADDR_EXPR)
14108     {
14109       tree op = TREE_OPERAND (sub, 0);
14110       tree optype = TREE_TYPE (op);
14111 
14112       /* *&CONST_DECL -> to the value of the const decl.  */
14113       if (TREE_CODE (op) == CONST_DECL)
14114 	return DECL_INITIAL (op);
14115       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
14116       if (type == optype)
14117 	{
14118 	  tree fop = fold_read_from_constant_string (op);
14119 	  if (fop)
14120 	    return fop;
14121 	  else
14122 	    return op;
14123 	}
14124       /* *(foo *)&fooarray => fooarray[0] */
14125       else if (TREE_CODE (optype) == ARRAY_TYPE
14126 	       && type == TREE_TYPE (optype)
14127 	       && (!in_gimple_form
14128 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14129 	{
14130 	  tree type_domain = TYPE_DOMAIN (optype);
14131 	  tree min_val = size_zero_node;
14132 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
14133 	    min_val = TYPE_MIN_VALUE (type_domain);
14134 	  if (in_gimple_form
14135 	      && TREE_CODE (min_val) != INTEGER_CST)
14136 	    return NULL_TREE;
14137 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
14138 			     NULL_TREE, NULL_TREE);
14139 	}
14140       /* *(foo *)&complexfoo => __real__ complexfoo */
14141       else if (TREE_CODE (optype) == COMPLEX_TYPE
14142 	       && type == TREE_TYPE (optype))
14143 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
14144       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14145       else if (VECTOR_TYPE_P (optype)
14146 	       && type == TREE_TYPE (optype))
14147 	{
14148 	  tree part_width = TYPE_SIZE (type);
14149 	  tree index = bitsize_int (0);
14150 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14151 				  index);
14152 	}
14153     }
14154 
14155   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14156       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14157     {
14158       tree op00 = TREE_OPERAND (sub, 0);
14159       tree op01 = TREE_OPERAND (sub, 1);
14160 
14161       STRIP_NOPS (op00);
14162       if (TREE_CODE (op00) == ADDR_EXPR)
14163 	{
14164 	  tree op00type;
14165 	  op00 = TREE_OPERAND (op00, 0);
14166 	  op00type = TREE_TYPE (op00);
14167 
14168 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14169 	  if (VECTOR_TYPE_P (op00type)
14170 	      && type == TREE_TYPE (op00type)
14171 	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14172 		 but we want to treat offsets with MSB set as negative.
14173 		 For the code below negative offsets are invalid and
14174 		 TYPE_SIZE of the element is something unsigned, so
14175 		 check whether op01 fits into HOST_WIDE_INT, which
14176 		 implies it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14177 		 then just use unsigned HOST_WIDE_INT because we want to treat
14178 		 the value as unsigned.  */
14179 	      && tree_fits_shwi_p (op01))
14180 	    {
14181 	      tree part_width = TYPE_SIZE (type);
14182 	      unsigned HOST_WIDE_INT max_offset
14183 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
14184 		   * TYPE_VECTOR_SUBPARTS (op00type));
14185 	      if (tree_int_cst_sign_bit (op01) == 0
14186 		  && compare_tree_int (op01, max_offset) == -1)
14187 		{
14188 		  unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14189 		  unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14190 		  tree index = bitsize_int (indexi);
14191 		  return fold_build3_loc (loc,
14192 					  BIT_FIELD_REF, type, op00,
14193 					  part_width, index);
14194 		}
14195 	    }
14196 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14197 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
14198 		   && type == TREE_TYPE (op00type))
14199 	    {
14200 	      tree size = TYPE_SIZE_UNIT (type);
14201 	      if (tree_int_cst_equal (size, op01))
14202 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14203 	    }
14204 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
14205 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
14206 		   && type == TREE_TYPE (op00type))
14207 	    {
14208 	      tree type_domain = TYPE_DOMAIN (op00type);
14209 	      tree min_val = size_zero_node;
14210 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
14211 		min_val = TYPE_MIN_VALUE (type_domain);
14212 	      op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14213 				     TYPE_SIZE_UNIT (type));
14214 	      op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14215 	      return build4_loc (loc, ARRAY_REF, type, op00, op01,
14216 				 NULL_TREE, NULL_TREE);
14217 	    }
14218 	}
14219     }
14220 
14221   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14222   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14223       && type == TREE_TYPE (TREE_TYPE (subtype))
14224       && (!in_gimple_form
14225 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14226     {
14227       tree type_domain;
14228       tree min_val = size_zero_node;
14229       sub = build_fold_indirect_ref_loc (loc, sub);
14230       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14231       if (type_domain && TYPE_MIN_VALUE (type_domain))
14232 	min_val = TYPE_MIN_VALUE (type_domain);
14233       if (in_gimple_form
14234 	  && TREE_CODE (min_val) != INTEGER_CST)
14235 	return NULL_TREE;
14236       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14237 			 NULL_TREE);
14238     }
14239 
14240   return NULL_TREE;
14241 }
14242 
14243 /* Builds an expression for an indirection through T, simplifying some
14244    cases.  */
14245 
14246 tree
14247 build_fold_indirect_ref_loc (location_t loc, tree t)
14248 {
14249   tree type = TREE_TYPE (TREE_TYPE (t));
14250   tree sub = fold_indirect_ref_1 (loc, type, t);
14251 
14252   if (sub)
14253     return sub;
14254 
14255   return build1_loc (loc, INDIRECT_REF, type, t);
14256 }
14257 
14258 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
14259 
14260 tree
14261 fold_indirect_ref_loc (location_t loc, tree t)
14262 {
14263   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14264 
14265   if (sub)
14266     return sub;
14267   else
14268     return t;
14269 }
14270 
14271 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14272    whose result is ignored.  The type of the returned tree need not be
14273    the same as the original expression.  */
14274 
14275 tree
14276 fold_ignored_result (tree t)
14277 {
14278   if (!TREE_SIDE_EFFECTS (t))
14279     return integer_zero_node;
14280 
14281   for (;;)
14282     switch (TREE_CODE_CLASS (TREE_CODE (t)))
14283       {
14284       case tcc_unary:
14285 	t = TREE_OPERAND (t, 0);
14286 	break;
14287 
14288       case tcc_binary:
14289       case tcc_comparison:
14290 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14291 	  t = TREE_OPERAND (t, 0);
14292 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14293 	  t = TREE_OPERAND (t, 1);
14294 	else
14295 	  return t;
14296 	break;
14297 
14298       case tcc_expression:
14299 	switch (TREE_CODE (t))
14300 	  {
14301 	  case COMPOUND_EXPR:
14302 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14303 	      return t;
14304 	    t = TREE_OPERAND (t, 0);
14305 	    break;
14306 
14307 	  case COND_EXPR:
14308 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14309 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14310 	      return t;
14311 	    t = TREE_OPERAND (t, 0);
14312 	    break;
14313 
14314 	  default:
14315 	    return t;
14316 	  }
14317 	break;
14318 
14319       default:
14320 	return t;
14321       }
14322 }
14323 
14324 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14325 
14326 tree
14327 round_up_loc (location_t loc, tree value, unsigned int divisor)
14328 {
14329   tree div = NULL_TREE;
14330 
14331   if (divisor == 1)
14332     return value;
14333 
14334   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14335      have to do anything.  Only do this when we are not given a const,
14336      because in that case, this check is more expensive than just
14337      doing it.  */
14338   if (TREE_CODE (value) != INTEGER_CST)
14339     {
14340       div = build_int_cst (TREE_TYPE (value), divisor);
14341 
14342       if (multiple_of_p (TREE_TYPE (value), value, div))
14343 	return value;
14344     }
14345 
14346   /* If divisor is a power of two, simplify this to bit manipulation.  */
14347   if (pow2_or_zerop (divisor))
14348     {
14349       if (TREE_CODE (value) == INTEGER_CST)
14350 	{
14351 	  wide_int val = value;
14352 	  bool overflow_p;
14353 
14354 	  if ((val & (divisor - 1)) == 0)
14355 	    return value;
14356 
14357 	  overflow_p = TREE_OVERFLOW (value);
14358 	  val += divisor - 1;
14359 	  val &= (int) -divisor;
14360 	  if (val == 0)
14361 	    overflow_p = true;
14362 
14363 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14364 	}
14365       else
14366 	{
14367 	  tree t;
14368 
14369 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
14370 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
14371 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14372 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14373 	}
14374     }
14375   else
14376     {
14377       if (!div)
14378 	div = build_int_cst (TREE_TYPE (value), divisor);
14379       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14380       value = size_binop_loc (loc, MULT_EXPR, value, div);
14381     }
14382 
14383   return value;
14384 }
14385 
14386 /* Likewise, but round down.  */
14387 
14388 tree
14389 round_down_loc (location_t loc, tree value, int divisor)
14390 {
14391   tree div = NULL_TREE;
14392 
14393   gcc_assert (divisor > 0);
14394   if (divisor == 1)
14395     return value;
14396 
14397   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14398      have to do anything.  Only do this when we are not given a const,
14399      because in that case, this check is more expensive than just
14400      doing it.  */
14401   if (TREE_CODE (value) != INTEGER_CST)
14402     {
14403       div = build_int_cst (TREE_TYPE (value), divisor);
14404 
14405       if (multiple_of_p (TREE_TYPE (value), value, div))
14406 	return value;
14407     }
14408 
14409   /* If divisor is a power of two, simplify this to bit manipulation.  */
14410   if (pow2_or_zerop (divisor))
14411     {
14412       tree t;
14413 
14414       t = build_int_cst (TREE_TYPE (value), -divisor);
14415       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14416     }
14417   else
14418     {
14419       if (!div)
14420 	div = build_int_cst (TREE_TYPE (value), divisor);
14421       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14422       value = size_binop_loc (loc, MULT_EXPR, value, div);
14423     }
14424 
14425   return value;
14426 }
14427 
14428 /* Returns the pointer to the base of the object addressed by EXP and
14429    extracts the information about the offset of the access, storing it
14430    to PBITPOS and POFFSET.  */
14431 
14432 static tree
14433 split_address_to_core_and_offset (tree exp,
14434 				  HOST_WIDE_INT *pbitpos, tree *poffset)
14435 {
14436   tree core;
14437   machine_mode mode;
14438   int unsignedp, reversep, volatilep;
14439   HOST_WIDE_INT bitsize;
14440   location_t loc = EXPR_LOCATION (exp);
14441 
14442   if (TREE_CODE (exp) == ADDR_EXPR)
14443     {
14444       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14445 				  poffset, &mode, &unsignedp, &reversep,
14446 				  &volatilep);
14447       core = build_fold_addr_expr_loc (loc, core);
14448     }
14449   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14450     {
14451       core = TREE_OPERAND (exp, 0);
14452       STRIP_NOPS (core);
14453       *pbitpos = 0;
14454       *poffset = TREE_OPERAND (exp, 1);
14455       if (TREE_CODE (*poffset) == INTEGER_CST)
14456 	{
14457 	  offset_int tem = wi::sext (wi::to_offset (*poffset),
14458 				     TYPE_PRECISION (TREE_TYPE (*poffset)));
14459 	  tem <<= LOG2_BITS_PER_UNIT;
14460 	  if (wi::fits_shwi_p (tem))
14461 	    {
14462 	      *pbitpos = tem.to_shwi ();
14463 	      *poffset = NULL_TREE;
14464 	    }
14465 	}
14466     }
14467   else
14468     {
14469       core = exp;
14470       *pbitpos = 0;
14471       *poffset = NULL_TREE;
14472     }
14473 
14474   return core;
14475 }
14476 
14477 /* Returns true if addresses of E1 and E2 differ by a constant, false
14478    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
14479 
14480 bool
14481 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14482 {
14483   tree core1, core2;
14484   HOST_WIDE_INT bitpos1, bitpos2;
14485   tree toffset1, toffset2, tdiff, type;
14486 
14487   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14488   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14489 
14490   if (bitpos1 % BITS_PER_UNIT != 0
14491       || bitpos2 % BITS_PER_UNIT != 0
14492       || !operand_equal_p (core1, core2, 0))
14493     return false;
14494 
14495   if (toffset1 && toffset2)
14496     {
14497       type = TREE_TYPE (toffset1);
14498       if (type != TREE_TYPE (toffset2))
14499 	toffset2 = fold_convert (type, toffset2);
14500 
14501       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14502       if (!cst_and_fits_in_hwi (tdiff))
14503 	return false;
14504 
14505       *diff = int_cst_value (tdiff);
14506     }
14507   else if (toffset1 || toffset2)
14508     {
14509       /* If only one of the offsets is non-constant, the difference cannot
14510 	 be a constant.  */
14511       return false;
14512     }
14513   else
14514     *diff = 0;
14515 
14516   *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14517   return true;
14518 }
14519 
14520 /* Return OFF converted to a pointer offset type suitable as offset for
14521    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
14522 tree
14523 convert_to_ptrofftype_loc (location_t loc, tree off)
14524 {
14525   return fold_convert_loc (loc, sizetype, off);
14526 }
14527 
14528 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14529 tree
14530 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14531 {
14532   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14533 			  ptr, convert_to_ptrofftype_loc (loc, off));
14534 }
14535 
14536 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14537 tree
14538 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14539 {
14540   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14541 			  ptr, size_int (off));
14542 }
14543 
14544 /* Return a char pointer for a C string if it is a string constant
14545    or sum of string constant and integer constant.  We only support
14546    string constants properly terminated with '\0' character.
14547    If STRLEN is a valid pointer, length (including terminating character)
14548    of returned string is stored to the argument.  */
14549 
14550 const char *
14551 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14552 {
14553   tree offset_node;
14554 
14555   if (strlen)
14556     *strlen = 0;
14557 
14558   src = string_constant (src, &offset_node);
14559   if (src == 0)
14560     return NULL;
14561 
14562   unsigned HOST_WIDE_INT offset = 0;
14563   if (offset_node != NULL_TREE)
14564     {
14565       if (!tree_fits_uhwi_p (offset_node))
14566 	return NULL;
14567       else
14568 	offset = tree_to_uhwi (offset_node);
14569     }
14570 
14571   unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14572   const char *string = TREE_STRING_POINTER (src);
14573 
14574   /* Support only properly null-terminated strings.  */
14575   if (string_length == 0
14576       || string[string_length - 1] != '\0'
14577       || offset >= string_length)
14578     return NULL;
14579 
14580   if (strlen)
14581     *strlen = string_length - offset;
14582   return string + offset;
14583 }
14584 
14585 #if CHECKING_P
14586 
14587 namespace selftest {
14588 
14589 /* Helper functions for writing tests of folding trees.  */
14590 
14591 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT.  */
14592 
14593 static void
14594 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14595 			     tree constant)
14596 {
14597   ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14598 }
14599 
14600 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14601    wrapping WRAPPED_EXPR.  */
14602 
14603 static void
14604 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14605 				 tree wrapped_expr)
14606 {
14607   tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14608   ASSERT_NE (wrapped_expr, result);
14609   ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14610   ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14611 }
14612 
14613 /* Verify that various arithmetic binary operations are folded
14614    correctly.  */
14615 
14616 static void
14617 test_arithmetic_folding ()
14618 {
14619   tree type = integer_type_node;
14620   tree x = create_tmp_var_raw (type, "x");
14621   tree zero = build_zero_cst (type);
14622   tree one = build_int_cst (type, 1);
14623 
14624   /* Addition.  */
14625   /* 1 <-- (0 + 1) */
14626   assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14627 			       one);
14628   assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14629 			       one);
14630 
14631   /* (nonlvalue)x <-- (x + 0) */
14632   assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14633 				   x);
14634 
14635   /* Subtraction.  */
14636   /* 0 <-- (x - x) */
14637   assert_binop_folds_to_const (x, MINUS_EXPR, x,
14638 			       zero);
14639   assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14640 				   x);
14641 
14642   /* Multiplication.  */
14643   /* 0 <-- (x * 0) */
14644   assert_binop_folds_to_const (x, MULT_EXPR, zero,
14645 			       zero);
14646 
14647   /* (nonlvalue)x <-- (x * 1) */
14648   assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14649 				   x);
14650 }
14651 
14652 /* Verify that various binary operations on vectors are folded
14653    correctly.  */
14654 
14655 static void
14656 test_vector_folding ()
14657 {
14658   tree inner_type = integer_type_node;
14659   tree type = build_vector_type (inner_type, 4);
14660   tree zero = build_zero_cst (type);
14661   tree one = build_one_cst (type);
14662 
14663   /* Verify equality tests that return a scalar boolean result.  */
14664   tree res_type = boolean_type_node;
14665   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14666   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14667   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14668   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14669 }
14670 
14671 /* Run all of the selftests within this file.  */
14672 
14673 void
14674 fold_const_c_tests ()
14675 {
14676   test_arithmetic_folding ();
14677   test_vector_folding ();
14678 }
14679 
14680 } // namespace selftest
14681 
14682 #endif /* CHECKING_P */
14683