xref: /netbsd-src/external/gpl3/gcc/dist/gcc/fold-const.cc (revision 0a3071956a3a9fdebdbf7f338cf2d439b45fc728)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85 #include "asan.h"
86 #include "gimple-range.h"
87 #include "internal-fn.h"
88 
89 /* Nonzero if we are folding constants inside an initializer or a C++
90    manifestly-constant-evaluated context; zero otherwise.
91    Should be used when folding in initializer enables additional
92    optimizations.  */
93 int folding_initializer = 0;
94 
95 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
96    otherwise.
97    Should be used when certain constructs shouldn't be optimized
98    during folding in that context.  */
99 bool folding_cxx_constexpr = false;
100 
101 /* The following constants represent a bit based encoding of GCC's
102    comparison operators.  This encoding simplifies transformations
103    on relational comparison operators, such as AND and OR.  */
104 enum comparison_code {
105   COMPCODE_FALSE = 0,
106   COMPCODE_LT = 1,
107   COMPCODE_EQ = 2,
108   COMPCODE_LE = 3,
109   COMPCODE_GT = 4,
110   COMPCODE_LTGT = 5,
111   COMPCODE_GE = 6,
112   COMPCODE_ORD = 7,
113   COMPCODE_UNORD = 8,
114   COMPCODE_UNLT = 9,
115   COMPCODE_UNEQ = 10,
116   COMPCODE_UNLE = 11,
117   COMPCODE_UNGT = 12,
118   COMPCODE_NE = 13,
119   COMPCODE_UNGE = 14,
120   COMPCODE_TRUE = 15
121 };
122 
123 static bool negate_expr_p (tree);
124 static tree negate_expr (tree);
125 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
126 static enum comparison_code comparison_to_compcode (enum tree_code);
127 static enum tree_code compcode_to_comparison (enum comparison_code);
128 static bool twoval_comparison_p (tree, tree *, tree *);
129 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
130 static tree optimize_bit_field_compare (location_t, enum tree_code,
131 					tree, tree, tree);
132 static bool simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
139 					    tree, tree, tree, tree);
140 static tree unextend (tree, int, int, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 						 enum tree_code, tree,
145 						 tree, tree,
146 						 tree, tree, int);
147 static tree fold_negate_const (tree, tree);
148 static tree fold_not_const (const_tree, tree);
149 static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 static tree fold_convert_const (enum tree_code, tree, tree);
151 static tree fold_view_convert_expr (tree, tree);
152 static tree fold_negate_expr (location_t, tree);
153 
154 
155 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
156    Otherwise, return LOC.  */
157 
158 static location_t
expr_location_or(tree t,location_t loc)159 expr_location_or (tree t, location_t loc)
160 {
161   location_t tloc = EXPR_LOCATION (t);
162   return tloc == UNKNOWN_LOCATION ? loc : tloc;
163 }
164 
165 /* Similar to protected_set_expr_location, but never modify x in place,
166    if location can and needs to be set, unshare it.  */
167 
168 static inline tree
protected_set_expr_location_unshare(tree x,location_t loc)169 protected_set_expr_location_unshare (tree x, location_t loc)
170 {
171   if (CAN_HAVE_LOCATION_P (x)
172       && EXPR_LOCATION (x) != loc
173       && !(TREE_CODE (x) == SAVE_EXPR
174 	   || TREE_CODE (x) == TARGET_EXPR
175 	   || TREE_CODE (x) == BIND_EXPR))
176     {
177       x = copy_node (x);
178       SET_EXPR_LOCATION (x, loc);
179     }
180   return x;
181 }
182 
183 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
184    division and returns the quotient.  Otherwise returns
185    NULL_TREE.  */
186 
187 tree
div_if_zero_remainder(const_tree arg1,const_tree arg2)188 div_if_zero_remainder (const_tree arg1, const_tree arg2)
189 {
190   widest_int quo;
191 
192   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
193 			 SIGNED, &quo))
194     return wide_int_to_tree (TREE_TYPE (arg1), quo);
195 
196   return NULL_TREE;
197 }
198 
199 /* This is nonzero if we should defer warnings about undefined
200    overflow.  This facility exists because these warnings are a
201    special case.  The code to estimate loop iterations does not want
202    to issue any warnings, since it works with expressions which do not
203    occur in user code.  Various bits of cleanup code call fold(), but
204    only use the result if it has certain characteristics (e.g., is a
205    constant); that code only wants to issue a warning if the result is
206    used.  */
207 
208 static int fold_deferring_overflow_warnings;
209 
210 /* If a warning about undefined overflow is deferred, this is the
211    warning.  Note that this may cause us to turn two warnings into
212    one, but that is fine since it is sufficient to only give one
213    warning per expression.  */
214 
215 static const char* fold_deferred_overflow_warning;
216 
217 /* If a warning about undefined overflow is deferred, this is the
218    level at which the warning should be emitted.  */
219 
220 static enum warn_strict_overflow_code fold_deferred_overflow_code;
221 
222 /* Start deferring overflow warnings.  We could use a stack here to
223    permit nested calls, but at present it is not necessary.  */
224 
225 void
fold_defer_overflow_warnings(void)226 fold_defer_overflow_warnings (void)
227 {
228   ++fold_deferring_overflow_warnings;
229 }
230 
231 /* Stop deferring overflow warnings.  If there is a pending warning,
232    and ISSUE is true, then issue the warning if appropriate.  STMT is
233    the statement with which the warning should be associated (used for
234    location information); STMT may be NULL.  CODE is the level of the
235    warning--a warn_strict_overflow_code value.  This function will use
236    the smaller of CODE and the deferred code when deciding whether to
237    issue the warning.  CODE may be zero to mean to always use the
238    deferred code.  */
239 
240 void
fold_undefer_overflow_warnings(bool issue,const gimple * stmt,int code)241 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
242 {
243   const char *warnmsg;
244   location_t locus;
245 
246   gcc_assert (fold_deferring_overflow_warnings > 0);
247   --fold_deferring_overflow_warnings;
248   if (fold_deferring_overflow_warnings > 0)
249     {
250       if (fold_deferred_overflow_warning != NULL
251 	  && code != 0
252 	  && code < (int) fold_deferred_overflow_code)
253 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
254       return;
255     }
256 
257   warnmsg = fold_deferred_overflow_warning;
258   fold_deferred_overflow_warning = NULL;
259 
260   if (!issue || warnmsg == NULL)
261     return;
262 
263   if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
264     return;
265 
266   /* Use the smallest code level when deciding to issue the
267      warning.  */
268   if (code == 0 || code > (int) fold_deferred_overflow_code)
269     code = fold_deferred_overflow_code;
270 
271   if (!issue_strict_overflow_warning (code))
272     return;
273 
274   if (stmt == NULL)
275     locus = input_location;
276   else
277     locus = gimple_location (stmt);
278   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
279 }
280 
281 /* Stop deferring overflow warnings, ignoring any deferred
282    warnings.  */
283 
284 void
fold_undefer_and_ignore_overflow_warnings(void)285 fold_undefer_and_ignore_overflow_warnings (void)
286 {
287   fold_undefer_overflow_warnings (false, NULL, 0);
288 }
289 
290 /* Whether we are deferring overflow warnings.  */
291 
292 bool
fold_deferring_overflow_warnings_p(void)293 fold_deferring_overflow_warnings_p (void)
294 {
295   return fold_deferring_overflow_warnings > 0;
296 }
297 
298 /* This is called when we fold something based on the fact that signed
299    overflow is undefined.  */
300 
301 void
fold_overflow_warning(const char * gmsgid,enum warn_strict_overflow_code wc)302 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
303 {
304   if (fold_deferring_overflow_warnings > 0)
305     {
306       if (fold_deferred_overflow_warning == NULL
307 	  || wc < fold_deferred_overflow_code)
308 	{
309 	  fold_deferred_overflow_warning = gmsgid;
310 	  fold_deferred_overflow_code = wc;
311 	}
312     }
313   else if (issue_strict_overflow_warning (wc))
314     warning (OPT_Wstrict_overflow, gmsgid);
315 }
316 
317 /* Return true if the built-in mathematical function specified by CODE
318    is odd, i.e. -f(x) == f(-x).  */
319 
320 bool
negate_mathfn_p(combined_fn fn)321 negate_mathfn_p (combined_fn fn)
322 {
323   switch (fn)
324     {
325     CASE_CFN_ASIN:
326     CASE_CFN_ASINH:
327     CASE_CFN_ATAN:
328     CASE_CFN_ATANH:
329     CASE_CFN_CASIN:
330     CASE_CFN_CASINH:
331     CASE_CFN_CATAN:
332     CASE_CFN_CATANH:
333     CASE_CFN_CBRT:
334     CASE_CFN_CPROJ:
335     CASE_CFN_CSIN:
336     CASE_CFN_CSINH:
337     CASE_CFN_CTAN:
338     CASE_CFN_CTANH:
339     CASE_CFN_ERF:
340     CASE_CFN_LLROUND:
341     CASE_CFN_LROUND:
342     CASE_CFN_ROUND:
343     CASE_CFN_ROUNDEVEN:
344     CASE_CFN_ROUNDEVEN_FN:
345     CASE_CFN_SIN:
346     CASE_CFN_SINH:
347     CASE_CFN_TAN:
348     CASE_CFN_TANH:
349     CASE_CFN_TRUNC:
350       return true;
351 
352     CASE_CFN_LLRINT:
353     CASE_CFN_LRINT:
354     CASE_CFN_NEARBYINT:
355     CASE_CFN_RINT:
356       return !flag_rounding_math;
357 
358     default:
359       break;
360     }
361   return false;
362 }
363 
364 /* Check whether we may negate an integer constant T without causing
365    overflow.  */
366 
367 bool
may_negate_without_overflow_p(const_tree t)368 may_negate_without_overflow_p (const_tree t)
369 {
370   tree type;
371 
372   gcc_assert (TREE_CODE (t) == INTEGER_CST);
373 
374   type = TREE_TYPE (t);
375   if (TYPE_UNSIGNED (type))
376     return false;
377 
378   return !wi::only_sign_bit_p (wi::to_wide (t));
379 }
380 
381 /* Determine whether an expression T can be cheaply negated using
382    the function negate_expr without introducing undefined overflow.  */
383 
384 static bool
negate_expr_p(tree t)385 negate_expr_p (tree t)
386 {
387   tree type;
388 
389   if (t == 0)
390     return false;
391 
392   type = TREE_TYPE (t);
393 
394   STRIP_SIGN_NOPS (t);
395   switch (TREE_CODE (t))
396     {
397     case INTEGER_CST:
398       if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
399 	return true;
400 
401       /* Check that -CST will not overflow type.  */
402       return may_negate_without_overflow_p (t);
403     case BIT_NOT_EXPR:
404       return (INTEGRAL_TYPE_P (type)
405 	      && TYPE_OVERFLOW_WRAPS (type));
406 
407     case FIXED_CST:
408       return true;
409 
410     case NEGATE_EXPR:
411       return !TYPE_OVERFLOW_SANITIZED (type);
412 
413     case REAL_CST:
414       /* We want to canonicalize to positive real constants.  Pretend
415          that only negative ones can be easily negated.  */
416       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
417 
418     case COMPLEX_CST:
419       return negate_expr_p (TREE_REALPART (t))
420 	     && negate_expr_p (TREE_IMAGPART (t));
421 
422     case VECTOR_CST:
423       {
424 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
425 	  return true;
426 
427 	/* Steps don't prevent negation.  */
428 	unsigned int count = vector_cst_encoded_nelts (t);
429 	for (unsigned int i = 0; i < count; ++i)
430 	  if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
431 	    return false;
432 
433 	return true;
434       }
435 
436     case COMPLEX_EXPR:
437       return negate_expr_p (TREE_OPERAND (t, 0))
438 	     && negate_expr_p (TREE_OPERAND (t, 1));
439 
440     case CONJ_EXPR:
441       return negate_expr_p (TREE_OPERAND (t, 0));
442 
443     case PLUS_EXPR:
444       if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
445 	  || HONOR_SIGNED_ZEROS (type)
446 	  || (ANY_INTEGRAL_TYPE_P (type)
447 	      && ! TYPE_OVERFLOW_WRAPS (type)))
448 	return false;
449       /* -(A + B) -> (-B) - A.  */
450       if (negate_expr_p (TREE_OPERAND (t, 1)))
451 	return true;
452       /* -(A + B) -> (-A) - B.  */
453       return negate_expr_p (TREE_OPERAND (t, 0));
454 
455     case MINUS_EXPR:
456       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
457       return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
458 	     && !HONOR_SIGNED_ZEROS (type)
459 	     && (! ANY_INTEGRAL_TYPE_P (type)
460 		 || TYPE_OVERFLOW_WRAPS (type));
461 
462     case MULT_EXPR:
463       if (TYPE_UNSIGNED (type))
464 	break;
465       /* INT_MIN/n * n doesn't overflow while negating one operand it does
466          if n is a (negative) power of two.  */
467       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
468 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
469 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
470 		 && (wi::popcount
471 		     (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
472 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
473 		    && (wi::popcount
474 			(wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
475 	break;
476 
477       /* Fall through.  */
478 
479     case RDIV_EXPR:
480       if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
481 	return negate_expr_p (TREE_OPERAND (t, 1))
482 	       || negate_expr_p (TREE_OPERAND (t, 0));
483       break;
484 
485     case TRUNC_DIV_EXPR:
486     case ROUND_DIV_EXPR:
487     case EXACT_DIV_EXPR:
488       if (TYPE_UNSIGNED (type))
489 	break;
490       /* In general we can't negate A in A / B, because if A is INT_MIN and
491          B is not 1 we change the sign of the result.  */
492       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
493 	  && negate_expr_p (TREE_OPERAND (t, 0)))
494 	return true;
495       /* In general we can't negate B in A / B, because if A is INT_MIN and
496 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
497 	 and actually traps on some architectures.  */
498       if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
499 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
500 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
501 	      && ! integer_onep (TREE_OPERAND (t, 1))))
502 	return negate_expr_p (TREE_OPERAND (t, 1));
503       break;
504 
505     case NOP_EXPR:
506       /* Negate -((double)float) as (double)(-float).  */
507       if (TREE_CODE (type) == REAL_TYPE)
508 	{
509 	  tree tem = strip_float_extensions (t);
510 	  if (tem != t)
511 	    return negate_expr_p (tem);
512 	}
513       break;
514 
515     case CALL_EXPR:
516       /* Negate -f(x) as f(-x).  */
517       if (negate_mathfn_p (get_call_combined_fn (t)))
518 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
519       break;
520 
521     case RSHIFT_EXPR:
522       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
523       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
524 	{
525 	  tree op1 = TREE_OPERAND (t, 1);
526 	  if (wi::to_wide (op1) == element_precision (type) - 1)
527 	    return true;
528 	}
529       break;
530 
531     default:
532       break;
533     }
534   return false;
535 }
536 
537 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
538    simplification is possible.
539    If negate_expr_p would return true for T, NULL_TREE will never be
540    returned.  */
541 
542 static tree
fold_negate_expr_1(location_t loc,tree t)543 fold_negate_expr_1 (location_t loc, tree t)
544 {
545   tree type = TREE_TYPE (t);
546   tree tem;
547 
548   switch (TREE_CODE (t))
549     {
550     /* Convert - (~A) to A + 1.  */
551     case BIT_NOT_EXPR:
552       if (INTEGRAL_TYPE_P (type))
553         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
554 				build_one_cst (type));
555       break;
556 
557     case INTEGER_CST:
558       tem = fold_negate_const (t, type);
559       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
560 	  || (ANY_INTEGRAL_TYPE_P (type)
561 	      && !TYPE_OVERFLOW_TRAPS (type)
562 	      && TYPE_OVERFLOW_WRAPS (type))
563 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
564 	return tem;
565       break;
566 
567     case POLY_INT_CST:
568     case REAL_CST:
569     case FIXED_CST:
570       tem = fold_negate_const (t, type);
571       return tem;
572 
573     case COMPLEX_CST:
574       {
575 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
576 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
577 	if (rpart && ipart)
578 	  return build_complex (type, rpart, ipart);
579       }
580       break;
581 
582     case VECTOR_CST:
583       {
584 	tree_vector_builder elts;
585 	elts.new_unary_operation (type, t, true);
586 	unsigned int count = elts.encoded_nelts ();
587 	for (unsigned int i = 0; i < count; ++i)
588 	  {
589 	    tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
590 	    if (elt == NULL_TREE)
591 	      return NULL_TREE;
592 	    elts.quick_push (elt);
593 	  }
594 
595 	return elts.build ();
596       }
597 
598     case COMPLEX_EXPR:
599       if (negate_expr_p (t))
600 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
601 				fold_negate_expr (loc, TREE_OPERAND (t, 0)),
602 				fold_negate_expr (loc, TREE_OPERAND (t, 1)));
603       break;
604 
605     case CONJ_EXPR:
606       if (negate_expr_p (t))
607 	return fold_build1_loc (loc, CONJ_EXPR, type,
608 				fold_negate_expr (loc, TREE_OPERAND (t, 0)));
609       break;
610 
611     case NEGATE_EXPR:
612       if (!TYPE_OVERFLOW_SANITIZED (type))
613 	return TREE_OPERAND (t, 0);
614       break;
615 
616     case PLUS_EXPR:
617       if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
618 	  && !HONOR_SIGNED_ZEROS (type))
619 	{
620 	  /* -(A + B) -> (-B) - A.  */
621 	  if (negate_expr_p (TREE_OPERAND (t, 1)))
622 	    {
623 	      tem = negate_expr (TREE_OPERAND (t, 1));
624 	      return fold_build2_loc (loc, MINUS_EXPR, type,
625 				      tem, TREE_OPERAND (t, 0));
626 	    }
627 
628 	  /* -(A + B) -> (-A) - B.  */
629 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
630 	    {
631 	      tem = negate_expr (TREE_OPERAND (t, 0));
632 	      return fold_build2_loc (loc, MINUS_EXPR, type,
633 				      tem, TREE_OPERAND (t, 1));
634 	    }
635 	}
636       break;
637 
638     case MINUS_EXPR:
639       /* - (A - B) -> B - A  */
640       if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
641 	  && !HONOR_SIGNED_ZEROS (type))
642 	return fold_build2_loc (loc, MINUS_EXPR, type,
643 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
644       break;
645 
646     case MULT_EXPR:
647       if (TYPE_UNSIGNED (type))
648         break;
649 
650       /* Fall through.  */
651 
652     case RDIV_EXPR:
653       if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
654 	{
655 	  tem = TREE_OPERAND (t, 1);
656 	  if (negate_expr_p (tem))
657 	    return fold_build2_loc (loc, TREE_CODE (t), type,
658 				    TREE_OPERAND (t, 0), negate_expr (tem));
659 	  tem = TREE_OPERAND (t, 0);
660 	  if (negate_expr_p (tem))
661 	    return fold_build2_loc (loc, TREE_CODE (t), type,
662 				    negate_expr (tem), TREE_OPERAND (t, 1));
663 	}
664       break;
665 
666     case TRUNC_DIV_EXPR:
667     case ROUND_DIV_EXPR:
668     case EXACT_DIV_EXPR:
669       if (TYPE_UNSIGNED (type))
670 	break;
671       /* In general we can't negate A in A / B, because if A is INT_MIN and
672 	 B is not 1 we change the sign of the result.  */
673       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
674 	  && negate_expr_p (TREE_OPERAND (t, 0)))
675 	return fold_build2_loc (loc, TREE_CODE (t), type,
676 				negate_expr (TREE_OPERAND (t, 0)),
677 				TREE_OPERAND (t, 1));
678       /* In general we can't negate B in A / B, because if A is INT_MIN and
679 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
680 	 and actually traps on some architectures.  */
681       if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
682 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
683 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
684 	       && ! integer_onep (TREE_OPERAND (t, 1))))
685 	  && negate_expr_p (TREE_OPERAND (t, 1)))
686 	return fold_build2_loc (loc, TREE_CODE (t), type,
687 				TREE_OPERAND (t, 0),
688 				negate_expr (TREE_OPERAND (t, 1)));
689       break;
690 
691     case NOP_EXPR:
692       /* Convert -((double)float) into (double)(-float).  */
693       if (TREE_CODE (type) == REAL_TYPE)
694 	{
695 	  tem = strip_float_extensions (t);
696 	  if (tem != t && negate_expr_p (tem))
697 	    return fold_convert_loc (loc, type, negate_expr (tem));
698 	}
699       break;
700 
701     case CALL_EXPR:
702       /* Negate -f(x) as f(-x).  */
703       if (negate_mathfn_p (get_call_combined_fn (t))
704 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
705 	{
706 	  tree fndecl, arg;
707 
708 	  fndecl = get_callee_fndecl (t);
709 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
710 	  return build_call_expr_loc (loc, fndecl, 1, arg);
711 	}
712       break;
713 
714     case RSHIFT_EXPR:
715       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
716       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
717 	{
718 	  tree op1 = TREE_OPERAND (t, 1);
719 	  if (wi::to_wide (op1) == element_precision (type) - 1)
720 	    {
721 	      tree ntype = TYPE_UNSIGNED (type)
722 			   ? signed_type_for (type)
723 			   : unsigned_type_for (type);
724 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
725 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
726 	      return fold_convert_loc (loc, type, temp);
727 	    }
728 	}
729       break;
730 
731     default:
732       break;
733     }
734 
735   return NULL_TREE;
736 }
737 
738 /* A wrapper for fold_negate_expr_1.  */
739 
740 static tree
fold_negate_expr(location_t loc,tree t)741 fold_negate_expr (location_t loc, tree t)
742 {
743   tree type = TREE_TYPE (t);
744   STRIP_SIGN_NOPS (t);
745   tree tem = fold_negate_expr_1 (loc, t);
746   if (tem == NULL_TREE)
747     return NULL_TREE;
748   return fold_convert_loc (loc, type, tem);
749 }
750 
751 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
752    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
753    return NULL_TREE. */
754 
755 static tree
negate_expr(tree t)756 negate_expr (tree t)
757 {
758   tree type, tem;
759   location_t loc;
760 
761   if (t == NULL_TREE)
762     return NULL_TREE;
763 
764   loc = EXPR_LOCATION (t);
765   type = TREE_TYPE (t);
766   STRIP_SIGN_NOPS (t);
767 
768   tem = fold_negate_expr (loc, t);
769   if (!tem)
770     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
771   return fold_convert_loc (loc, type, tem);
772 }
773 
774 /* Split a tree IN into a constant, literal and variable parts that could be
775    combined with CODE to make IN.  "constant" means an expression with
776    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
777    commutative arithmetic operation.  Store the constant part into *CONP,
778    the literal in *LITP and return the variable part.  If a part isn't
779    present, set it to null.  If the tree does not decompose in this way,
780    return the entire tree as the variable part and the other parts as null.
781 
782    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
783    case, we negate an operand that was subtracted.  Except if it is a
784    literal for which we use *MINUS_LITP instead.
785 
786    If NEGATE_P is true, we are negating all of IN, again except a literal
787    for which we use *MINUS_LITP instead.  If a variable part is of pointer
788    type, it is negated after converting to TYPE.  This prevents us from
789    generating illegal MINUS pointer expression.  LOC is the location of
790    the converted variable part.
791 
792    If IN is itself a literal or constant, return it as appropriate.
793 
794    Note that we do not guarantee that any of the three values will be the
795    same type as IN, but they will have the same signedness and mode.  */
796 
797 static tree
split_tree(tree in,tree type,enum tree_code code,tree * minus_varp,tree * conp,tree * minus_conp,tree * litp,tree * minus_litp,int negate_p)798 split_tree (tree in, tree type, enum tree_code code,
799 	    tree *minus_varp, tree *conp, tree *minus_conp,
800 	    tree *litp, tree *minus_litp, int negate_p)
801 {
802   tree var = 0;
803   *minus_varp = 0;
804   *conp = 0;
805   *minus_conp = 0;
806   *litp = 0;
807   *minus_litp = 0;
808 
809   /* Strip any conversions that don't change the machine mode or signedness.  */
810   STRIP_SIGN_NOPS (in);
811 
812   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813       || TREE_CODE (in) == FIXED_CST)
814     *litp = in;
815   else if (TREE_CODE (in) == code
816 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 	       /* We can associate addition and subtraction together (even
819 		  though the C standard doesn't say so) for integers because
820 		  the value is not affected.  For reals, the value might be
821 		  affected, so we can't.  */
822 	       && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
823 		   || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
824 		   || (code == MINUS_EXPR
825 		       && (TREE_CODE (in) == PLUS_EXPR
826 			   || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
827     {
828       tree op0 = TREE_OPERAND (in, 0);
829       tree op1 = TREE_OPERAND (in, 1);
830       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
831       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
832 
833       /* First see if either of the operands is a literal, then a constant.  */
834       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
835 	  || TREE_CODE (op0) == FIXED_CST)
836 	*litp = op0, op0 = 0;
837       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
838 	       || TREE_CODE (op1) == FIXED_CST)
839 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
840 
841       if (op0 != 0 && TREE_CONSTANT (op0))
842 	*conp = op0, op0 = 0;
843       else if (op1 != 0 && TREE_CONSTANT (op1))
844 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
845 
846       /* If we haven't dealt with either operand, this is not a case we can
847 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
848       if (op0 != 0 && op1 != 0)
849 	var = in;
850       else if (op0 != 0)
851 	var = op0;
852       else
853 	var = op1, neg_var_p = neg1_p;
854 
855       /* Now do any needed negations.  */
856       if (neg_litp_p)
857 	*minus_litp = *litp, *litp = 0;
858       if (neg_conp_p && *conp)
859 	*minus_conp = *conp, *conp = 0;
860       if (neg_var_p && var)
861 	*minus_varp = var, var = 0;
862     }
863   else if (TREE_CONSTANT (in))
864     *conp = in;
865   else if (TREE_CODE (in) == BIT_NOT_EXPR
866 	   && code == PLUS_EXPR)
867     {
868       /* -1 - X is folded to ~X, undo that here.  Do _not_ do this
869          when IN is constant.  */
870       *litp = build_minus_one_cst (type);
871       *minus_varp = TREE_OPERAND (in, 0);
872     }
873   else
874     var = in;
875 
876   if (negate_p)
877     {
878       if (*litp)
879 	*minus_litp = *litp, *litp = 0;
880       else if (*minus_litp)
881 	*litp = *minus_litp, *minus_litp = 0;
882       if (*conp)
883 	*minus_conp = *conp, *conp = 0;
884       else if (*minus_conp)
885 	*conp = *minus_conp, *minus_conp = 0;
886       if (var)
887 	*minus_varp = var, var = 0;
888       else if (*minus_varp)
889 	var = *minus_varp, *minus_varp = 0;
890     }
891 
892   if (*litp
893       && TREE_OVERFLOW_P (*litp))
894     *litp = drop_tree_overflow (*litp);
895   if (*minus_litp
896       && TREE_OVERFLOW_P (*minus_litp))
897     *minus_litp = drop_tree_overflow (*minus_litp);
898 
899   return var;
900 }
901 
902 /* Re-associate trees split by the above function.  T1 and T2 are
903    either expressions to associate or null.  Return the new
904    expression, if any.  LOC is the location of the new expression.  If
905    we build an operation, do it in TYPE and with CODE.  */
906 
907 static tree
associate_trees(location_t loc,tree t1,tree t2,enum tree_code code,tree type)908 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
909 {
910   if (t1 == 0)
911     {
912       gcc_assert (t2 == 0 || code != MINUS_EXPR);
913       return t2;
914     }
915   else if (t2 == 0)
916     return t1;
917 
918   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
919      try to fold this since we will have infinite recursion.  But do
920      deal with any NEGATE_EXPRs.  */
921   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
922       || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
923       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
924     {
925       if (code == PLUS_EXPR)
926 	{
927 	  if (TREE_CODE (t1) == NEGATE_EXPR)
928 	    return build2_loc (loc, MINUS_EXPR, type,
929 			       fold_convert_loc (loc, type, t2),
930 			       fold_convert_loc (loc, type,
931 						 TREE_OPERAND (t1, 0)));
932 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
933 	    return build2_loc (loc, MINUS_EXPR, type,
934 			       fold_convert_loc (loc, type, t1),
935 			       fold_convert_loc (loc, type,
936 						 TREE_OPERAND (t2, 0)));
937 	  else if (integer_zerop (t2))
938 	    return fold_convert_loc (loc, type, t1);
939 	}
940       else if (code == MINUS_EXPR)
941 	{
942 	  if (integer_zerop (t2))
943 	    return fold_convert_loc (loc, type, t1);
944 	}
945 
946       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
947 			 fold_convert_loc (loc, type, t2));
948     }
949 
950   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
951 			  fold_convert_loc (loc, type, t2));
952 }
953 
954 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
955    for use in int_const_binop, size_binop and size_diffop.  */
956 
957 static bool
int_binop_types_match_p(enum tree_code code,const_tree type1,const_tree type2)958 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
959 {
960   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
961     return false;
962   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
963     return false;
964 
965   switch (code)
966     {
967     case LSHIFT_EXPR:
968     case RSHIFT_EXPR:
969     case LROTATE_EXPR:
970     case RROTATE_EXPR:
971       return true;
972 
973     default:
974       break;
975     }
976 
977   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
978 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
979 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
980 }
981 
982 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
983    a new constant in RES.  Return FALSE if we don't know how to
984    evaluate CODE at compile-time.  */
985 
986 bool
wide_int_binop(wide_int & res,enum tree_code code,const wide_int & arg1,const wide_int & arg2,signop sign,wi::overflow_type * overflow)987 wide_int_binop (wide_int &res,
988 		enum tree_code code, const wide_int &arg1, const wide_int &arg2,
989 		signop sign, wi::overflow_type *overflow)
990 {
991   wide_int tmp;
992   *overflow = wi::OVF_NONE;
993   switch (code)
994     {
995     case BIT_IOR_EXPR:
996       res = wi::bit_or (arg1, arg2);
997       break;
998 
999     case BIT_XOR_EXPR:
1000       res = wi::bit_xor (arg1, arg2);
1001       break;
1002 
1003     case BIT_AND_EXPR:
1004       res = wi::bit_and (arg1, arg2);
1005       break;
1006 
1007     case LSHIFT_EXPR:
1008       if (wi::neg_p (arg2))
1009 	return false;
1010       res = wi::lshift (arg1, arg2);
1011       break;
1012 
1013     case RSHIFT_EXPR:
1014       if (wi::neg_p (arg2))
1015 	return false;
1016       /* It's unclear from the C standard whether shifts can overflow.
1017 	 The following code ignores overflow; perhaps a C standard
1018 	 interpretation ruling is needed.  */
1019       res = wi::rshift (arg1, arg2, sign);
1020       break;
1021 
1022     case RROTATE_EXPR:
1023     case LROTATE_EXPR:
1024       if (wi::neg_p (arg2))
1025 	{
1026 	  tmp = -arg2;
1027 	  if (code == RROTATE_EXPR)
1028 	    code = LROTATE_EXPR;
1029 	  else
1030 	    code = RROTATE_EXPR;
1031 	}
1032       else
1033         tmp = arg2;
1034 
1035       if (code == RROTATE_EXPR)
1036 	res = wi::rrotate (arg1, tmp);
1037       else
1038 	res = wi::lrotate (arg1, tmp);
1039       break;
1040 
1041     case PLUS_EXPR:
1042       res = wi::add (arg1, arg2, sign, overflow);
1043       break;
1044 
1045     case MINUS_EXPR:
1046       res = wi::sub (arg1, arg2, sign, overflow);
1047       break;
1048 
1049     case MULT_EXPR:
1050       res = wi::mul (arg1, arg2, sign, overflow);
1051       break;
1052 
1053     case MULT_HIGHPART_EXPR:
1054       res = wi::mul_high (arg1, arg2, sign);
1055       break;
1056 
1057     case TRUNC_DIV_EXPR:
1058     case EXACT_DIV_EXPR:
1059       if (arg2 == 0)
1060 	return false;
1061       res = wi::div_trunc (arg1, arg2, sign, overflow);
1062       break;
1063 
1064     case FLOOR_DIV_EXPR:
1065       if (arg2 == 0)
1066 	return false;
1067       res = wi::div_floor (arg1, arg2, sign, overflow);
1068       break;
1069 
1070     case CEIL_DIV_EXPR:
1071       if (arg2 == 0)
1072 	return false;
1073       res = wi::div_ceil (arg1, arg2, sign, overflow);
1074       break;
1075 
1076     case ROUND_DIV_EXPR:
1077       if (arg2 == 0)
1078 	return false;
1079       res = wi::div_round (arg1, arg2, sign, overflow);
1080       break;
1081 
1082     case TRUNC_MOD_EXPR:
1083       if (arg2 == 0)
1084 	return false;
1085       res = wi::mod_trunc (arg1, arg2, sign, overflow);
1086       break;
1087 
1088     case FLOOR_MOD_EXPR:
1089       if (arg2 == 0)
1090 	return false;
1091       res = wi::mod_floor (arg1, arg2, sign, overflow);
1092       break;
1093 
1094     case CEIL_MOD_EXPR:
1095       if (arg2 == 0)
1096 	return false;
1097       res = wi::mod_ceil (arg1, arg2, sign, overflow);
1098       break;
1099 
1100     case ROUND_MOD_EXPR:
1101       if (arg2 == 0)
1102 	return false;
1103       res = wi::mod_round (arg1, arg2, sign, overflow);
1104       break;
1105 
1106     case MIN_EXPR:
1107       res = wi::min (arg1, arg2, sign);
1108       break;
1109 
1110     case MAX_EXPR:
1111       res = wi::max (arg1, arg2, sign);
1112       break;
1113 
1114     default:
1115       return false;
1116     }
1117   return true;
1118 }
1119 
1120 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1121    produce a new constant in RES.  Return FALSE if we don't know how
1122    to evaluate CODE at compile-time.  */
1123 
1124 static bool
poly_int_binop(poly_wide_int & res,enum tree_code code,const_tree arg1,const_tree arg2,signop sign,wi::overflow_type * overflow)1125 poly_int_binop (poly_wide_int &res, enum tree_code code,
1126 		const_tree arg1, const_tree arg2,
1127 		signop sign, wi::overflow_type *overflow)
1128 {
1129   gcc_assert (NUM_POLY_INT_COEFFS != 1);
1130   gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1131   switch (code)
1132     {
1133     case PLUS_EXPR:
1134       res = wi::add (wi::to_poly_wide (arg1),
1135 		     wi::to_poly_wide (arg2), sign, overflow);
1136       break;
1137 
1138     case MINUS_EXPR:
1139       res = wi::sub (wi::to_poly_wide (arg1),
1140 		     wi::to_poly_wide (arg2), sign, overflow);
1141       break;
1142 
1143     case MULT_EXPR:
1144       if (TREE_CODE (arg2) == INTEGER_CST)
1145 	res = wi::mul (wi::to_poly_wide (arg1),
1146 		       wi::to_wide (arg2), sign, overflow);
1147       else if (TREE_CODE (arg1) == INTEGER_CST)
1148 	res = wi::mul (wi::to_poly_wide (arg2),
1149 		       wi::to_wide (arg1), sign, overflow);
1150       else
1151 	return NULL_TREE;
1152       break;
1153 
1154     case LSHIFT_EXPR:
1155       if (TREE_CODE (arg2) == INTEGER_CST)
1156 	res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1157       else
1158 	return false;
1159       break;
1160 
1161     case BIT_IOR_EXPR:
1162       if (TREE_CODE (arg2) != INTEGER_CST
1163 	  || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1164 			 &res))
1165 	return false;
1166       break;
1167 
1168     default:
1169       return false;
1170     }
1171   return true;
1172 }
1173 
1174 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1175    produce a new constant.  Return NULL_TREE if we don't know how to
1176    evaluate CODE at compile-time.  */
1177 
1178 tree
int_const_binop(enum tree_code code,const_tree arg1,const_tree arg2,int overflowable)1179 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1180 		 int overflowable)
1181 {
1182   poly_wide_int poly_res;
1183   tree type = TREE_TYPE (arg1);
1184   signop sign = TYPE_SIGN (type);
1185   wi::overflow_type overflow = wi::OVF_NONE;
1186 
1187   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1188     {
1189       wide_int warg1 = wi::to_wide (arg1), res;
1190       wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1191       if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1192 	return NULL_TREE;
1193       poly_res = res;
1194     }
1195   else if (!poly_int_tree_p (arg1)
1196 	   || !poly_int_tree_p (arg2)
1197 	   || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1198     return NULL_TREE;
1199   return force_fit_type (type, poly_res, overflowable,
1200 			 (((sign == SIGNED || overflowable == -1)
1201 			   && overflow)
1202 			  | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1203 }
1204 
1205 /* Return true if binary operation OP distributes over addition in operand
1206    OPNO, with the other operand being held constant.  OPNO counts from 1.  */
1207 
1208 static bool
distributes_over_addition_p(tree_code op,int opno)1209 distributes_over_addition_p (tree_code op, int opno)
1210 {
1211   switch (op)
1212     {
1213     case PLUS_EXPR:
1214     case MINUS_EXPR:
1215     case MULT_EXPR:
1216       return true;
1217 
1218     case LSHIFT_EXPR:
1219       return opno == 1;
1220 
1221     default:
1222       return false;
1223     }
1224 }
1225 
1226 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1227    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1228    are the same kind of constant and the same machine mode.  Return zero if
1229    combining the constants is not allowed in the current operating mode.  */
1230 
1231 static tree
const_binop(enum tree_code code,tree arg1,tree arg2)1232 const_binop (enum tree_code code, tree arg1, tree arg2)
1233 {
1234   /* Sanity check for the recursive cases.  */
1235   if (!arg1 || !arg2)
1236     return NULL_TREE;
1237 
1238   STRIP_NOPS (arg1);
1239   STRIP_NOPS (arg2);
1240 
1241   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1242     {
1243       if (code == POINTER_PLUS_EXPR)
1244 	return int_const_binop (PLUS_EXPR,
1245 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1246 
1247       return int_const_binop (code, arg1, arg2);
1248     }
1249 
1250   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1251     {
1252       machine_mode mode;
1253       REAL_VALUE_TYPE d1;
1254       REAL_VALUE_TYPE d2;
1255       REAL_VALUE_TYPE value;
1256       REAL_VALUE_TYPE result;
1257       bool inexact;
1258       tree t, type;
1259 
1260       /* The following codes are handled by real_arithmetic.  */
1261       switch (code)
1262 	{
1263 	case PLUS_EXPR:
1264 	case MINUS_EXPR:
1265 	case MULT_EXPR:
1266 	case RDIV_EXPR:
1267 	case MIN_EXPR:
1268 	case MAX_EXPR:
1269 	  break;
1270 
1271 	default:
1272 	  return NULL_TREE;
1273 	}
1274 
1275       d1 = TREE_REAL_CST (arg1);
1276       d2 = TREE_REAL_CST (arg2);
1277 
1278       type = TREE_TYPE (arg1);
1279       mode = TYPE_MODE (type);
1280 
1281       /* Don't perform operation if we honor signaling NaNs and
1282 	 either operand is a signaling NaN.  */
1283       if (HONOR_SNANS (mode)
1284 	  && (REAL_VALUE_ISSIGNALING_NAN (d1)
1285 	      || REAL_VALUE_ISSIGNALING_NAN (d2)))
1286 	return NULL_TREE;
1287 
1288       /* Don't perform operation if it would raise a division
1289 	 by zero exception.  */
1290       if (code == RDIV_EXPR
1291 	  && real_equal (&d2, &dconst0)
1292 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1293 	return NULL_TREE;
1294 
1295       /* If either operand is a NaN, just return it.  Otherwise, set up
1296 	 for floating-point trap; we return an overflow.  */
1297       if (REAL_VALUE_ISNAN (d1))
1298       {
1299 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1300 	   is off.  */
1301 	d1.signalling = 0;
1302 	t = build_real (type, d1);
1303 	return t;
1304       }
1305       else if (REAL_VALUE_ISNAN (d2))
1306       {
1307 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1308 	   is off.  */
1309 	d2.signalling = 0;
1310 	t = build_real (type, d2);
1311 	return t;
1312       }
1313 
1314       inexact = real_arithmetic (&value, code, &d1, &d2);
1315       real_convert (&result, mode, &value);
1316 
1317       /* Don't constant fold this floating point operation if
1318 	 both operands are not NaN but the result is NaN, and
1319 	 flag_trapping_math.  Such operations should raise an
1320 	 invalid operation exception.  */
1321       if (flag_trapping_math
1322 	  && MODE_HAS_NANS (mode)
1323 	  && REAL_VALUE_ISNAN (result)
1324 	  && !REAL_VALUE_ISNAN (d1)
1325 	  && !REAL_VALUE_ISNAN (d2))
1326 	return NULL_TREE;
1327 
1328       /* Don't constant fold this floating point operation if
1329 	 the result has overflowed and flag_trapping_math.  */
1330       if (flag_trapping_math
1331 	  && MODE_HAS_INFINITIES (mode)
1332 	  && REAL_VALUE_ISINF (result)
1333 	  && !REAL_VALUE_ISINF (d1)
1334 	  && !REAL_VALUE_ISINF (d2))
1335 	return NULL_TREE;
1336 
1337       /* Don't constant fold this floating point operation if the
1338 	 result may dependent upon the run-time rounding mode and
1339 	 flag_rounding_math is set, or if GCC's software emulation
1340 	 is unable to accurately represent the result.  */
1341       if ((flag_rounding_math
1342 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1343 	  && (inexact || !real_identical (&result, &value)))
1344 	return NULL_TREE;
1345 
1346       t = build_real (type, result);
1347 
1348       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1349       return t;
1350     }
1351 
1352   if (TREE_CODE (arg1) == FIXED_CST)
1353     {
1354       FIXED_VALUE_TYPE f1;
1355       FIXED_VALUE_TYPE f2;
1356       FIXED_VALUE_TYPE result;
1357       tree t, type;
1358       int sat_p;
1359       bool overflow_p;
1360 
1361       /* The following codes are handled by fixed_arithmetic.  */
1362       switch (code)
1363         {
1364 	case PLUS_EXPR:
1365 	case MINUS_EXPR:
1366 	case MULT_EXPR:
1367 	case TRUNC_DIV_EXPR:
1368 	  if (TREE_CODE (arg2) != FIXED_CST)
1369 	    return NULL_TREE;
1370 	  f2 = TREE_FIXED_CST (arg2);
1371 	  break;
1372 
1373 	case LSHIFT_EXPR:
1374 	case RSHIFT_EXPR:
1375 	  {
1376 	    if (TREE_CODE (arg2) != INTEGER_CST)
1377 	      return NULL_TREE;
1378 	    wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1379 	    f2.data.high = w2.elt (1);
1380 	    f2.data.low = w2.ulow ();
1381 	    f2.mode = SImode;
1382 	  }
1383 	  break;
1384 
1385         default:
1386 	  return NULL_TREE;
1387         }
1388 
1389       f1 = TREE_FIXED_CST (arg1);
1390       type = TREE_TYPE (arg1);
1391       sat_p = TYPE_SATURATING (type);
1392       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1393       t = build_fixed (type, result);
1394       /* Propagate overflow flags.  */
1395       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1396 	TREE_OVERFLOW (t) = 1;
1397       return t;
1398     }
1399 
1400   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1401     {
1402       tree type = TREE_TYPE (arg1);
1403       tree r1 = TREE_REALPART (arg1);
1404       tree i1 = TREE_IMAGPART (arg1);
1405       tree r2 = TREE_REALPART (arg2);
1406       tree i2 = TREE_IMAGPART (arg2);
1407       tree real, imag;
1408 
1409       switch (code)
1410 	{
1411 	case PLUS_EXPR:
1412 	case MINUS_EXPR:
1413 	  real = const_binop (code, r1, r2);
1414 	  imag = const_binop (code, i1, i2);
1415 	  break;
1416 
1417 	case MULT_EXPR:
1418 	  if (COMPLEX_FLOAT_TYPE_P (type))
1419 	    return do_mpc_arg2 (arg1, arg2, type,
1420 				/* do_nonfinite= */ folding_initializer,
1421 				mpc_mul);
1422 
1423 	  real = const_binop (MINUS_EXPR,
1424 			      const_binop (MULT_EXPR, r1, r2),
1425 			      const_binop (MULT_EXPR, i1, i2));
1426 	  imag = const_binop (PLUS_EXPR,
1427 			      const_binop (MULT_EXPR, r1, i2),
1428 			      const_binop (MULT_EXPR, i1, r2));
1429 	  break;
1430 
1431 	case RDIV_EXPR:
1432 	  if (COMPLEX_FLOAT_TYPE_P (type))
1433 	    return do_mpc_arg2 (arg1, arg2, type,
1434                                 /* do_nonfinite= */ folding_initializer,
1435 				mpc_div);
1436 	  /* Fallthru. */
1437 	case TRUNC_DIV_EXPR:
1438 	case CEIL_DIV_EXPR:
1439 	case FLOOR_DIV_EXPR:
1440 	case ROUND_DIV_EXPR:
1441 	  if (flag_complex_method == 0)
1442 	  {
1443 	    /* Keep this algorithm in sync with
1444 	       tree-complex.cc:expand_complex_div_straight().
1445 
1446 	       Expand complex division to scalars, straightforward algorithm.
1447 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1448 	       t = br*br + bi*bi
1449 	    */
1450 	    tree magsquared
1451 	      = const_binop (PLUS_EXPR,
1452 			     const_binop (MULT_EXPR, r2, r2),
1453 			     const_binop (MULT_EXPR, i2, i2));
1454 	    tree t1
1455 	      = const_binop (PLUS_EXPR,
1456 			     const_binop (MULT_EXPR, r1, r2),
1457 			     const_binop (MULT_EXPR, i1, i2));
1458 	    tree t2
1459 	      = const_binop (MINUS_EXPR,
1460 			     const_binop (MULT_EXPR, i1, r2),
1461 			     const_binop (MULT_EXPR, r1, i2));
1462 
1463 	    real = const_binop (code, t1, magsquared);
1464 	    imag = const_binop (code, t2, magsquared);
1465 	  }
1466 	  else
1467 	  {
1468 	    /* Keep this algorithm in sync with
1469                tree-complex.cc:expand_complex_div_wide().
1470 
1471 	       Expand complex division to scalars, modified algorithm to minimize
1472 	       overflow with wide input ranges.  */
1473 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1474 					fold_abs_const (r2, TREE_TYPE (type)),
1475 					fold_abs_const (i2, TREE_TYPE (type)));
1476 
1477 	    if (integer_nonzerop (compare))
1478 	      {
1479 		/* In the TRUE branch, we compute
1480 		   ratio = br/bi;
1481 		   div = (br * ratio) + bi;
1482 		   tr = (ar * ratio) + ai;
1483 		   ti = (ai * ratio) - ar;
1484 		   tr = tr / div;
1485 		   ti = ti / div;  */
1486 		tree ratio = const_binop (code, r2, i2);
1487 		tree div = const_binop (PLUS_EXPR, i2,
1488 					const_binop (MULT_EXPR, r2, ratio));
1489 		real = const_binop (MULT_EXPR, r1, ratio);
1490 		real = const_binop (PLUS_EXPR, real, i1);
1491 		real = const_binop (code, real, div);
1492 
1493 		imag = const_binop (MULT_EXPR, i1, ratio);
1494 		imag = const_binop (MINUS_EXPR, imag, r1);
1495 		imag = const_binop (code, imag, div);
1496 	      }
1497 	    else
1498 	      {
1499 		/* In the FALSE branch, we compute
1500 		   ratio = d/c;
1501 		   divisor = (d * ratio) + c;
1502 		   tr = (b * ratio) + a;
1503 		   ti = b - (a * ratio);
1504 		   tr = tr / div;
1505 		   ti = ti / div;  */
1506 		tree ratio = const_binop (code, i2, r2);
1507 		tree div = const_binop (PLUS_EXPR, r2,
1508                                         const_binop (MULT_EXPR, i2, ratio));
1509 
1510 		real = const_binop (MULT_EXPR, i1, ratio);
1511 		real = const_binop (PLUS_EXPR, real, r1);
1512 		real = const_binop (code, real, div);
1513 
1514 		imag = const_binop (MULT_EXPR, r1, ratio);
1515 		imag = const_binop (MINUS_EXPR, i1, imag);
1516 		imag = const_binop (code, imag, div);
1517 	      }
1518 	  }
1519 	  break;
1520 
1521 	default:
1522 	  return NULL_TREE;
1523 	}
1524 
1525       if (real && imag)
1526 	return build_complex (type, real, imag);
1527     }
1528 
1529   if (TREE_CODE (arg1) == VECTOR_CST
1530       && TREE_CODE (arg2) == VECTOR_CST
1531       && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1532 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1533     {
1534       tree type = TREE_TYPE (arg1);
1535       bool step_ok_p;
1536       if (VECTOR_CST_STEPPED_P (arg1)
1537 	  && VECTOR_CST_STEPPED_P (arg2))
1538 	/* We can operate directly on the encoding if:
1539 
1540 	      a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1541 	    implies
1542 	      (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1543 
1544 	   Addition and subtraction are the supported operators
1545 	   for which this is true.  */
1546 	step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1547       else if (VECTOR_CST_STEPPED_P (arg1))
1548 	/* We can operate directly on stepped encodings if:
1549 
1550 	     a3 - a2 == a2 - a1
1551 	   implies:
1552 	     (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1553 
1554 	   which is true if (x -> x op c) distributes over addition.  */
1555 	step_ok_p = distributes_over_addition_p (code, 1);
1556       else
1557 	/* Similarly in reverse.  */
1558 	step_ok_p = distributes_over_addition_p (code, 2);
1559       tree_vector_builder elts;
1560       if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1561 	return NULL_TREE;
1562       unsigned int count = elts.encoded_nelts ();
1563       for (unsigned int i = 0; i < count; ++i)
1564 	{
1565 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1566 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1567 
1568 	  tree elt = const_binop (code, elem1, elem2);
1569 
1570 	  /* It is possible that const_binop cannot handle the given
1571 	     code and return NULL_TREE */
1572 	  if (elt == NULL_TREE)
1573 	    return NULL_TREE;
1574 	  elts.quick_push (elt);
1575 	}
1576 
1577       return elts.build ();
1578     }
1579 
1580   /* Shifts allow a scalar offset for a vector.  */
1581   if (TREE_CODE (arg1) == VECTOR_CST
1582       && TREE_CODE (arg2) == INTEGER_CST)
1583     {
1584       tree type = TREE_TYPE (arg1);
1585       bool step_ok_p = distributes_over_addition_p (code, 1);
1586       tree_vector_builder elts;
1587       if (!elts.new_unary_operation (type, arg1, step_ok_p))
1588 	return NULL_TREE;
1589       unsigned int count = elts.encoded_nelts ();
1590       for (unsigned int i = 0; i < count; ++i)
1591 	{
1592 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1593 
1594 	  tree elt = const_binop (code, elem1, arg2);
1595 
1596 	  /* It is possible that const_binop cannot handle the given
1597 	     code and return NULL_TREE.  */
1598 	  if (elt == NULL_TREE)
1599 	    return NULL_TREE;
1600 	  elts.quick_push (elt);
1601 	}
1602 
1603       return elts.build ();
1604     }
1605   return NULL_TREE;
1606 }
1607 
1608 /* Overload that adds a TYPE parameter to be able to dispatch
1609    to fold_relational_const.  */
1610 
1611 tree
const_binop(enum tree_code code,tree type,tree arg1,tree arg2)1612 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1613 {
1614   if (TREE_CODE_CLASS (code) == tcc_comparison)
1615     return fold_relational_const (code, type, arg1, arg2);
1616 
1617   /* ???  Until we make the const_binop worker take the type of the
1618      result as argument put those cases that need it here.  */
1619   switch (code)
1620     {
1621     case VEC_SERIES_EXPR:
1622       if (CONSTANT_CLASS_P (arg1)
1623 	  && CONSTANT_CLASS_P (arg2))
1624 	return build_vec_series (type, arg1, arg2);
1625       return NULL_TREE;
1626 
1627     case COMPLEX_EXPR:
1628       if ((TREE_CODE (arg1) == REAL_CST
1629 	   && TREE_CODE (arg2) == REAL_CST)
1630 	  || (TREE_CODE (arg1) == INTEGER_CST
1631 	      && TREE_CODE (arg2) == INTEGER_CST))
1632 	return build_complex (type, arg1, arg2);
1633       return NULL_TREE;
1634 
1635     case POINTER_DIFF_EXPR:
1636       if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1637 	{
1638 	  poly_offset_int res = (wi::to_poly_offset (arg1)
1639 				 - wi::to_poly_offset (arg2));
1640 	  return force_fit_type (type, res, 1,
1641 				 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1642 	}
1643       return NULL_TREE;
1644 
1645     case VEC_PACK_TRUNC_EXPR:
1646     case VEC_PACK_FIX_TRUNC_EXPR:
1647     case VEC_PACK_FLOAT_EXPR:
1648       {
1649 	unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1650 
1651 	if (TREE_CODE (arg1) != VECTOR_CST
1652 	    || TREE_CODE (arg2) != VECTOR_CST)
1653 	  return NULL_TREE;
1654 
1655 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1656 	  return NULL_TREE;
1657 
1658 	out_nelts = in_nelts * 2;
1659 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1660 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1661 
1662 	tree_vector_builder elts (type, out_nelts, 1);
1663 	for (i = 0; i < out_nelts; i++)
1664 	  {
1665 	    tree elt = (i < in_nelts
1666 			? VECTOR_CST_ELT (arg1, i)
1667 			: VECTOR_CST_ELT (arg2, i - in_nelts));
1668 	    elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1669 				      ? NOP_EXPR
1670 				      : code == VEC_PACK_FLOAT_EXPR
1671 				      ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1672 				      TREE_TYPE (type), elt);
1673 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1674 	      return NULL_TREE;
1675 	    elts.quick_push (elt);
1676 	  }
1677 
1678 	return elts.build ();
1679       }
1680 
1681     case VEC_WIDEN_MULT_LO_EXPR:
1682     case VEC_WIDEN_MULT_HI_EXPR:
1683     case VEC_WIDEN_MULT_EVEN_EXPR:
1684     case VEC_WIDEN_MULT_ODD_EXPR:
1685       {
1686 	unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1687 
1688 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1689 	  return NULL_TREE;
1690 
1691 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1692 	  return NULL_TREE;
1693 	out_nelts = in_nelts / 2;
1694 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1695 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1696 
1697 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1698 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1699 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1700 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1701 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1702 	  scale = 1, ofs = 0;
1703 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1704 	  scale = 1, ofs = 1;
1705 
1706 	tree_vector_builder elts (type, out_nelts, 1);
1707 	for (out = 0; out < out_nelts; out++)
1708 	  {
1709 	    unsigned int in = (out << scale) + ofs;
1710 	    tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1711 					  VECTOR_CST_ELT (arg1, in));
1712 	    tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1713 					  VECTOR_CST_ELT (arg2, in));
1714 
1715 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1716 	      return NULL_TREE;
1717 	    tree elt = const_binop (MULT_EXPR, t1, t2);
1718 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1719 	      return NULL_TREE;
1720 	    elts.quick_push (elt);
1721 	  }
1722 
1723 	return elts.build ();
1724       }
1725 
1726     default:;
1727     }
1728 
1729   if (TREE_CODE_CLASS (code) != tcc_binary)
1730     return NULL_TREE;
1731 
1732   /* Make sure type and arg0 have the same saturating flag.  */
1733   gcc_checking_assert (TYPE_SATURATING (type)
1734 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1735 
1736   return const_binop (code, arg1, arg2);
1737 }
1738 
1739 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1740    Return zero if computing the constants is not possible.  */
1741 
1742 tree
const_unop(enum tree_code code,tree type,tree arg0)1743 const_unop (enum tree_code code, tree type, tree arg0)
1744 {
1745   /* Don't perform the operation, other than NEGATE and ABS, if
1746      flag_signaling_nans is on and the operand is a signaling NaN.  */
1747   if (TREE_CODE (arg0) == REAL_CST
1748       && HONOR_SNANS (arg0)
1749       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1750       && code != NEGATE_EXPR
1751       && code != ABS_EXPR
1752       && code != ABSU_EXPR)
1753     return NULL_TREE;
1754 
1755   switch (code)
1756     {
1757     CASE_CONVERT:
1758     case FLOAT_EXPR:
1759     case FIX_TRUNC_EXPR:
1760     case FIXED_CONVERT_EXPR:
1761       return fold_convert_const (code, type, arg0);
1762 
1763     case ADDR_SPACE_CONVERT_EXPR:
1764       /* If the source address is 0, and the source address space
1765 	 cannot have a valid object at 0, fold to dest type null.  */
1766       if (integer_zerop (arg0)
1767 	  && !(targetm.addr_space.zero_address_valid
1768 	       (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1769 	return fold_convert_const (code, type, arg0);
1770       break;
1771 
1772     case VIEW_CONVERT_EXPR:
1773       return fold_view_convert_expr (type, arg0);
1774 
1775     case NEGATE_EXPR:
1776       {
1777 	/* Can't call fold_negate_const directly here as that doesn't
1778 	   handle all cases and we might not be able to negate some
1779 	   constants.  */
1780 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1781 	if (tem && CONSTANT_CLASS_P (tem))
1782 	  return tem;
1783 	break;
1784       }
1785 
1786     case ABS_EXPR:
1787     case ABSU_EXPR:
1788       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1789 	return fold_abs_const (arg0, type);
1790       break;
1791 
1792     case CONJ_EXPR:
1793       if (TREE_CODE (arg0) == COMPLEX_CST)
1794 	{
1795 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1796 					  TREE_TYPE (type));
1797 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1798 	}
1799       break;
1800 
1801     case BIT_NOT_EXPR:
1802       if (TREE_CODE (arg0) == INTEGER_CST)
1803 	return fold_not_const (arg0, type);
1804       else if (POLY_INT_CST_P (arg0))
1805 	return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1806       /* Perform BIT_NOT_EXPR on each element individually.  */
1807       else if (TREE_CODE (arg0) == VECTOR_CST)
1808 	{
1809 	  tree elem;
1810 
1811 	  /* This can cope with stepped encodings because ~x == -1 - x.  */
1812 	  tree_vector_builder elements;
1813 	  elements.new_unary_operation (type, arg0, true);
1814 	  unsigned int i, count = elements.encoded_nelts ();
1815 	  for (i = 0; i < count; ++i)
1816 	    {
1817 	      elem = VECTOR_CST_ELT (arg0, i);
1818 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1819 	      if (elem == NULL_TREE)
1820 		break;
1821 	      elements.quick_push (elem);
1822 	    }
1823 	  if (i == count)
1824 	    return elements.build ();
1825 	}
1826       break;
1827 
1828     case TRUTH_NOT_EXPR:
1829       if (TREE_CODE (arg0) == INTEGER_CST)
1830 	return constant_boolean_node (integer_zerop (arg0), type);
1831       break;
1832 
1833     case REALPART_EXPR:
1834       if (TREE_CODE (arg0) == COMPLEX_CST)
1835 	return fold_convert (type, TREE_REALPART (arg0));
1836       break;
1837 
1838     case IMAGPART_EXPR:
1839       if (TREE_CODE (arg0) == COMPLEX_CST)
1840 	return fold_convert (type, TREE_IMAGPART (arg0));
1841       break;
1842 
1843     case VEC_UNPACK_LO_EXPR:
1844     case VEC_UNPACK_HI_EXPR:
1845     case VEC_UNPACK_FLOAT_LO_EXPR:
1846     case VEC_UNPACK_FLOAT_HI_EXPR:
1847     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1848     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1849       {
1850 	unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1851 	enum tree_code subcode;
1852 
1853 	if (TREE_CODE (arg0) != VECTOR_CST)
1854 	  return NULL_TREE;
1855 
1856 	if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1857 	  return NULL_TREE;
1858 	out_nelts = in_nelts / 2;
1859 	gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1860 
1861 	unsigned int offset = 0;
1862 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1863 				   || code == VEC_UNPACK_FLOAT_LO_EXPR
1864 				   || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1865 	  offset = out_nelts;
1866 
1867 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1868 	  subcode = NOP_EXPR;
1869 	else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1870 		 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1871 	  subcode = FLOAT_EXPR;
1872 	else
1873 	  subcode = FIX_TRUNC_EXPR;
1874 
1875 	tree_vector_builder elts (type, out_nelts, 1);
1876 	for (i = 0; i < out_nelts; i++)
1877 	  {
1878 	    tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1879 					   VECTOR_CST_ELT (arg0, i + offset));
1880 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1881 	      return NULL_TREE;
1882 	    elts.quick_push (elt);
1883 	  }
1884 
1885 	return elts.build ();
1886       }
1887 
1888     case VEC_DUPLICATE_EXPR:
1889       if (CONSTANT_CLASS_P (arg0))
1890 	return build_vector_from_val (type, arg0);
1891       return NULL_TREE;
1892 
1893     default:
1894       break;
1895     }
1896 
1897   return NULL_TREE;
1898 }
1899 
1900 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1901    indicates which particular sizetype to create.  */
1902 
1903 tree
size_int_kind(poly_int64 number,enum size_type_kind kind)1904 size_int_kind (poly_int64 number, enum size_type_kind kind)
1905 {
1906   return build_int_cst (sizetype_tab[(int) kind], number);
1907 }
1908 
1909 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1910    is a tree code.  The type of the result is taken from the operands.
1911    Both must be equivalent integer types, ala int_binop_types_match_p.
1912    If the operands are constant, so is the result.  */
1913 
1914 tree
size_binop_loc(location_t loc,enum tree_code code,tree arg0,tree arg1)1915 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1916 {
1917   tree type = TREE_TYPE (arg0);
1918 
1919   if (arg0 == error_mark_node || arg1 == error_mark_node)
1920     return error_mark_node;
1921 
1922   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1923                                        TREE_TYPE (arg1)));
1924 
1925   /* Handle the special case of two poly_int constants faster.  */
1926   if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1927     {
1928       /* And some specific cases even faster than that.  */
1929       if (code == PLUS_EXPR)
1930 	{
1931 	  if (integer_zerop (arg0)
1932 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1933 	    return arg1;
1934 	  if (integer_zerop (arg1)
1935 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1936 	    return arg0;
1937 	}
1938       else if (code == MINUS_EXPR)
1939 	{
1940 	  if (integer_zerop (arg1)
1941 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1942 	    return arg0;
1943 	}
1944       else if (code == MULT_EXPR)
1945 	{
1946 	  if (integer_onep (arg0)
1947 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1948 	    return arg1;
1949 	}
1950 
1951       /* Handle general case of two integer constants.  For sizetype
1952          constant calculations we always want to know about overflow,
1953 	 even in the unsigned case.  */
1954       tree res = int_const_binop (code, arg0, arg1, -1);
1955       if (res != NULL_TREE)
1956 	return res;
1957     }
1958 
1959   return fold_build2_loc (loc, code, type, arg0, arg1);
1960 }
1961 
1962 /* Given two values, either both of sizetype or both of bitsizetype,
1963    compute the difference between the two values.  Return the value
1964    in signed type corresponding to the type of the operands.  */
1965 
1966 tree
size_diffop_loc(location_t loc,tree arg0,tree arg1)1967 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1968 {
1969   tree type = TREE_TYPE (arg0);
1970   tree ctype;
1971 
1972   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1973 				       TREE_TYPE (arg1)));
1974 
1975   /* If the type is already signed, just do the simple thing.  */
1976   if (!TYPE_UNSIGNED (type))
1977     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1978 
1979   if (type == sizetype)
1980     ctype = ssizetype;
1981   else if (type == bitsizetype)
1982     ctype = sbitsizetype;
1983   else
1984     ctype = signed_type_for (type);
1985 
1986   /* If either operand is not a constant, do the conversions to the signed
1987      type and subtract.  The hardware will do the right thing with any
1988      overflow in the subtraction.  */
1989   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1990     return size_binop_loc (loc, MINUS_EXPR,
1991 			   fold_convert_loc (loc, ctype, arg0),
1992 			   fold_convert_loc (loc, ctype, arg1));
1993 
1994   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1995      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1996      overflow) and negate (which can't either).  Special-case a result
1997      of zero while we're here.  */
1998   if (tree_int_cst_equal (arg0, arg1))
1999     return build_int_cst (ctype, 0);
2000   else if (tree_int_cst_lt (arg1, arg0))
2001     return fold_convert_loc (loc, ctype,
2002 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2003   else
2004     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2005 			   fold_convert_loc (loc, ctype,
2006 					     size_binop_loc (loc,
2007 							     MINUS_EXPR,
2008 							     arg1, arg0)));
2009 }
2010 
2011 /* A subroutine of fold_convert_const handling conversions of an
2012    INTEGER_CST to another integer type.  */
2013 
2014 static tree
fold_convert_const_int_from_int(tree type,const_tree arg1)2015 fold_convert_const_int_from_int (tree type, const_tree arg1)
2016 {
2017   /* Given an integer constant, make new constant with new type,
2018      appropriately sign-extended or truncated.  Use widest_int
2019      so that any extension is done according ARG1's type.  */
2020   return force_fit_type (type, wi::to_widest (arg1),
2021 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2022 			 TREE_OVERFLOW (arg1));
2023 }
2024 
2025 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2026    to an integer type.  */
2027 
2028 static tree
fold_convert_const_int_from_real(enum tree_code code,tree type,const_tree arg1)2029 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2030 {
2031   bool overflow = false;
2032   tree t;
2033 
2034   /* The following code implements the floating point to integer
2035      conversion rules required by the Java Language Specification,
2036      that IEEE NaNs are mapped to zero and values that overflow
2037      the target precision saturate, i.e. values greater than
2038      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2039      are mapped to INT_MIN.  These semantics are allowed by the
2040      C and C++ standards that simply state that the behavior of
2041      FP-to-integer conversion is unspecified upon overflow.  */
2042 
2043   wide_int val;
2044   REAL_VALUE_TYPE r;
2045   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2046 
2047   switch (code)
2048     {
2049     case FIX_TRUNC_EXPR:
2050       real_trunc (&r, VOIDmode, &x);
2051       break;
2052 
2053     default:
2054       gcc_unreachable ();
2055     }
2056 
2057   /* If R is NaN, return zero and show we have an overflow.  */
2058   if (REAL_VALUE_ISNAN (r))
2059     {
2060       overflow = true;
2061       val = wi::zero (TYPE_PRECISION (type));
2062     }
2063 
2064   /* See if R is less than the lower bound or greater than the
2065      upper bound.  */
2066 
2067   if (! overflow)
2068     {
2069       tree lt = TYPE_MIN_VALUE (type);
2070       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2071       if (real_less (&r, &l))
2072 	{
2073 	  overflow = true;
2074 	  val = wi::to_wide (lt);
2075 	}
2076     }
2077 
2078   if (! overflow)
2079     {
2080       tree ut = TYPE_MAX_VALUE (type);
2081       if (ut)
2082 	{
2083 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2084 	  if (real_less (&u, &r))
2085 	    {
2086 	      overflow = true;
2087 	      val = wi::to_wide (ut);
2088 	    }
2089 	}
2090     }
2091 
2092   if (! overflow)
2093     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2094 
2095   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2096   return t;
2097 }
2098 
2099 /* A subroutine of fold_convert_const handling conversions of a
2100    FIXED_CST to an integer type.  */
2101 
2102 static tree
fold_convert_const_int_from_fixed(tree type,const_tree arg1)2103 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2104 {
2105   tree t;
2106   double_int temp, temp_trunc;
2107   scalar_mode mode;
2108 
2109   /* Right shift FIXED_CST to temp by fbit.  */
2110   temp = TREE_FIXED_CST (arg1).data;
2111   mode = TREE_FIXED_CST (arg1).mode;
2112   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2113     {
2114       temp = temp.rshift (GET_MODE_FBIT (mode),
2115 			  HOST_BITS_PER_DOUBLE_INT,
2116 			  SIGNED_FIXED_POINT_MODE_P (mode));
2117 
2118       /* Left shift temp to temp_trunc by fbit.  */
2119       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2120 				HOST_BITS_PER_DOUBLE_INT,
2121 				SIGNED_FIXED_POINT_MODE_P (mode));
2122     }
2123   else
2124     {
2125       temp = double_int_zero;
2126       temp_trunc = double_int_zero;
2127     }
2128 
2129   /* If FIXED_CST is negative, we need to round the value toward 0.
2130      By checking if the fractional bits are not zero to add 1 to temp.  */
2131   if (SIGNED_FIXED_POINT_MODE_P (mode)
2132       && temp_trunc.is_negative ()
2133       && TREE_FIXED_CST (arg1).data != temp_trunc)
2134     temp += double_int_one;
2135 
2136   /* Given a fixed-point constant, make new constant with new type,
2137      appropriately sign-extended or truncated.  */
2138   t = force_fit_type (type, temp, -1,
2139 		      (temp.is_negative ()
2140 		       && (TYPE_UNSIGNED (type)
2141 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2142 		      | TREE_OVERFLOW (arg1));
2143 
2144   return t;
2145 }
2146 
2147 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2148    to another floating point type.  */
2149 
2150 static tree
fold_convert_const_real_from_real(tree type,const_tree arg1)2151 fold_convert_const_real_from_real (tree type, const_tree arg1)
2152 {
2153   REAL_VALUE_TYPE value;
2154   tree t;
2155 
2156   /* Don't perform the operation if flag_signaling_nans is on
2157      and the operand is a signaling NaN.  */
2158   if (HONOR_SNANS (arg1)
2159       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2160     return NULL_TREE;
2161 
2162   /* With flag_rounding_math we should respect the current rounding mode
2163      unless the conversion is exact.  */
2164   if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2165       && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2166     return NULL_TREE;
2167 
2168   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2169   t = build_real (type, value);
2170 
2171   /* If converting an infinity or NAN to a representation that doesn't
2172      have one, set the overflow bit so that we can produce some kind of
2173      error message at the appropriate point if necessary.  It's not the
2174      most user-friendly message, but it's better than nothing.  */
2175   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2176       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2177     TREE_OVERFLOW (t) = 1;
2178   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2179 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2180     TREE_OVERFLOW (t) = 1;
2181   /* Regular overflow, conversion produced an infinity in a mode that
2182      can't represent them.  */
2183   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2184 	   && REAL_VALUE_ISINF (value)
2185 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2186     TREE_OVERFLOW (t) = 1;
2187   else
2188     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2189   return t;
2190 }
2191 
2192 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2193    to a floating point type.  */
2194 
2195 static tree
fold_convert_const_real_from_fixed(tree type,const_tree arg1)2196 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2197 {
2198   REAL_VALUE_TYPE value;
2199   tree t;
2200 
2201   real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2202 			   &TREE_FIXED_CST (arg1));
2203   t = build_real (type, value);
2204 
2205   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2206   return t;
2207 }
2208 
2209 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2210    to another fixed-point type.  */
2211 
2212 static tree
fold_convert_const_fixed_from_fixed(tree type,const_tree arg1)2213 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2214 {
2215   FIXED_VALUE_TYPE value;
2216   tree t;
2217   bool overflow_p;
2218 
2219   overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2220 			      &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2221   t = build_fixed (type, value);
2222 
2223   /* Propagate overflow flags.  */
2224   if (overflow_p | TREE_OVERFLOW (arg1))
2225     TREE_OVERFLOW (t) = 1;
2226   return t;
2227 }
2228 
2229 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2230    to a fixed-point type.  */
2231 
2232 static tree
fold_convert_const_fixed_from_int(tree type,const_tree arg1)2233 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2234 {
2235   FIXED_VALUE_TYPE value;
2236   tree t;
2237   bool overflow_p;
2238   double_int di;
2239 
2240   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2241 
2242   di.low = TREE_INT_CST_ELT (arg1, 0);
2243   if (TREE_INT_CST_NUNITS (arg1) == 1)
2244     di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2245   else
2246     di.high = TREE_INT_CST_ELT (arg1, 1);
2247 
2248   overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2249 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2250 				       TYPE_SATURATING (type));
2251   t = build_fixed (type, value);
2252 
2253   /* Propagate overflow flags.  */
2254   if (overflow_p | TREE_OVERFLOW (arg1))
2255     TREE_OVERFLOW (t) = 1;
2256   return t;
2257 }
2258 
2259 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2260    to a fixed-point type.  */
2261 
2262 static tree
fold_convert_const_fixed_from_real(tree type,const_tree arg1)2263 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2264 {
2265   FIXED_VALUE_TYPE value;
2266   tree t;
2267   bool overflow_p;
2268 
2269   overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2270 					&TREE_REAL_CST (arg1),
2271 					TYPE_SATURATING (type));
2272   t = build_fixed (type, value);
2273 
2274   /* Propagate overflow flags.  */
2275   if (overflow_p | TREE_OVERFLOW (arg1))
2276     TREE_OVERFLOW (t) = 1;
2277   return t;
2278 }
2279 
2280 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2281    type TYPE.  If no simplification can be done return NULL_TREE.  */
2282 
2283 static tree
fold_convert_const(enum tree_code code,tree type,tree arg1)2284 fold_convert_const (enum tree_code code, tree type, tree arg1)
2285 {
2286   tree arg_type = TREE_TYPE (arg1);
2287   if (arg_type == type)
2288     return arg1;
2289 
2290   /* We can't widen types, since the runtime value could overflow the
2291      original type before being extended to the new type.  */
2292   if (POLY_INT_CST_P (arg1)
2293       && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2294       && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2295     return build_poly_int_cst (type,
2296 			       poly_wide_int::from (poly_int_cst_value (arg1),
2297 						    TYPE_PRECISION (type),
2298 						    TYPE_SIGN (arg_type)));
2299 
2300   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2301       || TREE_CODE (type) == OFFSET_TYPE)
2302     {
2303       if (TREE_CODE (arg1) == INTEGER_CST)
2304 	return fold_convert_const_int_from_int (type, arg1);
2305       else if (TREE_CODE (arg1) == REAL_CST)
2306 	return fold_convert_const_int_from_real (code, type, arg1);
2307       else if (TREE_CODE (arg1) == FIXED_CST)
2308 	return fold_convert_const_int_from_fixed (type, arg1);
2309     }
2310   else if (TREE_CODE (type) == REAL_TYPE)
2311     {
2312       if (TREE_CODE (arg1) == INTEGER_CST)
2313 	{
2314 	  tree res = build_real_from_int_cst (type, arg1);
2315 	  /* Avoid the folding if flag_rounding_math is on and the
2316 	     conversion is not exact.  */
2317 	  if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2318 	    {
2319 	      bool fail = false;
2320 	      wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2321 					    TYPE_PRECISION (TREE_TYPE (arg1)));
2322 	      if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2323 		return NULL_TREE;
2324 	    }
2325 	  return res;
2326 	}
2327       else if (TREE_CODE (arg1) == REAL_CST)
2328 	return fold_convert_const_real_from_real (type, arg1);
2329       else if (TREE_CODE (arg1) == FIXED_CST)
2330 	return fold_convert_const_real_from_fixed (type, arg1);
2331     }
2332   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2333     {
2334       if (TREE_CODE (arg1) == FIXED_CST)
2335 	return fold_convert_const_fixed_from_fixed (type, arg1);
2336       else if (TREE_CODE (arg1) == INTEGER_CST)
2337 	return fold_convert_const_fixed_from_int (type, arg1);
2338       else if (TREE_CODE (arg1) == REAL_CST)
2339 	return fold_convert_const_fixed_from_real (type, arg1);
2340     }
2341   else if (TREE_CODE (type) == VECTOR_TYPE)
2342     {
2343       if (TREE_CODE (arg1) == VECTOR_CST
2344 	  && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2345 	{
2346 	  tree elttype = TREE_TYPE (type);
2347 	  tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2348 	  /* We can't handle steps directly when extending, since the
2349 	     values need to wrap at the original precision first.  */
2350 	  bool step_ok_p
2351 	    = (INTEGRAL_TYPE_P (elttype)
2352 	       && INTEGRAL_TYPE_P (arg1_elttype)
2353 	       && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2354 	  tree_vector_builder v;
2355 	  if (!v.new_unary_operation (type, arg1, step_ok_p))
2356 	    return NULL_TREE;
2357 	  unsigned int len = v.encoded_nelts ();
2358 	  for (unsigned int i = 0; i < len; ++i)
2359 	    {
2360 	      tree elt = VECTOR_CST_ELT (arg1, i);
2361 	      tree cvt = fold_convert_const (code, elttype, elt);
2362 	      if (cvt == NULL_TREE)
2363 		return NULL_TREE;
2364 	      v.quick_push (cvt);
2365 	    }
2366 	  return v.build ();
2367 	}
2368     }
2369   return NULL_TREE;
2370 }
2371 
2372 /* Construct a vector of zero elements of vector type TYPE.  */
2373 
2374 static tree
build_zero_vector(tree type)2375 build_zero_vector (tree type)
2376 {
2377   tree t;
2378 
2379   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2380   return build_vector_from_val (type, t);
2381 }
2382 
2383 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2384 
2385 bool
fold_convertible_p(const_tree type,const_tree arg)2386 fold_convertible_p (const_tree type, const_tree arg)
2387 {
2388   const_tree orig = TREE_TYPE (arg);
2389 
2390   if (type == orig)
2391     return true;
2392 
2393   if (TREE_CODE (arg) == ERROR_MARK
2394       || TREE_CODE (type) == ERROR_MARK
2395       || TREE_CODE (orig) == ERROR_MARK)
2396     return false;
2397 
2398   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2399     return true;
2400 
2401   switch (TREE_CODE (type))
2402     {
2403     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2404     case POINTER_TYPE: case REFERENCE_TYPE:
2405     case OFFSET_TYPE:
2406       return (INTEGRAL_TYPE_P (orig)
2407 	      || (POINTER_TYPE_P (orig)
2408 		  && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2409 	      || TREE_CODE (orig) == OFFSET_TYPE);
2410 
2411     case REAL_TYPE:
2412     case FIXED_POINT_TYPE:
2413     case VOID_TYPE:
2414       return TREE_CODE (type) == TREE_CODE (orig);
2415 
2416     case VECTOR_TYPE:
2417       return (VECTOR_TYPE_P (orig)
2418 	      && known_eq (TYPE_VECTOR_SUBPARTS (type),
2419 			   TYPE_VECTOR_SUBPARTS (orig))
2420 	      && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2421 
2422     default:
2423       return false;
2424     }
2425 }
2426 
2427 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2428    simple conversions in preference to calling the front-end's convert.  */
2429 
2430 tree
fold_convert_loc(location_t loc,tree type,tree arg)2431 fold_convert_loc (location_t loc, tree type, tree arg)
2432 {
2433   tree orig = TREE_TYPE (arg);
2434   tree tem;
2435 
2436   if (type == orig)
2437     return arg;
2438 
2439   if (TREE_CODE (arg) == ERROR_MARK
2440       || TREE_CODE (type) == ERROR_MARK
2441       || TREE_CODE (orig) == ERROR_MARK)
2442     return error_mark_node;
2443 
2444   switch (TREE_CODE (type))
2445     {
2446     case POINTER_TYPE:
2447     case REFERENCE_TYPE:
2448       /* Handle conversions between pointers to different address spaces.  */
2449       if (POINTER_TYPE_P (orig)
2450 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2451 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2452 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2453       /* fall through */
2454 
2455     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2456     case OFFSET_TYPE:
2457       if (TREE_CODE (arg) == INTEGER_CST)
2458 	{
2459 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2460 	  if (tem != NULL_TREE)
2461 	    return tem;
2462 	}
2463       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2464 	  || TREE_CODE (orig) == OFFSET_TYPE)
2465 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2466       if (TREE_CODE (orig) == COMPLEX_TYPE)
2467 	return fold_convert_loc (loc, type,
2468 				 fold_build1_loc (loc, REALPART_EXPR,
2469 						  TREE_TYPE (orig), arg));
2470       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2471 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2472       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2473 
2474     case REAL_TYPE:
2475       if (TREE_CODE (arg) == INTEGER_CST)
2476 	{
2477 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2478 	  if (tem != NULL_TREE)
2479 	    return tem;
2480 	}
2481       else if (TREE_CODE (arg) == REAL_CST)
2482 	{
2483 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2484 	  if (tem != NULL_TREE)
2485 	    return tem;
2486 	}
2487       else if (TREE_CODE (arg) == FIXED_CST)
2488 	{
2489 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2490 	  if (tem != NULL_TREE)
2491 	    return tem;
2492 	}
2493 
2494       switch (TREE_CODE (orig))
2495 	{
2496 	case INTEGER_TYPE:
2497 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2498 	case POINTER_TYPE: case REFERENCE_TYPE:
2499 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2500 
2501 	case REAL_TYPE:
2502 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2503 
2504 	case FIXED_POINT_TYPE:
2505 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2506 
2507 	case COMPLEX_TYPE:
2508 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2509 	  return fold_convert_loc (loc, type, tem);
2510 
2511 	default:
2512 	  gcc_unreachable ();
2513 	}
2514 
2515     case FIXED_POINT_TYPE:
2516       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2517 	  || TREE_CODE (arg) == REAL_CST)
2518 	{
2519 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2520 	  if (tem != NULL_TREE)
2521 	    goto fold_convert_exit;
2522 	}
2523 
2524       switch (TREE_CODE (orig))
2525 	{
2526 	case FIXED_POINT_TYPE:
2527 	case INTEGER_TYPE:
2528 	case ENUMERAL_TYPE:
2529 	case BOOLEAN_TYPE:
2530 	case REAL_TYPE:
2531 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2532 
2533 	case COMPLEX_TYPE:
2534 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2535 	  return fold_convert_loc (loc, type, tem);
2536 
2537 	default:
2538 	  gcc_unreachable ();
2539 	}
2540 
2541     case COMPLEX_TYPE:
2542       switch (TREE_CODE (orig))
2543 	{
2544 	case INTEGER_TYPE:
2545 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2546 	case POINTER_TYPE: case REFERENCE_TYPE:
2547 	case REAL_TYPE:
2548 	case FIXED_POINT_TYPE:
2549 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2550 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2551 			      fold_convert_loc (loc, TREE_TYPE (type),
2552 					    integer_zero_node));
2553 	case COMPLEX_TYPE:
2554 	  {
2555 	    tree rpart, ipart;
2556 
2557 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2558 	      {
2559 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2560 				      TREE_OPERAND (arg, 0));
2561 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2562 				      TREE_OPERAND (arg, 1));
2563 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2564 	      }
2565 
2566 	    arg = save_expr (arg);
2567 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2568 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2569 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2570 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2571 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2572 	  }
2573 
2574 	default:
2575 	  gcc_unreachable ();
2576 	}
2577 
2578     case VECTOR_TYPE:
2579       if (integer_zerop (arg))
2580 	return build_zero_vector (type);
2581       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2582       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2583 		  || TREE_CODE (orig) == VECTOR_TYPE);
2584       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2585 
2586     case VOID_TYPE:
2587       tem = fold_ignored_result (arg);
2588       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2589 
2590     default:
2591       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2592 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2593       gcc_unreachable ();
2594     }
2595  fold_convert_exit:
2596   protected_set_expr_location_unshare (tem, loc);
2597   return tem;
2598 }
2599 
2600 /* Return false if expr can be assumed not to be an lvalue, true
2601    otherwise.  */
2602 
2603 static bool
maybe_lvalue_p(const_tree x)2604 maybe_lvalue_p (const_tree x)
2605 {
2606   /* We only need to wrap lvalue tree codes.  */
2607   switch (TREE_CODE (x))
2608   {
2609   case VAR_DECL:
2610   case PARM_DECL:
2611   case RESULT_DECL:
2612   case LABEL_DECL:
2613   case FUNCTION_DECL:
2614   case SSA_NAME:
2615 
2616   case COMPONENT_REF:
2617   case MEM_REF:
2618   case INDIRECT_REF:
2619   case ARRAY_REF:
2620   case ARRAY_RANGE_REF:
2621   case BIT_FIELD_REF:
2622   case OBJ_TYPE_REF:
2623 
2624   case REALPART_EXPR:
2625   case IMAGPART_EXPR:
2626   case PREINCREMENT_EXPR:
2627   case PREDECREMENT_EXPR:
2628   case SAVE_EXPR:
2629   case TRY_CATCH_EXPR:
2630   case WITH_CLEANUP_EXPR:
2631   case COMPOUND_EXPR:
2632   case MODIFY_EXPR:
2633   case TARGET_EXPR:
2634   case COND_EXPR:
2635   case BIND_EXPR:
2636   case VIEW_CONVERT_EXPR:
2637     break;
2638 
2639   default:
2640     /* Assume the worst for front-end tree codes.  */
2641     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2642       break;
2643     return false;
2644   }
2645 
2646   return true;
2647 }
2648 
2649 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2650 
2651 tree
non_lvalue_loc(location_t loc,tree x)2652 non_lvalue_loc (location_t loc, tree x)
2653 {
2654   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2655      us.  */
2656   if (in_gimple_form)
2657     return x;
2658 
2659   if (! maybe_lvalue_p (x))
2660     return x;
2661   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2662 }
2663 
2664 /* Given a tree comparison code, return the code that is the logical inverse.
2665    It is generally not safe to do this for floating-point comparisons, except
2666    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2667    ERROR_MARK in this case.  */
2668 
2669 enum tree_code
invert_tree_comparison(enum tree_code code,bool honor_nans)2670 invert_tree_comparison (enum tree_code code, bool honor_nans)
2671 {
2672   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2673       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2674     return ERROR_MARK;
2675 
2676   switch (code)
2677     {
2678     case EQ_EXPR:
2679       return NE_EXPR;
2680     case NE_EXPR:
2681       return EQ_EXPR;
2682     case GT_EXPR:
2683       return honor_nans ? UNLE_EXPR : LE_EXPR;
2684     case GE_EXPR:
2685       return honor_nans ? UNLT_EXPR : LT_EXPR;
2686     case LT_EXPR:
2687       return honor_nans ? UNGE_EXPR : GE_EXPR;
2688     case LE_EXPR:
2689       return honor_nans ? UNGT_EXPR : GT_EXPR;
2690     case LTGT_EXPR:
2691       return UNEQ_EXPR;
2692     case UNEQ_EXPR:
2693       return LTGT_EXPR;
2694     case UNGT_EXPR:
2695       return LE_EXPR;
2696     case UNGE_EXPR:
2697       return LT_EXPR;
2698     case UNLT_EXPR:
2699       return GE_EXPR;
2700     case UNLE_EXPR:
2701       return GT_EXPR;
2702     case ORDERED_EXPR:
2703       return UNORDERED_EXPR;
2704     case UNORDERED_EXPR:
2705       return ORDERED_EXPR;
2706     default:
2707       gcc_unreachable ();
2708     }
2709 }
2710 
2711 /* Similar, but return the comparison that results if the operands are
2712    swapped.  This is safe for floating-point.  */
2713 
2714 enum tree_code
swap_tree_comparison(enum tree_code code)2715 swap_tree_comparison (enum tree_code code)
2716 {
2717   switch (code)
2718     {
2719     case EQ_EXPR:
2720     case NE_EXPR:
2721     case ORDERED_EXPR:
2722     case UNORDERED_EXPR:
2723     case LTGT_EXPR:
2724     case UNEQ_EXPR:
2725       return code;
2726     case GT_EXPR:
2727       return LT_EXPR;
2728     case GE_EXPR:
2729       return LE_EXPR;
2730     case LT_EXPR:
2731       return GT_EXPR;
2732     case LE_EXPR:
2733       return GE_EXPR;
2734     case UNGT_EXPR:
2735       return UNLT_EXPR;
2736     case UNGE_EXPR:
2737       return UNLE_EXPR;
2738     case UNLT_EXPR:
2739       return UNGT_EXPR;
2740     case UNLE_EXPR:
2741       return UNGE_EXPR;
2742     default:
2743       gcc_unreachable ();
2744     }
2745 }
2746 
2747 
2748 /* Convert a comparison tree code from an enum tree_code representation
2749    into a compcode bit-based encoding.  This function is the inverse of
2750    compcode_to_comparison.  */
2751 
2752 static enum comparison_code
comparison_to_compcode(enum tree_code code)2753 comparison_to_compcode (enum tree_code code)
2754 {
2755   switch (code)
2756     {
2757     case LT_EXPR:
2758       return COMPCODE_LT;
2759     case EQ_EXPR:
2760       return COMPCODE_EQ;
2761     case LE_EXPR:
2762       return COMPCODE_LE;
2763     case GT_EXPR:
2764       return COMPCODE_GT;
2765     case NE_EXPR:
2766       return COMPCODE_NE;
2767     case GE_EXPR:
2768       return COMPCODE_GE;
2769     case ORDERED_EXPR:
2770       return COMPCODE_ORD;
2771     case UNORDERED_EXPR:
2772       return COMPCODE_UNORD;
2773     case UNLT_EXPR:
2774       return COMPCODE_UNLT;
2775     case UNEQ_EXPR:
2776       return COMPCODE_UNEQ;
2777     case UNLE_EXPR:
2778       return COMPCODE_UNLE;
2779     case UNGT_EXPR:
2780       return COMPCODE_UNGT;
2781     case LTGT_EXPR:
2782       return COMPCODE_LTGT;
2783     case UNGE_EXPR:
2784       return COMPCODE_UNGE;
2785     default:
2786       gcc_unreachable ();
2787     }
2788 }
2789 
2790 /* Convert a compcode bit-based encoding of a comparison operator back
2791    to GCC's enum tree_code representation.  This function is the
2792    inverse of comparison_to_compcode.  */
2793 
2794 static enum tree_code
compcode_to_comparison(enum comparison_code code)2795 compcode_to_comparison (enum comparison_code code)
2796 {
2797   switch (code)
2798     {
2799     case COMPCODE_LT:
2800       return LT_EXPR;
2801     case COMPCODE_EQ:
2802       return EQ_EXPR;
2803     case COMPCODE_LE:
2804       return LE_EXPR;
2805     case COMPCODE_GT:
2806       return GT_EXPR;
2807     case COMPCODE_NE:
2808       return NE_EXPR;
2809     case COMPCODE_GE:
2810       return GE_EXPR;
2811     case COMPCODE_ORD:
2812       return ORDERED_EXPR;
2813     case COMPCODE_UNORD:
2814       return UNORDERED_EXPR;
2815     case COMPCODE_UNLT:
2816       return UNLT_EXPR;
2817     case COMPCODE_UNEQ:
2818       return UNEQ_EXPR;
2819     case COMPCODE_UNLE:
2820       return UNLE_EXPR;
2821     case COMPCODE_UNGT:
2822       return UNGT_EXPR;
2823     case COMPCODE_LTGT:
2824       return LTGT_EXPR;
2825     case COMPCODE_UNGE:
2826       return UNGE_EXPR;
2827     default:
2828       gcc_unreachable ();
2829     }
2830 }
2831 
2832 /* Return true if COND1 tests the opposite condition of COND2.  */
2833 
2834 bool
inverse_conditions_p(const_tree cond1,const_tree cond2)2835 inverse_conditions_p (const_tree cond1, const_tree cond2)
2836 {
2837   return (COMPARISON_CLASS_P (cond1)
2838 	  && COMPARISON_CLASS_P (cond2)
2839 	  && (invert_tree_comparison
2840 	      (TREE_CODE (cond1),
2841 	       HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2842 	  && operand_equal_p (TREE_OPERAND (cond1, 0),
2843 			      TREE_OPERAND (cond2, 0), 0)
2844 	  && operand_equal_p (TREE_OPERAND (cond1, 1),
2845 			      TREE_OPERAND (cond2, 1), 0));
2846 }
2847 
2848 /* Return a tree for the comparison which is the combination of
2849    doing the AND or OR (depending on CODE) of the two operations LCODE
2850    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2851    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2852    if this makes the transformation invalid.  */
2853 
2854 tree
combine_comparisons(location_t loc,enum tree_code code,enum tree_code lcode,enum tree_code rcode,tree truth_type,tree ll_arg,tree lr_arg)2855 combine_comparisons (location_t loc,
2856 		     enum tree_code code, enum tree_code lcode,
2857 		     enum tree_code rcode, tree truth_type,
2858 		     tree ll_arg, tree lr_arg)
2859 {
2860   bool honor_nans = HONOR_NANS (ll_arg);
2861   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2862   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2863   int compcode;
2864 
2865   switch (code)
2866     {
2867     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2868       compcode = lcompcode & rcompcode;
2869       break;
2870 
2871     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2872       compcode = lcompcode | rcompcode;
2873       break;
2874 
2875     default:
2876       return NULL_TREE;
2877     }
2878 
2879   if (!honor_nans)
2880     {
2881       /* Eliminate unordered comparisons, as well as LTGT and ORD
2882 	 which are not used unless the mode has NaNs.  */
2883       compcode &= ~COMPCODE_UNORD;
2884       if (compcode == COMPCODE_LTGT)
2885 	compcode = COMPCODE_NE;
2886       else if (compcode == COMPCODE_ORD)
2887 	compcode = COMPCODE_TRUE;
2888     }
2889    else if (flag_trapping_math)
2890      {
2891 	/* Check that the original operation and the optimized ones will trap
2892 	   under the same condition.  */
2893 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2894 		     && (lcompcode != COMPCODE_EQ)
2895 		     && (lcompcode != COMPCODE_ORD);
2896 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2897 		     && (rcompcode != COMPCODE_EQ)
2898 		     && (rcompcode != COMPCODE_ORD);
2899 	bool trap = (compcode & COMPCODE_UNORD) == 0
2900 		    && (compcode != COMPCODE_EQ)
2901 		    && (compcode != COMPCODE_ORD);
2902 
2903         /* In a short-circuited boolean expression the LHS might be
2904 	   such that the RHS, if evaluated, will never trap.  For
2905 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2906 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2907 	   example, the expression above will never trap, hence
2908 	   optimizing it to x < y would be invalid).  */
2909         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2910             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2911           rtrap = false;
2912 
2913         /* If the comparison was short-circuited, and only the RHS
2914 	   trapped, we may now generate a spurious trap.  */
2915 	if (rtrap && !ltrap
2916 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2917 	  return NULL_TREE;
2918 
2919 	/* If we changed the conditions that cause a trap, we lose.  */
2920 	if ((ltrap || rtrap) != trap)
2921 	  return NULL_TREE;
2922       }
2923 
2924   if (compcode == COMPCODE_TRUE)
2925     return constant_boolean_node (true, truth_type);
2926   else if (compcode == COMPCODE_FALSE)
2927     return constant_boolean_node (false, truth_type);
2928   else
2929     {
2930       enum tree_code tcode;
2931 
2932       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2933       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2934     }
2935 }
2936 
2937 /* Return nonzero if two operands (typically of the same tree node)
2938    are necessarily equal. FLAGS modifies behavior as follows:
2939 
2940    If OEP_ONLY_CONST is set, only return nonzero for constants.
2941    This function tests whether the operands are indistinguishable;
2942    it does not test whether they are equal using C's == operation.
2943    The distinction is important for IEEE floating point, because
2944    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2945    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2946 
2947    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2948    even though it may hold multiple values during a function.
2949    This is because a GCC tree node guarantees that nothing else is
2950    executed between the evaluation of its "operands" (which may often
2951    be evaluated in arbitrary order).  Hence if the operands themselves
2952    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2953    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2954    unset means assuming isochronic (or instantaneous) tree equivalence.
2955    Unless comparing arbitrary expression trees, such as from different
2956    statements, this flag can usually be left unset.
2957 
2958    If OEP_PURE_SAME is set, then pure functions with identical arguments
2959    are considered the same.  It is used when the caller has other ways
2960    to ensure that global memory is unchanged in between.
2961 
2962    If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2963    not values of expressions.
2964 
2965    If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2966    such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2967 
2968    If OEP_BITWISE is set, then require the values to be bitwise identical
2969    rather than simply numerically equal.  Do not take advantage of things
2970    like math-related flags or undefined behavior; only return true for
2971    values that are provably bitwise identical in all circumstances.
2972 
2973    Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2974    any operand with side effect.  This is unnecesarily conservative in the
2975    case we know that arg0 and arg1 are in disjoint code paths (such as in
2976    ?: operator).  In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2977    addresses with TREE_CONSTANT flag set so we know that &var == &var
2978    even if var is volatile.  */
2979 
2980 bool
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)2981 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2982 				  unsigned int flags)
2983 {
2984   bool r;
2985   if (verify_hash_value (arg0, arg1, flags, &r))
2986     return r;
2987 
2988   STRIP_ANY_LOCATION_WRAPPER (arg0);
2989   STRIP_ANY_LOCATION_WRAPPER (arg1);
2990 
2991   /* If either is ERROR_MARK, they aren't equal.  */
2992   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2993       || TREE_TYPE (arg0) == error_mark_node
2994       || TREE_TYPE (arg1) == error_mark_node)
2995     return false;
2996 
2997   /* Similar, if either does not have a type (like a template id),
2998      they aren't equal.  */
2999   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3000     return false;
3001 
3002   /* Bitwise identity makes no sense if the values have different layouts.  */
3003   if ((flags & OEP_BITWISE)
3004       && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3005     return false;
3006 
3007   /* We cannot consider pointers to different address space equal.  */
3008   if (POINTER_TYPE_P (TREE_TYPE (arg0))
3009       && POINTER_TYPE_P (TREE_TYPE (arg1))
3010       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3011 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3012     return false;
3013 
3014   /* Check equality of integer constants before bailing out due to
3015      precision differences.  */
3016   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3017     {
3018       /* Address of INTEGER_CST is not defined; check that we did not forget
3019 	 to drop the OEP_ADDRESS_OF flags.  */
3020       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3021       return tree_int_cst_equal (arg0, arg1);
3022     }
3023 
3024   if (!(flags & OEP_ADDRESS_OF))
3025     {
3026       /* If both types don't have the same signedness, then we can't consider
3027 	 them equal.  We must check this before the STRIP_NOPS calls
3028 	 because they may change the signedness of the arguments.  As pointers
3029 	 strictly don't have a signedness, require either two pointers or
3030 	 two non-pointers as well.  */
3031       if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3032 	  || POINTER_TYPE_P (TREE_TYPE (arg0))
3033 			     != POINTER_TYPE_P (TREE_TYPE (arg1)))
3034 	return false;
3035 
3036       /* If both types don't have the same precision, then it is not safe
3037 	 to strip NOPs.  */
3038       if (element_precision (TREE_TYPE (arg0))
3039 	  != element_precision (TREE_TYPE (arg1)))
3040 	return false;
3041 
3042       STRIP_NOPS (arg0);
3043       STRIP_NOPS (arg1);
3044     }
3045 #if 0
3046   /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3047      sanity check once the issue is solved.  */
3048   else
3049     /* Addresses of conversions and SSA_NAMEs (and many other things)
3050        are not defined.  Check that we did not forget to drop the
3051        OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags.  */
3052     gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3053 			 && TREE_CODE (arg0) != SSA_NAME);
3054 #endif
3055 
3056   /* In case both args are comparisons but with different comparison
3057      code, try to swap the comparison operands of one arg to produce
3058      a match and compare that variant.  */
3059   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3060       && COMPARISON_CLASS_P (arg0)
3061       && COMPARISON_CLASS_P (arg1))
3062     {
3063       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3064 
3065       if (TREE_CODE (arg0) == swap_code)
3066 	return operand_equal_p (TREE_OPERAND (arg0, 0),
3067 			        TREE_OPERAND (arg1, 1), flags)
3068 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
3069 				   TREE_OPERAND (arg1, 0), flags);
3070     }
3071 
3072   if (TREE_CODE (arg0) != TREE_CODE (arg1))
3073     {
3074       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
3075       if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3076 	;
3077       else if (flags & OEP_ADDRESS_OF)
3078 	{
3079 	  /* If we are interested in comparing addresses ignore
3080 	     MEM_REF wrappings of the base that can appear just for
3081 	     TBAA reasons.  */
3082 	  if (TREE_CODE (arg0) == MEM_REF
3083 	      && DECL_P (arg1)
3084 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3085 	      && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3086 	      && integer_zerop (TREE_OPERAND (arg0, 1)))
3087 	    return true;
3088 	  else if (TREE_CODE (arg1) == MEM_REF
3089 		   && DECL_P (arg0)
3090 		   && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3091 		   && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3092 		   && integer_zerop (TREE_OPERAND (arg1, 1)))
3093 	    return true;
3094 	  return false;
3095 	}
3096       else
3097 	return false;
3098     }
3099 
3100   /* When not checking adddresses, this is needed for conversions and for
3101      COMPONENT_REF.  Might as well play it safe and always test this.  */
3102   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3103       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3104       || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3105 	  && !(flags & OEP_ADDRESS_OF)))
3106     return false;
3107 
3108   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3109      We don't care about side effects in that case because the SAVE_EXPR
3110      takes care of that for us. In all other cases, two expressions are
3111      equal if they have no side effects.  If we have two identical
3112      expressions with side effects that should be treated the same due
3113      to the only side effects being identical SAVE_EXPR's, that will
3114      be detected in the recursive calls below.
3115      If we are taking an invariant address of two identical objects
3116      they are necessarily equal as well.  */
3117   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3118       && (TREE_CODE (arg0) == SAVE_EXPR
3119 	  || (flags & OEP_MATCH_SIDE_EFFECTS)
3120 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3121     return true;
3122 
3123   /* Next handle constant cases, those for which we can return 1 even
3124      if ONLY_CONST is set.  */
3125   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3126     switch (TREE_CODE (arg0))
3127       {
3128       case INTEGER_CST:
3129 	return tree_int_cst_equal (arg0, arg1);
3130 
3131       case FIXED_CST:
3132 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3133 				       TREE_FIXED_CST (arg1));
3134 
3135       case REAL_CST:
3136 	if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3137 	  return true;
3138 
3139 	if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3140 	  {
3141 	    /* If we do not distinguish between signed and unsigned zero,
3142 	       consider them equal.  */
3143 	    if (real_zerop (arg0) && real_zerop (arg1))
3144 	      return true;
3145 	  }
3146 	return false;
3147 
3148       case VECTOR_CST:
3149 	{
3150 	  if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3151 	      != VECTOR_CST_LOG2_NPATTERNS (arg1))
3152 	    return false;
3153 
3154 	  if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3155 	      != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3156 	    return false;
3157 
3158 	  unsigned int count = vector_cst_encoded_nelts (arg0);
3159 	  for (unsigned int i = 0; i < count; ++i)
3160 	    if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3161 				  VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3162 	      return false;
3163 	  return true;
3164 	}
3165 
3166       case COMPLEX_CST:
3167 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3168 				 flags)
3169 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3170 				    flags));
3171 
3172       case STRING_CST:
3173 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3174 		&& ! memcmp (TREE_STRING_POINTER (arg0),
3175 			      TREE_STRING_POINTER (arg1),
3176 			      TREE_STRING_LENGTH (arg0)));
3177 
3178       case ADDR_EXPR:
3179 	gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3180 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3181 				flags | OEP_ADDRESS_OF
3182 				| OEP_MATCH_SIDE_EFFECTS);
3183       case CONSTRUCTOR:
3184 	/* In GIMPLE empty constructors are allowed in initializers of
3185 	   aggregates.  */
3186 	return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3187       default:
3188 	break;
3189       }
3190 
3191   /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3192      two instances of undefined behavior will give identical results.  */
3193   if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3194     return false;
3195 
3196 /* Define macros to test an operand from arg0 and arg1 for equality and a
3197    variant that allows null and views null as being different from any
3198    non-null value.  In the latter case, if either is null, the both
3199    must be; otherwise, do the normal comparison.  */
3200 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
3201 				    TREE_OPERAND (arg1, N), flags)
3202 
3203 #define OP_SAME_WITH_NULL(N)				\
3204   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3205    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3206 
3207   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3208     {
3209     case tcc_unary:
3210       /* Two conversions are equal only if signedness and modes match.  */
3211       switch (TREE_CODE (arg0))
3212         {
3213 	CASE_CONVERT:
3214         case FIX_TRUNC_EXPR:
3215 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3216 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3217 	    return false;
3218 	  break;
3219 	default:
3220 	  break;
3221 	}
3222 
3223       return OP_SAME (0);
3224 
3225 
3226     case tcc_comparison:
3227     case tcc_binary:
3228       if (OP_SAME (0) && OP_SAME (1))
3229 	return true;
3230 
3231       /* For commutative ops, allow the other order.  */
3232       return (commutative_tree_code (TREE_CODE (arg0))
3233 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3234 				  TREE_OPERAND (arg1, 1), flags)
3235 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3236 				  TREE_OPERAND (arg1, 0), flags));
3237 
3238     case tcc_reference:
3239       /* If either of the pointer (or reference) expressions we are
3240 	 dereferencing contain a side effect, these cannot be equal,
3241 	 but their addresses can be.  */
3242       if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3243 	  && (TREE_SIDE_EFFECTS (arg0)
3244 	      || TREE_SIDE_EFFECTS (arg1)))
3245 	return false;
3246 
3247       switch (TREE_CODE (arg0))
3248 	{
3249 	case INDIRECT_REF:
3250 	  if (!(flags & OEP_ADDRESS_OF))
3251 	    {
3252 	      if (TYPE_ALIGN (TREE_TYPE (arg0))
3253 		  != TYPE_ALIGN (TREE_TYPE (arg1)))
3254 		return false;
3255 	      /* Verify that the access types are compatible.  */
3256 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3257 		  != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3258 		return false;
3259 	    }
3260 	  flags &= ~OEP_ADDRESS_OF;
3261 	  return OP_SAME (0);
3262 
3263 	case IMAGPART_EXPR:
3264 	  /* Require the same offset.  */
3265 	  if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3266 				TYPE_SIZE (TREE_TYPE (arg1)),
3267 				flags & ~OEP_ADDRESS_OF))
3268 	    return false;
3269 
3270 	/* Fallthru.  */
3271 	case REALPART_EXPR:
3272 	case VIEW_CONVERT_EXPR:
3273 	  return OP_SAME (0);
3274 
3275 	case TARGET_MEM_REF:
3276 	case MEM_REF:
3277 	  if (!(flags & OEP_ADDRESS_OF))
3278 	    {
3279 	      /* Require equal access sizes */
3280 	      if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3281 		  && (!TYPE_SIZE (TREE_TYPE (arg0))
3282 		      || !TYPE_SIZE (TREE_TYPE (arg1))
3283 		      || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3284 					   TYPE_SIZE (TREE_TYPE (arg1)),
3285 					   flags)))
3286 		return false;
3287 	      /* Verify that access happens in similar types.  */
3288 	      if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3289 		return false;
3290 	      /* Verify that accesses are TBAA compatible.  */
3291 	      if (!alias_ptr_types_compatible_p
3292 		    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3293 		     TREE_TYPE (TREE_OPERAND (arg1, 1)))
3294 		  || (MR_DEPENDENCE_CLIQUE (arg0)
3295 		      != MR_DEPENDENCE_CLIQUE (arg1))
3296 		  || (MR_DEPENDENCE_BASE (arg0)
3297 		      != MR_DEPENDENCE_BASE (arg1)))
3298 		return false;
3299 	     /* Verify that alignment is compatible.  */
3300 	     if (TYPE_ALIGN (TREE_TYPE (arg0))
3301 		 != TYPE_ALIGN (TREE_TYPE (arg1)))
3302 		return false;
3303 	    }
3304 	  flags &= ~OEP_ADDRESS_OF;
3305 	  return (OP_SAME (0) && OP_SAME (1)
3306 		  /* TARGET_MEM_REF require equal extra operands.  */
3307 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
3308 		      || (OP_SAME_WITH_NULL (2)
3309 			  && OP_SAME_WITH_NULL (3)
3310 			  && OP_SAME_WITH_NULL (4))));
3311 
3312 	case ARRAY_REF:
3313 	case ARRAY_RANGE_REF:
3314 	  if (!OP_SAME (0))
3315 	    return false;
3316 	  flags &= ~OEP_ADDRESS_OF;
3317 	  /* Compare the array index by value if it is constant first as we
3318 	     may have different types but same value here.  */
3319 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3320 				       TREE_OPERAND (arg1, 1))
3321 		   || OP_SAME (1))
3322 		  && OP_SAME_WITH_NULL (2)
3323 		  && OP_SAME_WITH_NULL (3)
3324 		  /* Compare low bound and element size as with OEP_ADDRESS_OF
3325 		     we have to account for the offset of the ref.  */
3326 		  && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3327 		      == TREE_TYPE (TREE_OPERAND (arg1, 0))
3328 		      || (operand_equal_p (array_ref_low_bound
3329 					     (CONST_CAST_TREE (arg0)),
3330 					   array_ref_low_bound
3331 					     (CONST_CAST_TREE (arg1)), flags)
3332 			  && operand_equal_p (array_ref_element_size
3333 					        (CONST_CAST_TREE (arg0)),
3334 					      array_ref_element_size
3335 					        (CONST_CAST_TREE (arg1)),
3336 					      flags))));
3337 
3338 	case COMPONENT_REF:
3339 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3340 	     may be NULL when we're called to compare MEM_EXPRs.  */
3341 	  if (!OP_SAME_WITH_NULL (0))
3342 	    return false;
3343 	  {
3344 	    bool compare_address = flags & OEP_ADDRESS_OF;
3345 
3346 	    /* Most of time we only need to compare FIELD_DECLs for equality.
3347 	       However when determining address look into actual offsets.
3348 	       These may match for unions and unshared record types.  */
3349 	    flags &= ~OEP_ADDRESS_OF;
3350 	    if (!OP_SAME (1))
3351 	      {
3352 		if (compare_address
3353 		    && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3354 		  {
3355 		    tree field0 = TREE_OPERAND (arg0, 1);
3356 		    tree field1 = TREE_OPERAND (arg1, 1);
3357 
3358 		    /* Non-FIELD_DECL operands can appear in C++ templates.  */
3359 		    if (TREE_CODE (field0) != FIELD_DECL
3360 			|| TREE_CODE (field1) != FIELD_DECL
3361 			|| !operand_equal_p (DECL_FIELD_OFFSET (field0),
3362 					     DECL_FIELD_OFFSET (field1), flags)
3363 			|| !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3364 					     DECL_FIELD_BIT_OFFSET (field1),
3365 					     flags))
3366 		      return false;
3367 		  }
3368 		else
3369 		  return false;
3370 	      }
3371 	  }
3372 	  return OP_SAME_WITH_NULL (2);
3373 
3374 	case BIT_FIELD_REF:
3375 	  if (!OP_SAME (0))
3376 	    return false;
3377 	  flags &= ~OEP_ADDRESS_OF;
3378 	  return OP_SAME (1) && OP_SAME (2);
3379 
3380 	default:
3381 	  return false;
3382 	}
3383 
3384     case tcc_expression:
3385       switch (TREE_CODE (arg0))
3386 	{
3387 	case ADDR_EXPR:
3388 	  /* Be sure we pass right ADDRESS_OF flag.  */
3389 	  gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3390 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3391 				  TREE_OPERAND (arg1, 0),
3392 				  flags | OEP_ADDRESS_OF);
3393 
3394 	case TRUTH_NOT_EXPR:
3395 	  return OP_SAME (0);
3396 
3397 	case TRUTH_ANDIF_EXPR:
3398 	case TRUTH_ORIF_EXPR:
3399 	  return OP_SAME (0) && OP_SAME (1);
3400 
3401 	case WIDEN_MULT_PLUS_EXPR:
3402 	case WIDEN_MULT_MINUS_EXPR:
3403 	  if (!OP_SAME (2))
3404 	    return false;
3405 	  /* The multiplcation operands are commutative.  */
3406 	  /* FALLTHRU */
3407 
3408 	case TRUTH_AND_EXPR:
3409 	case TRUTH_OR_EXPR:
3410 	case TRUTH_XOR_EXPR:
3411 	  if (OP_SAME (0) && OP_SAME (1))
3412 	    return true;
3413 
3414 	  /* Otherwise take into account this is a commutative operation.  */
3415 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3416 				   TREE_OPERAND (arg1, 1), flags)
3417 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3418 				      TREE_OPERAND (arg1, 0), flags));
3419 
3420 	case COND_EXPR:
3421 	  if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3422 	    return false;
3423 	  flags &= ~OEP_ADDRESS_OF;
3424 	  return OP_SAME (0);
3425 
3426 	case BIT_INSERT_EXPR:
3427 	  /* BIT_INSERT_EXPR has an implict operand as the type precision
3428 	     of op1.  Need to check to make sure they are the same.  */
3429 	  if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3430 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3431 	      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3432 		 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3433 	    return false;
3434 	  /* FALLTHRU */
3435 
3436 	case VEC_COND_EXPR:
3437 	case DOT_PROD_EXPR:
3438 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3439 
3440 	case MODIFY_EXPR:
3441 	case INIT_EXPR:
3442 	case COMPOUND_EXPR:
3443 	case PREDECREMENT_EXPR:
3444 	case PREINCREMENT_EXPR:
3445 	case POSTDECREMENT_EXPR:
3446 	case POSTINCREMENT_EXPR:
3447 	  if (flags & OEP_LEXICOGRAPHIC)
3448 	    return OP_SAME (0) && OP_SAME (1);
3449 	  return false;
3450 
3451 	case CLEANUP_POINT_EXPR:
3452 	case EXPR_STMT:
3453 	case SAVE_EXPR:
3454 	  if (flags & OEP_LEXICOGRAPHIC)
3455 	    return OP_SAME (0);
3456 	  return false;
3457 
3458 	case OBJ_TYPE_REF:
3459 	/* Virtual table reference.  */
3460 	if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3461 			      OBJ_TYPE_REF_EXPR (arg1), flags))
3462 	  return false;
3463 	flags &= ~OEP_ADDRESS_OF;
3464 	if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3465 	    != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3466 	  return false;
3467 	if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3468 			      OBJ_TYPE_REF_OBJECT (arg1), flags))
3469 	  return false;
3470 	if (virtual_method_call_p (arg0))
3471 	  {
3472 	    if (!virtual_method_call_p (arg1))
3473 	      return false;
3474 	    return types_same_for_odr (obj_type_ref_class (arg0),
3475 				       obj_type_ref_class (arg1));
3476 	  }
3477 	return false;
3478 
3479 	default:
3480 	  return false;
3481 	}
3482 
3483     case tcc_vl_exp:
3484       switch (TREE_CODE (arg0))
3485 	{
3486 	case CALL_EXPR:
3487 	  if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3488 	      != (CALL_EXPR_FN (arg1) == NULL_TREE))
3489 	    /* If not both CALL_EXPRs are either internal or normal function
3490 	       functions, then they are not equal.  */
3491 	    return false;
3492 	  else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3493 	    {
3494 	      /* If the CALL_EXPRs call different internal functions, then they
3495 		 are not equal.  */
3496 	      if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3497 		return false;
3498 	    }
3499 	  else
3500 	    {
3501 	      /* If the CALL_EXPRs call different functions, then they are not
3502 		 equal.  */
3503 	      if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3504 				     flags))
3505 		return false;
3506 	    }
3507 
3508 	  /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS.  */
3509 	  {
3510 	    unsigned int cef = call_expr_flags (arg0);
3511 	    if (flags & OEP_PURE_SAME)
3512 	      cef &= ECF_CONST | ECF_PURE;
3513 	    else
3514 	      cef &= ECF_CONST;
3515 	    if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3516 	      return false;
3517 	  }
3518 
3519 	  /* Now see if all the arguments are the same.  */
3520 	  {
3521 	    const_call_expr_arg_iterator iter0, iter1;
3522 	    const_tree a0, a1;
3523 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3524 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3525 		 a0 && a1;
3526 		 a0 = next_const_call_expr_arg (&iter0),
3527 		   a1 = next_const_call_expr_arg (&iter1))
3528 	      if (! operand_equal_p (a0, a1, flags))
3529 		return false;
3530 
3531 	    /* If we get here and both argument lists are exhausted
3532 	       then the CALL_EXPRs are equal.  */
3533 	    return ! (a0 || a1);
3534 	  }
3535 	default:
3536 	  return false;
3537 	}
3538 
3539     case tcc_declaration:
3540       /* Consider __builtin_sqrt equal to sqrt.  */
3541       if (TREE_CODE (arg0) == FUNCTION_DECL)
3542 	return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3543 		&& DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3544 		&& (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3545 		    == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3546 
3547       if (DECL_P (arg0)
3548 	  && (flags & OEP_DECL_NAME)
3549 	  && (flags & OEP_LEXICOGRAPHIC))
3550 	{
3551 	  /* Consider decls with the same name equal.  The caller needs
3552 	     to make sure they refer to the same entity (such as a function
3553 	     formal parameter).  */
3554 	  tree a0name = DECL_NAME (arg0);
3555 	  tree a1name = DECL_NAME (arg1);
3556 	  const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3557 	  const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3558 	  return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3559 	}
3560       return false;
3561 
3562     case tcc_exceptional:
3563       if (TREE_CODE (arg0) == CONSTRUCTOR)
3564 	{
3565 	  if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3566 	    return false;
3567 
3568 	  /* In GIMPLE constructors are used only to build vectors from
3569 	     elements.  Individual elements in the constructor must be
3570 	     indexed in increasing order and form an initial sequence.
3571 
3572 	     We make no effort to compare constructors in generic.
3573 	     (see sem_variable::equals in ipa-icf which can do so for
3574 	      constants).  */
3575 	  if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3576 	      || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3577 	    return false;
3578 
3579 	  /* Be sure that vectors constructed have the same representation.
3580 	     We only tested element precision and modes to match.
3581 	     Vectors may be BLKmode and thus also check that the number of
3582 	     parts match.  */
3583 	  if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3584 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3585 	    return false;
3586 
3587 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3588 	  vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3589 	  unsigned int len = vec_safe_length (v0);
3590 
3591 	  if (len != vec_safe_length (v1))
3592 	    return false;
3593 
3594 	  for (unsigned int i = 0; i < len; i++)
3595 	    {
3596 	      constructor_elt *c0 = &(*v0)[i];
3597 	      constructor_elt *c1 = &(*v1)[i];
3598 
3599 	      if (!operand_equal_p (c0->value, c1->value, flags)
3600 		  /* In GIMPLE the indexes can be either NULL or matching i.
3601 		     Double check this so we won't get false
3602 		     positives for GENERIC.  */
3603 		  || (c0->index
3604 		      && (TREE_CODE (c0->index) != INTEGER_CST
3605 			  || compare_tree_int (c0->index, i)))
3606 		  || (c1->index
3607 		      && (TREE_CODE (c1->index) != INTEGER_CST
3608 			  || compare_tree_int (c1->index, i))))
3609 		return false;
3610 	    }
3611 	  return true;
3612 	}
3613       else if (TREE_CODE (arg0) == STATEMENT_LIST
3614 	       && (flags & OEP_LEXICOGRAPHIC))
3615 	{
3616 	  /* Compare the STATEMENT_LISTs.  */
3617 	  tree_stmt_iterator tsi1, tsi2;
3618 	  tree body1 = CONST_CAST_TREE (arg0);
3619 	  tree body2 = CONST_CAST_TREE (arg1);
3620 	  for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3621 	       tsi_next (&tsi1), tsi_next (&tsi2))
3622 	    {
3623 	      /* The lists don't have the same number of statements.  */
3624 	      if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3625 		return false;
3626 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3627 		return true;
3628 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3629 				    flags & (OEP_LEXICOGRAPHIC
3630 					     | OEP_NO_HASH_CHECK)))
3631 		return false;
3632 	    }
3633 	}
3634       return false;
3635 
3636     case tcc_statement:
3637       switch (TREE_CODE (arg0))
3638 	{
3639 	case RETURN_EXPR:
3640 	  if (flags & OEP_LEXICOGRAPHIC)
3641 	    return OP_SAME_WITH_NULL (0);
3642 	  return false;
3643 	case DEBUG_BEGIN_STMT:
3644 	  if (flags & OEP_LEXICOGRAPHIC)
3645 	    return true;
3646 	  return false;
3647 	default:
3648 	  return false;
3649 	 }
3650 
3651     default:
3652       return false;
3653     }
3654 
3655 #undef OP_SAME
3656 #undef OP_SAME_WITH_NULL
3657 }
3658 
3659 /* Generate a hash value for an expression.  This can be used iteratively
3660    by passing a previous result as the HSTATE argument.  */
3661 
3662 void
hash_operand(const_tree t,inchash::hash & hstate,unsigned int flags)3663 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3664 			       unsigned int flags)
3665 {
3666   int i;
3667   enum tree_code code;
3668   enum tree_code_class tclass;
3669 
3670   if (t == NULL_TREE || t == error_mark_node)
3671     {
3672       hstate.merge_hash (0);
3673       return;
3674     }
3675 
3676   STRIP_ANY_LOCATION_WRAPPER (t);
3677 
3678   if (!(flags & OEP_ADDRESS_OF))
3679     STRIP_NOPS (t);
3680 
3681   code = TREE_CODE (t);
3682 
3683   switch (code)
3684     {
3685     /* Alas, constants aren't shared, so we can't rely on pointer
3686        identity.  */
3687     case VOID_CST:
3688       hstate.merge_hash (0);
3689       return;
3690     case INTEGER_CST:
3691       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3692       for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3693 	hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3694       return;
3695     case REAL_CST:
3696       {
3697 	unsigned int val2;
3698 	if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3699 	  val2 = rvc_zero;
3700 	else
3701 	  val2 = real_hash (TREE_REAL_CST_PTR (t));
3702 	hstate.merge_hash (val2);
3703 	return;
3704       }
3705     case FIXED_CST:
3706       {
3707 	unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3708 	hstate.merge_hash (val2);
3709 	return;
3710       }
3711     case STRING_CST:
3712       hstate.add ((const void *) TREE_STRING_POINTER (t),
3713 		  TREE_STRING_LENGTH (t));
3714       return;
3715     case COMPLEX_CST:
3716       hash_operand (TREE_REALPART (t), hstate, flags);
3717       hash_operand (TREE_IMAGPART (t), hstate, flags);
3718       return;
3719     case VECTOR_CST:
3720       {
3721 	hstate.add_int (VECTOR_CST_NPATTERNS (t));
3722 	hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3723 	unsigned int count = vector_cst_encoded_nelts (t);
3724 	for (unsigned int i = 0; i < count; ++i)
3725 	  hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3726 	return;
3727       }
3728     case SSA_NAME:
3729       /* We can just compare by pointer.  */
3730       hstate.add_hwi (SSA_NAME_VERSION (t));
3731       return;
3732     case PLACEHOLDER_EXPR:
3733       /* The node itself doesn't matter.  */
3734       return;
3735     case BLOCK:
3736     case OMP_CLAUSE:
3737       /* Ignore.  */
3738       return;
3739     case TREE_LIST:
3740       /* A list of expressions, for a CALL_EXPR or as the elements of a
3741 	 VECTOR_CST.  */
3742       for (; t; t = TREE_CHAIN (t))
3743 	hash_operand (TREE_VALUE (t), hstate, flags);
3744       return;
3745     case CONSTRUCTOR:
3746       {
3747 	unsigned HOST_WIDE_INT idx;
3748 	tree field, value;
3749 	flags &= ~OEP_ADDRESS_OF;
3750 	hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3751 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3752 	  {
3753 	    /* In GIMPLE the indexes can be either NULL or matching i.  */
3754 	    if (field == NULL_TREE)
3755 	      field = bitsize_int (idx);
3756 	    hash_operand (field, hstate, flags);
3757 	    hash_operand (value, hstate, flags);
3758 	  }
3759 	return;
3760       }
3761     case STATEMENT_LIST:
3762       {
3763 	tree_stmt_iterator i;
3764 	for (i = tsi_start (CONST_CAST_TREE (t));
3765 	     !tsi_end_p (i); tsi_next (&i))
3766 	  hash_operand (tsi_stmt (i), hstate, flags);
3767 	return;
3768       }
3769     case TREE_VEC:
3770       for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3771 	hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3772       return;
3773     case IDENTIFIER_NODE:
3774       hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3775       return;
3776     case FUNCTION_DECL:
3777       /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3778 	 Otherwise nodes that compare equal according to operand_equal_p might
3779 	 get different hash codes.  However, don't do this for machine specific
3780 	 or front end builtins, since the function code is overloaded in those
3781 	 cases.  */
3782       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3783 	  && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3784 	{
3785 	  t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3786 	  code = TREE_CODE (t);
3787 	}
3788       /* FALL THROUGH */
3789     default:
3790       if (POLY_INT_CST_P (t))
3791 	{
3792 	  for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3793 	    hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3794 	  return;
3795 	}
3796       tclass = TREE_CODE_CLASS (code);
3797 
3798       if (tclass == tcc_declaration)
3799 	{
3800 	  /* DECL's have a unique ID */
3801 	  hstate.add_hwi (DECL_UID (t));
3802 	}
3803       else if (tclass == tcc_comparison && !commutative_tree_code (code))
3804 	{
3805 	  /* For comparisons that can be swapped, use the lower
3806 	     tree code.  */
3807 	  enum tree_code ccode = swap_tree_comparison (code);
3808 	  if (code < ccode)
3809 	    ccode = code;
3810 	  hstate.add_object (ccode);
3811 	  hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3812 	  hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3813 	}
3814       else if (CONVERT_EXPR_CODE_P (code))
3815 	{
3816 	  /* NOP_EXPR and CONVERT_EXPR are considered equal by
3817 	     operand_equal_p.  */
3818 	  enum tree_code ccode = NOP_EXPR;
3819 	  hstate.add_object (ccode);
3820 
3821 	  /* Don't hash the type, that can lead to having nodes which
3822 	     compare equal according to operand_equal_p, but which
3823 	     have different hash codes.  Make sure to include signedness
3824 	     in the hash computation.  */
3825 	  hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3826 	  hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3827 	}
3828       /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl.  */
3829       else if (code == MEM_REF
3830 	       && (flags & OEP_ADDRESS_OF) != 0
3831 	       && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3832 	       && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3833 	       && integer_zerop (TREE_OPERAND (t, 1)))
3834 	hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3835 		      hstate, flags);
3836       /* Don't ICE on FE specific trees, or their arguments etc.
3837 	 during operand_equal_p hash verification.  */
3838       else if (!IS_EXPR_CODE_CLASS (tclass))
3839 	gcc_assert (flags & OEP_HASH_CHECK);
3840       else
3841 	{
3842 	  unsigned int sflags = flags;
3843 
3844 	  hstate.add_object (code);
3845 
3846 	  switch (code)
3847 	    {
3848 	    case ADDR_EXPR:
3849 	      gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3850 	      flags |= OEP_ADDRESS_OF;
3851 	      sflags = flags;
3852 	      break;
3853 
3854 	    case INDIRECT_REF:
3855 	    case MEM_REF:
3856 	    case TARGET_MEM_REF:
3857 	      flags &= ~OEP_ADDRESS_OF;
3858 	      sflags = flags;
3859 	      break;
3860 
3861 	    case COMPONENT_REF:
3862 	      if (sflags & OEP_ADDRESS_OF)
3863 		{
3864 		  hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3865 		  hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
3866 				hstate, flags & ~OEP_ADDRESS_OF);
3867 		  hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
3868 				hstate, flags & ~OEP_ADDRESS_OF);
3869 		  return;
3870 		}
3871 	      break;
3872 	    case ARRAY_REF:
3873 	    case ARRAY_RANGE_REF:
3874 	    case BIT_FIELD_REF:
3875 	      sflags &= ~OEP_ADDRESS_OF;
3876 	      break;
3877 
3878 	    case COND_EXPR:
3879 	      flags &= ~OEP_ADDRESS_OF;
3880 	      break;
3881 
3882 	    case WIDEN_MULT_PLUS_EXPR:
3883 	    case WIDEN_MULT_MINUS_EXPR:
3884 	      {
3885 		/* The multiplication operands are commutative.  */
3886 		inchash::hash one, two;
3887 		hash_operand (TREE_OPERAND (t, 0), one, flags);
3888 		hash_operand (TREE_OPERAND (t, 1), two, flags);
3889 		hstate.add_commutative (one, two);
3890 		hash_operand (TREE_OPERAND (t, 2), two, flags);
3891 		return;
3892 	      }
3893 
3894 	    case CALL_EXPR:
3895 	      if (CALL_EXPR_FN (t) == NULL_TREE)
3896 		hstate.add_int (CALL_EXPR_IFN (t));
3897 	      break;
3898 
3899 	    case TARGET_EXPR:
3900 	      /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3901 		 Usually different TARGET_EXPRs just should use
3902 		 different temporaries in their slots.  */
3903 	      hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3904 	      return;
3905 
3906 	    case OBJ_TYPE_REF:
3907 	    /* Virtual table reference.  */
3908 	      inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3909 	      flags &= ~OEP_ADDRESS_OF;
3910 	      inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3911 	      inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3912 	      if (!virtual_method_call_p (t))
3913 		return;
3914 	      if (tree c = obj_type_ref_class (t))
3915 		{
3916 		  c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3917 		  /* We compute mangled names only when free_lang_data is run.
3918 		     In that case we can hash precisely.  */
3919 		  if (TREE_CODE (c) == TYPE_DECL
3920 		      && DECL_ASSEMBLER_NAME_SET_P (c))
3921 		    hstate.add_object
3922 			   (IDENTIFIER_HASH_VALUE
3923 				   (DECL_ASSEMBLER_NAME (c)));
3924 		}
3925 	      return;
3926 	    default:
3927 	      break;
3928 	    }
3929 
3930 	  /* Don't hash the type, that can lead to having nodes which
3931 	     compare equal according to operand_equal_p, but which
3932 	     have different hash codes.  */
3933 	  if (code == NON_LVALUE_EXPR)
3934 	    {
3935 	      /* Make sure to include signness in the hash computation.  */
3936 	      hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3937 	      hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3938 	    }
3939 
3940 	  else if (commutative_tree_code (code))
3941 	    {
3942 	      /* It's a commutative expression.  We want to hash it the same
3943 		 however it appears.  We do this by first hashing both operands
3944 		 and then rehashing based on the order of their independent
3945 		 hashes.  */
3946 	      inchash::hash one, two;
3947 	      hash_operand (TREE_OPERAND (t, 0), one, flags);
3948 	      hash_operand (TREE_OPERAND (t, 1), two, flags);
3949 	      hstate.add_commutative (one, two);
3950 	    }
3951 	  else
3952 	    for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3953 	      hash_operand (TREE_OPERAND (t, i), hstate,
3954 			    i == 0 ? flags : sflags);
3955 	}
3956       return;
3957     }
3958 }
3959 
3960 bool
verify_hash_value(const_tree arg0,const_tree arg1,unsigned int flags,bool * ret)3961 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3962 				    unsigned int flags, bool *ret)
3963 {
3964   /* When checking and unless comparing DECL names, verify that if
3965      the outermost operand_equal_p call returns non-zero then ARG0
3966      and ARG1 have the same hash value.  */
3967   if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3968     {
3969       if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3970 	{
3971 	  if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
3972 	    {
3973 	      inchash::hash hstate0 (0), hstate1 (0);
3974 	      hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3975 	      hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3976 	      hashval_t h0 = hstate0.end ();
3977 	      hashval_t h1 = hstate1.end ();
3978 	      gcc_assert (h0 == h1);
3979 	    }
3980 	  *ret = true;
3981 	}
3982       else
3983 	*ret = false;
3984 
3985       return true;
3986     }
3987 
3988   return false;
3989 }
3990 
3991 
3992 static operand_compare default_compare_instance;
3993 
3994 /* Conveinece wrapper around operand_compare class because usually we do
3995    not need to play with the valueizer.  */
3996 
3997 bool
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)3998 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3999 {
4000   return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4001 }
4002 
4003 namespace inchash
4004 {
4005 
4006 /* Generate a hash value for an expression.  This can be used iteratively
4007    by passing a previous result as the HSTATE argument.
4008 
4009    This function is intended to produce the same hash for expressions which
4010    would compare equal using operand_equal_p.  */
4011 void
add_expr(const_tree t,inchash::hash & hstate,unsigned int flags)4012 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4013 {
4014   default_compare_instance.hash_operand (t, hstate, flags);
4015 }
4016 
4017 }
4018 
4019 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4020    with a different signedness or a narrower precision.  */
4021 
4022 static bool
operand_equal_for_comparison_p(tree arg0,tree arg1)4023 operand_equal_for_comparison_p (tree arg0, tree arg1)
4024 {
4025   if (operand_equal_p (arg0, arg1, 0))
4026     return true;
4027 
4028   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4029       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4030     return false;
4031 
4032   /* Discard any conversions that don't change the modes of ARG0 and ARG1
4033      and see if the inner values are the same.  This removes any
4034      signedness comparison, which doesn't matter here.  */
4035   tree op0 = arg0;
4036   tree op1 = arg1;
4037   STRIP_NOPS (op0);
4038   STRIP_NOPS (op1);
4039   if (operand_equal_p (op0, op1, 0))
4040     return true;
4041 
4042   /* Discard a single widening conversion from ARG1 and see if the inner
4043      value is the same as ARG0.  */
4044   if (CONVERT_EXPR_P (arg1)
4045       && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4046       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4047          < TYPE_PRECISION (TREE_TYPE (arg1))
4048       && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4049     return true;
4050 
4051   return false;
4052 }
4053 
4054 /* See if ARG is an expression that is either a comparison or is performing
4055    arithmetic on comparisons.  The comparisons must only be comparing
4056    two different values, which will be stored in *CVAL1 and *CVAL2; if
4057    they are nonzero it means that some operands have already been found.
4058    No variables may be used anywhere else in the expression except in the
4059    comparisons.
4060 
4061    If this is true, return 1.  Otherwise, return zero.  */
4062 
4063 static bool
twoval_comparison_p(tree arg,tree * cval1,tree * cval2)4064 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4065 {
4066   enum tree_code code = TREE_CODE (arg);
4067   enum tree_code_class tclass = TREE_CODE_CLASS (code);
4068 
4069   /* We can handle some of the tcc_expression cases here.  */
4070   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4071     tclass = tcc_unary;
4072   else if (tclass == tcc_expression
4073 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4074 	       || code == COMPOUND_EXPR))
4075     tclass = tcc_binary;
4076 
4077   switch (tclass)
4078     {
4079     case tcc_unary:
4080       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4081 
4082     case tcc_binary:
4083       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4084 	      && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4085 
4086     case tcc_constant:
4087       return true;
4088 
4089     case tcc_expression:
4090       if (code == COND_EXPR)
4091 	return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4092 		&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4093 		&& twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4094       return false;
4095 
4096     case tcc_comparison:
4097       /* First see if we can handle the first operand, then the second.  For
4098 	 the second operand, we know *CVAL1 can't be zero.  It must be that
4099 	 one side of the comparison is each of the values; test for the
4100 	 case where this isn't true by failing if the two operands
4101 	 are the same.  */
4102 
4103       if (operand_equal_p (TREE_OPERAND (arg, 0),
4104 			   TREE_OPERAND (arg, 1), 0))
4105 	return false;
4106 
4107       if (*cval1 == 0)
4108 	*cval1 = TREE_OPERAND (arg, 0);
4109       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4110 	;
4111       else if (*cval2 == 0)
4112 	*cval2 = TREE_OPERAND (arg, 0);
4113       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4114 	;
4115       else
4116 	return false;
4117 
4118       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4119 	;
4120       else if (*cval2 == 0)
4121 	*cval2 = TREE_OPERAND (arg, 1);
4122       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4123 	;
4124       else
4125 	return false;
4126 
4127       return true;
4128 
4129     default:
4130       return false;
4131     }
4132 }
4133 
4134 /* ARG is a tree that is known to contain just arithmetic operations and
4135    comparisons.  Evaluate the operations in the tree substituting NEW0 for
4136    any occurrence of OLD0 as an operand of a comparison and likewise for
4137    NEW1 and OLD1.  */
4138 
4139 static tree
eval_subst(location_t loc,tree arg,tree old0,tree new0,tree old1,tree new1)4140 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4141 	    tree old1, tree new1)
4142 {
4143   tree type = TREE_TYPE (arg);
4144   enum tree_code code = TREE_CODE (arg);
4145   enum tree_code_class tclass = TREE_CODE_CLASS (code);
4146 
4147   /* We can handle some of the tcc_expression cases here.  */
4148   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4149     tclass = tcc_unary;
4150   else if (tclass == tcc_expression
4151 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4152     tclass = tcc_binary;
4153 
4154   switch (tclass)
4155     {
4156     case tcc_unary:
4157       return fold_build1_loc (loc, code, type,
4158 			  eval_subst (loc, TREE_OPERAND (arg, 0),
4159 				      old0, new0, old1, new1));
4160 
4161     case tcc_binary:
4162       return fold_build2_loc (loc, code, type,
4163 			  eval_subst (loc, TREE_OPERAND (arg, 0),
4164 				      old0, new0, old1, new1),
4165 			  eval_subst (loc, TREE_OPERAND (arg, 1),
4166 				      old0, new0, old1, new1));
4167 
4168     case tcc_expression:
4169       switch (code)
4170 	{
4171 	case SAVE_EXPR:
4172 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4173 			     old1, new1);
4174 
4175 	case COMPOUND_EXPR:
4176 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4177 			     old1, new1);
4178 
4179 	case COND_EXPR:
4180 	  return fold_build3_loc (loc, code, type,
4181 			      eval_subst (loc, TREE_OPERAND (arg, 0),
4182 					  old0, new0, old1, new1),
4183 			      eval_subst (loc, TREE_OPERAND (arg, 1),
4184 					  old0, new0, old1, new1),
4185 			      eval_subst (loc, TREE_OPERAND (arg, 2),
4186 					  old0, new0, old1, new1));
4187 	default:
4188 	  break;
4189 	}
4190       /* Fall through - ???  */
4191 
4192     case tcc_comparison:
4193       {
4194 	tree arg0 = TREE_OPERAND (arg, 0);
4195 	tree arg1 = TREE_OPERAND (arg, 1);
4196 
4197 	/* We need to check both for exact equality and tree equality.  The
4198 	   former will be true if the operand has a side-effect.  In that
4199 	   case, we know the operand occurred exactly once.  */
4200 
4201 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4202 	  arg0 = new0;
4203 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4204 	  arg0 = new1;
4205 
4206 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4207 	  arg1 = new0;
4208 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4209 	  arg1 = new1;
4210 
4211 	return fold_build2_loc (loc, code, type, arg0, arg1);
4212       }
4213 
4214     default:
4215       return arg;
4216     }
4217 }
4218 
4219 /* Return a tree for the case when the result of an expression is RESULT
4220    converted to TYPE and OMITTED was previously an operand of the expression
4221    but is now not needed (e.g., we folded OMITTED * 0).
4222 
4223    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
4224    the conversion of RESULT to TYPE.  */
4225 
4226 tree
omit_one_operand_loc(location_t loc,tree type,tree result,tree omitted)4227 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4228 {
4229   tree t = fold_convert_loc (loc, type, result);
4230 
4231   /* If the resulting operand is an empty statement, just return the omitted
4232      statement casted to void. */
4233   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4234     return build1_loc (loc, NOP_EXPR, void_type_node,
4235 		       fold_ignored_result (omitted));
4236 
4237   if (TREE_SIDE_EFFECTS (omitted))
4238     return build2_loc (loc, COMPOUND_EXPR, type,
4239 		       fold_ignored_result (omitted), t);
4240 
4241   return non_lvalue_loc (loc, t);
4242 }
4243 
4244 /* Return a tree for the case when the result of an expression is RESULT
4245    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4246    of the expression but are now not needed.
4247 
4248    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4249    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4250    evaluated before OMITTED2.  Otherwise, if neither has side effects,
4251    just do the conversion of RESULT to TYPE.  */
4252 
4253 tree
omit_two_operands_loc(location_t loc,tree type,tree result,tree omitted1,tree omitted2)4254 omit_two_operands_loc (location_t loc, tree type, tree result,
4255 		       tree omitted1, tree omitted2)
4256 {
4257   tree t = fold_convert_loc (loc, type, result);
4258 
4259   if (TREE_SIDE_EFFECTS (omitted2))
4260     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4261   if (TREE_SIDE_EFFECTS (omitted1))
4262     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4263 
4264   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4265 }
4266 
4267 
4268 /* Return a simplified tree node for the truth-negation of ARG.  This
4269    never alters ARG itself.  We assume that ARG is an operation that
4270    returns a truth value (0 or 1).
4271 
4272    FIXME: one would think we would fold the result, but it causes
4273    problems with the dominator optimizer.  */
4274 
4275 static tree
fold_truth_not_expr(location_t loc,tree arg)4276 fold_truth_not_expr (location_t loc, tree arg)
4277 {
4278   tree type = TREE_TYPE (arg);
4279   enum tree_code code = TREE_CODE (arg);
4280   location_t loc1, loc2;
4281 
4282   /* If this is a comparison, we can simply invert it, except for
4283      floating-point non-equality comparisons, in which case we just
4284      enclose a TRUTH_NOT_EXPR around what we have.  */
4285 
4286   if (TREE_CODE_CLASS (code) == tcc_comparison)
4287     {
4288       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4289       if (FLOAT_TYPE_P (op_type)
4290 	  && flag_trapping_math
4291 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
4292 	  && code != NE_EXPR && code != EQ_EXPR)
4293 	return NULL_TREE;
4294 
4295       code = invert_tree_comparison (code, HONOR_NANS (op_type));
4296       if (code == ERROR_MARK)
4297 	return NULL_TREE;
4298 
4299       tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4300 			     TREE_OPERAND (arg, 1));
4301       copy_warning (ret, arg);
4302       return ret;
4303     }
4304 
4305   switch (code)
4306     {
4307     case INTEGER_CST:
4308       return constant_boolean_node (integer_zerop (arg), type);
4309 
4310     case TRUTH_AND_EXPR:
4311       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4312       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4313       return build2_loc (loc, TRUTH_OR_EXPR, type,
4314 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4315 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4316 
4317     case TRUTH_OR_EXPR:
4318       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4319       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4320       return build2_loc (loc, TRUTH_AND_EXPR, type,
4321 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4322 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4323 
4324     case TRUTH_XOR_EXPR:
4325       /* Here we can invert either operand.  We invert the first operand
4326 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
4327 	 result is the XOR of the first operand with the inside of the
4328 	 negation of the second operand.  */
4329 
4330       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4331 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4332 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4333       else
4334 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
4335 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4336 			   TREE_OPERAND (arg, 1));
4337 
4338     case TRUTH_ANDIF_EXPR:
4339       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4340       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4341       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4342 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4343 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4344 
4345     case TRUTH_ORIF_EXPR:
4346       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4347       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4348       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4349 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4350 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4351 
4352     case TRUTH_NOT_EXPR:
4353       return TREE_OPERAND (arg, 0);
4354 
4355     case COND_EXPR:
4356       {
4357 	tree arg1 = TREE_OPERAND (arg, 1);
4358 	tree arg2 = TREE_OPERAND (arg, 2);
4359 
4360 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4361 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4362 
4363 	/* A COND_EXPR may have a throw as one operand, which
4364 	   then has void type.  Just leave void operands
4365 	   as they are.  */
4366 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4367 			   VOID_TYPE_P (TREE_TYPE (arg1))
4368 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
4369 			   VOID_TYPE_P (TREE_TYPE (arg2))
4370 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
4371       }
4372 
4373     case COMPOUND_EXPR:
4374       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4375       return build2_loc (loc, COMPOUND_EXPR, type,
4376 			 TREE_OPERAND (arg, 0),
4377 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4378 
4379     case NON_LVALUE_EXPR:
4380       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4381       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4382 
4383     CASE_CONVERT:
4384       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4385 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4386 
4387       /* fall through */
4388 
4389     case FLOAT_EXPR:
4390       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4391       return build1_loc (loc, TREE_CODE (arg), type,
4392 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4393 
4394     case BIT_AND_EXPR:
4395       if (!integer_onep (TREE_OPERAND (arg, 1)))
4396 	return NULL_TREE;
4397       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4398 
4399     case SAVE_EXPR:
4400       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4401 
4402     case CLEANUP_POINT_EXPR:
4403       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4404       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4405 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4406 
4407     default:
4408       return NULL_TREE;
4409     }
4410 }
4411 
4412 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
4413    assume that ARG is an operation that returns a truth value (0 or 1
4414    for scalars, 0 or -1 for vectors).  Return the folded expression if
4415    folding is successful.  Otherwise, return NULL_TREE.  */
4416 
4417 static tree
fold_invert_truthvalue(location_t loc,tree arg)4418 fold_invert_truthvalue (location_t loc, tree arg)
4419 {
4420   tree type = TREE_TYPE (arg);
4421   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4422 			      ? BIT_NOT_EXPR
4423 			      : TRUTH_NOT_EXPR,
4424 			 type, arg);
4425 }
4426 
4427 /* Return a simplified tree node for the truth-negation of ARG.  This
4428    never alters ARG itself.  We assume that ARG is an operation that
4429    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
4430 
4431 tree
invert_truthvalue_loc(location_t loc,tree arg)4432 invert_truthvalue_loc (location_t loc, tree arg)
4433 {
4434   if (TREE_CODE (arg) == ERROR_MARK)
4435     return arg;
4436 
4437   tree type = TREE_TYPE (arg);
4438   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4439 			       ? BIT_NOT_EXPR
4440 			       : TRUTH_NOT_EXPR,
4441 			  type, arg);
4442 }
4443 
4444 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4445    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero
4446    and uses reverse storage order if REVERSEP is nonzero.  ORIG_INNER
4447    is the original memory reference used to preserve the alias set of
4448    the access.  */
4449 
4450 static tree
make_bit_field_ref(location_t loc,tree inner,tree orig_inner,tree type,HOST_WIDE_INT bitsize,poly_int64 bitpos,int unsignedp,int reversep)4451 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4452 		    HOST_WIDE_INT bitsize, poly_int64 bitpos,
4453 		    int unsignedp, int reversep)
4454 {
4455   tree result, bftype;
4456 
4457   /* Attempt not to lose the access path if possible.  */
4458   if (TREE_CODE (orig_inner) == COMPONENT_REF)
4459     {
4460       tree ninner = TREE_OPERAND (orig_inner, 0);
4461       machine_mode nmode;
4462       poly_int64 nbitsize, nbitpos;
4463       tree noffset;
4464       int nunsignedp, nreversep, nvolatilep = 0;
4465       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4466 				       &noffset, &nmode, &nunsignedp,
4467 				       &nreversep, &nvolatilep);
4468       if (base == inner
4469 	  && noffset == NULL_TREE
4470 	  && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4471 	  && !reversep
4472 	  && !nreversep
4473 	  && !nvolatilep)
4474 	{
4475 	  inner = ninner;
4476 	  bitpos -= nbitpos;
4477 	}
4478     }
4479 
4480   alias_set_type iset = get_alias_set (orig_inner);
4481   if (iset == 0 && get_alias_set (inner) != iset)
4482     inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4483 			 build_fold_addr_expr (inner),
4484 			 build_int_cst (ptr_type_node, 0));
4485 
4486   if (known_eq (bitpos, 0) && !reversep)
4487     {
4488       tree size = TYPE_SIZE (TREE_TYPE (inner));
4489       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4490 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
4491 	  && tree_fits_shwi_p (size)
4492 	  && tree_to_shwi (size) == bitsize)
4493 	return fold_convert_loc (loc, type, inner);
4494     }
4495 
4496   bftype = type;
4497   if (TYPE_PRECISION (bftype) != bitsize
4498       || TYPE_UNSIGNED (bftype) == !unsignedp)
4499     bftype = build_nonstandard_integer_type (bitsize, 0);
4500 
4501   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4502 		       bitsize_int (bitsize), bitsize_int (bitpos));
4503   REF_REVERSE_STORAGE_ORDER (result) = reversep;
4504 
4505   if (bftype != type)
4506     result = fold_convert_loc (loc, type, result);
4507 
4508   return result;
4509 }
4510 
4511 /* Optimize a bit-field compare.
4512 
4513    There are two cases:  First is a compare against a constant and the
4514    second is a comparison of two items where the fields are at the same
4515    bit position relative to the start of a chunk (byte, halfword, word)
4516    large enough to contain it.  In these cases we can avoid the shift
4517    implicit in bitfield extractions.
4518 
4519    For constants, we emit a compare of the shifted constant with the
4520    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4521    compared.  For two fields at the same position, we do the ANDs with the
4522    similar mask and compare the result of the ANDs.
4523 
4524    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4525    COMPARE_TYPE is the type of the comparison, and LHS and RHS
4526    are the left and right operands of the comparison, respectively.
4527 
4528    If the optimization described above can be done, we return the resulting
4529    tree.  Otherwise we return zero.  */
4530 
4531 static tree
optimize_bit_field_compare(location_t loc,enum tree_code code,tree compare_type,tree lhs,tree rhs)4532 optimize_bit_field_compare (location_t loc, enum tree_code code,
4533 			    tree compare_type, tree lhs, tree rhs)
4534 {
4535   poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4536   HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4537   tree type = TREE_TYPE (lhs);
4538   tree unsigned_type;
4539   int const_p = TREE_CODE (rhs) == INTEGER_CST;
4540   machine_mode lmode, rmode;
4541   scalar_int_mode nmode;
4542   int lunsignedp, runsignedp;
4543   int lreversep, rreversep;
4544   int lvolatilep = 0, rvolatilep = 0;
4545   tree linner, rinner = NULL_TREE;
4546   tree mask;
4547   tree offset;
4548 
4549   /* Get all the information about the extractions being done.  If the bit size
4550      is the same as the size of the underlying object, we aren't doing an
4551      extraction at all and so can do nothing.  We also don't want to
4552      do anything if the inner expression is a PLACEHOLDER_EXPR since we
4553      then will no longer be able to replace it.  */
4554   linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4555 				&lunsignedp, &lreversep, &lvolatilep);
4556   if (linner == lhs
4557       || !known_size_p (plbitsize)
4558       || !plbitsize.is_constant (&lbitsize)
4559       || !plbitpos.is_constant (&lbitpos)
4560       || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4561       || offset != 0
4562       || TREE_CODE (linner) == PLACEHOLDER_EXPR
4563       || lvolatilep)
4564     return 0;
4565 
4566   if (const_p)
4567     rreversep = lreversep;
4568   else
4569    {
4570      /* If this is not a constant, we can only do something if bit positions,
4571 	sizes, signedness and storage order are the same.  */
4572      rinner
4573        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4574 			      &runsignedp, &rreversep, &rvolatilep);
4575 
4576      if (rinner == rhs
4577 	 || maybe_ne (lbitpos, rbitpos)
4578 	 || maybe_ne (lbitsize, rbitsize)
4579 	 || lunsignedp != runsignedp
4580 	 || lreversep != rreversep
4581 	 || offset != 0
4582 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4583 	 || rvolatilep)
4584        return 0;
4585    }
4586 
4587   /* Honor the C++ memory model and mimic what RTL expansion does.  */
4588   poly_uint64 bitstart = 0;
4589   poly_uint64 bitend = 0;
4590   if (TREE_CODE (lhs) == COMPONENT_REF)
4591     {
4592       get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4593       if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4594 	return 0;
4595     }
4596 
4597   /* See if we can find a mode to refer to this field.  We should be able to,
4598      but fail if we can't.  */
4599   if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4600 		      const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4601 		      : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4602 			     TYPE_ALIGN (TREE_TYPE (rinner))),
4603 		      BITS_PER_WORD, false, &nmode))
4604     return 0;
4605 
4606   /* Set signed and unsigned types of the precision of this mode for the
4607      shifts below.  */
4608   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4609 
4610   /* Compute the bit position and size for the new reference and our offset
4611      within it. If the new reference is the same size as the original, we
4612      won't optimize anything, so return zero.  */
4613   nbitsize = GET_MODE_BITSIZE (nmode);
4614   nbitpos = lbitpos & ~ (nbitsize - 1);
4615   lbitpos -= nbitpos;
4616   if (nbitsize == lbitsize)
4617     return 0;
4618 
4619   if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4620     lbitpos = nbitsize - lbitsize - lbitpos;
4621 
4622   /* Make the mask to be used against the extracted field.  */
4623   mask = build_int_cst_type (unsigned_type, -1);
4624   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4625   mask = const_binop (RSHIFT_EXPR, mask,
4626 		      size_int (nbitsize - lbitsize - lbitpos));
4627 
4628   if (! const_p)
4629     {
4630       if (nbitpos < 0)
4631 	return 0;
4632 
4633       /* If not comparing with constant, just rework the comparison
4634 	 and return.  */
4635       tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4636 				    nbitsize, nbitpos, 1, lreversep);
4637       t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4638       tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4639 				    nbitsize, nbitpos, 1, rreversep);
4640       t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4641       return fold_build2_loc (loc, code, compare_type, t1, t2);
4642     }
4643 
4644   /* Otherwise, we are handling the constant case.  See if the constant is too
4645      big for the field.  Warn and return a tree for 0 (false) if so.  We do
4646      this not only for its own sake, but to avoid having to test for this
4647      error case below.  If we didn't, we might generate wrong code.
4648 
4649      For unsigned fields, the constant shifted right by the field length should
4650      be all zero.  For signed fields, the high-order bits should agree with
4651      the sign bit.  */
4652 
4653   if (lunsignedp)
4654     {
4655       if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4656 	{
4657 	  warning (0, "comparison is always %d due to width of bit-field",
4658 		   code == NE_EXPR);
4659 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4660 	}
4661     }
4662   else
4663     {
4664       wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4665       if (tem != 0 && tem != -1)
4666 	{
4667 	  warning (0, "comparison is always %d due to width of bit-field",
4668 		   code == NE_EXPR);
4669 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4670 	}
4671     }
4672 
4673   if (nbitpos < 0)
4674     return 0;
4675 
4676   /* Single-bit compares should always be against zero.  */
4677   if (lbitsize == 1 && ! integer_zerop (rhs))
4678     {
4679       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4680       rhs = build_int_cst (type, 0);
4681     }
4682 
4683   /* Make a new bitfield reference, shift the constant over the
4684      appropriate number of bits and mask it with the computed mask
4685      (in case this was a signed field).  If we changed it, make a new one.  */
4686   lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4687 			    nbitsize, nbitpos, 1, lreversep);
4688 
4689   rhs = const_binop (BIT_AND_EXPR,
4690 		     const_binop (LSHIFT_EXPR,
4691 				  fold_convert_loc (loc, unsigned_type, rhs),
4692 				  size_int (lbitpos)),
4693 		     mask);
4694 
4695   lhs = build2_loc (loc, code, compare_type,
4696 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4697   return lhs;
4698 }
4699 
4700 /* Subroutine for fold_truth_andor_1: decode a field reference.
4701 
4702    If EXP is a comparison reference, we return the innermost reference.
4703 
4704    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4705    set to the starting bit number.
4706 
4707    If the innermost field can be completely contained in a mode-sized
4708    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4709 
4710    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4711    otherwise it is not changed.
4712 
4713    *PUNSIGNEDP is set to the signedness of the field.
4714 
4715    *PREVERSEP is set to the storage order of the field.
4716 
4717    *PMASK is set to the mask used.  This is either contained in a
4718    BIT_AND_EXPR or derived from the width of the field.
4719 
4720    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4721 
4722    Return 0 if this is not a component reference or is one that we can't
4723    do anything with.  */
4724 
4725 static tree
decode_field_reference(location_t loc,tree * exp_,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,machine_mode * pmode,int * punsignedp,int * preversep,int * pvolatilep,tree * pmask,tree * pand_mask)4726 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4727 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4728 			int *punsignedp, int *preversep, int *pvolatilep,
4729 			tree *pmask, tree *pand_mask)
4730 {
4731   tree exp = *exp_;
4732   tree outer_type = 0;
4733   tree and_mask = 0;
4734   tree mask, inner, offset;
4735   tree unsigned_type;
4736   unsigned int precision;
4737 
4738   /* All the optimizations using this function assume integer fields.
4739      There are problems with FP fields since the type_for_size call
4740      below can fail for, e.g., XFmode.  */
4741   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4742     return NULL_TREE;
4743 
4744   /* We are interested in the bare arrangement of bits, so strip everything
4745      that doesn't affect the machine mode.  However, record the type of the
4746      outermost expression if it may matter below.  */
4747   if (CONVERT_EXPR_P (exp)
4748       || TREE_CODE (exp) == NON_LVALUE_EXPR)
4749     outer_type = TREE_TYPE (exp);
4750   STRIP_NOPS (exp);
4751 
4752   if (TREE_CODE (exp) == BIT_AND_EXPR)
4753     {
4754       and_mask = TREE_OPERAND (exp, 1);
4755       exp = TREE_OPERAND (exp, 0);
4756       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4757       if (TREE_CODE (and_mask) != INTEGER_CST)
4758 	return NULL_TREE;
4759     }
4760 
4761   poly_int64 poly_bitsize, poly_bitpos;
4762   inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4763 			       pmode, punsignedp, preversep, pvolatilep);
4764   if ((inner == exp && and_mask == 0)
4765       || !poly_bitsize.is_constant (pbitsize)
4766       || !poly_bitpos.is_constant (pbitpos)
4767       || *pbitsize < 0
4768       || offset != 0
4769       || TREE_CODE (inner) == PLACEHOLDER_EXPR
4770       /* Reject out-of-bound accesses (PR79731).  */
4771       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4772 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4773 			       *pbitpos + *pbitsize) < 0))
4774     return NULL_TREE;
4775 
4776   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4777   if (unsigned_type == NULL_TREE)
4778     return NULL_TREE;
4779 
4780   *exp_ = exp;
4781 
4782   /* If the number of bits in the reference is the same as the bitsize of
4783      the outer type, then the outer type gives the signedness. Otherwise
4784      (in case of a small bitfield) the signedness is unchanged.  */
4785   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4786     *punsignedp = TYPE_UNSIGNED (outer_type);
4787 
4788   /* Compute the mask to access the bitfield.  */
4789   precision = TYPE_PRECISION (unsigned_type);
4790 
4791   mask = build_int_cst_type (unsigned_type, -1);
4792 
4793   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4794   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4795 
4796   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4797   if (and_mask != 0)
4798     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4799 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4800 
4801   *pmask = mask;
4802   *pand_mask = and_mask;
4803   return inner;
4804 }
4805 
4806 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4807    bit positions and MASK is SIGNED.  */
4808 
4809 static bool
all_ones_mask_p(const_tree mask,unsigned int size)4810 all_ones_mask_p (const_tree mask, unsigned int size)
4811 {
4812   tree type = TREE_TYPE (mask);
4813   unsigned int precision = TYPE_PRECISION (type);
4814 
4815   /* If this function returns true when the type of the mask is
4816      UNSIGNED, then there will be errors.  In particular see
4817      gcc.c-torture/execute/990326-1.c.  There does not appear to be
4818      any documentation paper trail as to why this is so.  But the pre
4819      wide-int worked with that restriction and it has been preserved
4820      here.  */
4821   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4822     return false;
4823 
4824   return wi::mask (size, false, precision) == wi::to_wide (mask);
4825 }
4826 
4827 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4828    represents the sign bit of EXP's type.  If EXP represents a sign
4829    or zero extension, also test VAL against the unextended type.
4830    The return value is the (sub)expression whose sign bit is VAL,
4831    or NULL_TREE otherwise.  */
4832 
4833 tree
sign_bit_p(tree exp,const_tree val)4834 sign_bit_p (tree exp, const_tree val)
4835 {
4836   int width;
4837   tree t;
4838 
4839   /* Tree EXP must have an integral type.  */
4840   t = TREE_TYPE (exp);
4841   if (! INTEGRAL_TYPE_P (t))
4842     return NULL_TREE;
4843 
4844   /* Tree VAL must be an integer constant.  */
4845   if (TREE_CODE (val) != INTEGER_CST
4846       || TREE_OVERFLOW (val))
4847     return NULL_TREE;
4848 
4849   width = TYPE_PRECISION (t);
4850   if (wi::only_sign_bit_p (wi::to_wide (val), width))
4851     return exp;
4852 
4853   /* Handle extension from a narrower type.  */
4854   if (TREE_CODE (exp) == NOP_EXPR
4855       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4856     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4857 
4858   return NULL_TREE;
4859 }
4860 
4861 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4862    to be evaluated unconditionally.  */
4863 
4864 static bool
simple_operand_p(const_tree exp)4865 simple_operand_p (const_tree exp)
4866 {
4867   /* Strip any conversions that don't change the machine mode.  */
4868   STRIP_NOPS (exp);
4869 
4870   return (CONSTANT_CLASS_P (exp)
4871   	  || TREE_CODE (exp) == SSA_NAME
4872 	  || (DECL_P (exp)
4873 	      && ! TREE_ADDRESSABLE (exp)
4874 	      && ! TREE_THIS_VOLATILE (exp)
4875 	      && ! DECL_NONLOCAL (exp)
4876 	      /* Don't regard global variables as simple.  They may be
4877 		 allocated in ways unknown to the compiler (shared memory,
4878 		 #pragma weak, etc).  */
4879 	      && ! TREE_PUBLIC (exp)
4880 	      && ! DECL_EXTERNAL (exp)
4881 	      /* Weakrefs are not safe to be read, since they can be NULL.
4882  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4883 		 have DECL_WEAK flag set.  */
4884 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4885 	      /* Loading a static variable is unduly expensive, but global
4886 		 registers aren't expensive.  */
4887 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4888 }
4889 
4890 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4891    to be evaluated unconditionally.
4892    I addition to simple_operand_p, we assume that comparisons, conversions,
4893    and logic-not operations are simple, if their operands are simple, too.  */
4894 
4895 static bool
simple_operand_p_2(tree exp)4896 simple_operand_p_2 (tree exp)
4897 {
4898   enum tree_code code;
4899 
4900   if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4901     return false;
4902 
4903   while (CONVERT_EXPR_P (exp))
4904     exp = TREE_OPERAND (exp, 0);
4905 
4906   code = TREE_CODE (exp);
4907 
4908   if (TREE_CODE_CLASS (code) == tcc_comparison)
4909     return (simple_operand_p (TREE_OPERAND (exp, 0))
4910 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4911 
4912   if (code == TRUTH_NOT_EXPR)
4913       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4914 
4915   return simple_operand_p (exp);
4916 }
4917 
4918 
4919 /* The following functions are subroutines to fold_range_test and allow it to
4920    try to change a logical combination of comparisons into a range test.
4921 
4922    For example, both
4923 	X == 2 || X == 3 || X == 4 || X == 5
4924    and
4925 	X >= 2 && X <= 5
4926    are converted to
4927 	(unsigned) (X - 2) <= 3
4928 
4929    We describe each set of comparisons as being either inside or outside
4930    a range, using a variable named like IN_P, and then describe the
4931    range with a lower and upper bound.  If one of the bounds is omitted,
4932    it represents either the highest or lowest value of the type.
4933 
4934    In the comments below, we represent a range by two numbers in brackets
4935    preceded by a "+" to designate being inside that range, or a "-" to
4936    designate being outside that range, so the condition can be inverted by
4937    flipping the prefix.  An omitted bound is represented by a "-".  For
4938    example, "- [-, 10]" means being outside the range starting at the lowest
4939    possible value and ending at 10, in other words, being greater than 10.
4940    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4941    always false.
4942 
4943    We set up things so that the missing bounds are handled in a consistent
4944    manner so neither a missing bound nor "true" and "false" need to be
4945    handled using a special case.  */
4946 
4947 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4948    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4949    and UPPER1_P are nonzero if the respective argument is an upper bound
4950    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4951    must be specified for a comparison.  ARG1 will be converted to ARG0's
4952    type if both are specified.  */
4953 
4954 static tree
range_binop(enum tree_code code,tree type,tree arg0,int upper0_p,tree arg1,int upper1_p)4955 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4956 	     tree arg1, int upper1_p)
4957 {
4958   tree tem;
4959   int result;
4960   int sgn0, sgn1;
4961 
4962   /* If neither arg represents infinity, do the normal operation.
4963      Else, if not a comparison, return infinity.  Else handle the special
4964      comparison rules. Note that most of the cases below won't occur, but
4965      are handled for consistency.  */
4966 
4967   if (arg0 != 0 && arg1 != 0)
4968     {
4969       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4970 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4971       STRIP_NOPS (tem);
4972       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4973     }
4974 
4975   if (TREE_CODE_CLASS (code) != tcc_comparison)
4976     return 0;
4977 
4978   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4979      for neither.  In real maths, we cannot assume open ended ranges are
4980      the same. But, this is computer arithmetic, where numbers are finite.
4981      We can therefore make the transformation of any unbounded range with
4982      the value Z, Z being greater than any representable number. This permits
4983      us to treat unbounded ranges as equal.  */
4984   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4985   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4986   switch (code)
4987     {
4988     case EQ_EXPR:
4989       result = sgn0 == sgn1;
4990       break;
4991     case NE_EXPR:
4992       result = sgn0 != sgn1;
4993       break;
4994     case LT_EXPR:
4995       result = sgn0 < sgn1;
4996       break;
4997     case LE_EXPR:
4998       result = sgn0 <= sgn1;
4999       break;
5000     case GT_EXPR:
5001       result = sgn0 > sgn1;
5002       break;
5003     case GE_EXPR:
5004       result = sgn0 >= sgn1;
5005       break;
5006     default:
5007       gcc_unreachable ();
5008     }
5009 
5010   return constant_boolean_node (result, type);
5011 }
5012 
5013 /* Helper routine for make_range.  Perform one step for it, return
5014    new expression if the loop should continue or NULL_TREE if it should
5015    stop.  */
5016 
5017 tree
make_range_step(location_t loc,enum tree_code code,tree arg0,tree arg1,tree exp_type,tree * p_low,tree * p_high,int * p_in_p,bool * strict_overflow_p)5018 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5019 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5020 		 bool *strict_overflow_p)
5021 {
5022   tree arg0_type = TREE_TYPE (arg0);
5023   tree n_low, n_high, low = *p_low, high = *p_high;
5024   int in_p = *p_in_p, n_in_p;
5025 
5026   switch (code)
5027     {
5028     case TRUTH_NOT_EXPR:
5029       /* We can only do something if the range is testing for zero.  */
5030       if (low == NULL_TREE || high == NULL_TREE
5031 	  || ! integer_zerop (low) || ! integer_zerop (high))
5032 	return NULL_TREE;
5033       *p_in_p = ! in_p;
5034       return arg0;
5035 
5036     case EQ_EXPR: case NE_EXPR:
5037     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5038       /* We can only do something if the range is testing for zero
5039 	 and if the second operand is an integer constant.  Note that
5040 	 saying something is "in" the range we make is done by
5041 	 complementing IN_P since it will set in the initial case of
5042 	 being not equal to zero; "out" is leaving it alone.  */
5043       if (low == NULL_TREE || high == NULL_TREE
5044 	  || ! integer_zerop (low) || ! integer_zerop (high)
5045 	  || TREE_CODE (arg1) != INTEGER_CST)
5046 	return NULL_TREE;
5047 
5048       switch (code)
5049 	{
5050 	case NE_EXPR:  /* - [c, c]  */
5051 	  low = high = arg1;
5052 	  break;
5053 	case EQ_EXPR:  /* + [c, c]  */
5054 	  in_p = ! in_p, low = high = arg1;
5055 	  break;
5056 	case GT_EXPR:  /* - [-, c] */
5057 	  low = 0, high = arg1;
5058 	  break;
5059 	case GE_EXPR:  /* + [c, -] */
5060 	  in_p = ! in_p, low = arg1, high = 0;
5061 	  break;
5062 	case LT_EXPR:  /* - [c, -] */
5063 	  low = arg1, high = 0;
5064 	  break;
5065 	case LE_EXPR:  /* + [-, c] */
5066 	  in_p = ! in_p, low = 0, high = arg1;
5067 	  break;
5068 	default:
5069 	  gcc_unreachable ();
5070 	}
5071 
5072       /* If this is an unsigned comparison, we also know that EXP is
5073 	 greater than or equal to zero.  We base the range tests we make
5074 	 on that fact, so we record it here so we can parse existing
5075 	 range tests.  We test arg0_type since often the return type
5076 	 of, e.g. EQ_EXPR, is boolean.  */
5077       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5078 	{
5079 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
5080 			      in_p, low, high, 1,
5081 			      build_int_cst (arg0_type, 0),
5082 			      NULL_TREE))
5083 	    return NULL_TREE;
5084 
5085 	  in_p = n_in_p, low = n_low, high = n_high;
5086 
5087 	  /* If the high bound is missing, but we have a nonzero low
5088 	     bound, reverse the range so it goes from zero to the low bound
5089 	     minus 1.  */
5090 	  if (high == 0 && low && ! integer_zerop (low))
5091 	    {
5092 	      in_p = ! in_p;
5093 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5094 				  build_int_cst (TREE_TYPE (low), 1), 0);
5095 	      low = build_int_cst (arg0_type, 0);
5096 	    }
5097 	}
5098 
5099       *p_low = low;
5100       *p_high = high;
5101       *p_in_p = in_p;
5102       return arg0;
5103 
5104     case NEGATE_EXPR:
5105       /* If flag_wrapv and ARG0_TYPE is signed, make sure
5106 	 low and high are non-NULL, then normalize will DTRT.  */
5107       if (!TYPE_UNSIGNED (arg0_type)
5108 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5109 	{
5110 	  if (low == NULL_TREE)
5111 	    low = TYPE_MIN_VALUE (arg0_type);
5112 	  if (high == NULL_TREE)
5113 	    high = TYPE_MAX_VALUE (arg0_type);
5114 	}
5115 
5116       /* (-x) IN [a,b] -> x in [-b, -a]  */
5117       n_low = range_binop (MINUS_EXPR, exp_type,
5118 			   build_int_cst (exp_type, 0),
5119 			   0, high, 1);
5120       n_high = range_binop (MINUS_EXPR, exp_type,
5121 			    build_int_cst (exp_type, 0),
5122 			    0, low, 0);
5123       if (n_high != 0 && TREE_OVERFLOW (n_high))
5124 	return NULL_TREE;
5125       goto normalize;
5126 
5127     case BIT_NOT_EXPR:
5128       /* ~ X -> -X - 1  */
5129       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5130 			 build_int_cst (exp_type, 1));
5131 
5132     case PLUS_EXPR:
5133     case MINUS_EXPR:
5134       if (TREE_CODE (arg1) != INTEGER_CST)
5135 	return NULL_TREE;
5136 
5137       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5138 	 move a constant to the other side.  */
5139       if (!TYPE_UNSIGNED (arg0_type)
5140 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5141 	return NULL_TREE;
5142 
5143       /* If EXP is signed, any overflow in the computation is undefined,
5144 	 so we don't worry about it so long as our computations on
5145 	 the bounds don't overflow.  For unsigned, overflow is defined
5146 	 and this is exactly the right thing.  */
5147       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5148 			   arg0_type, low, 0, arg1, 0);
5149       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5150 			    arg0_type, high, 1, arg1, 0);
5151       if ((n_low != 0 && TREE_OVERFLOW (n_low))
5152 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
5153 	return NULL_TREE;
5154 
5155       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5156 	*strict_overflow_p = true;
5157 
5158       normalize:
5159 	/* Check for an unsigned range which has wrapped around the maximum
5160 	   value thus making n_high < n_low, and normalize it.  */
5161 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5162 	  {
5163 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5164 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
5165 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5166 				build_int_cst (TREE_TYPE (n_low), 1), 0);
5167 
5168 	    /* If the range is of the form +/- [ x+1, x ], we won't
5169 	       be able to normalize it.  But then, it represents the
5170 	       whole range or the empty set, so make it
5171 	       +/- [ -, - ].  */
5172 	    if (tree_int_cst_equal (n_low, low)
5173 		&& tree_int_cst_equal (n_high, high))
5174 	      low = high = 0;
5175 	    else
5176 	      in_p = ! in_p;
5177 	  }
5178 	else
5179 	  low = n_low, high = n_high;
5180 
5181 	*p_low = low;
5182 	*p_high = high;
5183 	*p_in_p = in_p;
5184 	return arg0;
5185 
5186     CASE_CONVERT:
5187     case NON_LVALUE_EXPR:
5188       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5189 	return NULL_TREE;
5190 
5191       if (! INTEGRAL_TYPE_P (arg0_type)
5192 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
5193 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5194 	return NULL_TREE;
5195 
5196       n_low = low, n_high = high;
5197 
5198       if (n_low != 0)
5199 	n_low = fold_convert_loc (loc, arg0_type, n_low);
5200 
5201       if (n_high != 0)
5202 	n_high = fold_convert_loc (loc, arg0_type, n_high);
5203 
5204       /* If we're converting arg0 from an unsigned type, to exp,
5205 	 a signed type, we will be doing the comparison as unsigned.
5206 	 The tests above have already verified that LOW and HIGH
5207 	 are both positive.
5208 
5209 	 So we have to ensure that we will handle large unsigned
5210 	 values the same way that the current signed bounds treat
5211 	 negative values.  */
5212 
5213       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5214 	{
5215 	  tree high_positive;
5216 	  tree equiv_type;
5217 	  /* For fixed-point modes, we need to pass the saturating flag
5218 	     as the 2nd parameter.  */
5219 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5220 	    equiv_type
5221 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5222 						TYPE_SATURATING (arg0_type));
5223 	  else
5224 	    equiv_type
5225 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5226 
5227 	  /* A range without an upper bound is, naturally, unbounded.
5228 	     Since convert would have cropped a very large value, use
5229 	     the max value for the destination type.  */
5230 	  high_positive
5231 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5232 	      : TYPE_MAX_VALUE (arg0_type);
5233 
5234 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5235 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5236 					     fold_convert_loc (loc, arg0_type,
5237 							       high_positive),
5238 					     build_int_cst (arg0_type, 1));
5239 
5240 	  /* If the low bound is specified, "and" the range with the
5241 	     range for which the original unsigned value will be
5242 	     positive.  */
5243 	  if (low != 0)
5244 	    {
5245 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5246 				  1, fold_convert_loc (loc, arg0_type,
5247 						       integer_zero_node),
5248 				  high_positive))
5249 		return NULL_TREE;
5250 
5251 	      in_p = (n_in_p == in_p);
5252 	    }
5253 	  else
5254 	    {
5255 	      /* Otherwise, "or" the range with the range of the input
5256 		 that will be interpreted as negative.  */
5257 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5258 				  1, fold_convert_loc (loc, arg0_type,
5259 						       integer_zero_node),
5260 				  high_positive))
5261 		return NULL_TREE;
5262 
5263 	      in_p = (in_p != n_in_p);
5264 	    }
5265 	}
5266 
5267       /* Otherwise, if we are converting arg0 from signed type, to exp,
5268 	 an unsigned type, we will do the comparison as signed.  If
5269 	 high is non-NULL, we punt above if it doesn't fit in the signed
5270 	 type, so if we get through here, +[-, high] or +[low, high] are
5271 	 equivalent to +[-, n_high] or +[n_low, n_high].  Similarly,
5272 	 +[-, -] or -[-, -] are equivalent too.  But if low is specified and
5273 	 high is not, the +[low, -] range is equivalent to union of
5274 	 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5275 	 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5276 	 low being 0, which should be treated as [-, -].  */
5277       else if (TYPE_UNSIGNED (exp_type)
5278 	       && !TYPE_UNSIGNED (arg0_type)
5279 	       && low
5280 	       && !high)
5281 	{
5282 	  if (integer_zerop (low))
5283 	    n_low = NULL_TREE;
5284 	  else
5285 	    {
5286 	      n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5287 					n_low, build_int_cst (arg0_type, -1));
5288 	      n_low = build_zero_cst (arg0_type);
5289 	      in_p = !in_p;
5290 	    }
5291 	}
5292 
5293       *p_low = n_low;
5294       *p_high = n_high;
5295       *p_in_p = in_p;
5296       return arg0;
5297 
5298     default:
5299       return NULL_TREE;
5300     }
5301 }
5302 
5303 /* Given EXP, a logical expression, set the range it is testing into
5304    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
5305    actually being tested.  *PLOW and *PHIGH will be made of the same
5306    type as the returned expression.  If EXP is not a comparison, we
5307    will most likely not be returning a useful value and range.  Set
5308    *STRICT_OVERFLOW_P to true if the return value is only valid
5309    because signed overflow is undefined; otherwise, do not change
5310    *STRICT_OVERFLOW_P.  */
5311 
5312 tree
make_range(tree exp,int * pin_p,tree * plow,tree * phigh,bool * strict_overflow_p)5313 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5314 	    bool *strict_overflow_p)
5315 {
5316   enum tree_code code;
5317   tree arg0, arg1 = NULL_TREE;
5318   tree exp_type, nexp;
5319   int in_p;
5320   tree low, high;
5321   location_t loc = EXPR_LOCATION (exp);
5322 
5323   /* Start with simply saying "EXP != 0" and then look at the code of EXP
5324      and see if we can refine the range.  Some of the cases below may not
5325      happen, but it doesn't seem worth worrying about this.  We "continue"
5326      the outer loop when we've changed something; otherwise we "break"
5327      the switch, which will "break" the while.  */
5328 
5329   in_p = 0;
5330   low = high = build_int_cst (TREE_TYPE (exp), 0);
5331 
5332   while (1)
5333     {
5334       code = TREE_CODE (exp);
5335       exp_type = TREE_TYPE (exp);
5336       arg0 = NULL_TREE;
5337 
5338       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5339 	{
5340 	  if (TREE_OPERAND_LENGTH (exp) > 0)
5341 	    arg0 = TREE_OPERAND (exp, 0);
5342 	  if (TREE_CODE_CLASS (code) == tcc_binary
5343 	      || TREE_CODE_CLASS (code) == tcc_comparison
5344 	      || (TREE_CODE_CLASS (code) == tcc_expression
5345 		  && TREE_OPERAND_LENGTH (exp) > 1))
5346 	    arg1 = TREE_OPERAND (exp, 1);
5347 	}
5348       if (arg0 == NULL_TREE)
5349 	break;
5350 
5351       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5352 			      &high, &in_p, strict_overflow_p);
5353       if (nexp == NULL_TREE)
5354 	break;
5355       exp = nexp;
5356     }
5357 
5358   /* If EXP is a constant, we can evaluate whether this is true or false.  */
5359   if (TREE_CODE (exp) == INTEGER_CST)
5360     {
5361       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5362 						 exp, 0, low, 0))
5363 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
5364 						    exp, 1, high, 1)));
5365       low = high = 0;
5366       exp = 0;
5367     }
5368 
5369   *pin_p = in_p, *plow = low, *phigh = high;
5370   return exp;
5371 }
5372 
5373 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5374    a bitwise check i.e. when
5375      LOW  == 0xXX...X00...0
5376      HIGH == 0xXX...X11...1
5377    Return corresponding mask in MASK and stem in VALUE.  */
5378 
5379 static bool
maskable_range_p(const_tree low,const_tree high,tree type,tree * mask,tree * value)5380 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5381 		  tree *value)
5382 {
5383   if (TREE_CODE (low) != INTEGER_CST
5384       || TREE_CODE (high) != INTEGER_CST)
5385     return false;
5386 
5387   unsigned prec = TYPE_PRECISION (type);
5388   wide_int lo = wi::to_wide (low, prec);
5389   wide_int hi = wi::to_wide (high, prec);
5390 
5391   wide_int end_mask = lo ^ hi;
5392   if ((end_mask & (end_mask + 1)) != 0
5393       || (lo & end_mask) != 0)
5394     return false;
5395 
5396   wide_int stem_mask = ~end_mask;
5397   wide_int stem = lo & stem_mask;
5398   if (stem != (hi & stem_mask))
5399     return false;
5400 
5401   *mask = wide_int_to_tree (type, stem_mask);
5402   *value = wide_int_to_tree (type, stem);
5403 
5404   return true;
5405 }
5406 
5407 /* Helper routine for build_range_check and match.pd.  Return the type to
5408    perform the check or NULL if it shouldn't be optimized.  */
5409 
5410 tree
range_check_type(tree etype)5411 range_check_type (tree etype)
5412 {
5413   /* First make sure that arithmetics in this type is valid, then make sure
5414      that it wraps around.  */
5415   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5416     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5417 
5418   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5419     {
5420       tree utype, minv, maxv;
5421 
5422       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5423 	 for the type in question, as we rely on this here.  */
5424       utype = unsigned_type_for (etype);
5425       maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5426       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5427 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
5428       minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5429 
5430       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5431 				      minv, 1, maxv, 1)))
5432 	etype = utype;
5433       else
5434 	return NULL_TREE;
5435     }
5436   else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5437     etype = unsigned_type_for (etype);
5438   return etype;
5439 }
5440 
5441 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5442    type, TYPE, return an expression to test if EXP is in (or out of, depending
5443    on IN_P) the range.  Return 0 if the test couldn't be created.  */
5444 
5445 tree
build_range_check(location_t loc,tree type,tree exp,int in_p,tree low,tree high)5446 build_range_check (location_t loc, tree type, tree exp, int in_p,
5447 		   tree low, tree high)
5448 {
5449   tree etype = TREE_TYPE (exp), mask, value;
5450 
5451   /* Disable this optimization for function pointer expressions
5452      on targets that require function pointer canonicalization.  */
5453   if (targetm.have_canonicalize_funcptr_for_compare ()
5454       && POINTER_TYPE_P (etype)
5455       && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5456     return NULL_TREE;
5457 
5458   if (! in_p)
5459     {
5460       value = build_range_check (loc, type, exp, 1, low, high);
5461       if (value != 0)
5462         return invert_truthvalue_loc (loc, value);
5463 
5464       return 0;
5465     }
5466 
5467   if (low == 0 && high == 0)
5468     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5469 
5470   if (low == 0)
5471     return fold_build2_loc (loc, LE_EXPR, type, exp,
5472 			    fold_convert_loc (loc, etype, high));
5473 
5474   if (high == 0)
5475     return fold_build2_loc (loc, GE_EXPR, type, exp,
5476 			    fold_convert_loc (loc, etype, low));
5477 
5478   if (operand_equal_p (low, high, 0))
5479     return fold_build2_loc (loc, EQ_EXPR, type, exp,
5480 			    fold_convert_loc (loc, etype, low));
5481 
5482   if (TREE_CODE (exp) == BIT_AND_EXPR
5483       && maskable_range_p (low, high, etype, &mask, &value))
5484     return fold_build2_loc (loc, EQ_EXPR, type,
5485 			    fold_build2_loc (loc, BIT_AND_EXPR, etype,
5486 					     exp, mask),
5487 			    value);
5488 
5489   if (integer_zerop (low))
5490     {
5491       if (! TYPE_UNSIGNED (etype))
5492 	{
5493 	  etype = unsigned_type_for (etype);
5494 	  high = fold_convert_loc (loc, etype, high);
5495 	  exp = fold_convert_loc (loc, etype, exp);
5496 	}
5497       return build_range_check (loc, type, exp, 1, 0, high);
5498     }
5499 
5500   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
5501   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5502     {
5503       int prec = TYPE_PRECISION (etype);
5504 
5505       if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5506 	{
5507 	  if (TYPE_UNSIGNED (etype))
5508 	    {
5509 	      tree signed_etype = signed_type_for (etype);
5510 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5511 		etype
5512 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5513 	      else
5514 		etype = signed_etype;
5515 	      exp = fold_convert_loc (loc, etype, exp);
5516 	    }
5517 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
5518 				  build_int_cst (etype, 0));
5519 	}
5520     }
5521 
5522   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5523      This requires wrap-around arithmetics for the type of the expression.  */
5524   etype = range_check_type (etype);
5525   if (etype == NULL_TREE)
5526     return NULL_TREE;
5527 
5528   high = fold_convert_loc (loc, etype, high);
5529   low = fold_convert_loc (loc, etype, low);
5530   exp = fold_convert_loc (loc, etype, exp);
5531 
5532   value = const_binop (MINUS_EXPR, high, low);
5533 
5534   if (value != 0 && !TREE_OVERFLOW (value))
5535     return build_range_check (loc, type,
5536 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5537 			      1, build_int_cst (etype, 0), value);
5538 
5539   return 0;
5540 }
5541 
5542 /* Return the predecessor of VAL in its type, handling the infinite case.  */
5543 
5544 static tree
range_predecessor(tree val)5545 range_predecessor (tree val)
5546 {
5547   tree type = TREE_TYPE (val);
5548 
5549   if (INTEGRAL_TYPE_P (type)
5550       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5551     return 0;
5552   else
5553     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5554 			build_int_cst (TREE_TYPE (val), 1), 0);
5555 }
5556 
5557 /* Return the successor of VAL in its type, handling the infinite case.  */
5558 
5559 static tree
range_successor(tree val)5560 range_successor (tree val)
5561 {
5562   tree type = TREE_TYPE (val);
5563 
5564   if (INTEGRAL_TYPE_P (type)
5565       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5566     return 0;
5567   else
5568     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5569 			build_int_cst (TREE_TYPE (val), 1), 0);
5570 }
5571 
5572 /* Given two ranges, see if we can merge them into one.  Return 1 if we
5573    can, 0 if we can't.  Set the output range into the specified parameters.  */
5574 
5575 bool
merge_ranges(int * pin_p,tree * plow,tree * phigh,int in0_p,tree low0,tree high0,int in1_p,tree low1,tree high1)5576 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5577 	      tree high0, int in1_p, tree low1, tree high1)
5578 {
5579   int no_overlap;
5580   int subset;
5581   int temp;
5582   tree tem;
5583   int in_p;
5584   tree low, high;
5585   int lowequal = ((low0 == 0 && low1 == 0)
5586 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5587 						low0, 0, low1, 0)));
5588   int highequal = ((high0 == 0 && high1 == 0)
5589 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5590 						 high0, 1, high1, 1)));
5591 
5592   /* Make range 0 be the range that starts first, or ends last if they
5593      start at the same value.  Swap them if it isn't.  */
5594   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5595 				 low0, 0, low1, 0))
5596       || (lowequal
5597 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
5598 					high1, 1, high0, 1))))
5599     {
5600       temp = in0_p, in0_p = in1_p, in1_p = temp;
5601       tem = low0, low0 = low1, low1 = tem;
5602       tem = high0, high0 = high1, high1 = tem;
5603     }
5604 
5605   /* If the second range is != high1 where high1 is the type maximum of
5606      the type, try first merging with < high1 range.  */
5607   if (low1
5608       && high1
5609       && TREE_CODE (low1) == INTEGER_CST
5610       && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5611 	  || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5612 	      && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5613 			   GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5614       && operand_equal_p (low1, high1, 0))
5615     {
5616       if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5617 	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5618 			   !in1_p, NULL_TREE, range_predecessor (low1)))
5619 	return true;
5620       /* Similarly for the second range != low1 where low1 is the type minimum
5621 	 of the type, try first merging with > low1 range.  */
5622       if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5623 	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5624 			   !in1_p, range_successor (low1), NULL_TREE))
5625 	return true;
5626     }
5627 
5628   /* Now flag two cases, whether the ranges are disjoint or whether the
5629      second range is totally subsumed in the first.  Note that the tests
5630      below are simplified by the ones above.  */
5631   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5632 					  high0, 1, low1, 0));
5633   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5634 				      high1, 1, high0, 1));
5635 
5636   /* We now have four cases, depending on whether we are including or
5637      excluding the two ranges.  */
5638   if (in0_p && in1_p)
5639     {
5640       /* If they don't overlap, the result is false.  If the second range
5641 	 is a subset it is the result.  Otherwise, the range is from the start
5642 	 of the second to the end of the first.  */
5643       if (no_overlap)
5644 	in_p = 0, low = high = 0;
5645       else if (subset)
5646 	in_p = 1, low = low1, high = high1;
5647       else
5648 	in_p = 1, low = low1, high = high0;
5649     }
5650 
5651   else if (in0_p && ! in1_p)
5652     {
5653       /* If they don't overlap, the result is the first range.  If they are
5654 	 equal, the result is false.  If the second range is a subset of the
5655 	 first, and the ranges begin at the same place, we go from just after
5656 	 the end of the second range to the end of the first.  If the second
5657 	 range is not a subset of the first, or if it is a subset and both
5658 	 ranges end at the same place, the range starts at the start of the
5659 	 first range and ends just before the second range.
5660 	 Otherwise, we can't describe this as a single range.  */
5661       if (no_overlap)
5662 	in_p = 1, low = low0, high = high0;
5663       else if (lowequal && highequal)
5664 	in_p = 0, low = high = 0;
5665       else if (subset && lowequal)
5666 	{
5667 	  low = range_successor (high1);
5668 	  high = high0;
5669 	  in_p = 1;
5670 	  if (low == 0)
5671 	    {
5672 	      /* We are in the weird situation where high0 > high1 but
5673 		 high1 has no successor.  Punt.  */
5674 	      return 0;
5675 	    }
5676 	}
5677       else if (! subset || highequal)
5678 	{
5679 	  low = low0;
5680 	  high = range_predecessor (low1);
5681 	  in_p = 1;
5682 	  if (high == 0)
5683 	    {
5684 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5685 	      return 0;
5686 	    }
5687 	}
5688       else
5689 	return 0;
5690     }
5691 
5692   else if (! in0_p && in1_p)
5693     {
5694       /* If they don't overlap, the result is the second range.  If the second
5695 	 is a subset of the first, the result is false.  Otherwise,
5696 	 the range starts just after the first range and ends at the
5697 	 end of the second.  */
5698       if (no_overlap)
5699 	in_p = 1, low = low1, high = high1;
5700       else if (subset || highequal)
5701 	in_p = 0, low = high = 0;
5702       else
5703 	{
5704 	  low = range_successor (high0);
5705 	  high = high1;
5706 	  in_p = 1;
5707 	  if (low == 0)
5708 	    {
5709 	      /* high1 > high0 but high0 has no successor.  Punt.  */
5710 	      return 0;
5711 	    }
5712 	}
5713     }
5714 
5715   else
5716     {
5717       /* The case where we are excluding both ranges.  Here the complex case
5718 	 is if they don't overlap.  In that case, the only time we have a
5719 	 range is if they are adjacent.  If the second is a subset of the
5720 	 first, the result is the first.  Otherwise, the range to exclude
5721 	 starts at the beginning of the first range and ends at the end of the
5722 	 second.  */
5723       if (no_overlap)
5724 	{
5725 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5726 					 range_successor (high0),
5727 					 1, low1, 0)))
5728 	    in_p = 0, low = low0, high = high1;
5729 	  else
5730 	    {
5731 	      /* Canonicalize - [min, x] into - [-, x].  */
5732 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5733 		switch (TREE_CODE (TREE_TYPE (low0)))
5734 		  {
5735 		  case ENUMERAL_TYPE:
5736 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5737 				  GET_MODE_BITSIZE
5738 				    (TYPE_MODE (TREE_TYPE (low0)))))
5739 		      break;
5740 		    /* FALLTHROUGH */
5741 		  case INTEGER_TYPE:
5742 		    if (tree_int_cst_equal (low0,
5743 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5744 		      low0 = 0;
5745 		    break;
5746 		  case POINTER_TYPE:
5747 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5748 			&& integer_zerop (low0))
5749 		      low0 = 0;
5750 		    break;
5751 		  default:
5752 		    break;
5753 		  }
5754 
5755 	      /* Canonicalize - [x, max] into - [x, -].  */
5756 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5757 		switch (TREE_CODE (TREE_TYPE (high1)))
5758 		  {
5759 		  case ENUMERAL_TYPE:
5760 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5761 				  GET_MODE_BITSIZE
5762 				    (TYPE_MODE (TREE_TYPE (high1)))))
5763 		      break;
5764 		    /* FALLTHROUGH */
5765 		  case INTEGER_TYPE:
5766 		    if (tree_int_cst_equal (high1,
5767 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5768 		      high1 = 0;
5769 		    break;
5770 		  case POINTER_TYPE:
5771 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5772 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5773 						       high1, 1,
5774 						       build_int_cst (TREE_TYPE (high1), 1),
5775 						       1)))
5776 		      high1 = 0;
5777 		    break;
5778 		  default:
5779 		    break;
5780 		  }
5781 
5782 	      /* The ranges might be also adjacent between the maximum and
5783 	         minimum values of the given type.  For
5784 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5785 	         return + [x + 1, y - 1].  */
5786 	      if (low0 == 0 && high1 == 0)
5787 	        {
5788 		  low = range_successor (high0);
5789 		  high = range_predecessor (low1);
5790 		  if (low == 0 || high == 0)
5791 		    return 0;
5792 
5793 		  in_p = 1;
5794 		}
5795 	      else
5796 		return 0;
5797 	    }
5798 	}
5799       else if (subset)
5800 	in_p = 0, low = low0, high = high0;
5801       else
5802 	in_p = 0, low = low0, high = high1;
5803     }
5804 
5805   *pin_p = in_p, *plow = low, *phigh = high;
5806   return 1;
5807 }
5808 
5809 
5810 /* Subroutine of fold, looking inside expressions of the form
5811    A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5812    are the three operands of the COND_EXPR.  This function is
5813    being used also to optimize A op B ? C : A, by reversing the
5814    comparison first.
5815 
5816    Return a folded expression whose code is not a COND_EXPR
5817    anymore, or NULL_TREE if no folding opportunity is found.  */
5818 
5819 static tree
fold_cond_expr_with_comparison(location_t loc,tree type,enum tree_code comp_code,tree arg00,tree arg01,tree arg1,tree arg2)5820 fold_cond_expr_with_comparison (location_t loc, tree type,
5821 				enum tree_code comp_code,
5822 				tree arg00, tree arg01, tree arg1, tree arg2)
5823 {
5824   tree arg1_type = TREE_TYPE (arg1);
5825   tree tem;
5826 
5827   STRIP_NOPS (arg1);
5828   STRIP_NOPS (arg2);
5829 
5830   /* If we have A op 0 ? A : -A, consider applying the following
5831      transformations:
5832 
5833      A == 0? A : -A    same as -A
5834      A != 0? A : -A    same as A
5835      A >= 0? A : -A    same as abs (A)
5836      A > 0?  A : -A    same as abs (A)
5837      A <= 0? A : -A    same as -abs (A)
5838      A < 0?  A : -A    same as -abs (A)
5839 
5840      None of these transformations work for modes with signed
5841      zeros.  If A is +/-0, the first two transformations will
5842      change the sign of the result (from +0 to -0, or vice
5843      versa).  The last four will fix the sign of the result,
5844      even though the original expressions could be positive or
5845      negative, depending on the sign of A.
5846 
5847      Note that all these transformations are correct if A is
5848      NaN, since the two alternatives (A and -A) are also NaNs.  */
5849   if (!HONOR_SIGNED_ZEROS (type)
5850       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5851 	  ? real_zerop (arg01)
5852 	  : integer_zerop (arg01))
5853       && ((TREE_CODE (arg2) == NEGATE_EXPR
5854 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5855 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5856 	        have already been folded to Y-X, check for that. */
5857 	  || (TREE_CODE (arg1) == MINUS_EXPR
5858 	      && TREE_CODE (arg2) == MINUS_EXPR
5859 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5860 				  TREE_OPERAND (arg2, 1), 0)
5861 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5862 				  TREE_OPERAND (arg2, 0), 0))))
5863     switch (comp_code)
5864       {
5865       case EQ_EXPR:
5866       case UNEQ_EXPR:
5867 	tem = fold_convert_loc (loc, arg1_type, arg1);
5868 	return fold_convert_loc (loc, type, negate_expr (tem));
5869       case NE_EXPR:
5870       case LTGT_EXPR:
5871 	return fold_convert_loc (loc, type, arg1);
5872       case UNGE_EXPR:
5873       case UNGT_EXPR:
5874 	if (flag_trapping_math)
5875 	  break;
5876 	/* Fall through.  */
5877       case GE_EXPR:
5878       case GT_EXPR:
5879 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5880 	  break;
5881 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5882 	return fold_convert_loc (loc, type, tem);
5883       case UNLE_EXPR:
5884       case UNLT_EXPR:
5885 	if (flag_trapping_math)
5886 	  break;
5887 	/* FALLTHRU */
5888       case LE_EXPR:
5889       case LT_EXPR:
5890 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5891 	  break;
5892 	if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5893 	    && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5894 	  {
5895 	    /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5896 	       is not, invokes UB both in abs and in the negation of it.
5897 	       So, use ABSU_EXPR instead.  */
5898 	    tree utype = unsigned_type_for (TREE_TYPE (arg1));
5899 	    tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5900 	    tem = negate_expr (tem);
5901 	    return fold_convert_loc (loc, type, tem);
5902 	  }
5903 	else
5904 	  {
5905 	    tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5906 	    return negate_expr (fold_convert_loc (loc, type, tem));
5907 	  }
5908       default:
5909 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5910 	break;
5911       }
5912 
5913   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5914      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5915      both transformations are correct when A is NaN: A != 0
5916      is then true, and A == 0 is false.  */
5917 
5918   if (!HONOR_SIGNED_ZEROS (type)
5919       && integer_zerop (arg01) && integer_zerop (arg2))
5920     {
5921       if (comp_code == NE_EXPR)
5922 	return fold_convert_loc (loc, type, arg1);
5923       else if (comp_code == EQ_EXPR)
5924 	return build_zero_cst (type);
5925     }
5926 
5927   /* Try some transformations of A op B ? A : B.
5928 
5929      A == B? A : B    same as B
5930      A != B? A : B    same as A
5931      A >= B? A : B    same as max (A, B)
5932      A > B?  A : B    same as max (B, A)
5933      A <= B? A : B    same as min (A, B)
5934      A < B?  A : B    same as min (B, A)
5935 
5936      As above, these transformations don't work in the presence
5937      of signed zeros.  For example, if A and B are zeros of
5938      opposite sign, the first two transformations will change
5939      the sign of the result.  In the last four, the original
5940      expressions give different results for (A=+0, B=-0) and
5941      (A=-0, B=+0), but the transformed expressions do not.
5942 
5943      The first two transformations are correct if either A or B
5944      is a NaN.  In the first transformation, the condition will
5945      be false, and B will indeed be chosen.  In the case of the
5946      second transformation, the condition A != B will be true,
5947      and A will be chosen.
5948 
5949      The conversions to max() and min() are not correct if B is
5950      a number and A is not.  The conditions in the original
5951      expressions will be false, so all four give B.  The min()
5952      and max() versions would give a NaN instead.  */
5953   if (!HONOR_SIGNED_ZEROS (type)
5954       && operand_equal_for_comparison_p (arg01, arg2)
5955       /* Avoid these transformations if the COND_EXPR may be used
5956 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5957       && (in_gimple_form
5958 	  || VECTOR_TYPE_P (type)
5959 	  || (! lang_GNU_CXX ()
5960 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5961 	  || ! maybe_lvalue_p (arg1)
5962 	  || ! maybe_lvalue_p (arg2)))
5963     {
5964       tree comp_op0 = arg00;
5965       tree comp_op1 = arg01;
5966       tree comp_type = TREE_TYPE (comp_op0);
5967 
5968       switch (comp_code)
5969 	{
5970 	case EQ_EXPR:
5971 	  return fold_convert_loc (loc, type, arg2);
5972 	case NE_EXPR:
5973 	  return fold_convert_loc (loc, type, arg1);
5974 	case LE_EXPR:
5975 	case LT_EXPR:
5976 	case UNLE_EXPR:
5977 	case UNLT_EXPR:
5978 	  /* In C++ a ?: expression can be an lvalue, so put the
5979 	     operand which will be used if they are equal first
5980 	     so that we can convert this back to the
5981 	     corresponding COND_EXPR.  */
5982 	  if (!HONOR_NANS (arg1))
5983 	    {
5984 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5985 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5986 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5987 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5988 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5989 				   comp_op1, comp_op0);
5990 	      return fold_convert_loc (loc, type, tem);
5991 	    }
5992 	  break;
5993 	case GE_EXPR:
5994 	case GT_EXPR:
5995 	case UNGE_EXPR:
5996 	case UNGT_EXPR:
5997 	  if (!HONOR_NANS (arg1))
5998 	    {
5999 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6000 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6001 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6002 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6003 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
6004 				   comp_op1, comp_op0);
6005 	      return fold_convert_loc (loc, type, tem);
6006 	    }
6007 	  break;
6008 	case UNEQ_EXPR:
6009 	  if (!HONOR_NANS (arg1))
6010 	    return fold_convert_loc (loc, type, arg2);
6011 	  break;
6012 	case LTGT_EXPR:
6013 	  if (!HONOR_NANS (arg1))
6014 	    return fold_convert_loc (loc, type, arg1);
6015 	  break;
6016 	default:
6017 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6018 	  break;
6019 	}
6020     }
6021 
6022   return NULL_TREE;
6023 }
6024 
6025 
6026 
6027 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6028 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6029   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6030 		false) >= 2)
6031 #endif
6032 
6033 /* EXP is some logical combination of boolean tests.  See if we can
6034    merge it into some range test.  Return the new tree if so.  */
6035 
6036 static tree
fold_range_test(location_t loc,enum tree_code code,tree type,tree op0,tree op1)6037 fold_range_test (location_t loc, enum tree_code code, tree type,
6038 		 tree op0, tree op1)
6039 {
6040   int or_op = (code == TRUTH_ORIF_EXPR
6041 	       || code == TRUTH_OR_EXPR);
6042   int in0_p, in1_p, in_p;
6043   tree low0, low1, low, high0, high1, high;
6044   bool strict_overflow_p = false;
6045   tree tem, lhs, rhs;
6046   const char * const warnmsg = G_("assuming signed overflow does not occur "
6047 				  "when simplifying range test");
6048 
6049   if (!INTEGRAL_TYPE_P (type))
6050     return 0;
6051 
6052   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6053   /* If op0 is known true or false and this is a short-circuiting
6054      operation we must not merge with op1 since that makes side-effects
6055      unconditional.  So special-case this.  */
6056   if (!lhs
6057       && ((code == TRUTH_ORIF_EXPR && in0_p)
6058 	  || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6059     return op0;
6060   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6061 
6062   /* If this is an OR operation, invert both sides; we will invert
6063      again at the end.  */
6064   if (or_op)
6065     in0_p = ! in0_p, in1_p = ! in1_p;
6066 
6067   /* If both expressions are the same, if we can merge the ranges, and we
6068      can build the range test, return it or it inverted.  If one of the
6069      ranges is always true or always false, consider it to be the same
6070      expression as the other.  */
6071   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6072       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6073 		       in1_p, low1, high1)
6074       && (tem = (build_range_check (loc, type,
6075 				    lhs != 0 ? lhs
6076 				    : rhs != 0 ? rhs : integer_zero_node,
6077 				    in_p, low, high))) != 0)
6078     {
6079       if (strict_overflow_p)
6080 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6081       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6082     }
6083 
6084   /* On machines where the branch cost is expensive, if this is a
6085      short-circuited branch and the underlying object on both sides
6086      is the same, make a non-short-circuit operation.  */
6087   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6088   if (param_logical_op_non_short_circuit != -1)
6089     logical_op_non_short_circuit
6090       = param_logical_op_non_short_circuit;
6091   if (logical_op_non_short_circuit
6092       && !sanitize_coverage_p ()
6093       && lhs != 0 && rhs != 0
6094       && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6095       && operand_equal_p (lhs, rhs, 0))
6096     {
6097       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
6098 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6099 	 which cases we can't do this.  */
6100       if (simple_operand_p (lhs))
6101 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6102 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6103 			   type, op0, op1);
6104 
6105       else if (!lang_hooks.decls.global_bindings_p ()
6106 	       && !CONTAINS_PLACEHOLDER_P (lhs))
6107 	{
6108 	  tree common = save_expr (lhs);
6109 
6110 	  if ((lhs = build_range_check (loc, type, common,
6111 					or_op ? ! in0_p : in0_p,
6112 					low0, high0)) != 0
6113 	      && (rhs = build_range_check (loc, type, common,
6114 					   or_op ? ! in1_p : in1_p,
6115 					   low1, high1)) != 0)
6116 	    {
6117 	      if (strict_overflow_p)
6118 		fold_overflow_warning (warnmsg,
6119 				       WARN_STRICT_OVERFLOW_COMPARISON);
6120 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6121 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6122 				 type, lhs, rhs);
6123 	    }
6124 	}
6125     }
6126 
6127   return 0;
6128 }
6129 
6130 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6131    bit value.  Arrange things so the extra bits will be set to zero if and
6132    only if C is signed-extended to its full width.  If MASK is nonzero,
6133    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
6134 
6135 static tree
unextend(tree c,int p,int unsignedp,tree mask)6136 unextend (tree c, int p, int unsignedp, tree mask)
6137 {
6138   tree type = TREE_TYPE (c);
6139   int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6140   tree temp;
6141 
6142   if (p == modesize || unsignedp)
6143     return c;
6144 
6145   /* We work by getting just the sign bit into the low-order bit, then
6146      into the high-order bit, then sign-extend.  We then XOR that value
6147      with C.  */
6148   temp = build_int_cst (TREE_TYPE (c),
6149 			wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6150 
6151   /* We must use a signed type in order to get an arithmetic right shift.
6152      However, we must also avoid introducing accidental overflows, so that
6153      a subsequent call to integer_zerop will work.  Hence we must
6154      do the type conversion here.  At this point, the constant is either
6155      zero or one, and the conversion to a signed type can never overflow.
6156      We could get an overflow if this conversion is done anywhere else.  */
6157   if (TYPE_UNSIGNED (type))
6158     temp = fold_convert (signed_type_for (type), temp);
6159 
6160   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6161   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6162   if (mask != 0)
6163     temp = const_binop (BIT_AND_EXPR, temp,
6164 			fold_convert (TREE_TYPE (c), mask));
6165   /* If necessary, convert the type back to match the type of C.  */
6166   if (TYPE_UNSIGNED (type))
6167     temp = fold_convert (type, temp);
6168 
6169   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6170 }
6171 
6172 /* For an expression that has the form
6173      (A && B) || ~B
6174    or
6175      (A || B) && ~B,
6176    we can drop one of the inner expressions and simplify to
6177      A || ~B
6178    or
6179      A && ~B
6180    LOC is the location of the resulting expression.  OP is the inner
6181    logical operation; the left-hand side in the examples above, while CMPOP
6182    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
6183    removing a condition that guards another, as in
6184      (A != NULL && A->...) || A == NULL
6185    which we must not transform.  If RHS_ONLY is true, only eliminate the
6186    right-most operand of the inner logical operation.  */
6187 
6188 static tree
merge_truthop_with_opposite_arm(location_t loc,tree op,tree cmpop,bool rhs_only)6189 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6190 				 bool rhs_only)
6191 {
6192   enum tree_code code = TREE_CODE (cmpop);
6193   enum tree_code truthop_code = TREE_CODE (op);
6194   tree lhs = TREE_OPERAND (op, 0);
6195   tree rhs = TREE_OPERAND (op, 1);
6196   tree orig_lhs = lhs, orig_rhs = rhs;
6197   enum tree_code rhs_code = TREE_CODE (rhs);
6198   enum tree_code lhs_code = TREE_CODE (lhs);
6199   enum tree_code inv_code;
6200 
6201   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6202     return NULL_TREE;
6203 
6204   if (TREE_CODE_CLASS (code) != tcc_comparison)
6205     return NULL_TREE;
6206 
6207   tree type = TREE_TYPE (TREE_OPERAND (cmpop, 0));
6208 
6209   if (rhs_code == truthop_code)
6210     {
6211       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6212       if (newrhs != NULL_TREE)
6213 	{
6214 	  rhs = newrhs;
6215 	  rhs_code = TREE_CODE (rhs);
6216 	}
6217     }
6218   if (lhs_code == truthop_code && !rhs_only)
6219     {
6220       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6221       if (newlhs != NULL_TREE)
6222 	{
6223 	  lhs = newlhs;
6224 	  lhs_code = TREE_CODE (lhs);
6225 	}
6226     }
6227 
6228   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6229   if (inv_code == rhs_code
6230       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6231       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6232     return lhs;
6233   if (!rhs_only && inv_code == lhs_code
6234       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6235       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6236     return rhs;
6237   if (rhs != orig_rhs || lhs != orig_lhs)
6238     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6239 			    lhs, rhs);
6240   return NULL_TREE;
6241 }
6242 
6243 /* Find ways of folding logical expressions of LHS and RHS:
6244    Try to merge two comparisons to the same innermost item.
6245    Look for range tests like "ch >= '0' && ch <= '9'".
6246    Look for combinations of simple terms on machines with expensive branches
6247    and evaluate the RHS unconditionally.
6248 
6249    For example, if we have p->a == 2 && p->b == 4 and we can make an
6250    object large enough to span both A and B, we can do this with a comparison
6251    against the object ANDed with the a mask.
6252 
6253    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6254    operations to do this with one comparison.
6255 
6256    We check for both normal comparisons and the BIT_AND_EXPRs made this by
6257    function and the one above.
6258 
6259    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
6260    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6261 
6262    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6263    two operands.
6264 
6265    We return the simplified tree or 0 if no optimization is possible.  */
6266 
6267 static tree
fold_truth_andor_1(location_t loc,enum tree_code code,tree truth_type,tree lhs,tree rhs)6268 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6269 		    tree lhs, tree rhs)
6270 {
6271   /* If this is the "or" of two comparisons, we can do something if
6272      the comparisons are NE_EXPR.  If this is the "and", we can do something
6273      if the comparisons are EQ_EXPR.  I.e.,
6274 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
6275 
6276      WANTED_CODE is this operation code.  For single bit fields, we can
6277      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6278      comparison for one-bit fields.  */
6279 
6280   enum tree_code wanted_code;
6281   enum tree_code lcode, rcode;
6282   tree ll_arg, lr_arg, rl_arg, rr_arg;
6283   tree ll_inner, lr_inner, rl_inner, rr_inner;
6284   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6285   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6286   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6287   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6288   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6289   int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6290   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6291   scalar_int_mode lnmode, rnmode;
6292   tree ll_mask, lr_mask, rl_mask, rr_mask;
6293   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6294   tree l_const, r_const;
6295   tree lntype, rntype, result;
6296   HOST_WIDE_INT first_bit, end_bit;
6297   int volatilep;
6298 
6299   /* Start by getting the comparison codes.  Fail if anything is volatile.
6300      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6301      it were surrounded with a NE_EXPR.  */
6302 
6303   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6304     return 0;
6305 
6306   lcode = TREE_CODE (lhs);
6307   rcode = TREE_CODE (rhs);
6308 
6309   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6310     {
6311       lhs = build2 (NE_EXPR, truth_type, lhs,
6312 		    build_int_cst (TREE_TYPE (lhs), 0));
6313       lcode = NE_EXPR;
6314     }
6315 
6316   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6317     {
6318       rhs = build2 (NE_EXPR, truth_type, rhs,
6319 		    build_int_cst (TREE_TYPE (rhs), 0));
6320       rcode = NE_EXPR;
6321     }
6322 
6323   if (TREE_CODE_CLASS (lcode) != tcc_comparison
6324       || TREE_CODE_CLASS (rcode) != tcc_comparison)
6325     return 0;
6326 
6327   ll_arg = TREE_OPERAND (lhs, 0);
6328   lr_arg = TREE_OPERAND (lhs, 1);
6329   rl_arg = TREE_OPERAND (rhs, 0);
6330   rr_arg = TREE_OPERAND (rhs, 1);
6331 
6332   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
6333   if (simple_operand_p (ll_arg)
6334       && simple_operand_p (lr_arg))
6335     {
6336       if (operand_equal_p (ll_arg, rl_arg, 0)
6337           && operand_equal_p (lr_arg, rr_arg, 0))
6338 	{
6339           result = combine_comparisons (loc, code, lcode, rcode,
6340 					truth_type, ll_arg, lr_arg);
6341 	  if (result)
6342 	    return result;
6343 	}
6344       else if (operand_equal_p (ll_arg, rr_arg, 0)
6345                && operand_equal_p (lr_arg, rl_arg, 0))
6346 	{
6347           result = combine_comparisons (loc, code, lcode,
6348 					swap_tree_comparison (rcode),
6349 					truth_type, ll_arg, lr_arg);
6350 	  if (result)
6351 	    return result;
6352 	}
6353     }
6354 
6355   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6356 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6357 
6358   /* If the RHS can be evaluated unconditionally and its operands are
6359      simple, it wins to evaluate the RHS unconditionally on machines
6360      with expensive branches.  In this case, this isn't a comparison
6361      that can be merged.  */
6362 
6363   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6364 		   false) >= 2
6365       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6366       && simple_operand_p (rl_arg)
6367       && simple_operand_p (rr_arg))
6368     {
6369       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
6370       if (code == TRUTH_OR_EXPR
6371 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
6372 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
6373 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6374 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6375 	return build2_loc (loc, NE_EXPR, truth_type,
6376 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6377 				   ll_arg, rl_arg),
6378 			   build_int_cst (TREE_TYPE (ll_arg), 0));
6379 
6380       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
6381       if (code == TRUTH_AND_EXPR
6382 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
6383 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
6384 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6385 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6386 	return build2_loc (loc, EQ_EXPR, truth_type,
6387 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6388 				   ll_arg, rl_arg),
6389 			   build_int_cst (TREE_TYPE (ll_arg), 0));
6390     }
6391 
6392   /* See if the comparisons can be merged.  Then get all the parameters for
6393      each side.  */
6394 
6395   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6396       || (rcode != EQ_EXPR && rcode != NE_EXPR))
6397     return 0;
6398 
6399   ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6400   volatilep = 0;
6401   ll_inner = decode_field_reference (loc, &ll_arg,
6402 				     &ll_bitsize, &ll_bitpos, &ll_mode,
6403 				     &ll_unsignedp, &ll_reversep, &volatilep,
6404 				     &ll_mask, &ll_and_mask);
6405   lr_inner = decode_field_reference (loc, &lr_arg,
6406 				     &lr_bitsize, &lr_bitpos, &lr_mode,
6407 				     &lr_unsignedp, &lr_reversep, &volatilep,
6408 				     &lr_mask, &lr_and_mask);
6409   rl_inner = decode_field_reference (loc, &rl_arg,
6410 				     &rl_bitsize, &rl_bitpos, &rl_mode,
6411 				     &rl_unsignedp, &rl_reversep, &volatilep,
6412 				     &rl_mask, &rl_and_mask);
6413   rr_inner = decode_field_reference (loc, &rr_arg,
6414 				     &rr_bitsize, &rr_bitpos, &rr_mode,
6415 				     &rr_unsignedp, &rr_reversep, &volatilep,
6416 				     &rr_mask, &rr_and_mask);
6417 
6418   /* It must be true that the inner operation on the lhs of each
6419      comparison must be the same if we are to be able to do anything.
6420      Then see if we have constants.  If not, the same must be true for
6421      the rhs's.  */
6422   if (volatilep
6423       || ll_reversep != rl_reversep
6424       || ll_inner == 0 || rl_inner == 0
6425       || ! operand_equal_p (ll_inner, rl_inner, 0))
6426     return 0;
6427 
6428   if (TREE_CODE (lr_arg) == INTEGER_CST
6429       && TREE_CODE (rr_arg) == INTEGER_CST)
6430     {
6431       l_const = lr_arg, r_const = rr_arg;
6432       lr_reversep = ll_reversep;
6433     }
6434   else if (lr_reversep != rr_reversep
6435 	   || lr_inner == 0 || rr_inner == 0
6436 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
6437     return 0;
6438   else
6439     l_const = r_const = 0;
6440 
6441   /* If either comparison code is not correct for our logical operation,
6442      fail.  However, we can convert a one-bit comparison against zero into
6443      the opposite comparison against that bit being set in the field.  */
6444 
6445   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6446   if (lcode != wanted_code)
6447     {
6448       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6449 	{
6450 	  /* Make the left operand unsigned, since we are only interested
6451 	     in the value of one bit.  Otherwise we are doing the wrong
6452 	     thing below.  */
6453 	  ll_unsignedp = 1;
6454 	  l_const = ll_mask;
6455 	}
6456       else
6457 	return 0;
6458     }
6459 
6460   /* This is analogous to the code for l_const above.  */
6461   if (rcode != wanted_code)
6462     {
6463       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6464 	{
6465 	  rl_unsignedp = 1;
6466 	  r_const = rl_mask;
6467 	}
6468       else
6469 	return 0;
6470     }
6471 
6472   /* See if we can find a mode that contains both fields being compared on
6473      the left.  If we can't, fail.  Otherwise, update all constants and masks
6474      to be relative to a field of that size.  */
6475   first_bit = MIN (ll_bitpos, rl_bitpos);
6476   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6477   if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6478 		      TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6479 		      volatilep, &lnmode))
6480     return 0;
6481 
6482   lnbitsize = GET_MODE_BITSIZE (lnmode);
6483   lnbitpos = first_bit & ~ (lnbitsize - 1);
6484   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6485   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6486 
6487   if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6488     {
6489       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6490       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6491     }
6492 
6493   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6494 			 size_int (xll_bitpos));
6495   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6496 			 size_int (xrl_bitpos));
6497   if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6498     return 0;
6499 
6500   if (l_const)
6501     {
6502       l_const = fold_convert_loc (loc, lntype, l_const);
6503       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6504       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6505       if (l_const == NULL_TREE)
6506 	return 0;
6507       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6508 					fold_build1_loc (loc, BIT_NOT_EXPR,
6509 							 lntype, ll_mask))))
6510 	{
6511 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6512 
6513 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6514 	}
6515     }
6516   if (r_const)
6517     {
6518       r_const = fold_convert_loc (loc, lntype, r_const);
6519       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6520       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6521       if (r_const == NULL_TREE)
6522 	return 0;
6523       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6524 					fold_build1_loc (loc, BIT_NOT_EXPR,
6525 							 lntype, rl_mask))))
6526 	{
6527 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6528 
6529 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6530 	}
6531     }
6532 
6533   /* If the right sides are not constant, do the same for it.  Also,
6534      disallow this optimization if a size, signedness or storage order
6535      mismatch occurs between the left and right sides.  */
6536   if (l_const == 0)
6537     {
6538       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6539 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6540 	  || ll_reversep != lr_reversep
6541 	  /* Make sure the two fields on the right
6542 	     correspond to the left without being swapped.  */
6543 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6544 	return 0;
6545 
6546       first_bit = MIN (lr_bitpos, rr_bitpos);
6547       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6548       if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6549 			  TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6550 			  volatilep, &rnmode))
6551 	return 0;
6552 
6553       rnbitsize = GET_MODE_BITSIZE (rnmode);
6554       rnbitpos = first_bit & ~ (rnbitsize - 1);
6555       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6556       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6557 
6558       if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6559 	{
6560 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6561 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6562 	}
6563 
6564       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6565 							    rntype, lr_mask),
6566 			     size_int (xlr_bitpos));
6567       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6568 							    rntype, rr_mask),
6569 			     size_int (xrr_bitpos));
6570       if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6571 	return 0;
6572 
6573       /* Make a mask that corresponds to both fields being compared.
6574 	 Do this for both items being compared.  If the operands are the
6575 	 same size and the bits being compared are in the same position
6576 	 then we can do this by masking both and comparing the masked
6577 	 results.  */
6578       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6579       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6580       if (lnbitsize == rnbitsize
6581 	  && xll_bitpos == xlr_bitpos
6582 	  && lnbitpos >= 0
6583 	  && rnbitpos >= 0)
6584 	{
6585 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6586 				    lntype, lnbitsize, lnbitpos,
6587 				    ll_unsignedp || rl_unsignedp, ll_reversep);
6588 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
6589 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6590 
6591 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6592 				    rntype, rnbitsize, rnbitpos,
6593 				    lr_unsignedp || rr_unsignedp, lr_reversep);
6594 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
6595 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6596 
6597 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6598 	}
6599 
6600       /* There is still another way we can do something:  If both pairs of
6601 	 fields being compared are adjacent, we may be able to make a wider
6602 	 field containing them both.
6603 
6604 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
6605 	 the mask must be shifted to account for the shift done by
6606 	 make_bit_field_ref.  */
6607       if (((ll_bitsize + ll_bitpos == rl_bitpos
6608 	    && lr_bitsize + lr_bitpos == rr_bitpos)
6609 	   || (ll_bitpos == rl_bitpos + rl_bitsize
6610 	       && lr_bitpos == rr_bitpos + rr_bitsize))
6611 	  && ll_bitpos >= 0
6612 	  && rl_bitpos >= 0
6613 	  && lr_bitpos >= 0
6614 	  && rr_bitpos >= 0)
6615 	{
6616 	  tree type;
6617 
6618 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6619 				    ll_bitsize + rl_bitsize,
6620 				    MIN (ll_bitpos, rl_bitpos),
6621 				    ll_unsignedp, ll_reversep);
6622 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6623 				    lr_bitsize + rr_bitsize,
6624 				    MIN (lr_bitpos, rr_bitpos),
6625 				    lr_unsignedp, lr_reversep);
6626 
6627 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6628 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
6629 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6630 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6631 	  if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6632 	    return 0;
6633 
6634 	  /* Convert to the smaller type before masking out unwanted bits.  */
6635 	  type = lntype;
6636 	  if (lntype != rntype)
6637 	    {
6638 	      if (lnbitsize > rnbitsize)
6639 		{
6640 		  lhs = fold_convert_loc (loc, rntype, lhs);
6641 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6642 		  type = rntype;
6643 		}
6644 	      else if (lnbitsize < rnbitsize)
6645 		{
6646 		  rhs = fold_convert_loc (loc, lntype, rhs);
6647 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6648 		  type = lntype;
6649 		}
6650 	    }
6651 
6652 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6653 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6654 
6655 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6656 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6657 
6658 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6659 	}
6660 
6661       return 0;
6662     }
6663 
6664   /* Handle the case of comparisons with constants.  If there is something in
6665      common between the masks, those bits of the constants must be the same.
6666      If not, the condition is always false.  Test for this to avoid generating
6667      incorrect code below.  */
6668   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6669   if (! integer_zerop (result)
6670       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6671 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6672     {
6673       if (wanted_code == NE_EXPR)
6674 	{
6675 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
6676 	  return constant_boolean_node (true, truth_type);
6677 	}
6678       else
6679 	{
6680 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6681 	  return constant_boolean_node (false, truth_type);
6682 	}
6683     }
6684 
6685   if (lnbitpos < 0)
6686     return 0;
6687 
6688   /* Construct the expression we will return.  First get the component
6689      reference we will make.  Unless the mask is all ones the width of
6690      that field, perform the mask operation.  Then compare with the
6691      merged constant.  */
6692   result = make_bit_field_ref (loc, ll_inner, ll_arg,
6693 			       lntype, lnbitsize, lnbitpos,
6694 			       ll_unsignedp || rl_unsignedp, ll_reversep);
6695 
6696   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6697   if (! all_ones_mask_p (ll_mask, lnbitsize))
6698     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6699 
6700   return build2_loc (loc, wanted_code, truth_type, result,
6701 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
6702 }
6703 
6704 /* T is an integer expression that is being multiplied, divided, or taken a
6705    modulus (CODE says which and what kind of divide or modulus) by a
6706    constant C.  See if we can eliminate that operation by folding it with
6707    other operations already in T.  WIDE_TYPE, if non-null, is a type that
6708    should be used for the computation if wider than our type.
6709 
6710    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6711    (X * 2) + (Y * 4).  We must, however, be assured that either the original
6712    expression would not overflow or that overflow is undefined for the type
6713    in the language in question.
6714 
6715    If we return a non-null expression, it is an equivalent form of the
6716    original computation, but need not be in the original type.
6717 
6718    We set *STRICT_OVERFLOW_P to true if the return values depends on
6719    signed overflow being undefined.  Otherwise we do not change
6720    *STRICT_OVERFLOW_P.  */
6721 
6722 static tree
extract_muldiv(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6723 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6724 		bool *strict_overflow_p)
6725 {
6726   /* To avoid exponential search depth, refuse to allow recursion past
6727      three levels.  Beyond that (1) it's highly unlikely that we'll find
6728      something interesting and (2) we've probably processed it before
6729      when we built the inner expression.  */
6730 
6731   static int depth;
6732   tree ret;
6733 
6734   if (depth > 3)
6735     return NULL;
6736 
6737   depth++;
6738   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6739   depth--;
6740 
6741   return ret;
6742 }
6743 
6744 static tree
extract_muldiv_1(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6745 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6746 		  bool *strict_overflow_p)
6747 {
6748   tree type = TREE_TYPE (t);
6749   enum tree_code tcode = TREE_CODE (t);
6750   tree ctype = (wide_type != 0
6751 		&& (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6752 		    > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6753 		? wide_type : type);
6754   tree t1, t2;
6755   int same_p = tcode == code;
6756   tree op0 = NULL_TREE, op1 = NULL_TREE;
6757   bool sub_strict_overflow_p;
6758 
6759   /* Don't deal with constants of zero here; they confuse the code below.  */
6760   if (integer_zerop (c))
6761     return NULL_TREE;
6762 
6763   if (TREE_CODE_CLASS (tcode) == tcc_unary)
6764     op0 = TREE_OPERAND (t, 0);
6765 
6766   if (TREE_CODE_CLASS (tcode) == tcc_binary)
6767     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6768 
6769   /* Note that we need not handle conditional operations here since fold
6770      already handles those cases.  So just do arithmetic here.  */
6771   switch (tcode)
6772     {
6773     case INTEGER_CST:
6774       /* For a constant, we can always simplify if we are a multiply
6775 	 or (for divide and modulus) if it is a multiple of our constant.  */
6776       if (code == MULT_EXPR
6777 	  || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6778 				TYPE_SIGN (type)))
6779 	{
6780 	  tree tem = const_binop (code, fold_convert (ctype, t),
6781 				  fold_convert (ctype, c));
6782 	  /* If the multiplication overflowed, we lost information on it.
6783 	     See PR68142 and PR69845.  */
6784 	  if (TREE_OVERFLOW (tem))
6785 	    return NULL_TREE;
6786 	  return tem;
6787 	}
6788       break;
6789 
6790     CASE_CONVERT: case NON_LVALUE_EXPR:
6791       if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6792 	break;
6793       /* If op0 is an expression ...  */
6794       if ((COMPARISON_CLASS_P (op0)
6795 	   || UNARY_CLASS_P (op0)
6796 	   || BINARY_CLASS_P (op0)
6797 	   || VL_EXP_CLASS_P (op0)
6798 	   || EXPRESSION_CLASS_P (op0))
6799 	  /* ... and has wrapping overflow, and its type is smaller
6800 	     than ctype, then we cannot pass through as widening.  */
6801 	  && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6802 	       && (TYPE_PRECISION (ctype)
6803 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6804 	      /* ... or this is a truncation (t is narrower than op0),
6805 		 then we cannot pass through this narrowing.  */
6806 	      || (TYPE_PRECISION (type)
6807 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6808 	      /* ... or signedness changes for division or modulus,
6809 		 then we cannot pass through this conversion.  */
6810 	      || (code != MULT_EXPR
6811 		  && (TYPE_UNSIGNED (ctype)
6812 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6813 	      /* ... or has undefined overflow while the converted to
6814 		 type has not, we cannot do the operation in the inner type
6815 		 as that would introduce undefined overflow.  */
6816 	      || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6817 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6818 	break;
6819 
6820       /* Pass the constant down and see if we can make a simplification.  If
6821 	 we can, replace this expression with the inner simplification for
6822 	 possible later conversion to our or some other type.  */
6823       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6824 	  && TREE_CODE (t2) == INTEGER_CST
6825 	  && !TREE_OVERFLOW (t2)
6826 	  && (t1 = extract_muldiv (op0, t2, code,
6827 				   code == MULT_EXPR ? ctype : NULL_TREE,
6828 				   strict_overflow_p)) != 0)
6829 	return t1;
6830       break;
6831 
6832     case ABS_EXPR:
6833       /* If widening the type changes it from signed to unsigned, then we
6834          must avoid building ABS_EXPR itself as unsigned.  */
6835       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6836         {
6837           tree cstype = (*signed_type_for) (ctype);
6838           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6839 	      != 0)
6840             {
6841               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6842               return fold_convert (ctype, t1);
6843             }
6844           break;
6845         }
6846       /* If the constant is negative, we cannot simplify this.  */
6847       if (tree_int_cst_sgn (c) == -1)
6848         break;
6849       /* FALLTHROUGH */
6850     case NEGATE_EXPR:
6851       /* For division and modulus, type can't be unsigned, as e.g.
6852 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6853 	 For signed types, even with wrapping overflow, this is fine.  */
6854       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6855 	break;
6856       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6857 	  != 0)
6858 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6859       break;
6860 
6861     case MIN_EXPR:  case MAX_EXPR:
6862       /* If widening the type changes the signedness, then we can't perform
6863 	 this optimization as that changes the result.  */
6864       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6865 	break;
6866 
6867       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6868       sub_strict_overflow_p = false;
6869       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6870 				&sub_strict_overflow_p)) != 0
6871 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6872 				   &sub_strict_overflow_p)) != 0)
6873 	{
6874 	  if (tree_int_cst_sgn (c) < 0)
6875 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6876 	  if (sub_strict_overflow_p)
6877 	    *strict_overflow_p = true;
6878 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6879 			      fold_convert (ctype, t2));
6880 	}
6881       break;
6882 
6883     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6884       /* If the second operand is constant, this is a multiplication
6885 	 or floor division, by a power of two, so we can treat it that
6886 	 way unless the multiplier or divisor overflows.  Signed
6887 	 left-shift overflow is implementation-defined rather than
6888 	 undefined in C90, so do not convert signed left shift into
6889 	 multiplication.  */
6890       if (TREE_CODE (op1) == INTEGER_CST
6891 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6892 	  /* const_binop may not detect overflow correctly,
6893 	     so check for it explicitly here.  */
6894 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6895 			wi::to_wide (op1))
6896 	  && (t1 = fold_convert (ctype,
6897 				 const_binop (LSHIFT_EXPR, size_one_node,
6898 					      op1))) != 0
6899 	  && !TREE_OVERFLOW (t1))
6900 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6901 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6902 				       ctype,
6903 				       fold_convert (ctype, op0),
6904 				       t1),
6905 			       c, code, wide_type, strict_overflow_p);
6906       break;
6907 
6908     case PLUS_EXPR:  case MINUS_EXPR:
6909       /* See if we can eliminate the operation on both sides.  If we can, we
6910 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6911 	 cases where we can do anything are if the second operand is a
6912 	 constant.  */
6913       sub_strict_overflow_p = false;
6914       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6915       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6916       if (t1 != 0 && t2 != 0
6917 	  && TYPE_OVERFLOW_WRAPS (ctype)
6918 	  && (code == MULT_EXPR
6919 	      /* If not multiplication, we can only do this if both operands
6920 		 are divisible by c.  */
6921 	      || (multiple_of_p (ctype, op0, c)
6922 	          && multiple_of_p (ctype, op1, c))))
6923 	{
6924 	  if (sub_strict_overflow_p)
6925 	    *strict_overflow_p = true;
6926 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6927 			      fold_convert (ctype, t2));
6928 	}
6929 
6930       /* If this was a subtraction, negate OP1 and set it to be an addition.
6931 	 This simplifies the logic below.  */
6932       if (tcode == MINUS_EXPR)
6933 	{
6934 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6935 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6936 	  if (TREE_CODE (op0) == INTEGER_CST)
6937 	    {
6938 	      std::swap (op0, op1);
6939 	      std::swap (t1, t2);
6940 	    }
6941 	}
6942 
6943       if (TREE_CODE (op1) != INTEGER_CST)
6944 	break;
6945 
6946       /* If either OP1 or C are negative, this optimization is not safe for
6947 	 some of the division and remainder types while for others we need
6948 	 to change the code.  */
6949       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6950 	{
6951 	  if (code == CEIL_DIV_EXPR)
6952 	    code = FLOOR_DIV_EXPR;
6953 	  else if (code == FLOOR_DIV_EXPR)
6954 	    code = CEIL_DIV_EXPR;
6955 	  else if (code != MULT_EXPR
6956 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6957 	    break;
6958 	}
6959 
6960       /* If it's a multiply or a division/modulus operation of a multiple
6961          of our constant, do the operation and verify it doesn't overflow.  */
6962       if (code == MULT_EXPR
6963 	  || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6964 				TYPE_SIGN (type)))
6965 	{
6966 	  op1 = const_binop (code, fold_convert (ctype, op1),
6967 			     fold_convert (ctype, c));
6968 	  /* We allow the constant to overflow with wrapping semantics.  */
6969 	  if (op1 == 0
6970 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6971 	    break;
6972 	}
6973       else
6974 	break;
6975 
6976       /* If we have an unsigned type, we cannot widen the operation since it
6977 	 will change the result if the original computation overflowed.  */
6978       if (TYPE_UNSIGNED (ctype) && ctype != type)
6979 	break;
6980 
6981       /* The last case is if we are a multiply.  In that case, we can
6982 	 apply the distributive law to commute the multiply and addition
6983 	 if the multiplication of the constants doesn't overflow
6984 	 and overflow is defined.  With undefined overflow
6985 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6986 	 But fold_plusminus_mult_expr would factor back any power-of-two
6987 	 value so do not distribute in the first place in this case.  */
6988       if (code == MULT_EXPR
6989 	  && TYPE_OVERFLOW_WRAPS (ctype)
6990 	  && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6991 	return fold_build2 (tcode, ctype,
6992 			    fold_build2 (code, ctype,
6993 					 fold_convert (ctype, op0),
6994 					 fold_convert (ctype, c)),
6995 			    op1);
6996 
6997       break;
6998 
6999     case MULT_EXPR:
7000       /* We have a special case here if we are doing something like
7001 	 (C * 8) % 4 since we know that's zero.  */
7002       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7003 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7004 	  /* If the multiplication can overflow we cannot optimize this.  */
7005 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7006 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7007 	  && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7008 				TYPE_SIGN (type)))
7009 	{
7010 	  *strict_overflow_p = true;
7011 	  return omit_one_operand (type, integer_zero_node, op0);
7012 	}
7013 
7014       /* ... fall through ...  */
7015 
7016     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
7017     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
7018       /* If we can extract our operation from the LHS, do so and return a
7019 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
7020 	 do something only if the second operand is a constant.  */
7021       if (same_p
7022 	  && TYPE_OVERFLOW_WRAPS (ctype)
7023 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
7024 				   strict_overflow_p)) != 0)
7025 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7026 			    fold_convert (ctype, op1));
7027       else if (tcode == MULT_EXPR && code == MULT_EXPR
7028 	       && TYPE_OVERFLOW_WRAPS (ctype)
7029 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
7030 					strict_overflow_p)) != 0)
7031 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7032 			    fold_convert (ctype, t1));
7033       else if (TREE_CODE (op1) != INTEGER_CST)
7034 	return 0;
7035 
7036       /* If these are the same operation types, we can associate them
7037 	 assuming no overflow.  */
7038       if (tcode == code)
7039 	{
7040 	  bool overflow_p = false;
7041 	  wi::overflow_type overflow_mul;
7042 	  signop sign = TYPE_SIGN (ctype);
7043 	  unsigned prec = TYPE_PRECISION (ctype);
7044 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
7045 				  wi::to_wide (c, prec),
7046 				  sign, &overflow_mul);
7047 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7048 	  if (overflow_mul
7049 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7050 	    overflow_p = true;
7051 	  if (!overflow_p)
7052 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7053 				wide_int_to_tree (ctype, mul));
7054 	}
7055 
7056       /* If these operations "cancel" each other, we have the main
7057 	 optimizations of this pass, which occur when either constant is a
7058 	 multiple of the other, in which case we replace this with either an
7059 	 operation or CODE or TCODE.
7060 
7061 	 If we have an unsigned type, we cannot do this since it will change
7062 	 the result if the original computation overflowed.  */
7063       if (TYPE_OVERFLOW_UNDEFINED (ctype)
7064 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7065 	      || (tcode == MULT_EXPR
7066 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7067 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7068 		  && code != MULT_EXPR)))
7069 	{
7070 	  if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7071 				 TYPE_SIGN (type)))
7072 	    {
7073 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
7074 		*strict_overflow_p = true;
7075 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7076 				  fold_convert (ctype,
7077 						const_binop (TRUNC_DIV_EXPR,
7078 							     op1, c)));
7079 	    }
7080 	  else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7081 				      TYPE_SIGN (type)))
7082 	    {
7083 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
7084 		*strict_overflow_p = true;
7085 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
7086 				  fold_convert (ctype,
7087 						const_binop (TRUNC_DIV_EXPR,
7088 							     c, op1)));
7089 	    }
7090 	}
7091       break;
7092 
7093     default:
7094       break;
7095     }
7096 
7097   return 0;
7098 }
7099 
7100 /* Return a node which has the indicated constant VALUE (either 0 or
7101    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7102    and is of the indicated TYPE.  */
7103 
7104 tree
constant_boolean_node(bool value,tree type)7105 constant_boolean_node (bool value, tree type)
7106 {
7107   if (type == integer_type_node)
7108     return value ? integer_one_node : integer_zero_node;
7109   else if (type == boolean_type_node)
7110     return value ? boolean_true_node : boolean_false_node;
7111   else if (TREE_CODE (type) == VECTOR_TYPE)
7112     return build_vector_from_val (type,
7113 				  build_int_cst (TREE_TYPE (type),
7114 						 value ? -1 : 0));
7115   else
7116     return fold_convert (type, value ? integer_one_node : integer_zero_node);
7117 }
7118 
7119 
7120 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7121    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
7122    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7123    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
7124    COND is the first argument to CODE; otherwise (as in the example
7125    given here), it is the second argument.  TYPE is the type of the
7126    original expression.  Return NULL_TREE if no simplification is
7127    possible.  */
7128 
7129 static tree
fold_binary_op_with_conditional_arg(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree cond,tree arg,int cond_first_p)7130 fold_binary_op_with_conditional_arg (location_t loc,
7131 				     enum tree_code code,
7132 				     tree type, tree op0, tree op1,
7133 				     tree cond, tree arg, int cond_first_p)
7134 {
7135   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7136   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7137   tree test, true_value, false_value;
7138   tree lhs = NULL_TREE;
7139   tree rhs = NULL_TREE;
7140   enum tree_code cond_code = COND_EXPR;
7141 
7142   /* Do not move possibly trapping operations into the conditional as this
7143      pessimizes code and causes gimplification issues when applied late.  */
7144   if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7145 			      ANY_INTEGRAL_TYPE_P (type)
7146 			      && TYPE_OVERFLOW_TRAPS (type), op1))
7147     return NULL_TREE;
7148 
7149   if (TREE_CODE (cond) == COND_EXPR
7150       || TREE_CODE (cond) == VEC_COND_EXPR)
7151     {
7152       test = TREE_OPERAND (cond, 0);
7153       true_value = TREE_OPERAND (cond, 1);
7154       false_value = TREE_OPERAND (cond, 2);
7155       /* If this operand throws an expression, then it does not make
7156 	 sense to try to perform a logical or arithmetic operation
7157 	 involving it.  */
7158       if (VOID_TYPE_P (TREE_TYPE (true_value)))
7159 	lhs = true_value;
7160       if (VOID_TYPE_P (TREE_TYPE (false_value)))
7161 	rhs = false_value;
7162     }
7163   else if (!(TREE_CODE (type) != VECTOR_TYPE
7164 	     && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7165     {
7166       tree testtype = TREE_TYPE (cond);
7167       test = cond;
7168       true_value = constant_boolean_node (true, testtype);
7169       false_value = constant_boolean_node (false, testtype);
7170     }
7171   else
7172     /* Detect the case of mixing vector and scalar types - bail out.  */
7173     return NULL_TREE;
7174 
7175   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7176     cond_code = VEC_COND_EXPR;
7177 
7178   /* This transformation is only worthwhile if we don't have to wrap ARG
7179      in a SAVE_EXPR and the operation can be simplified without recursing
7180      on at least one of the branches once its pushed inside the COND_EXPR.  */
7181   if (!TREE_CONSTANT (arg)
7182       && (TREE_SIDE_EFFECTS (arg)
7183 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7184 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7185     return NULL_TREE;
7186 
7187   arg = fold_convert_loc (loc, arg_type, arg);
7188   if (lhs == 0)
7189     {
7190       true_value = fold_convert_loc (loc, cond_type, true_value);
7191       if (cond_first_p)
7192 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
7193       else
7194 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
7195     }
7196   if (rhs == 0)
7197     {
7198       false_value = fold_convert_loc (loc, cond_type, false_value);
7199       if (cond_first_p)
7200 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
7201       else
7202 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
7203     }
7204 
7205   /* Check that we have simplified at least one of the branches.  */
7206   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7207     return NULL_TREE;
7208 
7209   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7210 }
7211 
7212 
7213 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7214 
7215    If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7216    type TYPE, ARG + ZERO_ARG is the same as ARG.  If NEGATE, return true
7217    if ARG - ZERO_ARG is the same as X.
7218 
7219    If ARG is NULL, check for any value of type TYPE.
7220 
7221    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7222    and finite.  The problematic cases are when X is zero, and its mode
7223    has signed zeros.  In the case of rounding towards -infinity,
7224    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
7225    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
7226 
7227 bool
fold_real_zero_addition_p(const_tree type,const_tree arg,const_tree zero_arg,int negate)7228 fold_real_zero_addition_p (const_tree type, const_tree arg,
7229                            const_tree zero_arg, int negate)
7230 {
7231   if (!real_zerop (zero_arg))
7232     return false;
7233 
7234   /* Don't allow the fold with -fsignaling-nans.  */
7235   if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7236     return false;
7237 
7238   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
7239   if (!HONOR_SIGNED_ZEROS (type))
7240     return true;
7241 
7242   /* There is no case that is safe for all rounding modes.  */
7243   if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7244     return false;
7245 
7246   /* In a vector or complex, we would need to check the sign of all zeros.  */
7247   if (TREE_CODE (zero_arg) == VECTOR_CST)
7248     zero_arg = uniform_vector_p (zero_arg);
7249   if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7250     return false;
7251 
7252   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
7253   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7254     negate = !negate;
7255 
7256   /* The mode has signed zeros, and we have to honor their sign.
7257      In this situation, there are only two cases we can return true for.
7258      (i) X - 0 is the same as X with default rounding.
7259      (ii) X + 0 is X when X can't possibly be -0.0.  */
7260   return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7261 }
7262 
7263 /* Subroutine of match.pd that optimizes comparisons of a division by
7264    a nonzero integer constant against an integer constant, i.e.
7265    X/C1 op C2.
7266 
7267    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7268    GE_EXPR or LE_EXPR.  ARG01 and ARG1 must be a INTEGER_CST.  */
7269 
7270 enum tree_code
fold_div_compare(enum tree_code code,tree c1,tree c2,tree * lo,tree * hi,bool * neg_overflow)7271 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7272 		  tree *hi, bool *neg_overflow)
7273 {
7274   tree prod, tmp, type = TREE_TYPE (c1);
7275   signop sign = TYPE_SIGN (type);
7276   wi::overflow_type overflow;
7277 
7278   /* We have to do this the hard way to detect unsigned overflow.
7279      prod = int_const_binop (MULT_EXPR, c1, c2);  */
7280   wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7281   prod = force_fit_type (type, val, -1, overflow);
7282   *neg_overflow = false;
7283 
7284   if (sign == UNSIGNED)
7285     {
7286       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7287       *lo = prod;
7288 
7289       /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
7290       val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7291       *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7292     }
7293   else if (tree_int_cst_sgn (c1) >= 0)
7294     {
7295       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7296       switch (tree_int_cst_sgn (c2))
7297 	{
7298 	case -1:
7299 	  *neg_overflow = true;
7300 	  *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7301 	  *hi = prod;
7302 	  break;
7303 
7304 	case 0:
7305 	  *lo = fold_negate_const (tmp, type);
7306 	  *hi = tmp;
7307 	  break;
7308 
7309 	case 1:
7310 	  *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7311 	  *lo = prod;
7312 	  break;
7313 
7314 	default:
7315 	  gcc_unreachable ();
7316 	}
7317     }
7318   else
7319     {
7320       /* A negative divisor reverses the relational operators.  */
7321       code = swap_tree_comparison (code);
7322 
7323       tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7324       switch (tree_int_cst_sgn (c2))
7325 	{
7326 	case -1:
7327 	  *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7328 	  *lo = prod;
7329 	  break;
7330 
7331 	case 0:
7332 	  *hi = fold_negate_const (tmp, type);
7333 	  *lo = tmp;
7334 	  break;
7335 
7336 	case 1:
7337 	  *neg_overflow = true;
7338 	  *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7339 	  *hi = prod;
7340 	  break;
7341 
7342 	default:
7343 	  gcc_unreachable ();
7344 	}
7345     }
7346 
7347   if (code != EQ_EXPR && code != NE_EXPR)
7348     return code;
7349 
7350   if (TREE_OVERFLOW (*lo)
7351       || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7352     *lo = NULL_TREE;
7353   if (TREE_OVERFLOW (*hi)
7354       || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7355     *hi = NULL_TREE;
7356 
7357   return code;
7358 }
7359 
7360 
7361 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7362    equality/inequality test, then return a simplified form of the test
7363    using a sign testing.  Otherwise return NULL.  TYPE is the desired
7364    result type.  */
7365 
7366 static tree
fold_single_bit_test_into_sign_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)7367 fold_single_bit_test_into_sign_test (location_t loc,
7368 				     enum tree_code code, tree arg0, tree arg1,
7369 				     tree result_type)
7370 {
7371   /* If this is testing a single bit, we can optimize the test.  */
7372   if ((code == NE_EXPR || code == EQ_EXPR)
7373       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7374       && integer_pow2p (TREE_OPERAND (arg0, 1)))
7375     {
7376       /* If we have (A & C) != 0 where C is the sign bit of A, convert
7377 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
7378       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7379 
7380       if (arg00 != NULL_TREE
7381 	  /* This is only a win if casting to a signed type is cheap,
7382 	     i.e. when arg00's type is not a partial mode.  */
7383 	  && type_has_mode_precision_p (TREE_TYPE (arg00)))
7384 	{
7385 	  tree stype = signed_type_for (TREE_TYPE (arg00));
7386 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7387 			      result_type,
7388 			      fold_convert_loc (loc, stype, arg00),
7389 			      build_int_cst (stype, 0));
7390 	}
7391     }
7392 
7393   return NULL_TREE;
7394 }
7395 
7396 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7397    equality/inequality test, then return a simplified form of
7398    the test using shifts and logical operations.  Otherwise return
7399    NULL.  TYPE is the desired result type.  */
7400 
7401 tree
fold_single_bit_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)7402 fold_single_bit_test (location_t loc, enum tree_code code,
7403 		      tree arg0, tree arg1, tree result_type)
7404 {
7405   /* If this is testing a single bit, we can optimize the test.  */
7406   if ((code == NE_EXPR || code == EQ_EXPR)
7407       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7408       && integer_pow2p (TREE_OPERAND (arg0, 1)))
7409     {
7410       tree inner = TREE_OPERAND (arg0, 0);
7411       tree type = TREE_TYPE (arg0);
7412       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7413       scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7414       int ops_unsigned;
7415       tree signed_type, unsigned_type, intermediate_type;
7416       tree tem, one;
7417 
7418       /* First, see if we can fold the single bit test into a sign-bit
7419 	 test.  */
7420       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7421 						 result_type);
7422       if (tem)
7423 	return tem;
7424 
7425       /* Otherwise we have (A & C) != 0 where C is a single bit,
7426 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
7427 	 Similarly for (A & C) == 0.  */
7428 
7429       /* If INNER is a right shift of a constant and it plus BITNUM does
7430 	 not overflow, adjust BITNUM and INNER.  */
7431       if (TREE_CODE (inner) == RSHIFT_EXPR
7432 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7433 	  && bitnum < TYPE_PRECISION (type)
7434 	  && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7435 			TYPE_PRECISION (type) - bitnum))
7436 	{
7437 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7438 	  inner = TREE_OPERAND (inner, 0);
7439 	}
7440 
7441       /* If we are going to be able to omit the AND below, we must do our
7442 	 operations as unsigned.  If we must use the AND, we have a choice.
7443 	 Normally unsigned is faster, but for some machines signed is.  */
7444       ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7445 		      && !flag_syntax_only) ? 0 : 1;
7446 
7447       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7448       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7449       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7450       inner = fold_convert_loc (loc, intermediate_type, inner);
7451 
7452       if (bitnum != 0)
7453 	inner = build2 (RSHIFT_EXPR, intermediate_type,
7454 			inner, size_int (bitnum));
7455 
7456       one = build_int_cst (intermediate_type, 1);
7457 
7458       if (code == EQ_EXPR)
7459 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7460 
7461       /* Put the AND last so it can combine with more things.  */
7462       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7463 
7464       /* Make sure to return the proper type.  */
7465       inner = fold_convert_loc (loc, result_type, inner);
7466 
7467       return inner;
7468     }
7469   return NULL_TREE;
7470 }
7471 
7472 /* Test whether it is preferable to swap two operands, ARG0 and
7473    ARG1, for example because ARG0 is an integer constant and ARG1
7474    isn't.  */
7475 
7476 bool
tree_swap_operands_p(const_tree arg0,const_tree arg1)7477 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7478 {
7479   if (CONSTANT_CLASS_P (arg1))
7480     return 0;
7481   if (CONSTANT_CLASS_P (arg0))
7482     return 1;
7483 
7484   STRIP_NOPS (arg0);
7485   STRIP_NOPS (arg1);
7486 
7487   if (TREE_CONSTANT (arg1))
7488     return 0;
7489   if (TREE_CONSTANT (arg0))
7490     return 1;
7491 
7492   /* It is preferable to swap two SSA_NAME to ensure a canonical form
7493      for commutative and comparison operators.  Ensuring a canonical
7494      form allows the optimizers to find additional redundancies without
7495      having to explicitly check for both orderings.  */
7496   if (TREE_CODE (arg0) == SSA_NAME
7497       && TREE_CODE (arg1) == SSA_NAME
7498       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7499     return 1;
7500 
7501   /* Put SSA_NAMEs last.  */
7502   if (TREE_CODE (arg1) == SSA_NAME)
7503     return 0;
7504   if (TREE_CODE (arg0) == SSA_NAME)
7505     return 1;
7506 
7507   /* Put variables last.  */
7508   if (DECL_P (arg1))
7509     return 0;
7510   if (DECL_P (arg0))
7511     return 1;
7512 
7513   return 0;
7514 }
7515 
7516 
7517 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7518    means A >= Y && A != MAX, but in this case we know that
7519    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7520 
7521 static tree
fold_to_nonsharp_ineq_using_bound(location_t loc,tree ineq,tree bound)7522 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7523 {
7524   tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7525 
7526   if (TREE_CODE (bound) == LT_EXPR)
7527     a = TREE_OPERAND (bound, 0);
7528   else if (TREE_CODE (bound) == GT_EXPR)
7529     a = TREE_OPERAND (bound, 1);
7530   else
7531     return NULL_TREE;
7532 
7533   typea = TREE_TYPE (a);
7534   if (!INTEGRAL_TYPE_P (typea)
7535       && !POINTER_TYPE_P (typea))
7536     return NULL_TREE;
7537 
7538   if (TREE_CODE (ineq) == LT_EXPR)
7539     {
7540       a1 = TREE_OPERAND (ineq, 1);
7541       y = TREE_OPERAND (ineq, 0);
7542     }
7543   else if (TREE_CODE (ineq) == GT_EXPR)
7544     {
7545       a1 = TREE_OPERAND (ineq, 0);
7546       y = TREE_OPERAND (ineq, 1);
7547     }
7548   else
7549     return NULL_TREE;
7550 
7551   if (TREE_TYPE (a1) != typea)
7552     return NULL_TREE;
7553 
7554   if (POINTER_TYPE_P (typea))
7555     {
7556       /* Convert the pointer types into integer before taking the difference.  */
7557       tree ta = fold_convert_loc (loc, ssizetype, a);
7558       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7559       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7560     }
7561   else
7562     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7563 
7564   if (!diff || !integer_onep (diff))
7565    return NULL_TREE;
7566 
7567   return fold_build2_loc (loc, GE_EXPR, type, a, y);
7568 }
7569 
7570 /* Fold a sum or difference of at least one multiplication.
7571    Returns the folded tree or NULL if no simplification could be made.  */
7572 
7573 static tree
fold_plusminus_mult_expr(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)7574 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7575 			  tree arg0, tree arg1)
7576 {
7577   tree arg00, arg01, arg10, arg11;
7578   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7579 
7580   /* (A * C) +- (B * C) -> (A+-B) * C.
7581      (A * C) +- A -> A * (C+-1).
7582      We are most concerned about the case where C is a constant,
7583      but other combinations show up during loop reduction.  Since
7584      it is not difficult, try all four possibilities.  */
7585 
7586   if (TREE_CODE (arg0) == MULT_EXPR)
7587     {
7588       arg00 = TREE_OPERAND (arg0, 0);
7589       arg01 = TREE_OPERAND (arg0, 1);
7590     }
7591   else if (TREE_CODE (arg0) == INTEGER_CST)
7592     {
7593       arg00 = build_one_cst (type);
7594       arg01 = arg0;
7595     }
7596   else
7597     {
7598       /* We cannot generate constant 1 for fract.  */
7599       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7600 	return NULL_TREE;
7601       arg00 = arg0;
7602       arg01 = build_one_cst (type);
7603     }
7604   if (TREE_CODE (arg1) == MULT_EXPR)
7605     {
7606       arg10 = TREE_OPERAND (arg1, 0);
7607       arg11 = TREE_OPERAND (arg1, 1);
7608     }
7609   else if (TREE_CODE (arg1) == INTEGER_CST)
7610     {
7611       arg10 = build_one_cst (type);
7612       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7613 	 the purpose of this canonicalization.  */
7614       if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7615 	  && negate_expr_p (arg1)
7616 	  && code == PLUS_EXPR)
7617 	{
7618 	  arg11 = negate_expr (arg1);
7619 	  code = MINUS_EXPR;
7620 	}
7621       else
7622 	arg11 = arg1;
7623     }
7624   else
7625     {
7626       /* We cannot generate constant 1 for fract.  */
7627       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7628 	return NULL_TREE;
7629       arg10 = arg1;
7630       arg11 = build_one_cst (type);
7631     }
7632   same = NULL_TREE;
7633 
7634   /* Prefer factoring a common non-constant.  */
7635   if (operand_equal_p (arg00, arg10, 0))
7636     same = arg00, alt0 = arg01, alt1 = arg11;
7637   else if (operand_equal_p (arg01, arg11, 0))
7638     same = arg01, alt0 = arg00, alt1 = arg10;
7639   else if (operand_equal_p (arg00, arg11, 0))
7640     same = arg00, alt0 = arg01, alt1 = arg10;
7641   else if (operand_equal_p (arg01, arg10, 0))
7642     same = arg01, alt0 = arg00, alt1 = arg11;
7643 
7644   /* No identical multiplicands; see if we can find a common
7645      power-of-two factor in non-power-of-two multiplies.  This
7646      can help in multi-dimensional array access.  */
7647   else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7648     {
7649       HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7650       HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7651       HOST_WIDE_INT tmp;
7652       bool swap = false;
7653       tree maybe_same;
7654 
7655       /* Move min of absolute values to int11.  */
7656       if (absu_hwi (int01) < absu_hwi (int11))
7657         {
7658 	  tmp = int01, int01 = int11, int11 = tmp;
7659 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7660 	  maybe_same = arg01;
7661 	  swap = true;
7662 	}
7663       else
7664 	maybe_same = arg11;
7665 
7666       const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7667       if (factor > 1
7668 	  && pow2p_hwi (factor)
7669 	  && (int01 & (factor - 1)) == 0
7670 	  /* The remainder should not be a constant, otherwise we
7671 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7672 	     increased the number of multiplications necessary.  */
7673 	  && TREE_CODE (arg10) != INTEGER_CST)
7674         {
7675 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7676 			      build_int_cst (TREE_TYPE (arg00),
7677 					     int01 / int11));
7678 	  alt1 = arg10;
7679 	  same = maybe_same;
7680 	  if (swap)
7681 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7682 	}
7683     }
7684 
7685   if (!same)
7686     return NULL_TREE;
7687 
7688   if (! ANY_INTEGRAL_TYPE_P (type)
7689       || TYPE_OVERFLOW_WRAPS (type)
7690       /* We are neither factoring zero nor minus one.  */
7691       || TREE_CODE (same) == INTEGER_CST)
7692     return fold_build2_loc (loc, MULT_EXPR, type,
7693 			fold_build2_loc (loc, code, type,
7694 				     fold_convert_loc (loc, type, alt0),
7695 				     fold_convert_loc (loc, type, alt1)),
7696 			fold_convert_loc (loc, type, same));
7697 
7698   /* Same may be zero and thus the operation 'code' may overflow.  Likewise
7699      same may be minus one and thus the multiplication may overflow.  Perform
7700      the sum operation in an unsigned type.  */
7701   tree utype = unsigned_type_for (type);
7702   tree tem = fold_build2_loc (loc, code, utype,
7703 			      fold_convert_loc (loc, utype, alt0),
7704 			      fold_convert_loc (loc, utype, alt1));
7705   /* If the sum evaluated to a constant that is not -INF the multiplication
7706      cannot overflow.  */
7707   if (TREE_CODE (tem) == INTEGER_CST
7708       && (wi::to_wide (tem)
7709 	  != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7710     return fold_build2_loc (loc, MULT_EXPR, type,
7711 			    fold_convert (type, tem), same);
7712 
7713   /* Do not resort to unsigned multiplication because
7714      we lose the no-overflow property of the expression.  */
7715   return NULL_TREE;
7716 }
7717 
7718 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7719    specified by EXPR into the buffer PTR of length LEN bytes.
7720    Return the number of bytes placed in the buffer, or zero
7721    upon failure.  */
7722 
7723 static int
native_encode_int(const_tree expr,unsigned char * ptr,int len,int off)7724 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7725 {
7726   tree type = TREE_TYPE (expr);
7727   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7728   int byte, offset, word, words;
7729   unsigned char value;
7730 
7731   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7732     return 0;
7733   if (off == -1)
7734     off = 0;
7735 
7736   if (ptr == NULL)
7737     /* Dry run.  */
7738     return MIN (len, total_bytes - off);
7739 
7740   words = total_bytes / UNITS_PER_WORD;
7741 
7742   for (byte = 0; byte < total_bytes; byte++)
7743     {
7744       int bitpos = byte * BITS_PER_UNIT;
7745       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7746 	 number of bytes.  */
7747       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7748 
7749       if (total_bytes > UNITS_PER_WORD)
7750 	{
7751 	  word = byte / UNITS_PER_WORD;
7752 	  if (WORDS_BIG_ENDIAN)
7753 	    word = (words - 1) - word;
7754 	  offset = word * UNITS_PER_WORD;
7755 	  if (BYTES_BIG_ENDIAN)
7756 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7757 	  else
7758 	    offset += byte % UNITS_PER_WORD;
7759 	}
7760       else
7761 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7762       if (offset >= off && offset - off < len)
7763 	ptr[offset - off] = value;
7764     }
7765   return MIN (len, total_bytes - off);
7766 }
7767 
7768 
7769 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7770    specified by EXPR into the buffer PTR of length LEN bytes.
7771    Return the number of bytes placed in the buffer, or zero
7772    upon failure.  */
7773 
7774 static int
native_encode_fixed(const_tree expr,unsigned char * ptr,int len,int off)7775 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7776 {
7777   tree type = TREE_TYPE (expr);
7778   scalar_mode mode = SCALAR_TYPE_MODE (type);
7779   int total_bytes = GET_MODE_SIZE (mode);
7780   FIXED_VALUE_TYPE value;
7781   tree i_value, i_type;
7782 
7783   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7784     return 0;
7785 
7786   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7787 
7788   if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7789     return 0;
7790 
7791   value = TREE_FIXED_CST (expr);
7792   i_value = double_int_to_tree (i_type, value.data);
7793 
7794   return native_encode_int (i_value, ptr, len, off);
7795 }
7796 
7797 
7798 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7799    specified by EXPR into the buffer PTR of length LEN bytes.
7800    Return the number of bytes placed in the buffer, or zero
7801    upon failure.  */
7802 
7803 static int
native_encode_real(const_tree expr,unsigned char * ptr,int len,int off)7804 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7805 {
7806   tree type = TREE_TYPE (expr);
7807   int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7808   int byte, offset, word, words, bitpos;
7809   unsigned char value;
7810 
7811   /* There are always 32 bits in each long, no matter the size of
7812      the hosts long.  We handle floating point representations with
7813      up to 192 bits.  */
7814   long tmp[6];
7815 
7816   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7817     return 0;
7818   if (off == -1)
7819     off = 0;
7820 
7821   if (ptr == NULL)
7822     /* Dry run.  */
7823     return MIN (len, total_bytes - off);
7824 
7825   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7826 
7827   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7828 
7829   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7830        bitpos += BITS_PER_UNIT)
7831     {
7832       byte = (bitpos / BITS_PER_UNIT) & 3;
7833       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7834 
7835       if (UNITS_PER_WORD < 4)
7836 	{
7837 	  word = byte / UNITS_PER_WORD;
7838 	  if (WORDS_BIG_ENDIAN)
7839 	    word = (words - 1) - word;
7840 	  offset = word * UNITS_PER_WORD;
7841 	  if (BYTES_BIG_ENDIAN)
7842 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7843 	  else
7844 	    offset += byte % UNITS_PER_WORD;
7845 	}
7846       else
7847 	{
7848 	  offset = byte;
7849 	  if (BYTES_BIG_ENDIAN)
7850 	    {
7851 	      /* Reverse bytes within each long, or within the entire float
7852 		 if it's smaller than a long (for HFmode).  */
7853 	      offset = MIN (3, total_bytes - 1) - offset;
7854 	      gcc_assert (offset >= 0);
7855 	    }
7856 	}
7857       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7858       if (offset >= off
7859 	  && offset - off < len)
7860 	ptr[offset - off] = value;
7861     }
7862   return MIN (len, total_bytes - off);
7863 }
7864 
7865 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7866    specified by EXPR into the buffer PTR of length LEN bytes.
7867    Return the number of bytes placed in the buffer, or zero
7868    upon failure.  */
7869 
7870 static int
native_encode_complex(const_tree expr,unsigned char * ptr,int len,int off)7871 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7872 {
7873   int rsize, isize;
7874   tree part;
7875 
7876   part = TREE_REALPART (expr);
7877   rsize = native_encode_expr (part, ptr, len, off);
7878   if (off == -1 && rsize == 0)
7879     return 0;
7880   part = TREE_IMAGPART (expr);
7881   if (off != -1)
7882     off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7883   isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7884 			      len - rsize, off);
7885   if (off == -1 && isize != rsize)
7886     return 0;
7887   return rsize + isize;
7888 }
7889 
7890 /* Like native_encode_vector, but only encode the first COUNT elements.
7891    The other arguments are as for native_encode_vector.  */
7892 
7893 static int
native_encode_vector_part(const_tree expr,unsigned char * ptr,int len,int off,unsigned HOST_WIDE_INT count)7894 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7895 			   int off, unsigned HOST_WIDE_INT count)
7896 {
7897   tree itype = TREE_TYPE (TREE_TYPE (expr));
7898   if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7899       && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7900     {
7901       /* This is the only case in which elements can be smaller than a byte.
7902 	 Element 0 is always in the lsb of the containing byte.  */
7903       unsigned int elt_bits = TYPE_PRECISION (itype);
7904       int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7905       if ((off == -1 && total_bytes > len) || off >= total_bytes)
7906 	return 0;
7907 
7908       if (off == -1)
7909 	off = 0;
7910 
7911       /* Zero the buffer and then set bits later where necessary.  */
7912       int extract_bytes = MIN (len, total_bytes - off);
7913       if (ptr)
7914 	memset (ptr, 0, extract_bytes);
7915 
7916       unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7917       unsigned int first_elt = off * elts_per_byte;
7918       unsigned int extract_elts = extract_bytes * elts_per_byte;
7919       for (unsigned int i = 0; i < extract_elts; ++i)
7920 	{
7921 	  tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7922 	  if (TREE_CODE (elt) != INTEGER_CST)
7923 	    return 0;
7924 
7925 	  if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7926 	    {
7927 	      unsigned int bit = i * elt_bits;
7928 	      ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7929 	    }
7930 	}
7931       return extract_bytes;
7932     }
7933 
7934   int offset = 0;
7935   int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7936   for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7937     {
7938       if (off >= size)
7939 	{
7940 	  off -= size;
7941 	  continue;
7942 	}
7943       tree elem = VECTOR_CST_ELT (expr, i);
7944       int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7945 				    len - offset, off);
7946       if ((off == -1 && res != size) || res == 0)
7947 	return 0;
7948       offset += res;
7949       if (offset >= len)
7950 	return (off == -1 && i < count - 1) ? 0 : offset;
7951       if (off != -1)
7952 	off = 0;
7953     }
7954   return offset;
7955 }
7956 
7957 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7958    specified by EXPR into the buffer PTR of length LEN bytes.
7959    Return the number of bytes placed in the buffer, or zero
7960    upon failure.  */
7961 
7962 static int
native_encode_vector(const_tree expr,unsigned char * ptr,int len,int off)7963 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7964 {
7965   unsigned HOST_WIDE_INT count;
7966   if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7967     return 0;
7968   return native_encode_vector_part (expr, ptr, len, off, count);
7969 }
7970 
7971 
7972 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7973    specified by EXPR into the buffer PTR of length LEN bytes.
7974    Return the number of bytes placed in the buffer, or zero
7975    upon failure.  */
7976 
7977 static int
native_encode_string(const_tree expr,unsigned char * ptr,int len,int off)7978 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7979 {
7980   tree type = TREE_TYPE (expr);
7981 
7982   /* Wide-char strings are encoded in target byte-order so native
7983      encoding them is trivial.  */
7984   if (BITS_PER_UNIT != CHAR_BIT
7985       || TREE_CODE (type) != ARRAY_TYPE
7986       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7987       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7988     return 0;
7989 
7990   HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7991   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7992     return 0;
7993   if (off == -1)
7994     off = 0;
7995   len = MIN (total_bytes - off, len);
7996   if (ptr == NULL)
7997     /* Dry run.  */;
7998   else
7999     {
8000       int written = 0;
8001       if (off < TREE_STRING_LENGTH (expr))
8002 	{
8003 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8004 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
8005 	}
8006       memset (ptr + written, 0, len - written);
8007     }
8008   return len;
8009 }
8010 
8011 
8012 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST, REAL_CST,
8013    FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8014    the buffer PTR of size LEN bytes.  If PTR is NULL, don't actually store
8015    anything, just do a dry run.  Fail either if OFF is -1 and LEN isn't
8016    sufficient to encode the entire EXPR, or if OFF is out of bounds.
8017    Otherwise, start at byte offset OFF and encode at most LEN bytes.
8018    Return the number of bytes placed in the buffer, or zero upon failure.  */
8019 
8020 int
native_encode_expr(const_tree expr,unsigned char * ptr,int len,int off)8021 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8022 {
8023   /* We don't support starting at negative offset and -1 is special.  */
8024   if (off < -1)
8025     return 0;
8026 
8027   switch (TREE_CODE (expr))
8028     {
8029     case INTEGER_CST:
8030       return native_encode_int (expr, ptr, len, off);
8031 
8032     case REAL_CST:
8033       return native_encode_real (expr, ptr, len, off);
8034 
8035     case FIXED_CST:
8036       return native_encode_fixed (expr, ptr, len, off);
8037 
8038     case COMPLEX_CST:
8039       return native_encode_complex (expr, ptr, len, off);
8040 
8041     case VECTOR_CST:
8042       return native_encode_vector (expr, ptr, len, off);
8043 
8044     case STRING_CST:
8045       return native_encode_string (expr, ptr, len, off);
8046 
8047     default:
8048       return 0;
8049     }
8050 }
8051 
8052 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8053    or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8054    of BITS_PER_UNIT.  As native_{interpret,encode}_int works in term of
8055    machine modes, we can't just use build_nonstandard_integer_type.  */
8056 
8057 tree
find_bitfield_repr_type(int fieldsize,int len)8058 find_bitfield_repr_type (int fieldsize, int len)
8059 {
8060   machine_mode mode;
8061   for (int pass = 0; pass < 2; pass++)
8062     {
8063       enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8064       FOR_EACH_MODE_IN_CLASS (mode, mclass)
8065 	if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8066 	    && known_eq (GET_MODE_PRECISION (mode),
8067 			 GET_MODE_BITSIZE (mode))
8068 	    && known_le (GET_MODE_SIZE (mode), len))
8069 	  {
8070 	    tree ret = lang_hooks.types.type_for_mode (mode, 1);
8071 	    if (ret && TYPE_MODE (ret) == mode)
8072 	      return ret;
8073 	  }
8074     }
8075 
8076   for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8077     if (int_n_enabled_p[i]
8078 	&& int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8079 	&& int_n_trees[i].unsigned_type)
8080       {
8081 	tree ret = int_n_trees[i].unsigned_type;
8082 	mode = TYPE_MODE (ret);
8083 	if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8084 	    && known_eq (GET_MODE_PRECISION (mode),
8085 			 GET_MODE_BITSIZE (mode))
8086 	    && known_le (GET_MODE_SIZE (mode), len))
8087 	  return ret;
8088       }
8089 
8090   return NULL_TREE;
8091 }
8092 
8093 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8094    NON_LVALUE_EXPRs and nops.  If MASK is non-NULL (then PTR has
8095    to be non-NULL and OFF zero), then in addition to filling the
8096    bytes pointed by PTR with the value also clear any bits pointed
8097    by MASK that are known to be initialized, keep them as is for
8098    e.g. uninitialized padding bits or uninitialized fields.  */
8099 
8100 int
native_encode_initializer(tree init,unsigned char * ptr,int len,int off,unsigned char * mask)8101 native_encode_initializer (tree init, unsigned char *ptr, int len,
8102 			   int off, unsigned char *mask)
8103 {
8104   int r;
8105 
8106   /* We don't support starting at negative offset and -1 is special.  */
8107   if (off < -1 || init == NULL_TREE)
8108     return 0;
8109 
8110   gcc_assert (mask == NULL || (off == 0 && ptr));
8111 
8112   STRIP_NOPS (init);
8113   switch (TREE_CODE (init))
8114     {
8115     case VIEW_CONVERT_EXPR:
8116     case NON_LVALUE_EXPR:
8117       return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8118 					mask);
8119     default:
8120       r = native_encode_expr (init, ptr, len, off);
8121       if (mask)
8122 	memset (mask, 0, r);
8123       return r;
8124     case CONSTRUCTOR:
8125       tree type = TREE_TYPE (init);
8126       HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8127       if (total_bytes < 0)
8128 	return 0;
8129       if ((off == -1 && total_bytes > len) || off >= total_bytes)
8130 	return 0;
8131       int o = off == -1 ? 0 : off;
8132       if (TREE_CODE (type) == ARRAY_TYPE)
8133 	{
8134 	  tree min_index;
8135 	  unsigned HOST_WIDE_INT cnt;
8136 	  HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8137 	  constructor_elt *ce;
8138 
8139 	  if (!TYPE_DOMAIN (type)
8140 	      || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8141 	    return 0;
8142 
8143 	  fieldsize = int_size_in_bytes (TREE_TYPE (type));
8144 	  if (fieldsize <= 0)
8145 	    return 0;
8146 
8147 	  min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8148 	  if (ptr)
8149 	    memset (ptr, '\0', MIN (total_bytes - off, len));
8150 
8151 	  for (cnt = 0; ; cnt++)
8152 	    {
8153 	      tree val = NULL_TREE, index = NULL_TREE;
8154 	      HOST_WIDE_INT pos = curpos, count = 0;
8155 	      bool full = false;
8156 	      if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8157 		{
8158 		  val = ce->value;
8159 		  index = ce->index;
8160 		}
8161 	      else if (mask == NULL
8162 		       || CONSTRUCTOR_NO_CLEARING (init)
8163 		       || curpos >= total_bytes)
8164 		break;
8165 	      else
8166 		pos = total_bytes;
8167 
8168 	      if (index && TREE_CODE (index) == RANGE_EXPR)
8169 		{
8170 		  if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8171 		      || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8172 		    return 0;
8173 		  offset_int wpos
8174 		    = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8175 				- wi::to_offset (min_index),
8176 				TYPE_PRECISION (sizetype));
8177 		  wpos *= fieldsize;
8178 		  if (!wi::fits_shwi_p (pos))
8179 		    return 0;
8180 		  pos = wpos.to_shwi ();
8181 		  offset_int wcount
8182 		    = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8183 				- wi::to_offset (TREE_OPERAND (index, 0)),
8184 				TYPE_PRECISION (sizetype));
8185 		  if (!wi::fits_shwi_p (wcount))
8186 		    return 0;
8187 		  count = wcount.to_shwi ();
8188 		}
8189 	      else if (index)
8190 		{
8191 		  if (TREE_CODE (index) != INTEGER_CST)
8192 		    return 0;
8193 		  offset_int wpos
8194 		    = wi::sext (wi::to_offset (index)
8195 				- wi::to_offset (min_index),
8196 				TYPE_PRECISION (sizetype));
8197 		  wpos *= fieldsize;
8198 		  if (!wi::fits_shwi_p (wpos))
8199 		    return 0;
8200 		  pos = wpos.to_shwi ();
8201 		}
8202 
8203 	      if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8204 		{
8205 		  if (valueinit == -1)
8206 		    {
8207 		      tree zero = build_zero_cst (TREE_TYPE (type));
8208 		      r = native_encode_initializer (zero, ptr + curpos,
8209 						     fieldsize, 0,
8210 						     mask + curpos);
8211 		      if (TREE_CODE (zero) == CONSTRUCTOR)
8212 			ggc_free (zero);
8213 		      if (!r)
8214 			return 0;
8215 		      valueinit = curpos;
8216 		      curpos += fieldsize;
8217 		    }
8218 		  while (curpos != pos)
8219 		    {
8220 		      memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8221 		      memcpy (mask + curpos, mask + valueinit, fieldsize);
8222 		      curpos += fieldsize;
8223 		    }
8224 		}
8225 
8226 	      curpos = pos;
8227 	      if (val)
8228 		do
8229 		  {
8230 		    if (off == -1
8231 			|| (curpos >= off
8232 			    && (curpos + fieldsize
8233 				<= (HOST_WIDE_INT) off + len)))
8234 		      {
8235 			if (full)
8236 			  {
8237 			    if (ptr)
8238 			      memcpy (ptr + (curpos - o), ptr + (pos - o),
8239 				      fieldsize);
8240 			    if (mask)
8241 			      memcpy (mask + curpos, mask + pos, fieldsize);
8242 			  }
8243 			else if (!native_encode_initializer (val,
8244 							     ptr
8245 							     ? ptr + curpos - o
8246 							     : NULL,
8247 							     fieldsize,
8248 							     off == -1 ? -1
8249 								       : 0,
8250 							     mask
8251 							     ? mask + curpos
8252 							     : NULL))
8253 			  return 0;
8254 			else
8255 			  {
8256 			    full = true;
8257 			    pos = curpos;
8258 			  }
8259 		      }
8260 		    else if (curpos + fieldsize > off
8261 			     && curpos < (HOST_WIDE_INT) off + len)
8262 		      {
8263 			/* Partial overlap.  */
8264 			unsigned char *p = NULL;
8265 			int no = 0;
8266 			int l;
8267 			gcc_assert (mask == NULL);
8268 			if (curpos >= off)
8269 			  {
8270 			    if (ptr)
8271 			      p = ptr + curpos - off;
8272 			    l = MIN ((HOST_WIDE_INT) off + len - curpos,
8273 				     fieldsize);
8274 			  }
8275 			else
8276 			  {
8277 			    p = ptr;
8278 			    no = off - curpos;
8279 			    l = len;
8280 			  }
8281 			if (!native_encode_initializer (val, p, l, no, NULL))
8282 			  return 0;
8283 		      }
8284 		    curpos += fieldsize;
8285 		  }
8286 		while (count-- != 0);
8287 	    }
8288 	  return MIN (total_bytes - off, len);
8289 	}
8290       else if (TREE_CODE (type) == RECORD_TYPE
8291 	       || TREE_CODE (type) == UNION_TYPE)
8292 	{
8293 	  unsigned HOST_WIDE_INT cnt;
8294 	  constructor_elt *ce;
8295 	  tree fld_base = TYPE_FIELDS (type);
8296 	  tree to_free = NULL_TREE;
8297 
8298 	  gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8299 	  if (ptr != NULL)
8300 	    memset (ptr, '\0', MIN (total_bytes - o, len));
8301 	  for (cnt = 0; ; cnt++)
8302 	    {
8303 	      tree val = NULL_TREE, field = NULL_TREE;
8304 	      HOST_WIDE_INT pos = 0, fieldsize;
8305 	      unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8306 
8307 	      if (to_free)
8308 		{
8309 		  ggc_free (to_free);
8310 		  to_free = NULL_TREE;
8311 		}
8312 
8313 	      if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8314 		{
8315 		  val = ce->value;
8316 		  field = ce->index;
8317 		  if (field == NULL_TREE)
8318 		    return 0;
8319 
8320 		  pos = int_byte_position (field);
8321 		  if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8322 		    continue;
8323 		}
8324 	      else if (mask == NULL
8325 		       || CONSTRUCTOR_NO_CLEARING (init))
8326 		break;
8327 	      else
8328 		pos = total_bytes;
8329 
8330 	      if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8331 		{
8332 		  tree fld;
8333 		  for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8334 		    {
8335 		      if (TREE_CODE (fld) != FIELD_DECL)
8336 			continue;
8337 		      if (fld == field)
8338 			break;
8339 		      if (DECL_PADDING_P (fld))
8340 			continue;
8341 		      if (DECL_SIZE_UNIT (fld) == NULL_TREE
8342 			  || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8343 			return 0;
8344 		      if (integer_zerop (DECL_SIZE_UNIT (fld)))
8345 			continue;
8346 		      break;
8347 		    }
8348 		  if (fld == NULL_TREE)
8349 		    {
8350 		      if (ce == NULL)
8351 			break;
8352 		      return 0;
8353 		    }
8354 		  fld_base = DECL_CHAIN (fld);
8355 		  if (fld != field)
8356 		    {
8357 		      cnt--;
8358 		      field = fld;
8359 		      pos = int_byte_position (field);
8360 		      val = build_zero_cst (TREE_TYPE (fld));
8361 		      if (TREE_CODE (val) == CONSTRUCTOR)
8362 			to_free = val;
8363 		    }
8364 		}
8365 
8366 	      if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8367 		  && TYPE_DOMAIN (TREE_TYPE (field))
8368 		  && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8369 		{
8370 		  if (mask || off != -1)
8371 		    return 0;
8372 		  if (val == NULL_TREE)
8373 		    continue;
8374 		  if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8375 		    return 0;
8376 		  fieldsize = int_size_in_bytes (TREE_TYPE (val));
8377 		  if (fieldsize < 0
8378 		      || (int) fieldsize != fieldsize
8379 		      || (pos + fieldsize) > INT_MAX)
8380 		    return 0;
8381 		  if (pos + fieldsize > total_bytes)
8382 		    {
8383 		      if (ptr != NULL && total_bytes < len)
8384 			memset (ptr + total_bytes, '\0',
8385 				MIN (pos + fieldsize, len) - total_bytes);
8386 		      total_bytes = pos + fieldsize;
8387 		    }
8388 		}
8389 	      else
8390 		{
8391 		  if (DECL_SIZE_UNIT (field) == NULL_TREE
8392 		      || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8393 		    return 0;
8394 		  fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8395 		}
8396 	      if (fieldsize == 0)
8397 		continue;
8398 
8399 	      if (DECL_BIT_FIELD (field))
8400 		{
8401 		  if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8402 		    return 0;
8403 		  fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8404 		  bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8405 		  if (bpos % BITS_PER_UNIT)
8406 		    bpos %= BITS_PER_UNIT;
8407 		  else
8408 		    bpos = 0;
8409 		  fieldsize += bpos;
8410 		  epos = fieldsize % BITS_PER_UNIT;
8411 		  fieldsize += BITS_PER_UNIT - 1;
8412 		  fieldsize /= BITS_PER_UNIT;
8413 		}
8414 
8415 	      if (off != -1 && pos + fieldsize <= off)
8416 		continue;
8417 
8418 	      if (val == NULL_TREE)
8419 		continue;
8420 
8421 	      if (DECL_BIT_FIELD (field))
8422 		{
8423 		  /* FIXME: Handle PDP endian.  */
8424 		  if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8425 		    return 0;
8426 
8427 		  if (TREE_CODE (val) == NON_LVALUE_EXPR)
8428 		    val = TREE_OPERAND (val, 0);
8429 		  if (TREE_CODE (val) != INTEGER_CST)
8430 		    return 0;
8431 
8432 		  tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8433 		  tree repr_type = NULL_TREE;
8434 		  HOST_WIDE_INT rpos = 0;
8435 		  if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8436 		    {
8437 		      rpos = int_byte_position (repr);
8438 		      repr_type = TREE_TYPE (repr);
8439 		    }
8440 		  else
8441 		    {
8442 		      repr_type = find_bitfield_repr_type (fieldsize, len);
8443 		      if (repr_type == NULL_TREE)
8444 			return 0;
8445 		      HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8446 		      gcc_assert (repr_size > 0 && repr_size <= len);
8447 		      if (pos + repr_size <= o + len)
8448 			rpos = pos;
8449 		      else
8450 			{
8451 			  rpos = o + len - repr_size;
8452 			  gcc_assert (rpos <= pos);
8453 			}
8454 		    }
8455 
8456 		  if (rpos > pos)
8457 		    return 0;
8458 		  wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8459 		  int diff = (TYPE_PRECISION (repr_type)
8460 			      - TYPE_PRECISION (TREE_TYPE (field)));
8461 		  HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8462 		  if (!BYTES_BIG_ENDIAN)
8463 		    w = wi::lshift (w, bitoff);
8464 		  else
8465 		    w = wi::lshift (w, diff - bitoff);
8466 		  val = wide_int_to_tree (repr_type, w);
8467 
8468 		  unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8469 				    / BITS_PER_UNIT + 1];
8470 		  int l = native_encode_int (val, buf, sizeof buf, 0);
8471 		  if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8472 		    return 0;
8473 
8474 		  if (ptr == NULL)
8475 		    continue;
8476 
8477 		  /* If the bitfield does not start at byte boundary, handle
8478 		     the partial byte at the start.  */
8479 		  if (bpos
8480 		      && (off == -1 || (pos >= off && len >= 1)))
8481 		    {
8482 		      if (!BYTES_BIG_ENDIAN)
8483 			{
8484 			  int msk = (1 << bpos) - 1;
8485 			  buf[pos - rpos] &= ~msk;
8486 			  buf[pos - rpos] |= ptr[pos - o] & msk;
8487 			  if (mask)
8488 			    {
8489 			      if (fieldsize > 1 || epos == 0)
8490 				mask[pos] &= msk;
8491 			      else
8492 				mask[pos] &= (msk | ~((1 << epos) - 1));
8493 			    }
8494 			}
8495 		      else
8496 			{
8497 			  int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8498 			  buf[pos - rpos] &= msk;
8499 			  buf[pos - rpos] |= ptr[pos - o] & ~msk;
8500 			  if (mask)
8501 			    {
8502 			      if (fieldsize > 1 || epos == 0)
8503 				mask[pos] &= ~msk;
8504 			      else
8505 				mask[pos] &= (~msk
8506 					      | ((1 << (BITS_PER_UNIT - epos))
8507 						 - 1));
8508 			    }
8509 			}
8510 		    }
8511 		  /* If the bitfield does not end at byte boundary, handle
8512 		     the partial byte at the end.  */
8513 		  if (epos
8514 		      && (off == -1
8515 			  || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8516 		    {
8517 		      if (!BYTES_BIG_ENDIAN)
8518 			{
8519 			  int msk = (1 << epos) - 1;
8520 			  buf[pos - rpos + fieldsize - 1] &= msk;
8521 			  buf[pos - rpos + fieldsize - 1]
8522 			    |= ptr[pos + fieldsize - 1 - o] & ~msk;
8523 			  if (mask && (fieldsize > 1 || bpos == 0))
8524 			    mask[pos + fieldsize - 1] &= ~msk;
8525 			}
8526 		       else
8527 			{
8528 			  int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8529 			  buf[pos - rpos + fieldsize - 1] &= ~msk;
8530 			  buf[pos - rpos + fieldsize - 1]
8531 			    |= ptr[pos + fieldsize - 1 - o] & msk;
8532 			  if (mask && (fieldsize > 1 || bpos == 0))
8533 			    mask[pos + fieldsize - 1] &= msk;
8534 			}
8535 		    }
8536 		  if (off == -1
8537 		      || (pos >= off
8538 			  && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8539 		    {
8540 		      memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8541 		      if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8542 			memset (mask + pos + (bpos != 0), 0,
8543 				fieldsize - (bpos != 0) - (epos != 0));
8544 		    }
8545 		  else
8546 		    {
8547 		      /* Partial overlap.  */
8548 		      HOST_WIDE_INT fsz = fieldsize;
8549 		      gcc_assert (mask == NULL);
8550 		      if (pos < off)
8551 			{
8552 			  fsz -= (off - pos);
8553 			  pos = off;
8554 			}
8555 		      if (pos + fsz > (HOST_WIDE_INT) off + len)
8556 			fsz = (HOST_WIDE_INT) off + len - pos;
8557 		      memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8558 		    }
8559 		  continue;
8560 		}
8561 
8562 	      if (off == -1
8563 		  || (pos >= off
8564 		      && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8565 		{
8566 		  int fldsize = fieldsize;
8567 		  if (off == -1)
8568 		    {
8569 		      tree fld = DECL_CHAIN (field);
8570 		      while (fld)
8571 			{
8572 			  if (TREE_CODE (fld) == FIELD_DECL)
8573 			    break;
8574 			  fld = DECL_CHAIN (fld);
8575 			}
8576 		      if (fld == NULL_TREE)
8577 			fldsize = len - pos;
8578 		    }
8579 		  r = native_encode_initializer (val, ptr ? ptr + pos - o
8580 							  : NULL,
8581 						 fldsize,
8582 						 off == -1 ? -1 : 0,
8583 						 mask ? mask + pos : NULL);
8584 		  if (!r)
8585 		    return 0;
8586 		  if (off == -1
8587 		      && fldsize != fieldsize
8588 		      && r > fieldsize
8589 		      && pos + r > total_bytes)
8590 		    total_bytes = pos + r;
8591 		}
8592 	      else
8593 		{
8594 		  /* Partial overlap.  */
8595 		  unsigned char *p = NULL;
8596 		  int no = 0;
8597 		  int l;
8598 		  gcc_assert (mask == NULL);
8599 		  if (pos >= off)
8600 		    {
8601 		      if (ptr)
8602 			p = ptr + pos - off;
8603 		      l = MIN ((HOST_WIDE_INT) off + len - pos,
8604 				fieldsize);
8605 		    }
8606 		  else
8607 		    {
8608 		      p = ptr;
8609 		      no = off - pos;
8610 		      l = len;
8611 		    }
8612 		  if (!native_encode_initializer (val, p, l, no, NULL))
8613 		    return 0;
8614 		}
8615 	    }
8616 	  return MIN (total_bytes - off, len);
8617 	}
8618       return 0;
8619     }
8620 }
8621 
8622 
8623 /* Subroutine of native_interpret_expr.  Interpret the contents of
8624    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8625    If the buffer cannot be interpreted, return NULL_TREE.  */
8626 
8627 static tree
native_interpret_int(tree type,const unsigned char * ptr,int len)8628 native_interpret_int (tree type, const unsigned char *ptr, int len)
8629 {
8630   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8631 
8632   if (total_bytes > len
8633       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8634     return NULL_TREE;
8635 
8636   wide_int result = wi::from_buffer (ptr, total_bytes);
8637 
8638   return wide_int_to_tree (type, result);
8639 }
8640 
8641 
8642 /* Subroutine of native_interpret_expr.  Interpret the contents of
8643    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8644    If the buffer cannot be interpreted, return NULL_TREE.  */
8645 
8646 static tree
native_interpret_fixed(tree type,const unsigned char * ptr,int len)8647 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8648 {
8649   scalar_mode mode = SCALAR_TYPE_MODE (type);
8650   int total_bytes = GET_MODE_SIZE (mode);
8651   double_int result;
8652   FIXED_VALUE_TYPE fixed_value;
8653 
8654   if (total_bytes > len
8655       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8656     return NULL_TREE;
8657 
8658   result = double_int::from_buffer (ptr, total_bytes);
8659   fixed_value = fixed_from_double_int (result, mode);
8660 
8661   return build_fixed (type, fixed_value);
8662 }
8663 
8664 
8665 /* Subroutine of native_interpret_expr.  Interpret the contents of
8666    the buffer PTR of length LEN as a REAL_CST of type TYPE.
8667    If the buffer cannot be interpreted, return NULL_TREE.  */
8668 
8669 tree
native_interpret_real(tree type,const unsigned char * ptr,int len)8670 native_interpret_real (tree type, const unsigned char *ptr, int len)
8671 {
8672   scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8673   int total_bytes = GET_MODE_SIZE (mode);
8674   unsigned char value;
8675   /* There are always 32 bits in each long, no matter the size of
8676      the hosts long.  We handle floating point representations with
8677      up to 192 bits.  */
8678   REAL_VALUE_TYPE r;
8679   long tmp[6];
8680 
8681   if (total_bytes > len || total_bytes > 24)
8682     return NULL_TREE;
8683   int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8684 
8685   memset (tmp, 0, sizeof (tmp));
8686   for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8687        bitpos += BITS_PER_UNIT)
8688     {
8689       /* Both OFFSET and BYTE index within a long;
8690 	 bitpos indexes the whole float.  */
8691       int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8692       if (UNITS_PER_WORD < 4)
8693 	{
8694 	  int word = byte / UNITS_PER_WORD;
8695 	  if (WORDS_BIG_ENDIAN)
8696 	    word = (words - 1) - word;
8697 	  offset = word * UNITS_PER_WORD;
8698 	  if (BYTES_BIG_ENDIAN)
8699 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8700 	  else
8701 	    offset += byte % UNITS_PER_WORD;
8702 	}
8703       else
8704 	{
8705 	  offset = byte;
8706 	  if (BYTES_BIG_ENDIAN)
8707 	    {
8708 	      /* Reverse bytes within each long, or within the entire float
8709 		 if it's smaller than a long (for HFmode).  */
8710 	      offset = MIN (3, total_bytes - 1) - offset;
8711 	      gcc_assert (offset >= 0);
8712 	    }
8713 	}
8714       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8715 
8716       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8717     }
8718 
8719   real_from_target (&r, tmp, mode);
8720   return build_real (type, r);
8721 }
8722 
8723 
8724 /* Subroutine of native_interpret_expr.  Interpret the contents of
8725    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8726    If the buffer cannot be interpreted, return NULL_TREE.  */
8727 
8728 static tree
native_interpret_complex(tree type,const unsigned char * ptr,int len)8729 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8730 {
8731   tree etype, rpart, ipart;
8732   int size;
8733 
8734   etype = TREE_TYPE (type);
8735   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8736   if (size * 2 > len)
8737     return NULL_TREE;
8738   rpart = native_interpret_expr (etype, ptr, size);
8739   if (!rpart)
8740     return NULL_TREE;
8741   ipart = native_interpret_expr (etype, ptr+size, size);
8742   if (!ipart)
8743     return NULL_TREE;
8744   return build_complex (type, rpart, ipart);
8745 }
8746 
8747 /* Read a vector of type TYPE from the target memory image given by BYTES,
8748    which contains LEN bytes.  The vector is known to be encodable using
8749    NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8750 
8751    Return the vector on success, otherwise return null.  */
8752 
8753 static tree
native_interpret_vector_part(tree type,const unsigned char * bytes,unsigned int len,unsigned int npatterns,unsigned int nelts_per_pattern)8754 native_interpret_vector_part (tree type, const unsigned char *bytes,
8755 			      unsigned int len, unsigned int npatterns,
8756 			      unsigned int nelts_per_pattern)
8757 {
8758   tree elt_type = TREE_TYPE (type);
8759   if (VECTOR_BOOLEAN_TYPE_P (type)
8760       && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8761     {
8762       /* This is the only case in which elements can be smaller than a byte.
8763 	 Element 0 is always in the lsb of the containing byte.  */
8764       unsigned int elt_bits = TYPE_PRECISION (elt_type);
8765       if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8766 	return NULL_TREE;
8767 
8768       tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8769       for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8770 	{
8771 	  unsigned int bit_index = i * elt_bits;
8772 	  unsigned int byte_index = bit_index / BITS_PER_UNIT;
8773 	  unsigned int lsb = bit_index % BITS_PER_UNIT;
8774 	  builder.quick_push (bytes[byte_index] & (1 << lsb)
8775 			      ? build_all_ones_cst (elt_type)
8776 			      : build_zero_cst (elt_type));
8777 	}
8778       return builder.build ();
8779     }
8780 
8781   unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8782   if (elt_bytes * npatterns * nelts_per_pattern > len)
8783     return NULL_TREE;
8784 
8785   tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8786   for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8787     {
8788       tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8789       if (!elt)
8790 	return NULL_TREE;
8791       builder.quick_push (elt);
8792       bytes += elt_bytes;
8793     }
8794   return builder.build ();
8795 }
8796 
8797 /* Subroutine of native_interpret_expr.  Interpret the contents of
8798    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8799    If the buffer cannot be interpreted, return NULL_TREE.  */
8800 
8801 static tree
native_interpret_vector(tree type,const unsigned char * ptr,unsigned int len)8802 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8803 {
8804   tree etype;
8805   unsigned int size;
8806   unsigned HOST_WIDE_INT count;
8807 
8808   etype = TREE_TYPE (type);
8809   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8810   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8811       || size * count > len)
8812     return NULL_TREE;
8813 
8814   return native_interpret_vector_part (type, ptr, len, count, 1);
8815 }
8816 
8817 
8818 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
8819    the buffer PTR of length LEN as a constant of type TYPE.  For
8820    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8821    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
8822    return NULL_TREE.  */
8823 
8824 tree
native_interpret_expr(tree type,const unsigned char * ptr,int len)8825 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8826 {
8827   switch (TREE_CODE (type))
8828     {
8829     case INTEGER_TYPE:
8830     case ENUMERAL_TYPE:
8831     case BOOLEAN_TYPE:
8832     case POINTER_TYPE:
8833     case REFERENCE_TYPE:
8834     case OFFSET_TYPE:
8835       return native_interpret_int (type, ptr, len);
8836 
8837     case REAL_TYPE:
8838       if (tree ret = native_interpret_real (type, ptr, len))
8839 	{
8840 	  /* For floating point values in composite modes, punt if this
8841 	     folding doesn't preserve bit representation.  As the mode doesn't
8842 	     have fixed precision while GCC pretends it does, there could be
8843 	     valid values that GCC can't really represent accurately.
8844 	     See PR95450.  Even for other modes, e.g. x86 XFmode can have some
8845 	     bit combinationations which GCC doesn't preserve.  */
8846 	  unsigned char buf[24 * 2];
8847 	  scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8848 	  int total_bytes = GET_MODE_SIZE (mode);
8849 	  memcpy (buf + 24, ptr, total_bytes);
8850 	  clear_type_padding_in_mask (type, buf + 24);
8851 	  if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8852 	      || memcmp (buf + 24, buf, total_bytes) != 0)
8853 	    return NULL_TREE;
8854 	  return ret;
8855 	}
8856       return NULL_TREE;
8857 
8858     case FIXED_POINT_TYPE:
8859       return native_interpret_fixed (type, ptr, len);
8860 
8861     case COMPLEX_TYPE:
8862       return native_interpret_complex (type, ptr, len);
8863 
8864     case VECTOR_TYPE:
8865       return native_interpret_vector (type, ptr, len);
8866 
8867     default:
8868       return NULL_TREE;
8869     }
8870 }
8871 
8872 /* Returns true if we can interpret the contents of a native encoding
8873    as TYPE.  */
8874 
8875 bool
can_native_interpret_type_p(tree type)8876 can_native_interpret_type_p (tree type)
8877 {
8878   switch (TREE_CODE (type))
8879     {
8880     case INTEGER_TYPE:
8881     case ENUMERAL_TYPE:
8882     case BOOLEAN_TYPE:
8883     case POINTER_TYPE:
8884     case REFERENCE_TYPE:
8885     case FIXED_POINT_TYPE:
8886     case REAL_TYPE:
8887     case COMPLEX_TYPE:
8888     case VECTOR_TYPE:
8889     case OFFSET_TYPE:
8890       return true;
8891     default:
8892       return false;
8893     }
8894 }
8895 
8896 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8897    byte order at PTR + OFF with LEN bytes.  Does not handle unions.  */
8898 
8899 tree
native_interpret_aggregate(tree type,const unsigned char * ptr,int off,int len)8900 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8901 			    int len)
8902 {
8903   vec<constructor_elt, va_gc> *elts = NULL;
8904   if (TREE_CODE (type) == ARRAY_TYPE)
8905     {
8906       HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8907       if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8908 	return NULL_TREE;
8909 
8910       HOST_WIDE_INT cnt = 0;
8911       if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8912 	{
8913 	  if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8914 	    return NULL_TREE;
8915 	  cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8916 	}
8917       if (eltsz == 0)
8918 	cnt = 0;
8919       HOST_WIDE_INT pos = 0;
8920       for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8921 	{
8922 	  tree v = NULL_TREE;
8923 	  if (pos >= len || pos + eltsz > len)
8924 	    return NULL_TREE;
8925 	  if (can_native_interpret_type_p (TREE_TYPE (type)))
8926 	    {
8927 	      v = native_interpret_expr (TREE_TYPE (type),
8928 					 ptr + off + pos, eltsz);
8929 	      if (v == NULL_TREE)
8930 		return NULL_TREE;
8931 	    }
8932 	  else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8933 		   || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8934 	    v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8935 					    eltsz);
8936 	  if (v == NULL_TREE)
8937 	    return NULL_TREE;
8938 	  CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8939 	}
8940       return build_constructor (type, elts);
8941     }
8942   if (TREE_CODE (type) != RECORD_TYPE)
8943     return NULL_TREE;
8944   for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8945     {
8946       if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8947 	continue;
8948       tree fld = field;
8949       HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8950       int diff = 0;
8951       tree v = NULL_TREE;
8952       if (DECL_BIT_FIELD (field))
8953 	{
8954 	  fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8955 	  if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8956 	    {
8957 	      poly_int64 bitoffset;
8958 	      poly_uint64 field_offset, fld_offset;
8959 	      if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8960 		  && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8961 		bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8962 	      else
8963 		bitoffset = 0;
8964 	      bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8965 			    - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8966 	      diff = (TYPE_PRECISION (TREE_TYPE (fld))
8967 		      - TYPE_PRECISION (TREE_TYPE (field)));
8968 	      if (!bitoffset.is_constant (&bitoff)
8969 		  || bitoff < 0
8970 		  || bitoff > diff)
8971 		return NULL_TREE;
8972 	    }
8973 	  else
8974 	    {
8975 	      if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8976 		return NULL_TREE;
8977 	      int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8978 	      int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8979 	      bpos %= BITS_PER_UNIT;
8980 	      fieldsize += bpos;
8981 	      fieldsize += BITS_PER_UNIT - 1;
8982 	      fieldsize /= BITS_PER_UNIT;
8983 	      tree repr_type = find_bitfield_repr_type (fieldsize, len);
8984 	      if (repr_type == NULL_TREE)
8985 		return NULL_TREE;
8986 	      sz = int_size_in_bytes (repr_type);
8987 	      if (sz < 0 || sz > len)
8988 		return NULL_TREE;
8989 	      pos = int_byte_position (field);
8990 	      if (pos < 0 || pos > len || pos + fieldsize > len)
8991 		return NULL_TREE;
8992 	      HOST_WIDE_INT rpos;
8993 	      if (pos + sz <= len)
8994 		rpos = pos;
8995 	      else
8996 		{
8997 		  rpos = len - sz;
8998 		  gcc_assert (rpos <= pos);
8999 		}
9000 	      bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9001 	      pos = rpos;
9002 	      diff = (TYPE_PRECISION (repr_type)
9003 		      - TYPE_PRECISION (TREE_TYPE (field)));
9004 	      v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9005 	      if (v == NULL_TREE)
9006 		return NULL_TREE;
9007 	      fld = NULL_TREE;
9008 	    }
9009 	}
9010 
9011       if (fld)
9012 	{
9013 	  sz = int_size_in_bytes (TREE_TYPE (fld));
9014 	  if (sz < 0 || sz > len)
9015 	    return NULL_TREE;
9016 	  tree byte_pos = byte_position (fld);
9017 	  if (!tree_fits_shwi_p (byte_pos))
9018 	    return NULL_TREE;
9019 	  pos = tree_to_shwi (byte_pos);
9020 	  if (pos < 0 || pos > len || pos + sz > len)
9021 	    return NULL_TREE;
9022 	}
9023       if (fld == NULL_TREE)
9024 	/* Already handled above.  */;
9025       else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9026 	{
9027 	  v = native_interpret_expr (TREE_TYPE (fld),
9028 				     ptr + off + pos, sz);
9029 	  if (v == NULL_TREE)
9030 	    return NULL_TREE;
9031 	}
9032       else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9033 	       || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9034 	v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9035       if (v == NULL_TREE)
9036 	return NULL_TREE;
9037       if (fld != field)
9038 	{
9039 	  if (TREE_CODE (v) != INTEGER_CST)
9040 	    return NULL_TREE;
9041 
9042 	  /* FIXME: Figure out how to handle PDP endian bitfields.  */
9043 	  if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9044 	    return NULL_TREE;
9045 	  if (!BYTES_BIG_ENDIAN)
9046 	    v = wide_int_to_tree (TREE_TYPE (field),
9047 				  wi::lrshift (wi::to_wide (v), bitoff));
9048 	  else
9049 	    v = wide_int_to_tree (TREE_TYPE (field),
9050 				  wi::lrshift (wi::to_wide (v),
9051 					       diff - bitoff));
9052 	}
9053       CONSTRUCTOR_APPEND_ELT (elts, field, v);
9054     }
9055   return build_constructor (type, elts);
9056 }
9057 
9058 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9059    or extracted constant positions and/or sizes aren't byte aligned.  */
9060 
9061 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9062    bits between adjacent elements.  AMNT should be within
9063    [0, BITS_PER_UNIT).
9064    Example, AMNT = 2:
9065    00011111|11100000 << 2 = 01111111|10000000
9066    PTR[1]  | PTR[0]         PTR[1]  | PTR[0].  */
9067 
9068 void
shift_bytes_in_array_left(unsigned char * ptr,unsigned int sz,unsigned int amnt)9069 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9070 			   unsigned int amnt)
9071 {
9072   if (amnt == 0)
9073     return;
9074 
9075   unsigned char carry_over = 0U;
9076   unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9077   unsigned char clear_mask = (~0U) << amnt;
9078 
9079   for (unsigned int i = 0; i < sz; i++)
9080     {
9081       unsigned prev_carry_over = carry_over;
9082       carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9083 
9084       ptr[i] <<= amnt;
9085       if (i != 0)
9086 	{
9087 	  ptr[i] &= clear_mask;
9088 	  ptr[i] |= prev_carry_over;
9089 	}
9090     }
9091 }
9092 
9093 /* Like shift_bytes_in_array_left but for big-endian.
9094    Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9095    bits between adjacent elements.  AMNT should be within
9096    [0, BITS_PER_UNIT).
9097    Example, AMNT = 2:
9098    00011111|11100000 >> 2 = 00000111|11111000
9099    PTR[0]  | PTR[1]         PTR[0]  | PTR[1].  */
9100 
9101 void
shift_bytes_in_array_right(unsigned char * ptr,unsigned int sz,unsigned int amnt)9102 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9103 			    unsigned int amnt)
9104 {
9105   if (amnt == 0)
9106     return;
9107 
9108   unsigned char carry_over = 0U;
9109   unsigned char carry_mask = ~(~0U << amnt);
9110 
9111   for (unsigned int i = 0; i < sz; i++)
9112     {
9113       unsigned prev_carry_over = carry_over;
9114       carry_over = ptr[i] & carry_mask;
9115 
9116       carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9117       ptr[i] >>= amnt;
9118       ptr[i] |= prev_carry_over;
9119     }
9120 }
9121 
9122 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9123    directly on the VECTOR_CST encoding, in a way that works for variable-
9124    length vectors.  Return the resulting VECTOR_CST on success or null
9125    on failure.  */
9126 
9127 static tree
fold_view_convert_vector_encoding(tree type,tree expr)9128 fold_view_convert_vector_encoding (tree type, tree expr)
9129 {
9130   tree expr_type = TREE_TYPE (expr);
9131   poly_uint64 type_bits, expr_bits;
9132   if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9133       || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9134     return NULL_TREE;
9135 
9136   poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9137   poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9138   unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9139   unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9140 
9141   /* We can only preserve the semantics of a stepped pattern if the new
9142      vector element is an integer of the same size.  */
9143   if (VECTOR_CST_STEPPED_P (expr)
9144       && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9145     return NULL_TREE;
9146 
9147   /* The number of bits needed to encode one element from every pattern
9148      of the original vector.  */
9149   unsigned int expr_sequence_bits
9150     = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9151 
9152   /* The number of bits needed to encode one element from every pattern
9153      of the result.  */
9154   unsigned int type_sequence_bits
9155     = least_common_multiple (expr_sequence_bits, type_elt_bits);
9156 
9157   /* Don't try to read more bytes than are available, which can happen
9158      for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9159      The general VIEW_CONVERT handling can cope with that case, so there's
9160      no point complicating things here.  */
9161   unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9162   unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9163 				    BITS_PER_UNIT);
9164   unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9165   if (known_gt (buffer_bits, expr_bits))
9166     return NULL_TREE;
9167 
9168   /* Get enough bytes of EXPR to form the new encoding.  */
9169   auto_vec<unsigned char, 128> buffer (buffer_bytes);
9170   buffer.quick_grow (buffer_bytes);
9171   if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9172 				 buffer_bits / expr_elt_bits)
9173       != (int) buffer_bytes)
9174     return NULL_TREE;
9175 
9176   /* Reencode the bytes as TYPE.  */
9177   unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9178   return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9179 				       type_npatterns, nelts_per_pattern);
9180 }
9181 
9182 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9183    TYPE at compile-time.  If we're unable to perform the conversion
9184    return NULL_TREE.  */
9185 
9186 static tree
fold_view_convert_expr(tree type,tree expr)9187 fold_view_convert_expr (tree type, tree expr)
9188 {
9189   /* We support up to 512-bit values (for V8DFmode).  */
9190   unsigned char buffer[64];
9191   int len;
9192 
9193   /* Check that the host and target are sane.  */
9194   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9195     return NULL_TREE;
9196 
9197   if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9198     if (tree res = fold_view_convert_vector_encoding (type, expr))
9199       return res;
9200 
9201   len = native_encode_expr (expr, buffer, sizeof (buffer));
9202   if (len == 0)
9203     return NULL_TREE;
9204 
9205   return native_interpret_expr (type, buffer, len);
9206 }
9207 
9208 /* Build an expression for the address of T.  Folds away INDIRECT_REF
9209    to avoid confusing the gimplify process.  */
9210 
9211 tree
build_fold_addr_expr_with_type_loc(location_t loc,tree t,tree ptrtype)9212 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9213 {
9214   /* The size of the object is not relevant when talking about its address.  */
9215   if (TREE_CODE (t) == WITH_SIZE_EXPR)
9216     t = TREE_OPERAND (t, 0);
9217 
9218   if (TREE_CODE (t) == INDIRECT_REF)
9219     {
9220       t = TREE_OPERAND (t, 0);
9221 
9222       if (TREE_TYPE (t) != ptrtype)
9223 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9224     }
9225   else if (TREE_CODE (t) == MEM_REF
9226 	   && integer_zerop (TREE_OPERAND (t, 1)))
9227     {
9228       t = TREE_OPERAND (t, 0);
9229 
9230       if (TREE_TYPE (t) != ptrtype)
9231 	t = fold_convert_loc (loc, ptrtype, t);
9232     }
9233   else if (TREE_CODE (t) == MEM_REF
9234 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9235     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9236 			TREE_OPERAND (t, 0),
9237 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9238   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9239     {
9240       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9241 
9242       if (TREE_TYPE (t) != ptrtype)
9243 	t = fold_convert_loc (loc, ptrtype, t);
9244     }
9245   else
9246     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9247 
9248   return t;
9249 }
9250 
9251 /* Build an expression for the address of T.  */
9252 
9253 tree
build_fold_addr_expr_loc(location_t loc,tree t)9254 build_fold_addr_expr_loc (location_t loc, tree t)
9255 {
9256   tree ptrtype = build_pointer_type (TREE_TYPE (t));
9257 
9258   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9259 }
9260 
9261 /* Fold a unary expression of code CODE and type TYPE with operand
9262    OP0.  Return the folded expression if folding is successful.
9263    Otherwise, return NULL_TREE.  */
9264 
9265 tree
fold_unary_loc(location_t loc,enum tree_code code,tree type,tree op0)9266 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9267 {
9268   tree tem;
9269   tree arg0;
9270   enum tree_code_class kind = TREE_CODE_CLASS (code);
9271 
9272   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9273 	      && TREE_CODE_LENGTH (code) == 1);
9274 
9275   arg0 = op0;
9276   if (arg0)
9277     {
9278       if (CONVERT_EXPR_CODE_P (code)
9279 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9280 	{
9281 	  /* Don't use STRIP_NOPS, because signedness of argument type
9282 	     matters.  */
9283 	  STRIP_SIGN_NOPS (arg0);
9284 	}
9285       else
9286 	{
9287 	  /* Strip any conversions that don't change the mode.  This
9288 	     is safe for every expression, except for a comparison
9289 	     expression because its signedness is derived from its
9290 	     operands.
9291 
9292 	     Note that this is done as an internal manipulation within
9293 	     the constant folder, in order to find the simplest
9294 	     representation of the arguments so that their form can be
9295 	     studied.  In any cases, the appropriate type conversions
9296 	     should be put back in the tree that will get out of the
9297 	     constant folder.  */
9298 	  STRIP_NOPS (arg0);
9299 	}
9300 
9301       if (CONSTANT_CLASS_P (arg0))
9302 	{
9303 	  tree tem = const_unop (code, type, arg0);
9304 	  if (tem)
9305 	    {
9306 	      if (TREE_TYPE (tem) != type)
9307 		tem = fold_convert_loc (loc, type, tem);
9308 	      return tem;
9309 	    }
9310 	}
9311     }
9312 
9313   tem = generic_simplify (loc, code, type, op0);
9314   if (tem)
9315     return tem;
9316 
9317   if (TREE_CODE_CLASS (code) == tcc_unary)
9318     {
9319       if (TREE_CODE (arg0) == COMPOUND_EXPR)
9320 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9321 		       fold_build1_loc (loc, code, type,
9322 				    fold_convert_loc (loc, TREE_TYPE (op0),
9323 						      TREE_OPERAND (arg0, 1))));
9324       else if (TREE_CODE (arg0) == COND_EXPR)
9325 	{
9326 	  tree arg01 = TREE_OPERAND (arg0, 1);
9327 	  tree arg02 = TREE_OPERAND (arg0, 2);
9328 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9329 	    arg01 = fold_build1_loc (loc, code, type,
9330 				 fold_convert_loc (loc,
9331 						   TREE_TYPE (op0), arg01));
9332 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9333 	    arg02 = fold_build1_loc (loc, code, type,
9334 				 fold_convert_loc (loc,
9335 						   TREE_TYPE (op0), arg02));
9336 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9337 			     arg01, arg02);
9338 
9339 	  /* If this was a conversion, and all we did was to move into
9340 	     inside the COND_EXPR, bring it back out.  But leave it if
9341 	     it is a conversion from integer to integer and the
9342 	     result precision is no wider than a word since such a
9343 	     conversion is cheap and may be optimized away by combine,
9344 	     while it couldn't if it were outside the COND_EXPR.  Then return
9345 	     so we don't get into an infinite recursion loop taking the
9346 	     conversion out and then back in.  */
9347 
9348 	  if ((CONVERT_EXPR_CODE_P (code)
9349 	       || code == NON_LVALUE_EXPR)
9350 	      && TREE_CODE (tem) == COND_EXPR
9351 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9352 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9353 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
9354 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
9355 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9356 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9357 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9358 		     && (INTEGRAL_TYPE_P
9359 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9360 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9361 		  || flag_syntax_only))
9362 	    tem = build1_loc (loc, code, type,
9363 			      build3 (COND_EXPR,
9364 				      TREE_TYPE (TREE_OPERAND
9365 						 (TREE_OPERAND (tem, 1), 0)),
9366 				      TREE_OPERAND (tem, 0),
9367 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9368 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
9369 						    0)));
9370 	  return tem;
9371 	}
9372    }
9373 
9374   switch (code)
9375     {
9376     case NON_LVALUE_EXPR:
9377       if (!maybe_lvalue_p (op0))
9378 	return fold_convert_loc (loc, type, op0);
9379       return NULL_TREE;
9380 
9381     CASE_CONVERT:
9382     case FLOAT_EXPR:
9383     case FIX_TRUNC_EXPR:
9384       if (COMPARISON_CLASS_P (op0))
9385 	{
9386 	  /* If we have (type) (a CMP b) and type is an integral type, return
9387 	     new expression involving the new type.  Canonicalize
9388 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9389 	     non-integral type.
9390 	     Do not fold the result as that would not simplify further, also
9391 	     folding again results in recursions.  */
9392 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
9393 	    return build2_loc (loc, TREE_CODE (op0), type,
9394 			       TREE_OPERAND (op0, 0),
9395 			       TREE_OPERAND (op0, 1));
9396 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9397 		   && TREE_CODE (type) != VECTOR_TYPE)
9398 	    return build3_loc (loc, COND_EXPR, type, op0,
9399 			       constant_boolean_node (true, type),
9400 			       constant_boolean_node (false, type));
9401 	}
9402 
9403       /* Handle (T *)&A.B.C for A being of type T and B and C
9404 	 living at offset zero.  This occurs frequently in
9405 	 C++ upcasting and then accessing the base.  */
9406       if (TREE_CODE (op0) == ADDR_EXPR
9407 	  && POINTER_TYPE_P (type)
9408 	  && handled_component_p (TREE_OPERAND (op0, 0)))
9409         {
9410 	  poly_int64 bitsize, bitpos;
9411 	  tree offset;
9412 	  machine_mode mode;
9413 	  int unsignedp, reversep, volatilep;
9414 	  tree base
9415 	    = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9416 				   &offset, &mode, &unsignedp, &reversep,
9417 				   &volatilep);
9418 	  /* If the reference was to a (constant) zero offset, we can use
9419 	     the address of the base if it has the same base type
9420 	     as the result type and the pointer type is unqualified.  */
9421 	  if (!offset
9422 	      && known_eq (bitpos, 0)
9423 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9424 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9425 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9426 	    return fold_convert_loc (loc, type,
9427 				     build_fold_addr_expr_loc (loc, base));
9428         }
9429 
9430       if (TREE_CODE (op0) == MODIFY_EXPR
9431 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9432 	  /* Detect assigning a bitfield.  */
9433 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9434 	       && DECL_BIT_FIELD
9435 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9436 	{
9437 	  /* Don't leave an assignment inside a conversion
9438 	     unless assigning a bitfield.  */
9439 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9440 	  /* First do the assignment, then return converted constant.  */
9441 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9442 	  suppress_warning (tem /* What warning? */);
9443 	  TREE_USED (tem) = 1;
9444 	  return tem;
9445 	}
9446 
9447       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9448 	 constants (if x has signed type, the sign bit cannot be set
9449 	 in c).  This folds extension into the BIT_AND_EXPR.
9450 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9451 	 very likely don't have maximal range for their precision and this
9452 	 transformation effectively doesn't preserve non-maximal ranges.  */
9453       if (TREE_CODE (type) == INTEGER_TYPE
9454 	  && TREE_CODE (op0) == BIT_AND_EXPR
9455 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9456 	{
9457 	  tree and_expr = op0;
9458 	  tree and0 = TREE_OPERAND (and_expr, 0);
9459 	  tree and1 = TREE_OPERAND (and_expr, 1);
9460 	  int change = 0;
9461 
9462 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9463 	      || (TYPE_PRECISION (type)
9464 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9465 	    change = 1;
9466 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
9467 		   <= HOST_BITS_PER_WIDE_INT
9468 		   && tree_fits_uhwi_p (and1))
9469 	    {
9470 	      unsigned HOST_WIDE_INT cst;
9471 
9472 	      cst = tree_to_uhwi (and1);
9473 	      cst &= HOST_WIDE_INT_M1U
9474 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9475 	      change = (cst == 0);
9476 	      if (change
9477 		  && !flag_syntax_only
9478 		  && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9479 		      == ZERO_EXTEND))
9480 		{
9481 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
9482 		  and0 = fold_convert_loc (loc, uns, and0);
9483 		  and1 = fold_convert_loc (loc, uns, and1);
9484 		}
9485 	    }
9486 	  if (change)
9487 	    {
9488 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
9489 				    TREE_OVERFLOW (and1));
9490 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
9491 				      fold_convert_loc (loc, type, and0), tem);
9492 	    }
9493 	}
9494 
9495       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9496 	 cast (T1)X will fold away.  We assume that this happens when X itself
9497 	 is a cast.  */
9498       if (POINTER_TYPE_P (type)
9499 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9500 	  && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9501 	{
9502 	  tree arg00 = TREE_OPERAND (arg0, 0);
9503 	  tree arg01 = TREE_OPERAND (arg0, 1);
9504 
9505 	  /* If -fsanitize=alignment, avoid this optimization in GENERIC
9506 	     when the pointed type needs higher alignment than
9507 	     the p+ first operand's pointed type.  */
9508 	  if (!in_gimple_form
9509 	      && sanitize_flags_p (SANITIZE_ALIGNMENT)
9510 	      && (min_align_of_type (TREE_TYPE (type))
9511 		  > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9512 	    return NULL_TREE;
9513 
9514 	  /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9515 	     when type is a reference type and arg00's type is not,
9516 	     because arg00 could be validly nullptr and if arg01 doesn't return,
9517 	     we don't want false positive binding of reference to nullptr.  */
9518 	  if (TREE_CODE (type) == REFERENCE_TYPE
9519 	      && !in_gimple_form
9520 	      && sanitize_flags_p (SANITIZE_NULL)
9521 	      && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9522 	    return NULL_TREE;
9523 
9524 	  arg00 = fold_convert_loc (loc, type, arg00);
9525 	  return fold_build_pointer_plus_loc (loc, arg00, arg01);
9526 	}
9527 
9528       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9529 	 of the same precision, and X is an integer type not narrower than
9530 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
9531       if (INTEGRAL_TYPE_P (type)
9532 	  && TREE_CODE (op0) == BIT_NOT_EXPR
9533 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9534 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9535 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9536 	{
9537 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9538 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9539 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9540 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9541 				fold_convert_loc (loc, type, tem));
9542 	}
9543 
9544       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9545 	 type of X and Y (integer types only).  */
9546       if (INTEGRAL_TYPE_P (type)
9547 	  && TREE_CODE (op0) == MULT_EXPR
9548 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9549 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9550 	{
9551 	  /* Be careful not to introduce new overflows.  */
9552 	  tree mult_type;
9553           if (TYPE_OVERFLOW_WRAPS (type))
9554 	    mult_type = type;
9555 	  else
9556 	    mult_type = unsigned_type_for (type);
9557 
9558 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9559 	    {
9560 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9561 				 fold_convert_loc (loc, mult_type,
9562 						   TREE_OPERAND (op0, 0)),
9563 				 fold_convert_loc (loc, mult_type,
9564 						   TREE_OPERAND (op0, 1)));
9565 	      return fold_convert_loc (loc, type, tem);
9566 	    }
9567 	}
9568 
9569       return NULL_TREE;
9570 
9571     case VIEW_CONVERT_EXPR:
9572       if (TREE_CODE (op0) == MEM_REF)
9573         {
9574 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9575 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9576 	  tem = fold_build2_loc (loc, MEM_REF, type,
9577 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9578 	  REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9579 	  return tem;
9580 	}
9581 
9582       return NULL_TREE;
9583 
9584     case NEGATE_EXPR:
9585       tem = fold_negate_expr (loc, arg0);
9586       if (tem)
9587 	return fold_convert_loc (loc, type, tem);
9588       return NULL_TREE;
9589 
9590     case ABS_EXPR:
9591       /* Convert fabs((double)float) into (double)fabsf(float).  */
9592       if (TREE_CODE (arg0) == NOP_EXPR
9593 	  && TREE_CODE (type) == REAL_TYPE)
9594 	{
9595 	  tree targ0 = strip_float_extensions (arg0);
9596 	  if (targ0 != arg0)
9597 	    return fold_convert_loc (loc, type,
9598 				     fold_build1_loc (loc, ABS_EXPR,
9599 						  TREE_TYPE (targ0),
9600 						  targ0));
9601 	}
9602       return NULL_TREE;
9603 
9604     case BIT_NOT_EXPR:
9605       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
9606       if (TREE_CODE (arg0) == BIT_XOR_EXPR
9607 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9608 				    fold_convert_loc (loc, type,
9609 						      TREE_OPERAND (arg0, 0)))))
9610 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9611 				fold_convert_loc (loc, type,
9612 						  TREE_OPERAND (arg0, 1)));
9613       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9614 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9615 			       	     fold_convert_loc (loc, type,
9616 						       TREE_OPERAND (arg0, 1)))))
9617 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9618 			    fold_convert_loc (loc, type,
9619 					      TREE_OPERAND (arg0, 0)), tem);
9620 
9621       return NULL_TREE;
9622 
9623     case TRUTH_NOT_EXPR:
9624       /* Note that the operand of this must be an int
9625 	 and its values must be 0 or 1.
9626 	 ("true" is a fixed value perhaps depending on the language,
9627 	 but we don't handle values other than 1 correctly yet.)  */
9628       tem = fold_truth_not_expr (loc, arg0);
9629       if (!tem)
9630 	return NULL_TREE;
9631       return fold_convert_loc (loc, type, tem);
9632 
9633     case INDIRECT_REF:
9634       /* Fold *&X to X if X is an lvalue.  */
9635       if (TREE_CODE (op0) == ADDR_EXPR)
9636 	{
9637 	  tree op00 = TREE_OPERAND (op0, 0);
9638 	  if ((VAR_P (op00)
9639 	       || TREE_CODE (op00) == PARM_DECL
9640 	       || TREE_CODE (op00) == RESULT_DECL)
9641 	      && !TREE_READONLY (op00))
9642 	    return op00;
9643 	}
9644       return NULL_TREE;
9645 
9646     default:
9647       return NULL_TREE;
9648     } /* switch (code) */
9649 }
9650 
9651 
9652 /* If the operation was a conversion do _not_ mark a resulting constant
9653    with TREE_OVERFLOW if the original constant was not.  These conversions
9654    have implementation defined behavior and retaining the TREE_OVERFLOW
9655    flag here would confuse later passes such as VRP.  */
9656 tree
fold_unary_ignore_overflow_loc(location_t loc,enum tree_code code,tree type,tree op0)9657 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9658 				tree type, tree op0)
9659 {
9660   tree res = fold_unary_loc (loc, code, type, op0);
9661   if (res
9662       && TREE_CODE (res) == INTEGER_CST
9663       && TREE_CODE (op0) == INTEGER_CST
9664       && CONVERT_EXPR_CODE_P (code))
9665     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9666 
9667   return res;
9668 }
9669 
9670 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9671    operands OP0 and OP1.  LOC is the location of the resulting expression.
9672    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9673    Return the folded expression if folding is successful.  Otherwise,
9674    return NULL_TREE.  */
9675 static tree
fold_truth_andor(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,tree op0,tree op1)9676 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9677 		  tree arg0, tree arg1, tree op0, tree op1)
9678 {
9679   tree tem;
9680 
9681   /* We only do these simplifications if we are optimizing.  */
9682   if (!optimize)
9683     return NULL_TREE;
9684 
9685   /* Check for things like (A || B) && (A || C).  We can convert this
9686      to A || (B && C).  Note that either operator can be any of the four
9687      truth and/or operations and the transformation will still be
9688      valid.   Also note that we only care about order for the
9689      ANDIF and ORIF operators.  If B contains side effects, this
9690      might change the truth-value of A.  */
9691   if (TREE_CODE (arg0) == TREE_CODE (arg1)
9692       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9693 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9694 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
9695 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9696       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9697     {
9698       tree a00 = TREE_OPERAND (arg0, 0);
9699       tree a01 = TREE_OPERAND (arg0, 1);
9700       tree a10 = TREE_OPERAND (arg1, 0);
9701       tree a11 = TREE_OPERAND (arg1, 1);
9702       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9703 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9704 			 && (code == TRUTH_AND_EXPR
9705 			     || code == TRUTH_OR_EXPR));
9706 
9707       if (operand_equal_p (a00, a10, 0))
9708 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9709 			    fold_build2_loc (loc, code, type, a01, a11));
9710       else if (commutative && operand_equal_p (a00, a11, 0))
9711 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9712 			    fold_build2_loc (loc, code, type, a01, a10));
9713       else if (commutative && operand_equal_p (a01, a10, 0))
9714 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9715 			    fold_build2_loc (loc, code, type, a00, a11));
9716 
9717       /* This case if tricky because we must either have commutative
9718 	 operators or else A10 must not have side-effects.  */
9719 
9720       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9721 	       && operand_equal_p (a01, a11, 0))
9722 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
9723 			    fold_build2_loc (loc, code, type, a00, a10),
9724 			    a01);
9725     }
9726 
9727   /* See if we can build a range comparison.  */
9728   if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9729     return tem;
9730 
9731   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9732       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9733     {
9734       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9735       if (tem)
9736 	return fold_build2_loc (loc, code, type, tem, arg1);
9737     }
9738 
9739   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9740       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9741     {
9742       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9743       if (tem)
9744 	return fold_build2_loc (loc, code, type, arg0, tem);
9745     }
9746 
9747   /* Check for the possibility of merging component references.  If our
9748      lhs is another similar operation, try to merge its rhs with our
9749      rhs.  Then try to merge our lhs and rhs.  */
9750   if (TREE_CODE (arg0) == code
9751       && (tem = fold_truth_andor_1 (loc, code, type,
9752 				    TREE_OPERAND (arg0, 1), arg1)) != 0)
9753     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9754 
9755   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9756     return tem;
9757 
9758   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9759   if (param_logical_op_non_short_circuit != -1)
9760     logical_op_non_short_circuit
9761       = param_logical_op_non_short_circuit;
9762   if (logical_op_non_short_circuit
9763       && !sanitize_coverage_p ()
9764       && (code == TRUTH_AND_EXPR
9765           || code == TRUTH_ANDIF_EXPR
9766           || code == TRUTH_OR_EXPR
9767           || code == TRUTH_ORIF_EXPR))
9768     {
9769       enum tree_code ncode, icode;
9770 
9771       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9772 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9773       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9774 
9775       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9776 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9777 	 We don't want to pack more than two leafs to a non-IF AND/OR
9778 	 expression.
9779 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9780 	 equal to IF-CODE, then we don't want to add right-hand operand.
9781 	 If the inner right-hand side of left-hand operand has
9782 	 side-effects, or isn't simple, then we can't add to it,
9783 	 as otherwise we might destroy if-sequence.  */
9784       if (TREE_CODE (arg0) == icode
9785 	  && simple_operand_p_2 (arg1)
9786 	  /* Needed for sequence points to handle trappings, and
9787 	     side-effects.  */
9788 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9789 	{
9790 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9791 				 arg1);
9792 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9793 				  tem);
9794 	}
9795 	/* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9796 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
9797       else if (TREE_CODE (arg1) == icode
9798 	  && simple_operand_p_2 (arg0)
9799 	  /* Needed for sequence points to handle trappings, and
9800 	     side-effects.  */
9801 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9802 	{
9803 	  tem = fold_build2_loc (loc, ncode, type,
9804 				 arg0, TREE_OPERAND (arg1, 0));
9805 	  return fold_build2_loc (loc, icode, type, tem,
9806 				  TREE_OPERAND (arg1, 1));
9807 	}
9808       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9809 	 into (A OR B).
9810 	 For sequence point consistancy, we need to check for trapping,
9811 	 and side-effects.  */
9812       else if (code == icode && simple_operand_p_2 (arg0)
9813                && simple_operand_p_2 (arg1))
9814 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
9815     }
9816 
9817   return NULL_TREE;
9818 }
9819 
9820 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9821    by changing CODE to reduce the magnitude of constants involved in
9822    ARG0 of the comparison.
9823    Returns a canonicalized comparison tree if a simplification was
9824    possible, otherwise returns NULL_TREE.
9825    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9826    valid if signed overflow is undefined.  */
9827 
9828 static tree
maybe_canonicalize_comparison_1(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,bool * strict_overflow_p)9829 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9830 				 tree arg0, tree arg1,
9831 				 bool *strict_overflow_p)
9832 {
9833   enum tree_code code0 = TREE_CODE (arg0);
9834   tree t, cst0 = NULL_TREE;
9835   int sgn0;
9836 
9837   /* Match A +- CST code arg1.  We can change this only if overflow
9838      is undefined.  */
9839   if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9840 	 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9841 	/* In principle pointers also have undefined overflow behavior,
9842 	   but that causes problems elsewhere.  */
9843 	&& !POINTER_TYPE_P (TREE_TYPE (arg0))
9844 	&& (code0 == MINUS_EXPR
9845 	    || code0 == PLUS_EXPR)
9846 	&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9847     return NULL_TREE;
9848 
9849   /* Identify the constant in arg0 and its sign.  */
9850   cst0 = TREE_OPERAND (arg0, 1);
9851   sgn0 = tree_int_cst_sgn (cst0);
9852 
9853   /* Overflowed constants and zero will cause problems.  */
9854   if (integer_zerop (cst0)
9855       || TREE_OVERFLOW (cst0))
9856     return NULL_TREE;
9857 
9858   /* See if we can reduce the magnitude of the constant in
9859      arg0 by changing the comparison code.  */
9860   /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
9861   if (code == LT_EXPR
9862       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9863     code = LE_EXPR;
9864   /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
9865   else if (code == GT_EXPR
9866 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9867     code = GE_EXPR;
9868   /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
9869   else if (code == LE_EXPR
9870 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9871     code = LT_EXPR;
9872   /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
9873   else if (code == GE_EXPR
9874 	   && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9875     code = GT_EXPR;
9876   else
9877     return NULL_TREE;
9878   *strict_overflow_p = true;
9879 
9880   /* Now build the constant reduced in magnitude.  But not if that
9881      would produce one outside of its types range.  */
9882   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9883       && ((sgn0 == 1
9884 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9885 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9886 	  || (sgn0 == -1
9887 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9888 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9889     return NULL_TREE;
9890 
9891   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9892 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
9893   t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9894   t = fold_convert (TREE_TYPE (arg1), t);
9895 
9896   return fold_build2_loc (loc, code, type, t, arg1);
9897 }
9898 
9899 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9900    overflow further.  Try to decrease the magnitude of constants involved
9901    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9902    and put sole constants at the second argument position.
9903    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
9904 
9905 static tree
maybe_canonicalize_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)9906 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9907 			       tree arg0, tree arg1)
9908 {
9909   tree t;
9910   bool strict_overflow_p;
9911   const char * const warnmsg = G_("assuming signed overflow does not occur "
9912 				  "when reducing constant in comparison");
9913 
9914   /* Try canonicalization by simplifying arg0.  */
9915   strict_overflow_p = false;
9916   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9917 				       &strict_overflow_p);
9918   if (t)
9919     {
9920       if (strict_overflow_p)
9921 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9922       return t;
9923     }
9924 
9925   /* Try canonicalization by simplifying arg1 using the swapped
9926      comparison.  */
9927   code = swap_tree_comparison (code);
9928   strict_overflow_p = false;
9929   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9930 				       &strict_overflow_p);
9931   if (t && strict_overflow_p)
9932     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9933   return t;
9934 }
9935 
9936 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9937    space.  This is used to avoid issuing overflow warnings for
9938    expressions like &p->x which cannot wrap.  */
9939 
9940 static bool
pointer_may_wrap_p(tree base,tree offset,poly_int64 bitpos)9941 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9942 {
9943   if (!POINTER_TYPE_P (TREE_TYPE (base)))
9944     return true;
9945 
9946   if (maybe_lt (bitpos, 0))
9947     return true;
9948 
9949   poly_wide_int wi_offset;
9950   int precision = TYPE_PRECISION (TREE_TYPE (base));
9951   if (offset == NULL_TREE)
9952     wi_offset = wi::zero (precision);
9953   else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9954     return true;
9955   else
9956     wi_offset = wi::to_poly_wide (offset);
9957 
9958   wi::overflow_type overflow;
9959   poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9960 				  precision);
9961   poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9962   if (overflow)
9963     return true;
9964 
9965   poly_uint64 total_hwi, size;
9966   if (!total.to_uhwi (&total_hwi)
9967       || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9968 			   &size)
9969       || known_eq (size, 0U))
9970     return true;
9971 
9972   if (known_le (total_hwi, size))
9973     return false;
9974 
9975   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9976      array.  */
9977   if (TREE_CODE (base) == ADDR_EXPR
9978       && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9979 			  &size)
9980       && maybe_ne (size, 0U)
9981       && known_le (total_hwi, size))
9982     return false;
9983 
9984   return true;
9985 }
9986 
9987 /* Return a positive integer when the symbol DECL is known to have
9988    a nonzero address, zero when it's known not to (e.g., it's a weak
9989    symbol), and a negative integer when the symbol is not yet in the
9990    symbol table and so whether or not its address is zero is unknown.
9991    For function local objects always return positive integer.  */
9992 static int
maybe_nonzero_address(tree decl)9993 maybe_nonzero_address (tree decl)
9994 {
9995   /* Normally, don't do anything for variables and functions before symtab is
9996      built; it is quite possible that DECL will be declared weak later.
9997      But if folding_initializer, we need a constant answer now, so create
9998      the symtab entry and prevent later weak declaration.  */
9999   if (DECL_P (decl) && decl_in_symtab_p (decl))
10000     if (struct symtab_node *symbol
10001 	= (folding_initializer
10002 	   ? symtab_node::get_create (decl)
10003 	   : symtab_node::get (decl)))
10004       return symbol->nonzero_address ();
10005 
10006   /* Function local objects are never NULL.  */
10007   if (DECL_P (decl)
10008       && (DECL_CONTEXT (decl)
10009       && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10010       && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10011     return 1;
10012 
10013   return -1;
10014 }
10015 
10016 /* Subroutine of fold_binary.  This routine performs all of the
10017    transformations that are common to the equality/inequality
10018    operators (EQ_EXPR and NE_EXPR) and the ordering operators
10019    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
10020    fold_binary should call fold_binary.  Fold a comparison with
10021    tree code CODE and type TYPE with operands OP0 and OP1.  Return
10022    the folded comparison or NULL_TREE.  */
10023 
10024 static tree
fold_comparison(location_t loc,enum tree_code code,tree type,tree op0,tree op1)10025 fold_comparison (location_t loc, enum tree_code code, tree type,
10026 		 tree op0, tree op1)
10027 {
10028   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10029   tree arg0, arg1, tem;
10030 
10031   arg0 = op0;
10032   arg1 = op1;
10033 
10034   STRIP_SIGN_NOPS (arg0);
10035   STRIP_SIGN_NOPS (arg1);
10036 
10037   /* For comparisons of pointers we can decompose it to a compile time
10038      comparison of the base objects and the offsets into the object.
10039      This requires at least one operand being an ADDR_EXPR or a
10040      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
10041   if (POINTER_TYPE_P (TREE_TYPE (arg0))
10042       && (TREE_CODE (arg0) == ADDR_EXPR
10043 	  || TREE_CODE (arg1) == ADDR_EXPR
10044 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10045 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10046     {
10047       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10048       poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10049       machine_mode mode;
10050       int volatilep, reversep, unsignedp;
10051       bool indirect_base0 = false, indirect_base1 = false;
10052 
10053       /* Get base and offset for the access.  Strip ADDR_EXPR for
10054 	 get_inner_reference, but put it back by stripping INDIRECT_REF
10055 	 off the base object if possible.  indirect_baseN will be true
10056 	 if baseN is not an address but refers to the object itself.  */
10057       base0 = arg0;
10058       if (TREE_CODE (arg0) == ADDR_EXPR)
10059 	{
10060 	  base0
10061 	    = get_inner_reference (TREE_OPERAND (arg0, 0),
10062 				   &bitsize, &bitpos0, &offset0, &mode,
10063 				   &unsignedp, &reversep, &volatilep);
10064 	  if (TREE_CODE (base0) == INDIRECT_REF)
10065 	    base0 = TREE_OPERAND (base0, 0);
10066 	  else
10067 	    indirect_base0 = true;
10068 	}
10069       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10070 	{
10071 	  base0 = TREE_OPERAND (arg0, 0);
10072 	  STRIP_SIGN_NOPS (base0);
10073 	  if (TREE_CODE (base0) == ADDR_EXPR)
10074 	    {
10075 	      base0
10076 		= get_inner_reference (TREE_OPERAND (base0, 0),
10077 				       &bitsize, &bitpos0, &offset0, &mode,
10078 				       &unsignedp, &reversep, &volatilep);
10079 	      if (TREE_CODE (base0) == INDIRECT_REF)
10080 		base0 = TREE_OPERAND (base0, 0);
10081 	      else
10082 		indirect_base0 = true;
10083 	    }
10084 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
10085 	    offset0 = TREE_OPERAND (arg0, 1);
10086 	  else
10087 	    offset0 = size_binop (PLUS_EXPR, offset0,
10088 				  TREE_OPERAND (arg0, 1));
10089 	  if (poly_int_tree_p (offset0))
10090 	    {
10091 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10092 					      TYPE_PRECISION (sizetype));
10093 	      tem <<= LOG2_BITS_PER_UNIT;
10094 	      tem += bitpos0;
10095 	      if (tem.to_shwi (&bitpos0))
10096 		offset0 = NULL_TREE;
10097 	    }
10098 	}
10099 
10100       base1 = arg1;
10101       if (TREE_CODE (arg1) == ADDR_EXPR)
10102 	{
10103 	  base1
10104 	    = get_inner_reference (TREE_OPERAND (arg1, 0),
10105 				   &bitsize, &bitpos1, &offset1, &mode,
10106 				   &unsignedp, &reversep, &volatilep);
10107 	  if (TREE_CODE (base1) == INDIRECT_REF)
10108 	    base1 = TREE_OPERAND (base1, 0);
10109 	  else
10110 	    indirect_base1 = true;
10111 	}
10112       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10113 	{
10114 	  base1 = TREE_OPERAND (arg1, 0);
10115 	  STRIP_SIGN_NOPS (base1);
10116 	  if (TREE_CODE (base1) == ADDR_EXPR)
10117 	    {
10118 	      base1
10119 		= get_inner_reference (TREE_OPERAND (base1, 0),
10120 				       &bitsize, &bitpos1, &offset1, &mode,
10121 				       &unsignedp, &reversep, &volatilep);
10122 	      if (TREE_CODE (base1) == INDIRECT_REF)
10123 		base1 = TREE_OPERAND (base1, 0);
10124 	      else
10125 		indirect_base1 = true;
10126 	    }
10127 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
10128 	    offset1 = TREE_OPERAND (arg1, 1);
10129 	  else
10130 	    offset1 = size_binop (PLUS_EXPR, offset1,
10131 				  TREE_OPERAND (arg1, 1));
10132 	  if (poly_int_tree_p (offset1))
10133 	    {
10134 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10135 					      TYPE_PRECISION (sizetype));
10136 	      tem <<= LOG2_BITS_PER_UNIT;
10137 	      tem += bitpos1;
10138 	      if (tem.to_shwi (&bitpos1))
10139 		offset1 = NULL_TREE;
10140 	    }
10141 	}
10142 
10143       /* If we have equivalent bases we might be able to simplify.  */
10144       if (indirect_base0 == indirect_base1
10145 	  && operand_equal_p (base0, base1,
10146 			      indirect_base0 ? OEP_ADDRESS_OF : 0))
10147 	{
10148 	  /* We can fold this expression to a constant if the non-constant
10149 	     offset parts are equal.  */
10150 	  if ((offset0 == offset1
10151 	       || (offset0 && offset1
10152 		   && operand_equal_p (offset0, offset1, 0)))
10153 	      && (equality_code
10154 		  || (indirect_base0
10155 		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10156 		  || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10157 	    {
10158 	      if (!equality_code
10159 		  && maybe_ne (bitpos0, bitpos1)
10160 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
10161 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
10162 		fold_overflow_warning (("assuming pointer wraparound does not "
10163 					"occur when comparing P +- C1 with "
10164 					"P +- C2"),
10165 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
10166 
10167 	      switch (code)
10168 		{
10169 		case EQ_EXPR:
10170 		  if (known_eq (bitpos0, bitpos1))
10171 		    return constant_boolean_node (true, type);
10172 		  if (known_ne (bitpos0, bitpos1))
10173 		    return constant_boolean_node (false, type);
10174 		  break;
10175 		case NE_EXPR:
10176 		  if (known_ne (bitpos0, bitpos1))
10177 		    return constant_boolean_node (true, type);
10178 		  if (known_eq (bitpos0, bitpos1))
10179 		    return constant_boolean_node (false, type);
10180 		  break;
10181 		case LT_EXPR:
10182 		  if (known_lt (bitpos0, bitpos1))
10183 		    return constant_boolean_node (true, type);
10184 		  if (known_ge (bitpos0, bitpos1))
10185 		    return constant_boolean_node (false, type);
10186 		  break;
10187 		case LE_EXPR:
10188 		  if (known_le (bitpos0, bitpos1))
10189 		    return constant_boolean_node (true, type);
10190 		  if (known_gt (bitpos0, bitpos1))
10191 		    return constant_boolean_node (false, type);
10192 		  break;
10193 		case GE_EXPR:
10194 		  if (known_ge (bitpos0, bitpos1))
10195 		    return constant_boolean_node (true, type);
10196 		  if (known_lt (bitpos0, bitpos1))
10197 		    return constant_boolean_node (false, type);
10198 		  break;
10199 		case GT_EXPR:
10200 		  if (known_gt (bitpos0, bitpos1))
10201 		    return constant_boolean_node (true, type);
10202 		  if (known_le (bitpos0, bitpos1))
10203 		    return constant_boolean_node (false, type);
10204 		  break;
10205 		default:;
10206 		}
10207 	    }
10208 	  /* We can simplify the comparison to a comparison of the variable
10209 	     offset parts if the constant offset parts are equal.
10210 	     Be careful to use signed sizetype here because otherwise we
10211 	     mess with array offsets in the wrong way.  This is possible
10212 	     because pointer arithmetic is restricted to retain within an
10213 	     object and overflow on pointer differences is undefined as of
10214 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
10215 	  else if (known_eq (bitpos0, bitpos1)
10216 		   && (equality_code
10217 		       || (indirect_base0
10218 			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10219 		       || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10220 	    {
10221 	      /* By converting to signed sizetype we cover middle-end pointer
10222 	         arithmetic which operates on unsigned pointer types of size
10223 	         type size and ARRAY_REF offsets which are properly sign or
10224 	         zero extended from their type in case it is narrower than
10225 	         sizetype.  */
10226 	      if (offset0 == NULL_TREE)
10227 		offset0 = build_int_cst (ssizetype, 0);
10228 	      else
10229 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
10230 	      if (offset1 == NULL_TREE)
10231 		offset1 = build_int_cst (ssizetype, 0);
10232 	      else
10233 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
10234 
10235 	      if (!equality_code
10236 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
10237 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
10238 		fold_overflow_warning (("assuming pointer wraparound does not "
10239 					"occur when comparing P +- C1 with "
10240 					"P +- C2"),
10241 				       WARN_STRICT_OVERFLOW_COMPARISON);
10242 
10243 	      return fold_build2_loc (loc, code, type, offset0, offset1);
10244 	    }
10245 	}
10246       /* For equal offsets we can simplify to a comparison of the
10247 	 base addresses.  */
10248       else if (known_eq (bitpos0, bitpos1)
10249 	       && (indirect_base0
10250 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10251 	       && (indirect_base1
10252 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10253 	       && ((offset0 == offset1)
10254 		   || (offset0 && offset1
10255 		       && operand_equal_p (offset0, offset1, 0))))
10256 	{
10257 	  if (indirect_base0)
10258 	    base0 = build_fold_addr_expr_loc (loc, base0);
10259 	  if (indirect_base1)
10260 	    base1 = build_fold_addr_expr_loc (loc, base1);
10261 	  return fold_build2_loc (loc, code, type, base0, base1);
10262 	}
10263       /* Comparison between an ordinary (non-weak) symbol and a null
10264 	 pointer can be eliminated since such symbols must have a non
10265 	 null address.  In C, relational expressions between pointers
10266 	 to objects and null pointers are undefined.  The results
10267 	 below follow the C++ rules with the additional property that
10268 	 every object pointer compares greater than a null pointer.
10269       */
10270       else if (((DECL_P (base0)
10271 		 && maybe_nonzero_address (base0) > 0
10272 		 /* Avoid folding references to struct members at offset 0 to
10273 		    prevent tests like '&ptr->firstmember == 0' from getting
10274 		    eliminated.  When ptr is null, although the -> expression
10275 		    is strictly speaking invalid, GCC retains it as a matter
10276 		    of QoI.  See PR c/44555. */
10277 		 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10278 		|| CONSTANT_CLASS_P (base0))
10279 	       && indirect_base0
10280 	       /* The caller guarantees that when one of the arguments is
10281 		  constant (i.e., null in this case) it is second.  */
10282 	       && integer_zerop (arg1))
10283 	{
10284 	  switch (code)
10285 	    {
10286 	    case EQ_EXPR:
10287 	    case LE_EXPR:
10288 	    case LT_EXPR:
10289 	      return constant_boolean_node (false, type);
10290 	    case GE_EXPR:
10291 	    case GT_EXPR:
10292 	    case NE_EXPR:
10293 	      return constant_boolean_node (true, type);
10294 	    default:
10295 	      gcc_unreachable ();
10296 	    }
10297 	}
10298     }
10299 
10300   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10301      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
10302      the resulting offset is smaller in absolute value than the
10303      original one and has the same sign.  */
10304   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10305       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10306       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10307       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10308 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10309       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10310       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10311 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10312     {
10313       tree const1 = TREE_OPERAND (arg0, 1);
10314       tree const2 = TREE_OPERAND (arg1, 1);
10315       tree variable1 = TREE_OPERAND (arg0, 0);
10316       tree variable2 = TREE_OPERAND (arg1, 0);
10317       tree cst;
10318       const char * const warnmsg = G_("assuming signed overflow does not "
10319 				      "occur when combining constants around "
10320 				      "a comparison");
10321 
10322       /* Put the constant on the side where it doesn't overflow and is
10323 	 of lower absolute value and of same sign than before.  */
10324       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10325 			     ? MINUS_EXPR : PLUS_EXPR,
10326 			     const2, const1);
10327       if (!TREE_OVERFLOW (cst)
10328 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10329 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10330 	{
10331 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10332 	  return fold_build2_loc (loc, code, type,
10333 				  variable1,
10334 				  fold_build2_loc (loc, TREE_CODE (arg1),
10335 						   TREE_TYPE (arg1),
10336 						   variable2, cst));
10337 	}
10338 
10339       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10340 			     ? MINUS_EXPR : PLUS_EXPR,
10341 			     const1, const2);
10342       if (!TREE_OVERFLOW (cst)
10343 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10344 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10345 	{
10346 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10347 	  return fold_build2_loc (loc, code, type,
10348 				  fold_build2_loc (loc, TREE_CODE (arg0),
10349 						   TREE_TYPE (arg0),
10350 						   variable1, cst),
10351 				  variable2);
10352 	}
10353     }
10354 
10355   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10356   if (tem)
10357     return tem;
10358 
10359   /* If we are comparing an expression that just has comparisons
10360      of two integer values, arithmetic expressions of those comparisons,
10361      and constants, we can simplify it.  There are only three cases
10362      to check: the two values can either be equal, the first can be
10363      greater, or the second can be greater.  Fold the expression for
10364      those three values.  Since each value must be 0 or 1, we have
10365      eight possibilities, each of which corresponds to the constant 0
10366      or 1 or one of the six possible comparisons.
10367 
10368      This handles common cases like (a > b) == 0 but also handles
10369      expressions like  ((x > y) - (y > x)) > 0, which supposedly
10370      occur in macroized code.  */
10371 
10372   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10373     {
10374       tree cval1 = 0, cval2 = 0;
10375 
10376       if (twoval_comparison_p (arg0, &cval1, &cval2)
10377 	  /* Don't handle degenerate cases here; they should already
10378 	     have been handled anyway.  */
10379 	  && cval1 != 0 && cval2 != 0
10380 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10381 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10382 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10383 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10384 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10385 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10386 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10387 	{
10388 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10389 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10390 
10391 	  /* We can't just pass T to eval_subst in case cval1 or cval2
10392 	     was the same as ARG1.  */
10393 
10394 	  tree high_result
10395 		= fold_build2_loc (loc, code, type,
10396 			       eval_subst (loc, arg0, cval1, maxval,
10397 					   cval2, minval),
10398 			       arg1);
10399 	  tree equal_result
10400 		= fold_build2_loc (loc, code, type,
10401 			       eval_subst (loc, arg0, cval1, maxval,
10402 					   cval2, maxval),
10403 			       arg1);
10404 	  tree low_result
10405 		= fold_build2_loc (loc, code, type,
10406 			       eval_subst (loc, arg0, cval1, minval,
10407 					   cval2, maxval),
10408 			       arg1);
10409 
10410 	  /* All three of these results should be 0 or 1.  Confirm they are.
10411 	     Then use those values to select the proper code to use.  */
10412 
10413 	  if (TREE_CODE (high_result) == INTEGER_CST
10414 	      && TREE_CODE (equal_result) == INTEGER_CST
10415 	      && TREE_CODE (low_result) == INTEGER_CST)
10416 	    {
10417 	      /* Make a 3-bit mask with the high-order bit being the
10418 		 value for `>', the next for '=', and the low for '<'.  */
10419 	      switch ((integer_onep (high_result) * 4)
10420 		      + (integer_onep (equal_result) * 2)
10421 		      + integer_onep (low_result))
10422 		{
10423 		case 0:
10424 		  /* Always false.  */
10425 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10426 		case 1:
10427 		  code = LT_EXPR;
10428 		  break;
10429 		case 2:
10430 		  code = EQ_EXPR;
10431 		  break;
10432 		case 3:
10433 		  code = LE_EXPR;
10434 		  break;
10435 		case 4:
10436 		  code = GT_EXPR;
10437 		  break;
10438 		case 5:
10439 		  code = NE_EXPR;
10440 		  break;
10441 		case 6:
10442 		  code = GE_EXPR;
10443 		  break;
10444 		case 7:
10445 		  /* Always true.  */
10446 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10447 		}
10448 
10449 	      return fold_build2_loc (loc, code, type, cval1, cval2);
10450 	    }
10451 	}
10452     }
10453 
10454   return NULL_TREE;
10455 }
10456 
10457 
10458 /* Subroutine of fold_binary.  Optimize complex multiplications of the
10459    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
10460    argument EXPR represents the expression "z" of type TYPE.  */
10461 
10462 static tree
fold_mult_zconjz(location_t loc,tree type,tree expr)10463 fold_mult_zconjz (location_t loc, tree type, tree expr)
10464 {
10465   tree itype = TREE_TYPE (type);
10466   tree rpart, ipart, tem;
10467 
10468   if (TREE_CODE (expr) == COMPLEX_EXPR)
10469     {
10470       rpart = TREE_OPERAND (expr, 0);
10471       ipart = TREE_OPERAND (expr, 1);
10472     }
10473   else if (TREE_CODE (expr) == COMPLEX_CST)
10474     {
10475       rpart = TREE_REALPART (expr);
10476       ipart = TREE_IMAGPART (expr);
10477     }
10478   else
10479     {
10480       expr = save_expr (expr);
10481       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10482       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10483     }
10484 
10485   rpart = save_expr (rpart);
10486   ipart = save_expr (ipart);
10487   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10488 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10489 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10490   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10491 			  build_zero_cst (itype));
10492 }
10493 
10494 
10495 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
10496    CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10497    true if successful.  */
10498 
10499 static bool
vec_cst_ctor_to_array(tree arg,unsigned int nelts,tree * elts)10500 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10501 {
10502   unsigned HOST_WIDE_INT i, nunits;
10503 
10504   if (TREE_CODE (arg) == VECTOR_CST
10505       && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10506     {
10507       for (i = 0; i < nunits; ++i)
10508 	elts[i] = VECTOR_CST_ELT (arg, i);
10509     }
10510   else if (TREE_CODE (arg) == CONSTRUCTOR)
10511     {
10512       constructor_elt *elt;
10513 
10514       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10515 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10516 	  return false;
10517 	else
10518 	  elts[i] = elt->value;
10519     }
10520   else
10521     return false;
10522   for (; i < nelts; i++)
10523     elts[i]
10524       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10525   return true;
10526 }
10527 
10528 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10529    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10530    NULL_TREE otherwise.  */
10531 
10532 tree
fold_vec_perm(tree type,tree arg0,tree arg1,const vec_perm_indices & sel)10533 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10534 {
10535   unsigned int i;
10536   unsigned HOST_WIDE_INT nelts;
10537   bool need_ctor = false;
10538 
10539   if (!sel.length ().is_constant (&nelts))
10540     return NULL_TREE;
10541   gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10542 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10543 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10544   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10545       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10546     return NULL_TREE;
10547 
10548   tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10549   if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10550       || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10551     return NULL_TREE;
10552 
10553   tree_vector_builder out_elts (type, nelts, 1);
10554   for (i = 0; i < nelts; i++)
10555     {
10556       HOST_WIDE_INT index;
10557       if (!sel[i].is_constant (&index))
10558 	return NULL_TREE;
10559       if (!CONSTANT_CLASS_P (in_elts[index]))
10560 	need_ctor = true;
10561       out_elts.quick_push (unshare_expr (in_elts[index]));
10562     }
10563 
10564   if (need_ctor)
10565     {
10566       vec<constructor_elt, va_gc> *v;
10567       vec_alloc (v, nelts);
10568       for (i = 0; i < nelts; i++)
10569 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10570       return build_constructor (type, v);
10571     }
10572   else
10573     return out_elts.build ();
10574 }
10575 
10576 /* Try to fold a pointer difference of type TYPE two address expressions of
10577    array references AREF0 and AREF1 using location LOC.  Return a
10578    simplified expression for the difference or NULL_TREE.  */
10579 
10580 static tree
fold_addr_of_array_ref_difference(location_t loc,tree type,tree aref0,tree aref1,bool use_pointer_diff)10581 fold_addr_of_array_ref_difference (location_t loc, tree type,
10582 				   tree aref0, tree aref1,
10583 				   bool use_pointer_diff)
10584 {
10585   tree base0 = TREE_OPERAND (aref0, 0);
10586   tree base1 = TREE_OPERAND (aref1, 0);
10587   tree base_offset = build_int_cst (type, 0);
10588 
10589   /* If the bases are array references as well, recurse.  If the bases
10590      are pointer indirections compute the difference of the pointers.
10591      If the bases are equal, we are set.  */
10592   if ((TREE_CODE (base0) == ARRAY_REF
10593        && TREE_CODE (base1) == ARRAY_REF
10594        && (base_offset
10595 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10596 						use_pointer_diff)))
10597       || (INDIRECT_REF_P (base0)
10598 	  && INDIRECT_REF_P (base1)
10599 	  && (base_offset
10600 	        = use_pointer_diff
10601 		  ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10602 				     TREE_OPERAND (base0, 0),
10603 				     TREE_OPERAND (base1, 0))
10604 		  : fold_binary_loc (loc, MINUS_EXPR, type,
10605 				     fold_convert (type,
10606 						   TREE_OPERAND (base0, 0)),
10607 				     fold_convert (type,
10608 						   TREE_OPERAND (base1, 0)))))
10609       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10610     {
10611       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10612       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10613       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10614       tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10615       return fold_build2_loc (loc, PLUS_EXPR, type,
10616 			      base_offset,
10617 			      fold_build2_loc (loc, MULT_EXPR, type,
10618 					       diff, esz));
10619     }
10620   return NULL_TREE;
10621 }
10622 
10623 /* If the real or vector real constant CST of type TYPE has an exact
10624    inverse, return it, else return NULL.  */
10625 
10626 tree
exact_inverse(tree type,tree cst)10627 exact_inverse (tree type, tree cst)
10628 {
10629   REAL_VALUE_TYPE r;
10630   tree unit_type;
10631   machine_mode mode;
10632 
10633   switch (TREE_CODE (cst))
10634     {
10635     case REAL_CST:
10636       r = TREE_REAL_CST (cst);
10637 
10638       if (exact_real_inverse (TYPE_MODE (type), &r))
10639 	return build_real (type, r);
10640 
10641       return NULL_TREE;
10642 
10643     case VECTOR_CST:
10644       {
10645 	unit_type = TREE_TYPE (type);
10646 	mode = TYPE_MODE (unit_type);
10647 
10648 	tree_vector_builder elts;
10649 	if (!elts.new_unary_operation (type, cst, false))
10650 	  return NULL_TREE;
10651 	unsigned int count = elts.encoded_nelts ();
10652 	for (unsigned int i = 0; i < count; ++i)
10653 	  {
10654 	    r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10655 	    if (!exact_real_inverse (mode, &r))
10656 	      return NULL_TREE;
10657 	    elts.quick_push (build_real (unit_type, r));
10658 	  }
10659 
10660 	return elts.build ();
10661       }
10662 
10663     default:
10664       return NULL_TREE;
10665     }
10666 }
10667 
10668 /*  Mask out the tz least significant bits of X of type TYPE where
10669     tz is the number of trailing zeroes in Y.  */
10670 static wide_int
mask_with_tz(tree type,const wide_int & x,const wide_int & y)10671 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10672 {
10673   int tz = wi::ctz (y);
10674   if (tz > 0)
10675     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10676   return x;
10677 }
10678 
10679 /* Return true when T is an address and is known to be nonzero.
10680    For floating point we further ensure that T is not denormal.
10681    Similar logic is present in nonzero_address in rtlanal.h.
10682 
10683    If the return value is based on the assumption that signed overflow
10684    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10685    change *STRICT_OVERFLOW_P.  */
10686 
10687 static bool
tree_expr_nonzero_warnv_p(tree t,bool * strict_overflow_p)10688 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10689 {
10690   tree type = TREE_TYPE (t);
10691   enum tree_code code;
10692 
10693   /* Doing something useful for floating point would need more work.  */
10694   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10695     return false;
10696 
10697   code = TREE_CODE (t);
10698   switch (TREE_CODE_CLASS (code))
10699     {
10700     case tcc_unary:
10701       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10702 					      strict_overflow_p);
10703     case tcc_binary:
10704     case tcc_comparison:
10705       return tree_binary_nonzero_warnv_p (code, type,
10706 					       TREE_OPERAND (t, 0),
10707 					       TREE_OPERAND (t, 1),
10708 					       strict_overflow_p);
10709     case tcc_constant:
10710     case tcc_declaration:
10711     case tcc_reference:
10712       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10713 
10714     default:
10715       break;
10716     }
10717 
10718   switch (code)
10719     {
10720     case TRUTH_NOT_EXPR:
10721       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10722 					      strict_overflow_p);
10723 
10724     case TRUTH_AND_EXPR:
10725     case TRUTH_OR_EXPR:
10726     case TRUTH_XOR_EXPR:
10727       return tree_binary_nonzero_warnv_p (code, type,
10728 					       TREE_OPERAND (t, 0),
10729 					       TREE_OPERAND (t, 1),
10730 					       strict_overflow_p);
10731 
10732     case COND_EXPR:
10733     case CONSTRUCTOR:
10734     case OBJ_TYPE_REF:
10735     case ASSERT_EXPR:
10736     case ADDR_EXPR:
10737     case WITH_SIZE_EXPR:
10738     case SSA_NAME:
10739       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10740 
10741     case COMPOUND_EXPR:
10742     case MODIFY_EXPR:
10743     case BIND_EXPR:
10744       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10745 					strict_overflow_p);
10746 
10747     case SAVE_EXPR:
10748       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10749 					strict_overflow_p);
10750 
10751     case CALL_EXPR:
10752       {
10753 	tree fndecl = get_callee_fndecl (t);
10754 	if (!fndecl) return false;
10755 	if (flag_delete_null_pointer_checks && !flag_check_new
10756 	    && DECL_IS_OPERATOR_NEW_P (fndecl)
10757 	    && !TREE_NOTHROW (fndecl))
10758 	  return true;
10759 	if (flag_delete_null_pointer_checks
10760 	    && lookup_attribute ("returns_nonnull",
10761 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10762 	  return true;
10763 	return alloca_call_p (t);
10764       }
10765 
10766     default:
10767       break;
10768     }
10769   return false;
10770 }
10771 
10772 /* Return true when T is an address and is known to be nonzero.
10773    Handle warnings about undefined signed overflow.  */
10774 
10775 bool
tree_expr_nonzero_p(tree t)10776 tree_expr_nonzero_p (tree t)
10777 {
10778   bool ret, strict_overflow_p;
10779 
10780   strict_overflow_p = false;
10781   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10782   if (strict_overflow_p)
10783     fold_overflow_warning (("assuming signed overflow does not occur when "
10784 			    "determining that expression is always "
10785 			    "non-zero"),
10786 			   WARN_STRICT_OVERFLOW_MISC);
10787   return ret;
10788 }
10789 
10790 /* Return true if T is known not to be equal to an integer W.  */
10791 
10792 bool
expr_not_equal_to(tree t,const wide_int & w)10793 expr_not_equal_to (tree t, const wide_int &w)
10794 {
10795   int_range_max vr;
10796   switch (TREE_CODE (t))
10797     {
10798     case INTEGER_CST:
10799       return wi::to_wide (t) != w;
10800 
10801     case SSA_NAME:
10802       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10803 	return false;
10804 
10805       if (cfun)
10806 	get_range_query (cfun)->range_of_expr (vr, t);
10807       else
10808 	get_global_range_query ()->range_of_expr (vr, t);
10809 
10810       if (!vr.undefined_p ()
10811 	  && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10812 	return true;
10813       /* If T has some known zero bits and W has any of those bits set,
10814 	 then T is known not to be equal to W.  */
10815       if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10816 			      TYPE_PRECISION (TREE_TYPE (t))), 0))
10817 	return true;
10818       return false;
10819 
10820     default:
10821       return false;
10822     }
10823 }
10824 
10825 /* Fold a binary expression of code CODE and type TYPE with operands
10826    OP0 and OP1.  LOC is the location of the resulting expression.
10827    Return the folded expression if folding is successful.  Otherwise,
10828    return NULL_TREE.  */
10829 
10830 tree
fold_binary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)10831 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10832 		 tree op0, tree op1)
10833 {
10834   enum tree_code_class kind = TREE_CODE_CLASS (code);
10835   tree arg0, arg1, tem;
10836   tree t1 = NULL_TREE;
10837   bool strict_overflow_p;
10838   unsigned int prec;
10839 
10840   gcc_assert (IS_EXPR_CODE_CLASS (kind)
10841 	      && TREE_CODE_LENGTH (code) == 2
10842 	      && op0 != NULL_TREE
10843 	      && op1 != NULL_TREE);
10844 
10845   arg0 = op0;
10846   arg1 = op1;
10847 
10848   /* Strip any conversions that don't change the mode.  This is
10849      safe for every expression, except for a comparison expression
10850      because its signedness is derived from its operands.  So, in
10851      the latter case, only strip conversions that don't change the
10852      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
10853      preserved.
10854 
10855      Note that this is done as an internal manipulation within the
10856      constant folder, in order to find the simplest representation
10857      of the arguments so that their form can be studied.  In any
10858      cases, the appropriate type conversions should be put back in
10859      the tree that will get out of the constant folder.  */
10860 
10861   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10862     {
10863       STRIP_SIGN_NOPS (arg0);
10864       STRIP_SIGN_NOPS (arg1);
10865     }
10866   else
10867     {
10868       STRIP_NOPS (arg0);
10869       STRIP_NOPS (arg1);
10870     }
10871 
10872   /* Note that TREE_CONSTANT isn't enough: static var addresses are
10873      constant but we can't do arithmetic on them.  */
10874   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10875     {
10876       tem = const_binop (code, type, arg0, arg1);
10877       if (tem != NULL_TREE)
10878 	{
10879 	  if (TREE_TYPE (tem) != type)
10880 	    tem = fold_convert_loc (loc, type, tem);
10881 	  return tem;
10882 	}
10883     }
10884 
10885   /* If this is a commutative operation, and ARG0 is a constant, move it
10886      to ARG1 to reduce the number of tests below.  */
10887   if (commutative_tree_code (code)
10888       && tree_swap_operands_p (arg0, arg1))
10889     return fold_build2_loc (loc, code, type, op1, op0);
10890 
10891   /* Likewise if this is a comparison, and ARG0 is a constant, move it
10892      to ARG1 to reduce the number of tests below.  */
10893   if (kind == tcc_comparison
10894       && tree_swap_operands_p (arg0, arg1))
10895     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10896 
10897   tem = generic_simplify (loc, code, type, op0, op1);
10898   if (tem)
10899     return tem;
10900 
10901   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10902 
10903      First check for cases where an arithmetic operation is applied to a
10904      compound, conditional, or comparison operation.  Push the arithmetic
10905      operation inside the compound or conditional to see if any folding
10906      can then be done.  Convert comparison to conditional for this purpose.
10907      The also optimizes non-constant cases that used to be done in
10908      expand_expr.
10909 
10910      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10911      one of the operands is a comparison and the other is a comparison, a
10912      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
10913      code below would make the expression more complex.  Change it to a
10914      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
10915      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
10916 
10917   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10918        || code == EQ_EXPR || code == NE_EXPR)
10919       && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10920       && ((truth_value_p (TREE_CODE (arg0))
10921 	   && (truth_value_p (TREE_CODE (arg1))
10922 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
10923 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
10924 	  || (truth_value_p (TREE_CODE (arg1))
10925 	      && (truth_value_p (TREE_CODE (arg0))
10926 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
10927 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
10928     {
10929       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10930 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10931 			 : TRUTH_XOR_EXPR,
10932 			 boolean_type_node,
10933 			 fold_convert_loc (loc, boolean_type_node, arg0),
10934 			 fold_convert_loc (loc, boolean_type_node, arg1));
10935 
10936       if (code == EQ_EXPR)
10937 	tem = invert_truthvalue_loc (loc, tem);
10938 
10939       return fold_convert_loc (loc, type, tem);
10940     }
10941 
10942   if (TREE_CODE_CLASS (code) == tcc_binary
10943       || TREE_CODE_CLASS (code) == tcc_comparison)
10944     {
10945       if (TREE_CODE (arg0) == COMPOUND_EXPR)
10946 	{
10947 	  tem = fold_build2_loc (loc, code, type,
10948 			     fold_convert_loc (loc, TREE_TYPE (op0),
10949 					       TREE_OPERAND (arg0, 1)), op1);
10950 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10951 			     tem);
10952 	}
10953       if (TREE_CODE (arg1) == COMPOUND_EXPR)
10954 	{
10955 	  tem = fold_build2_loc (loc, code, type, op0,
10956 			     fold_convert_loc (loc, TREE_TYPE (op1),
10957 					       TREE_OPERAND (arg1, 1)));
10958 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10959 			     tem);
10960 	}
10961 
10962       if (TREE_CODE (arg0) == COND_EXPR
10963 	  || TREE_CODE (arg0) == VEC_COND_EXPR
10964 	  || COMPARISON_CLASS_P (arg0))
10965 	{
10966 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10967 						     arg0, arg1,
10968 						     /*cond_first_p=*/1);
10969 	  if (tem != NULL_TREE)
10970 	    return tem;
10971 	}
10972 
10973       if (TREE_CODE (arg1) == COND_EXPR
10974 	  || TREE_CODE (arg1) == VEC_COND_EXPR
10975 	  || COMPARISON_CLASS_P (arg1))
10976 	{
10977 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10978 						     arg1, arg0,
10979 					             /*cond_first_p=*/0);
10980 	  if (tem != NULL_TREE)
10981 	    return tem;
10982 	}
10983     }
10984 
10985   switch (code)
10986     {
10987     case MEM_REF:
10988       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
10989       if (TREE_CODE (arg0) == ADDR_EXPR
10990 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10991 	{
10992 	  tree iref = TREE_OPERAND (arg0, 0);
10993 	  return fold_build2 (MEM_REF, type,
10994 			      TREE_OPERAND (iref, 0),
10995 			      int_const_binop (PLUS_EXPR, arg1,
10996 					       TREE_OPERAND (iref, 1)));
10997 	}
10998 
10999       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
11000       if (TREE_CODE (arg0) == ADDR_EXPR
11001 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
11002 	{
11003 	  tree base;
11004 	  poly_int64 coffset;
11005 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11006 						&coffset);
11007 	  if (!base)
11008 	    return NULL_TREE;
11009 	  return fold_build2 (MEM_REF, type,
11010 			      build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11011 			      int_const_binop (PLUS_EXPR, arg1,
11012 					       size_int (coffset)));
11013 	}
11014 
11015       return NULL_TREE;
11016 
11017     case POINTER_PLUS_EXPR:
11018       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
11019       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11020 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11021         return fold_convert_loc (loc, type,
11022 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11023 					      fold_convert_loc (loc, sizetype,
11024 								arg1),
11025 					      fold_convert_loc (loc, sizetype,
11026 								arg0)));
11027 
11028       return NULL_TREE;
11029 
11030     case PLUS_EXPR:
11031       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11032 	{
11033 	  /* X + (X / CST) * -CST is X % CST.  */
11034 	  if (TREE_CODE (arg1) == MULT_EXPR
11035 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11036 	      && operand_equal_p (arg0,
11037 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11038 	    {
11039 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11040 	      tree cst1 = TREE_OPERAND (arg1, 1);
11041 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11042 				      cst1, cst0);
11043 	      if (sum && integer_zerop (sum))
11044 		return fold_convert_loc (loc, type,
11045 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11046 						      TREE_TYPE (arg0), arg0,
11047 						      cst0));
11048 	    }
11049 	}
11050 
11051       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11052 	 one.  Make sure the type is not saturating and has the signedness of
11053 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11054 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
11055       if ((TREE_CODE (arg0) == MULT_EXPR
11056 	   || TREE_CODE (arg1) == MULT_EXPR)
11057 	  && !TYPE_SATURATING (type)
11058 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11059 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11060 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
11061         {
11062 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11063 	  if (tem)
11064 	    return tem;
11065 	}
11066 
11067       if (! FLOAT_TYPE_P (type))
11068 	{
11069 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11070 	     (plus (plus (mult) (mult)) (foo)) so that we can
11071 	     take advantage of the factoring cases below.  */
11072 	  if (ANY_INTEGRAL_TYPE_P (type)
11073 	      && TYPE_OVERFLOW_WRAPS (type)
11074 	      && (((TREE_CODE (arg0) == PLUS_EXPR
11075 		    || TREE_CODE (arg0) == MINUS_EXPR)
11076 		   && TREE_CODE (arg1) == MULT_EXPR)
11077 		  || ((TREE_CODE (arg1) == PLUS_EXPR
11078 		       || TREE_CODE (arg1) == MINUS_EXPR)
11079 		      && TREE_CODE (arg0) == MULT_EXPR)))
11080 	    {
11081 	      tree parg0, parg1, parg, marg;
11082 	      enum tree_code pcode;
11083 
11084 	      if (TREE_CODE (arg1) == MULT_EXPR)
11085 		parg = arg0, marg = arg1;
11086 	      else
11087 		parg = arg1, marg = arg0;
11088 	      pcode = TREE_CODE (parg);
11089 	      parg0 = TREE_OPERAND (parg, 0);
11090 	      parg1 = TREE_OPERAND (parg, 1);
11091 	      STRIP_NOPS (parg0);
11092 	      STRIP_NOPS (parg1);
11093 
11094 	      if (TREE_CODE (parg0) == MULT_EXPR
11095 		  && TREE_CODE (parg1) != MULT_EXPR)
11096 		return fold_build2_loc (loc, pcode, type,
11097 				    fold_build2_loc (loc, PLUS_EXPR, type,
11098 						 fold_convert_loc (loc, type,
11099 								   parg0),
11100 						 fold_convert_loc (loc, type,
11101 								   marg)),
11102 				    fold_convert_loc (loc, type, parg1));
11103 	      if (TREE_CODE (parg0) != MULT_EXPR
11104 		  && TREE_CODE (parg1) == MULT_EXPR)
11105 		return
11106 		  fold_build2_loc (loc, PLUS_EXPR, type,
11107 			       fold_convert_loc (loc, type, parg0),
11108 			       fold_build2_loc (loc, pcode, type,
11109 					    fold_convert_loc (loc, type, marg),
11110 					    fold_convert_loc (loc, type,
11111 							      parg1)));
11112 	    }
11113 	}
11114       else
11115 	{
11116 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11117 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
11118 	     if signed zeros are involved.  */
11119 	  if (!HONOR_SNANS (arg0)
11120 	      && !HONOR_SIGNED_ZEROS (arg0)
11121 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11122 	    {
11123 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11124 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11125 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11126 	      bool arg0rz = false, arg0iz = false;
11127 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
11128 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
11129 		{
11130 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11131 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11132 		  if (arg0rz && arg1i && real_zerop (arg1i))
11133 		    {
11134 		      tree rp = arg1r ? arg1r
11135 				  : build1 (REALPART_EXPR, rtype, arg1);
11136 		      tree ip = arg0i ? arg0i
11137 				  : build1 (IMAGPART_EXPR, rtype, arg0);
11138 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11139 		    }
11140 		  else if (arg0iz && arg1r && real_zerop (arg1r))
11141 		    {
11142 		      tree rp = arg0r ? arg0r
11143 				  : build1 (REALPART_EXPR, rtype, arg0);
11144 		      tree ip = arg1i ? arg1i
11145 				  : build1 (IMAGPART_EXPR, rtype, arg1);
11146 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11147 		    }
11148 		}
11149 	    }
11150 
11151           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11152              We associate floats only if the user has specified
11153              -fassociative-math.  */
11154           if (flag_associative_math
11155               && TREE_CODE (arg1) == PLUS_EXPR
11156               && TREE_CODE (arg0) != MULT_EXPR)
11157             {
11158               tree tree10 = TREE_OPERAND (arg1, 0);
11159               tree tree11 = TREE_OPERAND (arg1, 1);
11160               if (TREE_CODE (tree11) == MULT_EXPR
11161 		  && TREE_CODE (tree10) == MULT_EXPR)
11162                 {
11163                   tree tree0;
11164                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11165                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11166                 }
11167             }
11168           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11169              We associate floats only if the user has specified
11170              -fassociative-math.  */
11171           if (flag_associative_math
11172               && TREE_CODE (arg0) == PLUS_EXPR
11173               && TREE_CODE (arg1) != MULT_EXPR)
11174             {
11175               tree tree00 = TREE_OPERAND (arg0, 0);
11176               tree tree01 = TREE_OPERAND (arg0, 1);
11177               if (TREE_CODE (tree01) == MULT_EXPR
11178 		  && TREE_CODE (tree00) == MULT_EXPR)
11179                 {
11180                   tree tree0;
11181                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11182                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11183                 }
11184             }
11185 	}
11186 
11187      bit_rotate:
11188       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11189 	 is a rotate of A by C1 bits.  */
11190       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11191 	 is a rotate of A by B bits.
11192 	 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11193 	 though in this case CODE must be | and not + or ^, otherwise
11194 	 it doesn't return A when B is 0.  */
11195       {
11196 	enum tree_code code0, code1;
11197 	tree rtype;
11198 	code0 = TREE_CODE (arg0);
11199 	code1 = TREE_CODE (arg1);
11200 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11201 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11202 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
11203 			        TREE_OPERAND (arg1, 0), 0)
11204 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11205 	        TYPE_UNSIGNED (rtype))
11206 	    /* Only create rotates in complete modes.  Other cases are not
11207 	       expanded properly.  */
11208 	    && (element_precision (rtype)
11209 		== GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11210 	  {
11211 	    tree tree01, tree11;
11212 	    tree orig_tree01, orig_tree11;
11213 	    enum tree_code code01, code11;
11214 
11215 	    tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11216 	    tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11217 	    STRIP_NOPS (tree01);
11218 	    STRIP_NOPS (tree11);
11219 	    code01 = TREE_CODE (tree01);
11220 	    code11 = TREE_CODE (tree11);
11221 	    if (code11 != MINUS_EXPR
11222 		&& (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11223 	      {
11224 		std::swap (code0, code1);
11225 		std::swap (code01, code11);
11226 		std::swap (tree01, tree11);
11227 		std::swap (orig_tree01, orig_tree11);
11228 	      }
11229 	    if (code01 == INTEGER_CST
11230 		&& code11 == INTEGER_CST
11231 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
11232 		    == element_precision (rtype)))
11233 	      {
11234 		tem = build2_loc (loc, LROTATE_EXPR,
11235 				  rtype, TREE_OPERAND (arg0, 0),
11236 				  code0 == LSHIFT_EXPR
11237 				  ? orig_tree01 : orig_tree11);
11238 		return fold_convert_loc (loc, type, tem);
11239 	      }
11240 	    else if (code11 == MINUS_EXPR)
11241 	      {
11242 		tree tree110, tree111;
11243 		tree110 = TREE_OPERAND (tree11, 0);
11244 		tree111 = TREE_OPERAND (tree11, 1);
11245 		STRIP_NOPS (tree110);
11246 		STRIP_NOPS (tree111);
11247 		if (TREE_CODE (tree110) == INTEGER_CST
11248 		    && compare_tree_int (tree110,
11249 					 element_precision (rtype)) == 0
11250 		    && operand_equal_p (tree01, tree111, 0))
11251 		  {
11252 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11253 					    ? LROTATE_EXPR : RROTATE_EXPR),
11254 				      rtype, TREE_OPERAND (arg0, 0),
11255 				      orig_tree01);
11256 		    return fold_convert_loc (loc, type, tem);
11257 		  }
11258 	      }
11259 	    else if (code == BIT_IOR_EXPR
11260 		     && code11 == BIT_AND_EXPR
11261 		     && pow2p_hwi (element_precision (rtype)))
11262 	      {
11263 		tree tree110, tree111;
11264 		tree110 = TREE_OPERAND (tree11, 0);
11265 		tree111 = TREE_OPERAND (tree11, 1);
11266 		STRIP_NOPS (tree110);
11267 		STRIP_NOPS (tree111);
11268 		if (TREE_CODE (tree110) == NEGATE_EXPR
11269 		    && TREE_CODE (tree111) == INTEGER_CST
11270 		    && compare_tree_int (tree111,
11271 					 element_precision (rtype) - 1) == 0
11272 		    && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11273 		  {
11274 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11275 					    ? LROTATE_EXPR : RROTATE_EXPR),
11276 				      rtype, TREE_OPERAND (arg0, 0),
11277 				      orig_tree01);
11278 		    return fold_convert_loc (loc, type, tem);
11279 		  }
11280 	      }
11281 	  }
11282       }
11283 
11284     associate:
11285       /* In most languages, can't associate operations on floats through
11286 	 parentheses.  Rather than remember where the parentheses were, we
11287 	 don't associate floats at all, unless the user has specified
11288 	 -fassociative-math.
11289 	 And, we need to make sure type is not saturating.  */
11290 
11291       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11292 	  && !TYPE_SATURATING (type))
11293 	{
11294 	  tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11295 	  tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11296 	  tree atype = type;
11297 	  bool ok = true;
11298 
11299 	  /* Split both trees into variables, constants, and literals.  Then
11300 	     associate each group together, the constants with literals,
11301 	     then the result with variables.  This increases the chances of
11302 	     literals being recombined later and of generating relocatable
11303 	     expressions for the sum of a constant and literal.  */
11304 	  var0 = split_tree (arg0, type, code,
11305 			     &minus_var0, &con0, &minus_con0,
11306 			     &lit0, &minus_lit0, 0);
11307 	  var1 = split_tree (arg1, type, code,
11308 			     &minus_var1, &con1, &minus_con1,
11309 			     &lit1, &minus_lit1, code == MINUS_EXPR);
11310 
11311 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
11312 	  if (code == MINUS_EXPR)
11313 	    code = PLUS_EXPR;
11314 
11315 	  /* With undefined overflow prefer doing association in a type
11316 	     which wraps on overflow, if that is one of the operand types.  */
11317 	  if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11318 	      && !TYPE_OVERFLOW_WRAPS (type))
11319 	    {
11320 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11321 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11322 		atype = TREE_TYPE (arg0);
11323 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11324 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11325 		atype = TREE_TYPE (arg1);
11326 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11327 	    }
11328 
11329 	  /* With undefined overflow we can only associate constants with one
11330 	     variable, and constants whose association doesn't overflow.  */
11331 	  if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11332 	      && !TYPE_OVERFLOW_WRAPS (atype))
11333 	    {
11334 	      if ((var0 && var1) || (minus_var0 && minus_var1))
11335 		{
11336 		  /* ???  If split_tree would handle NEGATE_EXPR we could
11337 		     simply reject these cases and the allowed cases would
11338 		     be the var0/minus_var1 ones.  */
11339 		  tree tmp0 = var0 ? var0 : minus_var0;
11340 		  tree tmp1 = var1 ? var1 : minus_var1;
11341 		  bool one_neg = false;
11342 
11343 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
11344 		    {
11345 		      tmp0 = TREE_OPERAND (tmp0, 0);
11346 		      one_neg = !one_neg;
11347 		    }
11348 		  if (CONVERT_EXPR_P (tmp0)
11349 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11350 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11351 			  <= TYPE_PRECISION (atype)))
11352 		    tmp0 = TREE_OPERAND (tmp0, 0);
11353 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
11354 		    {
11355 		      tmp1 = TREE_OPERAND (tmp1, 0);
11356 		      one_neg = !one_neg;
11357 		    }
11358 		  if (CONVERT_EXPR_P (tmp1)
11359 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11360 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11361 			  <= TYPE_PRECISION (atype)))
11362 		    tmp1 = TREE_OPERAND (tmp1, 0);
11363 		  /* The only case we can still associate with two variables
11364 		     is if they cancel out.  */
11365 		  if (!one_neg
11366 		      || !operand_equal_p (tmp0, tmp1, 0))
11367 		    ok = false;
11368 		}
11369 	      else if ((var0 && minus_var1
11370 			&& ! operand_equal_p (var0, minus_var1, 0))
11371 		       || (minus_var0 && var1
11372 			   && ! operand_equal_p (minus_var0, var1, 0)))
11373 		ok = false;
11374 	    }
11375 
11376 	  /* Only do something if we found more than two objects.  Otherwise,
11377 	     nothing has changed and we risk infinite recursion.  */
11378 	  if (ok
11379 	      && ((var0 != 0) + (var1 != 0)
11380 		  + (minus_var0 != 0) + (minus_var1 != 0)
11381 		  + (con0 != 0) + (con1 != 0)
11382 		  + (minus_con0 != 0) + (minus_con1 != 0)
11383 		  + (lit0 != 0) + (lit1 != 0)
11384 		  + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11385 	    {
11386 	      var0 = associate_trees (loc, var0, var1, code, atype);
11387 	      minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11388 					    code, atype);
11389 	      con0 = associate_trees (loc, con0, con1, code, atype);
11390 	      minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11391 					    code, atype);
11392 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
11393 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11394 					    code, atype);
11395 
11396 	      if (minus_var0 && var0)
11397 		{
11398 		  var0 = associate_trees (loc, var0, minus_var0,
11399 					  MINUS_EXPR, atype);
11400 		  minus_var0 = 0;
11401 		}
11402 	      if (minus_con0 && con0)
11403 		{
11404 		  con0 = associate_trees (loc, con0, minus_con0,
11405 					  MINUS_EXPR, atype);
11406 		  minus_con0 = 0;
11407 		}
11408 
11409 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
11410 		 greater than the positive part.  Otherwise, the multiplicative
11411 		 folding code (i.e extract_muldiv) may be fooled in case
11412 		 unsigned constants are subtracted, like in the following
11413 		 example: ((X*2 + 4) - 8U)/2.  */
11414 	      if (minus_lit0 && lit0)
11415 		{
11416 		  if (TREE_CODE (lit0) == INTEGER_CST
11417 		      && TREE_CODE (minus_lit0) == INTEGER_CST
11418 		      && tree_int_cst_lt (lit0, minus_lit0)
11419 		      /* But avoid ending up with only negated parts.  */
11420 		      && (var0 || con0))
11421 		    {
11422 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11423 						    MINUS_EXPR, atype);
11424 		      lit0 = 0;
11425 		    }
11426 		  else
11427 		    {
11428 		      lit0 = associate_trees (loc, lit0, minus_lit0,
11429 					      MINUS_EXPR, atype);
11430 		      minus_lit0 = 0;
11431 		    }
11432 		}
11433 
11434 	      /* Don't introduce overflows through reassociation.  */
11435 	      if ((lit0 && TREE_OVERFLOW_P (lit0))
11436 		  || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11437 		return NULL_TREE;
11438 
11439 	      /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11440 	      con0 = associate_trees (loc, con0, lit0, code, atype);
11441 	      lit0 = 0;
11442 	      minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11443 					    code, atype);
11444 	      minus_lit0 = 0;
11445 
11446 	      /* Eliminate minus_con0.  */
11447 	      if (minus_con0)
11448 		{
11449 		  if (con0)
11450 		    con0 = associate_trees (loc, con0, minus_con0,
11451 					    MINUS_EXPR, atype);
11452 		  else if (var0)
11453 		    var0 = associate_trees (loc, var0, minus_con0,
11454 					    MINUS_EXPR, atype);
11455 		  else
11456 		    gcc_unreachable ();
11457 		  minus_con0 = 0;
11458 		}
11459 
11460 	      /* Eliminate minus_var0.  */
11461 	      if (minus_var0)
11462 		{
11463 		  if (con0)
11464 		    con0 = associate_trees (loc, con0, minus_var0,
11465 					    MINUS_EXPR, atype);
11466 		  else
11467 		    gcc_unreachable ();
11468 		  minus_var0 = 0;
11469 		}
11470 
11471 	      return
11472 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11473 							      code, atype));
11474 	    }
11475 	}
11476 
11477       return NULL_TREE;
11478 
11479     case POINTER_DIFF_EXPR:
11480     case MINUS_EXPR:
11481       /* Fold &a[i] - &a[j] to i-j.  */
11482       if (TREE_CODE (arg0) == ADDR_EXPR
11483 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11484 	  && TREE_CODE (arg1) == ADDR_EXPR
11485 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11486         {
11487 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
11488 							TREE_OPERAND (arg0, 0),
11489 							TREE_OPERAND (arg1, 0),
11490 							code
11491 							== POINTER_DIFF_EXPR);
11492 	  if (tem)
11493 	    return tem;
11494 	}
11495 
11496       /* Further transformations are not for pointers.  */
11497       if (code == POINTER_DIFF_EXPR)
11498 	return NULL_TREE;
11499 
11500       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
11501       if (TREE_CODE (arg0) == NEGATE_EXPR
11502 	  && negate_expr_p (op1)
11503 	  /* If arg0 is e.g. unsigned int and type is int, then this could
11504 	     introduce UB, because if A is INT_MIN at runtime, the original
11505 	     expression can be well defined while the latter is not.
11506 	     See PR83269.  */
11507 	  && !(ANY_INTEGRAL_TYPE_P (type)
11508 	       && TYPE_OVERFLOW_UNDEFINED (type)
11509 	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11510 	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11511 	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11512 			        fold_convert_loc (loc, type,
11513 						  TREE_OPERAND (arg0, 0)));
11514 
11515       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11516 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
11517 	 signed zeros are involved.  */
11518       if (!HONOR_SNANS (arg0)
11519 	  && !HONOR_SIGNED_ZEROS (arg0)
11520 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11521         {
11522 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11523 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11524 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11525 	  bool arg0rz = false, arg0iz = false;
11526 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
11527 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
11528 	    {
11529 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11530 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11531 	      if (arg0rz && arg1i && real_zerop (arg1i))
11532 	        {
11533 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11534 					 arg1r ? arg1r
11535 					 : build1 (REALPART_EXPR, rtype, arg1));
11536 		  tree ip = arg0i ? arg0i
11537 		    : build1 (IMAGPART_EXPR, rtype, arg0);
11538 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11539 		}
11540 	      else if (arg0iz && arg1r && real_zerop (arg1r))
11541 	        {
11542 		  tree rp = arg0r ? arg0r
11543 		    : build1 (REALPART_EXPR, rtype, arg0);
11544 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11545 					 arg1i ? arg1i
11546 					 : build1 (IMAGPART_EXPR, rtype, arg1));
11547 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11548 		}
11549 	    }
11550 	}
11551 
11552       /* A - B -> A + (-B) if B is easily negatable.  */
11553       if (negate_expr_p (op1)
11554 	  && ! TYPE_OVERFLOW_SANITIZED (type)
11555 	  && ((FLOAT_TYPE_P (type)
11556                /* Avoid this transformation if B is a positive REAL_CST.  */
11557 	       && (TREE_CODE (op1) != REAL_CST
11558 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11559 	      || INTEGRAL_TYPE_P (type)))
11560 	return fold_build2_loc (loc, PLUS_EXPR, type,
11561 				fold_convert_loc (loc, type, arg0),
11562 				negate_expr (op1));
11563 
11564       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11565 	 one.  Make sure the type is not saturating and has the signedness of
11566 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11567 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
11568       if ((TREE_CODE (arg0) == MULT_EXPR
11569 	   || TREE_CODE (arg1) == MULT_EXPR)
11570 	  && !TYPE_SATURATING (type)
11571 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11572 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11573 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
11574         {
11575 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11576 	  if (tem)
11577 	    return tem;
11578 	}
11579 
11580       goto associate;
11581 
11582     case MULT_EXPR:
11583       if (! FLOAT_TYPE_P (type))
11584 	{
11585 	  /* Transform x * -C into -x * C if x is easily negatable.  */
11586 	  if (TREE_CODE (op1) == INTEGER_CST
11587 	      && tree_int_cst_sgn (op1) == -1
11588 	      && negate_expr_p (op0)
11589 	      && negate_expr_p (op1)
11590 	      && (tem = negate_expr (op1)) != op1
11591 	      && ! TREE_OVERFLOW (tem))
11592 	    return fold_build2_loc (loc, MULT_EXPR, type,
11593 				    fold_convert_loc (loc, type,
11594 						      negate_expr (op0)), tem);
11595 
11596 	  strict_overflow_p = false;
11597 	  if (TREE_CODE (arg1) == INTEGER_CST
11598 	      && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11599 					&strict_overflow_p)) != 0)
11600 	    {
11601 	      if (strict_overflow_p)
11602 		fold_overflow_warning (("assuming signed overflow does not "
11603 					"occur when simplifying "
11604 					"multiplication"),
11605 				       WARN_STRICT_OVERFLOW_MISC);
11606 	      return fold_convert_loc (loc, type, tem);
11607 	    }
11608 
11609 	  /* Optimize z * conj(z) for integer complex numbers.  */
11610 	  if (TREE_CODE (arg0) == CONJ_EXPR
11611 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11612 	    return fold_mult_zconjz (loc, type, arg1);
11613 	  if (TREE_CODE (arg1) == CONJ_EXPR
11614 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11615 	    return fold_mult_zconjz (loc, type, arg0);
11616 	}
11617       else
11618 	{
11619 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11620 	     This is not the same for NaNs or if signed zeros are
11621 	     involved.  */
11622 	  if (!HONOR_NANS (arg0)
11623 	      && !HONOR_SIGNED_ZEROS (arg0)
11624 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11625 	      && TREE_CODE (arg1) == COMPLEX_CST
11626 	      && real_zerop (TREE_REALPART (arg1)))
11627 	    {
11628 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11629 	      if (real_onep (TREE_IMAGPART (arg1)))
11630 		return
11631 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11632 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11633 							     rtype, arg0)),
11634 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11635 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
11636 		return
11637 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11638 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11639 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11640 							     rtype, arg0)));
11641 	    }
11642 
11643 	  /* Optimize z * conj(z) for floating point complex numbers.
11644 	     Guarded by flag_unsafe_math_optimizations as non-finite
11645 	     imaginary components don't produce scalar results.  */
11646 	  if (flag_unsafe_math_optimizations
11647 	      && TREE_CODE (arg0) == CONJ_EXPR
11648 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11649 	    return fold_mult_zconjz (loc, type, arg1);
11650 	  if (flag_unsafe_math_optimizations
11651 	      && TREE_CODE (arg1) == CONJ_EXPR
11652 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11653 	    return fold_mult_zconjz (loc, type, arg0);
11654 	}
11655       goto associate;
11656 
11657     case BIT_IOR_EXPR:
11658       /* Canonicalize (X & C1) | C2.  */
11659       if (TREE_CODE (arg0) == BIT_AND_EXPR
11660 	  && TREE_CODE (arg1) == INTEGER_CST
11661 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11662 	{
11663 	  int width = TYPE_PRECISION (type), w;
11664 	  wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11665 	  wide_int c2 = wi::to_wide (arg1);
11666 
11667 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
11668 	  if ((c1 & c2) == c1)
11669 	    return omit_one_operand_loc (loc, type, arg1,
11670 					 TREE_OPERAND (arg0, 0));
11671 
11672 	  wide_int msk = wi::mask (width, false,
11673 				   TYPE_PRECISION (TREE_TYPE (arg1)));
11674 
11675 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
11676 	  if (wi::bit_and_not (msk, c1 | c2) == 0)
11677 	    {
11678 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11679 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11680 	    }
11681 
11682 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11683 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11684 	     mode which allows further optimizations.  */
11685 	  c1 &= msk;
11686 	  c2 &= msk;
11687 	  wide_int c3 = wi::bit_and_not (c1, c2);
11688 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11689 	    {
11690 	      wide_int mask = wi::mask (w, false,
11691 					TYPE_PRECISION (type));
11692 	      if (((c1 | c2) & mask) == mask
11693 		  && wi::bit_and_not (c1, mask) == 0)
11694 		{
11695 		  c3 = mask;
11696 		  break;
11697 		}
11698 	    }
11699 
11700 	  if (c3 != c1)
11701 	    {
11702 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11703 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11704 				     wide_int_to_tree (type, c3));
11705 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11706 	    }
11707 	}
11708 
11709       /* See if this can be simplified into a rotate first.  If that
11710 	 is unsuccessful continue in the association code.  */
11711       goto bit_rotate;
11712 
11713     case BIT_XOR_EXPR:
11714       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11715       if (TREE_CODE (arg0) == BIT_AND_EXPR
11716 	  && INTEGRAL_TYPE_P (type)
11717 	  && integer_onep (TREE_OPERAND (arg0, 1))
11718 	  && integer_onep (arg1))
11719 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11720 				build_zero_cst (TREE_TYPE (arg0)));
11721 
11722       /* See if this can be simplified into a rotate first.  If that
11723 	 is unsuccessful continue in the association code.  */
11724       goto bit_rotate;
11725 
11726     case BIT_AND_EXPR:
11727       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11728       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11729 	  && INTEGRAL_TYPE_P (type)
11730 	  && integer_onep (TREE_OPERAND (arg0, 1))
11731 	  && integer_onep (arg1))
11732 	{
11733 	  tree tem2;
11734 	  tem = TREE_OPERAND (arg0, 0);
11735 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11736 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11737 				  tem, tem2);
11738 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11739 				  build_zero_cst (TREE_TYPE (tem)));
11740 	}
11741       /* Fold ~X & 1 as (X & 1) == 0.  */
11742       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11743 	  && INTEGRAL_TYPE_P (type)
11744 	  && integer_onep (arg1))
11745 	{
11746 	  tree tem2;
11747 	  tem = TREE_OPERAND (arg0, 0);
11748 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11749 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11750 				  tem, tem2);
11751 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11752 				  build_zero_cst (TREE_TYPE (tem)));
11753 	}
11754       /* Fold !X & 1 as X == 0.  */
11755       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11756 	  && integer_onep (arg1))
11757 	{
11758 	  tem = TREE_OPERAND (arg0, 0);
11759 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
11760 				  build_zero_cst (TREE_TYPE (tem)));
11761 	}
11762 
11763       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11764          multiple of 1 << CST.  */
11765       if (TREE_CODE (arg1) == INTEGER_CST)
11766 	{
11767 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11768 	  wide_int ncst1 = -cst1;
11769 	  if ((cst1 & ncst1) == ncst1
11770 	      && multiple_of_p (type, arg0,
11771 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11772 	    return fold_convert_loc (loc, type, arg0);
11773 	}
11774 
11775       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11776          bits from CST2.  */
11777       if (TREE_CODE (arg1) == INTEGER_CST
11778 	  && TREE_CODE (arg0) == MULT_EXPR
11779 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11780 	{
11781 	  wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11782 	  wide_int masked
11783 	    = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11784 
11785 	  if (masked == 0)
11786 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
11787 	                                  arg0, arg1);
11788 	  else if (masked != warg1)
11789 	    {
11790 	      /* Avoid the transform if arg1 is a mask of some
11791 	         mode which allows further optimizations.  */
11792 	      int pop = wi::popcount (warg1);
11793 	      if (!(pop >= BITS_PER_UNIT
11794 		    && pow2p_hwi (pop)
11795 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11796 		return fold_build2_loc (loc, code, type, op0,
11797 					wide_int_to_tree (type, masked));
11798 	    }
11799 	}
11800 
11801       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
11802       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11803 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11804 	{
11805 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11806 
11807 	  wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11808 	  if (mask == -1)
11809 	    return
11810 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11811 	}
11812 
11813       goto associate;
11814 
11815     case RDIV_EXPR:
11816       /* Don't touch a floating-point divide by zero unless the mode
11817 	 of the constant can represent infinity.  */
11818       if (TREE_CODE (arg1) == REAL_CST
11819 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11820 	  && real_zerop (arg1))
11821 	return NULL_TREE;
11822 
11823       /* (-A) / (-B) -> A / B  */
11824       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11825 	return fold_build2_loc (loc, RDIV_EXPR, type,
11826 			    TREE_OPERAND (arg0, 0),
11827 			    negate_expr (arg1));
11828       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11829 	return fold_build2_loc (loc, RDIV_EXPR, type,
11830 			    negate_expr (arg0),
11831 			    TREE_OPERAND (arg1, 0));
11832       return NULL_TREE;
11833 
11834     case TRUNC_DIV_EXPR:
11835       /* Fall through */
11836 
11837     case FLOOR_DIV_EXPR:
11838       /* Simplify A / (B << N) where A and B are positive and B is
11839 	 a power of 2, to A >> (N + log2(B)).  */
11840       strict_overflow_p = false;
11841       if (TREE_CODE (arg1) == LSHIFT_EXPR
11842 	  && (TYPE_UNSIGNED (type)
11843 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11844 	{
11845 	  tree sval = TREE_OPERAND (arg1, 0);
11846 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11847 	    {
11848 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
11849 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11850 					 wi::exact_log2 (wi::to_wide (sval)));
11851 
11852 	      if (strict_overflow_p)
11853 		fold_overflow_warning (("assuming signed overflow does not "
11854 					"occur when simplifying A / (B << N)"),
11855 				       WARN_STRICT_OVERFLOW_MISC);
11856 
11857 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11858 					sh_cnt, pow2);
11859 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
11860 				      fold_convert_loc (loc, type, arg0), sh_cnt);
11861 	    }
11862 	}
11863 
11864       /* Fall through */
11865 
11866     case ROUND_DIV_EXPR:
11867     case CEIL_DIV_EXPR:
11868     case EXACT_DIV_EXPR:
11869       if (integer_zerop (arg1))
11870 	return NULL_TREE;
11871 
11872       /* Convert -A / -B to A / B when the type is signed and overflow is
11873 	 undefined.  */
11874       if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11875 	  && TREE_CODE (op0) == NEGATE_EXPR
11876 	  && negate_expr_p (op1))
11877 	{
11878 	  if (ANY_INTEGRAL_TYPE_P (type))
11879 	    fold_overflow_warning (("assuming signed overflow does not occur "
11880 				    "when distributing negation across "
11881 				    "division"),
11882 				   WARN_STRICT_OVERFLOW_MISC);
11883 	  return fold_build2_loc (loc, code, type,
11884 				  fold_convert_loc (loc, type,
11885 						    TREE_OPERAND (arg0, 0)),
11886 				  negate_expr (op1));
11887 	}
11888       if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11889 	  && TREE_CODE (arg1) == NEGATE_EXPR
11890 	  && negate_expr_p (op0))
11891 	{
11892 	  if (ANY_INTEGRAL_TYPE_P (type))
11893 	    fold_overflow_warning (("assuming signed overflow does not occur "
11894 				    "when distributing negation across "
11895 				    "division"),
11896 				   WARN_STRICT_OVERFLOW_MISC);
11897 	  return fold_build2_loc (loc, code, type,
11898 				  negate_expr (op0),
11899 				  fold_convert_loc (loc, type,
11900 						    TREE_OPERAND (arg1, 0)));
11901 	}
11902 
11903       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11904 	 operation, EXACT_DIV_EXPR.
11905 
11906 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11907 	 At one time others generated faster code, it's not clear if they do
11908 	 after the last round to changes to the DIV code in expmed.cc.  */
11909       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11910 	  && multiple_of_p (type, arg0, arg1))
11911 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11912 				fold_convert (type, arg0),
11913 				fold_convert (type, arg1));
11914 
11915       strict_overflow_p = false;
11916       if (TREE_CODE (arg1) == INTEGER_CST
11917 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11918 				    &strict_overflow_p)) != 0)
11919 	{
11920 	  if (strict_overflow_p)
11921 	    fold_overflow_warning (("assuming signed overflow does not occur "
11922 				    "when simplifying division"),
11923 				   WARN_STRICT_OVERFLOW_MISC);
11924 	  return fold_convert_loc (loc, type, tem);
11925 	}
11926 
11927       return NULL_TREE;
11928 
11929     case CEIL_MOD_EXPR:
11930     case FLOOR_MOD_EXPR:
11931     case ROUND_MOD_EXPR:
11932     case TRUNC_MOD_EXPR:
11933       strict_overflow_p = false;
11934       if (TREE_CODE (arg1) == INTEGER_CST
11935 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11936 				    &strict_overflow_p)) != 0)
11937 	{
11938 	  if (strict_overflow_p)
11939 	    fold_overflow_warning (("assuming signed overflow does not occur "
11940 				    "when simplifying modulus"),
11941 				   WARN_STRICT_OVERFLOW_MISC);
11942 	  return fold_convert_loc (loc, type, tem);
11943 	}
11944 
11945       return NULL_TREE;
11946 
11947     case LROTATE_EXPR:
11948     case RROTATE_EXPR:
11949     case RSHIFT_EXPR:
11950     case LSHIFT_EXPR:
11951       /* Since negative shift count is not well-defined,
11952 	 don't try to compute it in the compiler.  */
11953       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11954 	return NULL_TREE;
11955 
11956       prec = element_precision (type);
11957 
11958       /* If we have a rotate of a bit operation with the rotate count and
11959 	 the second operand of the bit operation both constant,
11960 	 permute the two operations.  */
11961       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11962 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
11963 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
11964 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
11965 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11966 	{
11967 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11968 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11969 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
11970 				  fold_build2_loc (loc, code, type,
11971 						   arg00, arg1),
11972 				  fold_build2_loc (loc, code, type,
11973 						   arg01, arg1));
11974 	}
11975 
11976       /* Two consecutive rotates adding up to the some integer
11977 	 multiple of the precision of the type can be ignored.  */
11978       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11979 	  && TREE_CODE (arg0) == RROTATE_EXPR
11980 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11981 	  && wi::umod_trunc (wi::to_wide (arg1)
11982 			     + wi::to_wide (TREE_OPERAND (arg0, 1)),
11983 			     prec) == 0)
11984 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11985 
11986       return NULL_TREE;
11987 
11988     case MIN_EXPR:
11989     case MAX_EXPR:
11990       goto associate;
11991 
11992     case TRUTH_ANDIF_EXPR:
11993       /* Note that the operands of this must be ints
11994 	 and their values must be 0 or 1.
11995 	 ("true" is a fixed value perhaps depending on the language.)  */
11996       /* If first arg is constant zero, return it.  */
11997       if (integer_zerop (arg0))
11998 	return fold_convert_loc (loc, type, arg0);
11999       /* FALLTHRU */
12000     case TRUTH_AND_EXPR:
12001       /* If either arg is constant true, drop it.  */
12002       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12003 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12004       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12005 	  /* Preserve sequence points.  */
12006 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12007 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12008       /* If second arg is constant zero, result is zero, but first arg
12009 	 must be evaluated.  */
12010       if (integer_zerop (arg1))
12011 	return omit_one_operand_loc (loc, type, arg1, arg0);
12012       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12013 	 case will be handled here.  */
12014       if (integer_zerop (arg0))
12015 	return omit_one_operand_loc (loc, type, arg0, arg1);
12016 
12017       /* !X && X is always false.  */
12018       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12019 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12020 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12021       /* X && !X is always false.  */
12022       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12023 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12024 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12025 
12026       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
12027 	 means A >= Y && A != MAX, but in this case we know that
12028 	 A < X <= MAX.  */
12029 
12030       if (!TREE_SIDE_EFFECTS (arg0)
12031 	  && !TREE_SIDE_EFFECTS (arg1))
12032 	{
12033 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12034 	  if (tem && !operand_equal_p (tem, arg0, 0))
12035 	    return fold_convert (type,
12036 				 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12037 						  tem, arg1));
12038 
12039 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12040 	  if (tem && !operand_equal_p (tem, arg1, 0))
12041 	    return fold_convert (type,
12042 				 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12043 						  arg0, tem));
12044 	}
12045 
12046       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12047           != NULL_TREE)
12048         return tem;
12049 
12050       return NULL_TREE;
12051 
12052     case TRUTH_ORIF_EXPR:
12053       /* Note that the operands of this must be ints
12054 	 and their values must be 0 or true.
12055 	 ("true" is a fixed value perhaps depending on the language.)  */
12056       /* If first arg is constant true, return it.  */
12057       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12058 	return fold_convert_loc (loc, type, arg0);
12059       /* FALLTHRU */
12060     case TRUTH_OR_EXPR:
12061       /* If either arg is constant zero, drop it.  */
12062       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12063 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12064       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12065 	  /* Preserve sequence points.  */
12066 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12067 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12068       /* If second arg is constant true, result is true, but we must
12069 	 evaluate first arg.  */
12070       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12071 	return omit_one_operand_loc (loc, type, arg1, arg0);
12072       /* Likewise for first arg, but note this only occurs here for
12073 	 TRUTH_OR_EXPR.  */
12074       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12075 	return omit_one_operand_loc (loc, type, arg0, arg1);
12076 
12077       /* !X || X is always true.  */
12078       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12079 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12080 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12081       /* X || !X is always true.  */
12082       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12083 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12084 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12085 
12086       /* (X && !Y) || (!X && Y) is X ^ Y */
12087       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12088 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12089         {
12090 	  tree a0, a1, l0, l1, n0, n1;
12091 
12092 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12093 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12094 
12095 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12096 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12097 
12098 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12099 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12100 
12101 	  if ((operand_equal_p (n0, a0, 0)
12102 	       && operand_equal_p (n1, a1, 0))
12103 	      || (operand_equal_p (n0, a1, 0)
12104 		  && operand_equal_p (n1, a0, 0)))
12105 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12106 	}
12107 
12108       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12109           != NULL_TREE)
12110         return tem;
12111 
12112       return NULL_TREE;
12113 
12114     case TRUTH_XOR_EXPR:
12115       /* If the second arg is constant zero, drop it.  */
12116       if (integer_zerop (arg1))
12117 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12118       /* If the second arg is constant true, this is a logical inversion.  */
12119       if (integer_onep (arg1))
12120 	{
12121 	  tem = invert_truthvalue_loc (loc, arg0);
12122 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12123 	}
12124       /* Identical arguments cancel to zero.  */
12125       if (operand_equal_p (arg0, arg1, 0))
12126 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12127 
12128       /* !X ^ X is always true.  */
12129       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12130 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12131 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12132 
12133       /* X ^ !X is always true.  */
12134       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12135 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12136 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12137 
12138       return NULL_TREE;
12139 
12140     case EQ_EXPR:
12141     case NE_EXPR:
12142       STRIP_NOPS (arg0);
12143       STRIP_NOPS (arg1);
12144 
12145       tem = fold_comparison (loc, code, type, op0, op1);
12146       if (tem != NULL_TREE)
12147 	return tem;
12148 
12149       /* bool_var != 1 becomes !bool_var. */
12150       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12151           && code == NE_EXPR)
12152         return fold_convert_loc (loc, type,
12153 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12154 						  TREE_TYPE (arg0), arg0));
12155 
12156       /* bool_var == 0 becomes !bool_var. */
12157       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12158           && code == EQ_EXPR)
12159         return fold_convert_loc (loc, type,
12160 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12161 						  TREE_TYPE (arg0), arg0));
12162 
12163       /* !exp != 0 becomes !exp */
12164       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12165 	  && code == NE_EXPR)
12166         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12167 
12168       /* If this is an EQ or NE comparison with zero and ARG0 is
12169 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
12170 	 two operations, but the latter can be done in one less insn
12171 	 on machines that have only two-operand insns or on which a
12172 	 constant cannot be the first operand.  */
12173       if (TREE_CODE (arg0) == BIT_AND_EXPR
12174 	  && integer_zerop (arg1))
12175 	{
12176 	  tree arg00 = TREE_OPERAND (arg0, 0);
12177 	  tree arg01 = TREE_OPERAND (arg0, 1);
12178 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
12179 	      && integer_onep (TREE_OPERAND (arg00, 0)))
12180 	    {
12181 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12182 					  arg01, TREE_OPERAND (arg00, 1));
12183 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12184 				     build_one_cst (TREE_TYPE (arg0)));
12185 	      return fold_build2_loc (loc, code, type,
12186 				      fold_convert_loc (loc, TREE_TYPE (arg1),
12187 							tem), arg1);
12188 	    }
12189 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
12190 		   && integer_onep (TREE_OPERAND (arg01, 0)))
12191 	    {
12192 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12193 					  arg00, TREE_OPERAND (arg01, 1));
12194 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12195 				     build_one_cst (TREE_TYPE (arg0)));
12196 	      return fold_build2_loc (loc, code, type,
12197 				      fold_convert_loc (loc, TREE_TYPE (arg1),
12198 							tem), arg1);
12199 	    }
12200 	}
12201 
12202       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12203 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
12204 	 a single bit.  */
12205       if (TREE_CODE (arg0) == BIT_AND_EXPR
12206 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12207 	  && integer_zerop (arg1))
12208 	{
12209 	  tree arg00 = TREE_OPERAND (arg0, 0);
12210 	  STRIP_NOPS (arg00);
12211 	  if (TREE_CODE (arg00) == RSHIFT_EXPR
12212 	      && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
12213 	    {
12214 	      tree itype = TREE_TYPE (arg00);
12215 	      tree arg001 = TREE_OPERAND (arg00, 1);
12216 	      prec = TYPE_PRECISION (itype);
12217 
12218 	      /* Check for a valid shift count.  */
12219 	      if (wi::ltu_p (wi::to_wide (arg001), prec))
12220 		{
12221 		  tree arg01 = TREE_OPERAND (arg0, 1);
12222 		  tree arg000 = TREE_OPERAND (arg00, 0);
12223 		  unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12224 		  /* If (C2 << C1) doesn't overflow, then
12225 		     ((X >> C1) & C2) != 0 can be rewritten as
12226 		     (X & (C2 << C1)) != 0.  */
12227 		  if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12228 		    {
12229 		      tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
12230 					     arg01, arg001);
12231 		      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
12232 					     arg000, tem);
12233 		      return fold_build2_loc (loc, code, type, tem,
12234 				fold_convert_loc (loc, itype, arg1));
12235 		    }
12236 		  /* Otherwise, for signed (arithmetic) shifts,
12237 		     ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12238 		     ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
12239 		  else if (!TYPE_UNSIGNED (itype))
12240 		    return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
12241 								 : LT_EXPR,
12242 					    type, arg000,
12243 					    build_int_cst (itype, 0));
12244 		  /* Otherwise, of unsigned (logical) shifts,
12245 		     ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12246 		     ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
12247 		  else
12248 		    return omit_one_operand_loc (loc, type,
12249 					 code == EQ_EXPR ? integer_one_node
12250 							 : integer_zero_node,
12251 					 arg000);
12252 		}
12253 	    }
12254 	}
12255 
12256       /* If this is a comparison of a field, we may be able to simplify it.  */
12257       if ((TREE_CODE (arg0) == COMPONENT_REF
12258 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
12259 	  /* Handle the constant case even without -O
12260 	     to make sure the warnings are given.  */
12261 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12262 	{
12263 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12264 	  if (t1)
12265 	    return t1;
12266 	}
12267 
12268       /* Optimize comparisons of strlen vs zero to a compare of the
12269 	 first character of the string vs zero.  To wit,
12270 		strlen(ptr) == 0   =>  *ptr == 0
12271 		strlen(ptr) != 0   =>  *ptr != 0
12272 	 Other cases should reduce to one of these two (or a constant)
12273 	 due to the return value of strlen being unsigned.  */
12274       if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12275 	{
12276 	  tree fndecl = get_callee_fndecl (arg0);
12277 
12278 	  if (fndecl
12279 	      && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12280 	      && call_expr_nargs (arg0) == 1
12281 	      && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12282 		  == POINTER_TYPE))
12283 	    {
12284 	      tree ptrtype
12285 		= build_pointer_type (build_qualified_type (char_type_node,
12286 							    TYPE_QUAL_CONST));
12287 	      tree ptr = fold_convert_loc (loc, ptrtype,
12288 					   CALL_EXPR_ARG (arg0, 0));
12289 	      tree iref = build_fold_indirect_ref_loc (loc, ptr);
12290 	      return fold_build2_loc (loc, code, type, iref,
12291 				      build_int_cst (TREE_TYPE (iref), 0));
12292 	    }
12293 	}
12294 
12295       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12296 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
12297       if (TREE_CODE (arg0) == RSHIFT_EXPR
12298 	  && integer_zerop (arg1)
12299 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12300 	{
12301 	  tree arg00 = TREE_OPERAND (arg0, 0);
12302 	  tree arg01 = TREE_OPERAND (arg0, 1);
12303 	  tree itype = TREE_TYPE (arg00);
12304 	  if (wi::to_wide (arg01) == element_precision (itype) - 1)
12305 	    {
12306 	      if (TYPE_UNSIGNED (itype))
12307 		{
12308 		  itype = signed_type_for (itype);
12309 		  arg00 = fold_convert_loc (loc, itype, arg00);
12310 		}
12311 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12312 				  type, arg00, build_zero_cst (itype));
12313 	    }
12314 	}
12315 
12316       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12317 	 (X & C) == 0 when C is a single bit.  */
12318       if (TREE_CODE (arg0) == BIT_AND_EXPR
12319 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12320 	  && integer_zerop (arg1)
12321 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
12322 	{
12323 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12324 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12325 				 TREE_OPERAND (arg0, 1));
12326 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12327 				  type, tem,
12328 				  fold_convert_loc (loc, TREE_TYPE (arg0),
12329 						    arg1));
12330 	}
12331 
12332       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12333 	 constant C is a power of two, i.e. a single bit.  */
12334       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12335 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12336 	  && integer_zerop (arg1)
12337 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12338 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12339 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12340 	{
12341 	  tree arg00 = TREE_OPERAND (arg0, 0);
12342 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12343 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
12344 	}
12345 
12346       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12347 	 when is C is a power of two, i.e. a single bit.  */
12348       if (TREE_CODE (arg0) == BIT_AND_EXPR
12349 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12350 	  && integer_zerop (arg1)
12351 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12352 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12353 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12354 	{
12355 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12356 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12357 			     arg000, TREE_OPERAND (arg0, 1));
12358 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12359 			      tem, build_int_cst (TREE_TYPE (tem), 0));
12360 	}
12361 
12362       if (integer_zerop (arg1)
12363 	  && tree_expr_nonzero_p (arg0))
12364         {
12365 	  tree res = constant_boolean_node (code==NE_EXPR, type);
12366 	  return omit_one_operand_loc (loc, type, res, arg0);
12367 	}
12368 
12369       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12370 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
12371 	{
12372 	  tree arg00 = TREE_OPERAND (arg0, 0);
12373 	  tree arg01 = TREE_OPERAND (arg0, 1);
12374 	  tree arg10 = TREE_OPERAND (arg1, 0);
12375 	  tree arg11 = TREE_OPERAND (arg1, 1);
12376 	  tree itype = TREE_TYPE (arg0);
12377 
12378 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12379 	     operand_equal_p guarantees no side-effects so we don't need
12380 	     to use omit_one_operand on Z.  */
12381 	  if (operand_equal_p (arg01, arg11, 0))
12382 	    return fold_build2_loc (loc, code, type, arg00,
12383 				    fold_convert_loc (loc, TREE_TYPE (arg00),
12384 						      arg10));
12385 	  if (operand_equal_p (arg01, arg10, 0))
12386 	    return fold_build2_loc (loc, code, type, arg00,
12387 				    fold_convert_loc (loc, TREE_TYPE (arg00),
12388 						      arg11));
12389 	  if (operand_equal_p (arg00, arg11, 0))
12390 	    return fold_build2_loc (loc, code, type, arg01,
12391 				    fold_convert_loc (loc, TREE_TYPE (arg01),
12392 						      arg10));
12393 	  if (operand_equal_p (arg00, arg10, 0))
12394 	    return fold_build2_loc (loc, code, type, arg01,
12395 				    fold_convert_loc (loc, TREE_TYPE (arg01),
12396 						      arg11));
12397 
12398 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
12399 	  if (TREE_CODE (arg01) == INTEGER_CST
12400 	      && TREE_CODE (arg11) == INTEGER_CST)
12401 	    {
12402 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12403 				     fold_convert_loc (loc, itype, arg11));
12404 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12405 	      return fold_build2_loc (loc, code, type, tem,
12406 				      fold_convert_loc (loc, itype, arg10));
12407 	    }
12408 	}
12409 
12410       /* Attempt to simplify equality/inequality comparisons of complex
12411 	 values.  Only lower the comparison if the result is known or
12412 	 can be simplified to a single scalar comparison.  */
12413       if ((TREE_CODE (arg0) == COMPLEX_EXPR
12414 	   || TREE_CODE (arg0) == COMPLEX_CST)
12415 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
12416 	      || TREE_CODE (arg1) == COMPLEX_CST))
12417 	{
12418 	  tree real0, imag0, real1, imag1;
12419 	  tree rcond, icond;
12420 
12421 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
12422 	    {
12423 	      real0 = TREE_OPERAND (arg0, 0);
12424 	      imag0 = TREE_OPERAND (arg0, 1);
12425 	    }
12426 	  else
12427 	    {
12428 	      real0 = TREE_REALPART (arg0);
12429 	      imag0 = TREE_IMAGPART (arg0);
12430 	    }
12431 
12432 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
12433 	    {
12434 	      real1 = TREE_OPERAND (arg1, 0);
12435 	      imag1 = TREE_OPERAND (arg1, 1);
12436 	    }
12437 	  else
12438 	    {
12439 	      real1 = TREE_REALPART (arg1);
12440 	      imag1 = TREE_IMAGPART (arg1);
12441 	    }
12442 
12443 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
12444 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12445 	    {
12446 	      if (integer_zerop (rcond))
12447 		{
12448 		  if (code == EQ_EXPR)
12449 		    return omit_two_operands_loc (loc, type, boolean_false_node,
12450 					      imag0, imag1);
12451 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12452 		}
12453 	      else
12454 		{
12455 		  if (code == NE_EXPR)
12456 		    return omit_two_operands_loc (loc, type, boolean_true_node,
12457 					      imag0, imag1);
12458 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12459 		}
12460 	    }
12461 
12462 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
12463 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
12464 	    {
12465 	      if (integer_zerop (icond))
12466 		{
12467 		  if (code == EQ_EXPR)
12468 		    return omit_two_operands_loc (loc, type, boolean_false_node,
12469 					      real0, real1);
12470 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12471 		}
12472 	      else
12473 		{
12474 		  if (code == NE_EXPR)
12475 		    return omit_two_operands_loc (loc, type, boolean_true_node,
12476 					      real0, real1);
12477 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12478 		}
12479 	    }
12480 	}
12481 
12482       return NULL_TREE;
12483 
12484     case LT_EXPR:
12485     case GT_EXPR:
12486     case LE_EXPR:
12487     case GE_EXPR:
12488       tem = fold_comparison (loc, code, type, op0, op1);
12489       if (tem != NULL_TREE)
12490 	return tem;
12491 
12492       /* Transform comparisons of the form X +- C CMP X.  */
12493       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12494 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12495 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12496 	  && !HONOR_SNANS (arg0))
12497 	{
12498 	  tree arg01 = TREE_OPERAND (arg0, 1);
12499 	  enum tree_code code0 = TREE_CODE (arg0);
12500 	  int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12501 
12502 	  /* (X - c) > X becomes false.  */
12503 	  if (code == GT_EXPR
12504 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
12505 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
12506 	    return constant_boolean_node (0, type);
12507 
12508 	  /* Likewise (X + c) < X becomes false.  */
12509 	  if (code == LT_EXPR
12510 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
12511 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
12512 	    return constant_boolean_node (0, type);
12513 
12514 	  /* Convert (X - c) <= X to true.  */
12515 	  if (!HONOR_NANS (arg1)
12516 	      && code == LE_EXPR
12517 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
12518 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
12519 	    return constant_boolean_node (1, type);
12520 
12521 	  /* Convert (X + c) >= X to true.  */
12522 	  if (!HONOR_NANS (arg1)
12523 	      && code == GE_EXPR
12524 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
12525 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
12526 	    return constant_boolean_node (1, type);
12527 	}
12528 
12529       /* If we are comparing an ABS_EXPR with a constant, we can
12530 	 convert all the cases into explicit comparisons, but they may
12531 	 well not be faster than doing the ABS and one comparison.
12532 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
12533 	 and a comparison, and is probably faster.  */
12534       if (code == LE_EXPR
12535 	  && TREE_CODE (arg1) == INTEGER_CST
12536 	  && TREE_CODE (arg0) == ABS_EXPR
12537 	  && ! TREE_SIDE_EFFECTS (arg0)
12538 	  && (tem = negate_expr (arg1)) != 0
12539 	  && TREE_CODE (tem) == INTEGER_CST
12540 	  && !TREE_OVERFLOW (tem))
12541 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12542 			    build2 (GE_EXPR, type,
12543 				    TREE_OPERAND (arg0, 0), tem),
12544 			    build2 (LE_EXPR, type,
12545 				    TREE_OPERAND (arg0, 0), arg1));
12546 
12547       /* Convert ABS_EXPR<x> >= 0 to true.  */
12548       strict_overflow_p = false;
12549       if (code == GE_EXPR
12550 	  && (integer_zerop (arg1)
12551 	      || (! HONOR_NANS (arg0)
12552 		  && real_zerop (arg1)))
12553 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12554 	{
12555 	  if (strict_overflow_p)
12556 	    fold_overflow_warning (("assuming signed overflow does not occur "
12557 				    "when simplifying comparison of "
12558 				    "absolute value and zero"),
12559 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
12560 	  return omit_one_operand_loc (loc, type,
12561 				       constant_boolean_node (true, type),
12562 				       arg0);
12563 	}
12564 
12565       /* Convert ABS_EXPR<x> < 0 to false.  */
12566       strict_overflow_p = false;
12567       if (code == LT_EXPR
12568 	  && (integer_zerop (arg1) || real_zerop (arg1))
12569 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12570 	{
12571 	  if (strict_overflow_p)
12572 	    fold_overflow_warning (("assuming signed overflow does not occur "
12573 				    "when simplifying comparison of "
12574 				    "absolute value and zero"),
12575 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
12576 	  return omit_one_operand_loc (loc, type,
12577 				       constant_boolean_node (false, type),
12578 				       arg0);
12579 	}
12580 
12581       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12582 	 and similarly for >= into !=.  */
12583       if ((code == LT_EXPR || code == GE_EXPR)
12584 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
12585 	  && TREE_CODE (arg1) == LSHIFT_EXPR
12586 	  && integer_onep (TREE_OPERAND (arg1, 0)))
12587 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12588 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12589 				   TREE_OPERAND (arg1, 1)),
12590 			   build_zero_cst (TREE_TYPE (arg0)));
12591 
12592       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
12593 	 otherwise Y might be >= # of bits in X's type and thus e.g.
12594 	 (unsigned char) (1 << Y) for Y 15 might be 0.
12595 	 If the cast is widening, then 1 << Y should have unsigned type,
12596 	 otherwise if Y is number of bits in the signed shift type minus 1,
12597 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
12598 	 31 might be 0xffffffff80000000.  */
12599       if ((code == LT_EXPR || code == GE_EXPR)
12600 	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12601 	      || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12602 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
12603 	  && CONVERT_EXPR_P (arg1)
12604 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12605 	  && (element_precision (TREE_TYPE (arg1))
12606 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12607 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12608 	      || (element_precision (TREE_TYPE (arg1))
12609 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12610 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12611 	{
12612 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12613 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12614 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12615 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12616 			     build_zero_cst (TREE_TYPE (arg0)));
12617 	}
12618 
12619       return NULL_TREE;
12620 
12621     case UNORDERED_EXPR:
12622     case ORDERED_EXPR:
12623     case UNLT_EXPR:
12624     case UNLE_EXPR:
12625     case UNGT_EXPR:
12626     case UNGE_EXPR:
12627     case UNEQ_EXPR:
12628     case LTGT_EXPR:
12629       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
12630       {
12631 	tree targ0 = strip_float_extensions (arg0);
12632 	tree targ1 = strip_float_extensions (arg1);
12633 	tree newtype = TREE_TYPE (targ0);
12634 
12635 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12636 	  newtype = TREE_TYPE (targ1);
12637 
12638 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12639 	  return fold_build2_loc (loc, code, type,
12640 			      fold_convert_loc (loc, newtype, targ0),
12641 			      fold_convert_loc (loc, newtype, targ1));
12642       }
12643 
12644       return NULL_TREE;
12645 
12646     case COMPOUND_EXPR:
12647       /* When pedantic, a compound expression can be neither an lvalue
12648 	 nor an integer constant expression.  */
12649       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12650 	return NULL_TREE;
12651       /* Don't let (0, 0) be null pointer constant.  */
12652       tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12653 				 : fold_convert_loc (loc, type, arg1);
12654       return tem;
12655 
12656     case ASSERT_EXPR:
12657       /* An ASSERT_EXPR should never be passed to fold_binary.  */
12658       gcc_unreachable ();
12659 
12660     default:
12661       return NULL_TREE;
12662     } /* switch (code) */
12663 }
12664 
12665 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12666    ((A & N) + B) & M -> (A + B) & M
12667    Similarly if (N & M) == 0,
12668    ((A | N) + B) & M -> (A + B) & M
12669    and for - instead of + (or unary - instead of +)
12670    and/or ^ instead of |.
12671    If B is constant and (B & M) == 0, fold into A & M.
12672 
12673    This function is a helper for match.pd patterns.  Return non-NULL
12674    type in which the simplified operation should be performed only
12675    if any optimization is possible.
12676 
12677    ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12678    then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12679    Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12680    +/-.  */
12681 tree
fold_bit_and_mask(tree type,tree arg1,enum tree_code code,tree arg00,enum tree_code code00,tree arg000,tree arg001,tree arg01,enum tree_code code01,tree arg010,tree arg011,tree * pmop)12682 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12683 		   tree arg00, enum tree_code code00, tree arg000, tree arg001,
12684 		   tree arg01, enum tree_code code01, tree arg010, tree arg011,
12685 		   tree *pmop)
12686 {
12687   gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12688   gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12689   wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12690   if (~cst1 == 0
12691       || (cst1 & (cst1 + 1)) != 0
12692       || !INTEGRAL_TYPE_P (type)
12693       || (!TYPE_OVERFLOW_WRAPS (type)
12694 	  && TREE_CODE (type) != INTEGER_TYPE)
12695       || (wi::max_value (type) & cst1) != cst1)
12696     return NULL_TREE;
12697 
12698   enum tree_code codes[2] = { code00, code01 };
12699   tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12700   int which = 0;
12701   wide_int cst0;
12702 
12703   /* Now we know that arg0 is (C + D) or (C - D) or -C and
12704      arg1 (M) is == (1LL << cst) - 1.
12705      Store C into PMOP[0] and D into PMOP[1].  */
12706   pmop[0] = arg00;
12707   pmop[1] = arg01;
12708   which = code != NEGATE_EXPR;
12709 
12710   for (; which >= 0; which--)
12711     switch (codes[which])
12712       {
12713       case BIT_AND_EXPR:
12714       case BIT_IOR_EXPR:
12715       case BIT_XOR_EXPR:
12716 	gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12717 	cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12718 	if (codes[which] == BIT_AND_EXPR)
12719 	  {
12720 	    if (cst0 != cst1)
12721 	      break;
12722 	  }
12723 	else if (cst0 != 0)
12724 	  break;
12725 	/* If C or D is of the form (A & N) where
12726 	   (N & M) == M, or of the form (A | N) or
12727 	   (A ^ N) where (N & M) == 0, replace it with A.  */
12728 	pmop[which] = arg0xx[2 * which];
12729 	break;
12730       case ERROR_MARK:
12731 	if (TREE_CODE (pmop[which]) != INTEGER_CST)
12732 	  break;
12733 	/* If C or D is a N where (N & M) == 0, it can be
12734 	   omitted (replaced with 0).  */
12735 	if ((code == PLUS_EXPR
12736 	     || (code == MINUS_EXPR && which == 0))
12737 	    && (cst1 & wi::to_wide (pmop[which])) == 0)
12738 	  pmop[which] = build_int_cst (type, 0);
12739 	/* Similarly, with C - N where (-N & M) == 0.  */
12740 	if (code == MINUS_EXPR
12741 	    && which == 1
12742 	    && (cst1 & -wi::to_wide (pmop[which])) == 0)
12743 	  pmop[which] = build_int_cst (type, 0);
12744 	break;
12745       default:
12746 	gcc_unreachable ();
12747       }
12748 
12749   /* Only build anything new if we optimized one or both arguments above.  */
12750   if (pmop[0] == arg00 && pmop[1] == arg01)
12751     return NULL_TREE;
12752 
12753   if (TYPE_OVERFLOW_WRAPS (type))
12754     return type;
12755   else
12756     return unsigned_type_for (type);
12757 }
12758 
12759 /* Used by contains_label_[p1].  */
12760 
12761 struct contains_label_data
12762 {
12763   hash_set<tree> *pset;
12764   bool inside_switch_p;
12765 };
12766 
12767 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
12768    a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12769    return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
12770 
12771 static tree
contains_label_1(tree * tp,int * walk_subtrees,void * data)12772 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12773 {
12774   contains_label_data *d = (contains_label_data *) data;
12775   switch (TREE_CODE (*tp))
12776     {
12777     case LABEL_EXPR:
12778       return *tp;
12779 
12780     case CASE_LABEL_EXPR:
12781       if (!d->inside_switch_p)
12782 	return *tp;
12783       return NULL_TREE;
12784 
12785     case SWITCH_EXPR:
12786       if (!d->inside_switch_p)
12787 	{
12788 	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12789 	    return *tp;
12790 	  d->inside_switch_p = true;
12791 	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12792 	    return *tp;
12793 	  d->inside_switch_p = false;
12794 	  *walk_subtrees = 0;
12795 	}
12796       return NULL_TREE;
12797 
12798     case GOTO_EXPR:
12799       *walk_subtrees = 0;
12800       return NULL_TREE;
12801 
12802     default:
12803       return NULL_TREE;
12804     }
12805 }
12806 
12807 /* Return whether the sub-tree ST contains a label which is accessible from
12808    outside the sub-tree.  */
12809 
12810 static bool
contains_label_p(tree st)12811 contains_label_p (tree st)
12812 {
12813   hash_set<tree> pset;
12814   contains_label_data data = { &pset, false };
12815   return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12816 }
12817 
12818 /* Fold a ternary expression of code CODE and type TYPE with operands
12819    OP0, OP1, and OP2.  Return the folded expression if folding is
12820    successful.  Otherwise, return NULL_TREE.  */
12821 
12822 tree
fold_ternary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)12823 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12824 		  tree op0, tree op1, tree op2)
12825 {
12826   tree tem;
12827   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12828   enum tree_code_class kind = TREE_CODE_CLASS (code);
12829 
12830   gcc_assert (IS_EXPR_CODE_CLASS (kind)
12831 	      && TREE_CODE_LENGTH (code) == 3);
12832 
12833   /* If this is a commutative operation, and OP0 is a constant, move it
12834      to OP1 to reduce the number of tests below.  */
12835   if (commutative_ternary_tree_code (code)
12836       && tree_swap_operands_p (op0, op1))
12837     return fold_build3_loc (loc, code, type, op1, op0, op2);
12838 
12839   tem = generic_simplify (loc, code, type, op0, op1, op2);
12840   if (tem)
12841     return tem;
12842 
12843   /* Strip any conversions that don't change the mode.  This is safe
12844      for every expression, except for a comparison expression because
12845      its signedness is derived from its operands.  So, in the latter
12846      case, only strip conversions that don't change the signedness.
12847 
12848      Note that this is done as an internal manipulation within the
12849      constant folder, in order to find the simplest representation of
12850      the arguments so that their form can be studied.  In any cases,
12851      the appropriate type conversions should be put back in the tree
12852      that will get out of the constant folder.  */
12853   if (op0)
12854     {
12855       arg0 = op0;
12856       STRIP_NOPS (arg0);
12857     }
12858 
12859   if (op1)
12860     {
12861       arg1 = op1;
12862       STRIP_NOPS (arg1);
12863     }
12864 
12865   if (op2)
12866     {
12867       arg2 = op2;
12868       STRIP_NOPS (arg2);
12869     }
12870 
12871   switch (code)
12872     {
12873     case COMPONENT_REF:
12874       if (TREE_CODE (arg0) == CONSTRUCTOR
12875 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12876 	{
12877 	  unsigned HOST_WIDE_INT idx;
12878 	  tree field, value;
12879 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12880 	    if (field == arg1)
12881 	      return value;
12882 	}
12883       return NULL_TREE;
12884 
12885     case COND_EXPR:
12886     case VEC_COND_EXPR:
12887       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12888 	 so all simple results must be passed through pedantic_non_lvalue.  */
12889       if (TREE_CODE (arg0) == INTEGER_CST)
12890 	{
12891 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
12892 	  tem = integer_zerop (arg0) ? op2 : op1;
12893 	  /* Only optimize constant conditions when the selected branch
12894 	     has the same type as the COND_EXPR.  This avoids optimizing
12895              away "c ? x : throw", where the throw has a void type.
12896              Avoid throwing away that operand which contains label.  */
12897           if ((!TREE_SIDE_EFFECTS (unused_op)
12898                || !contains_label_p (unused_op))
12899               && (! VOID_TYPE_P (TREE_TYPE (tem))
12900                   || VOID_TYPE_P (type)))
12901 	    return protected_set_expr_location_unshare (tem, loc);
12902 	  return NULL_TREE;
12903 	}
12904       else if (TREE_CODE (arg0) == VECTOR_CST)
12905 	{
12906 	  unsigned HOST_WIDE_INT nelts;
12907 	  if ((TREE_CODE (arg1) == VECTOR_CST
12908 	       || TREE_CODE (arg1) == CONSTRUCTOR)
12909 	      && (TREE_CODE (arg2) == VECTOR_CST
12910 		  || TREE_CODE (arg2) == CONSTRUCTOR)
12911 	      && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12912 	    {
12913 	      vec_perm_builder sel (nelts, nelts, 1);
12914 	      for (unsigned int i = 0; i < nelts; i++)
12915 		{
12916 		  tree val = VECTOR_CST_ELT (arg0, i);
12917 		  if (integer_all_onesp (val))
12918 		    sel.quick_push (i);
12919 		  else if (integer_zerop (val))
12920 		    sel.quick_push (nelts + i);
12921 		  else /* Currently unreachable.  */
12922 		    return NULL_TREE;
12923 		}
12924 	      vec_perm_indices indices (sel, 2, nelts);
12925 	      tree t = fold_vec_perm (type, arg1, arg2, indices);
12926 	      if (t != NULL_TREE)
12927 		return t;
12928 	    }
12929 	}
12930 
12931       /* If we have A op B ? A : C, we may be able to convert this to a
12932 	 simpler expression, depending on the operation and the values
12933 	 of B and C.  Signed zeros prevent all of these transformations,
12934 	 for reasons given above each one.
12935 
12936          Also try swapping the arguments and inverting the conditional.  */
12937       if (COMPARISON_CLASS_P (arg0)
12938 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12939 	  && !HONOR_SIGNED_ZEROS (op1))
12940 	{
12941 	  tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12942 						TREE_OPERAND (arg0, 0),
12943 						TREE_OPERAND (arg0, 1),
12944 						op1, op2);
12945 	  if (tem)
12946 	    return tem;
12947 	}
12948 
12949       if (COMPARISON_CLASS_P (arg0)
12950 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12951 	  && !HONOR_SIGNED_ZEROS (op2))
12952 	{
12953 	  enum tree_code comp_code = TREE_CODE (arg0);
12954 	  tree arg00 = TREE_OPERAND (arg0, 0);
12955 	  tree arg01 = TREE_OPERAND (arg0, 1);
12956 	  comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12957 	  if (comp_code != ERROR_MARK)
12958 	    tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12959 						  arg00,
12960 						  arg01,
12961 						  op2, op1);
12962 	  if (tem)
12963 	    return tem;
12964 	}
12965 
12966       /* If the second operand is simpler than the third, swap them
12967 	 since that produces better jump optimization results.  */
12968       if (truth_value_p (TREE_CODE (arg0))
12969 	  && tree_swap_operands_p (op1, op2))
12970 	{
12971 	  location_t loc0 = expr_location_or (arg0, loc);
12972 	  /* See if this can be inverted.  If it can't, possibly because
12973 	     it was a floating-point inequality comparison, don't do
12974 	     anything.  */
12975 	  tem = fold_invert_truthvalue (loc0, arg0);
12976 	  if (tem)
12977 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
12978 	}
12979 
12980       /* Convert A ? 1 : 0 to simply A.  */
12981       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12982 				 : (integer_onep (op1)
12983 				    && !VECTOR_TYPE_P (type)))
12984 	  && integer_zerop (op2)
12985 	  /* If we try to convert OP0 to our type, the
12986 	     call to fold will try to move the conversion inside
12987 	     a COND, which will recurse.  In that case, the COND_EXPR
12988 	     is probably the best choice, so leave it alone.  */
12989 	  && type == TREE_TYPE (arg0))
12990 	return protected_set_expr_location_unshare (arg0, loc);
12991 
12992       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
12993 	 over COND_EXPR in cases such as floating point comparisons.  */
12994       if (integer_zerop (op1)
12995 	  && code == COND_EXPR
12996 	  && integer_onep (op2)
12997 	  && !VECTOR_TYPE_P (type)
12998 	  && truth_value_p (TREE_CODE (arg0)))
12999 	return fold_convert_loc (loc, type,
13000 				 invert_truthvalue_loc (loc, arg0));
13001 
13002       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
13003       if (TREE_CODE (arg0) == LT_EXPR
13004 	  && integer_zerop (TREE_OPERAND (arg0, 1))
13005 	  && integer_zerop (op2)
13006 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13007 	{
13008 	  /* sign_bit_p looks through both zero and sign extensions,
13009 	     but for this optimization only sign extensions are
13010 	     usable.  */
13011 	  tree tem2 = TREE_OPERAND (arg0, 0);
13012 	  while (tem != tem2)
13013 	    {
13014 	      if (TREE_CODE (tem2) != NOP_EXPR
13015 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13016 		{
13017 		  tem = NULL_TREE;
13018 		  break;
13019 		}
13020 	      tem2 = TREE_OPERAND (tem2, 0);
13021 	    }
13022 	  /* sign_bit_p only checks ARG1 bits within A's precision.
13023 	     If <sign bit of A> has wider type than A, bits outside
13024 	     of A's precision in <sign bit of A> need to be checked.
13025 	     If they are all 0, this optimization needs to be done
13026 	     in unsigned A's type, if they are all 1 in signed A's type,
13027 	     otherwise this can't be done.  */
13028 	  if (tem
13029 	      && TYPE_PRECISION (TREE_TYPE (tem))
13030 		 < TYPE_PRECISION (TREE_TYPE (arg1))
13031 	      && TYPE_PRECISION (TREE_TYPE (tem))
13032 		 < TYPE_PRECISION (type))
13033 	    {
13034 	      int inner_width, outer_width;
13035 	      tree tem_type;
13036 
13037 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13038 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13039 	      if (outer_width > TYPE_PRECISION (type))
13040 		outer_width = TYPE_PRECISION (type);
13041 
13042 	      wide_int mask = wi::shifted_mask
13043 		(inner_width, outer_width - inner_width, false,
13044 		 TYPE_PRECISION (TREE_TYPE (arg1)));
13045 
13046 	      wide_int common = mask & wi::to_wide (arg1);
13047 	      if (common == mask)
13048 		{
13049 		  tem_type = signed_type_for (TREE_TYPE (tem));
13050 		  tem = fold_convert_loc (loc, tem_type, tem);
13051 		}
13052 	      else if (common == 0)
13053 		{
13054 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
13055 		  tem = fold_convert_loc (loc, tem_type, tem);
13056 		}
13057 	      else
13058 		tem = NULL;
13059 	    }
13060 
13061 	  if (tem)
13062 	    return
13063 	      fold_convert_loc (loc, type,
13064 				fold_build2_loc (loc, BIT_AND_EXPR,
13065 					     TREE_TYPE (tem), tem,
13066 					     fold_convert_loc (loc,
13067 							       TREE_TYPE (tem),
13068 							       arg1)));
13069 	}
13070 
13071       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
13072 	 already handled above.  */
13073       if (TREE_CODE (arg0) == BIT_AND_EXPR
13074 	  && integer_onep (TREE_OPERAND (arg0, 1))
13075 	  && integer_zerop (op2)
13076 	  && integer_pow2p (arg1))
13077 	{
13078 	  tree tem = TREE_OPERAND (arg0, 0);
13079 	  STRIP_NOPS (tem);
13080 	  if (TREE_CODE (tem) == RSHIFT_EXPR
13081 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13082               && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13083 		 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13084 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
13085 				    fold_convert_loc (loc, type,
13086 						      TREE_OPERAND (tem, 0)),
13087 				    op1);
13088 	}
13089 
13090       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
13091 	 is probably obsolete because the first operand should be a
13092 	 truth value (that's why we have the two cases above), but let's
13093 	 leave it in until we can confirm this for all front-ends.  */
13094       if (integer_zerop (op2)
13095 	  && TREE_CODE (arg0) == NE_EXPR
13096 	  && integer_zerop (TREE_OPERAND (arg0, 1))
13097 	  && integer_pow2p (arg1)
13098 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13099 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13100 			      arg1, OEP_ONLY_CONST)
13101 	  /* operand_equal_p compares just value, not precision, so e.g.
13102 	     arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13103 	     second operand 32-bit -128, which is not a power of two (or vice
13104 	     versa.  */
13105 	  && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13106 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13107 
13108       /* Disable the transformations below for vectors, since
13109 	 fold_binary_op_with_conditional_arg may undo them immediately,
13110 	 yielding an infinite loop.  */
13111       if (code == VEC_COND_EXPR)
13112 	return NULL_TREE;
13113 
13114       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
13115       if (integer_zerop (op2)
13116 	  && truth_value_p (TREE_CODE (arg0))
13117 	  && truth_value_p (TREE_CODE (arg1))
13118 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13119 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13120 							   : TRUTH_ANDIF_EXPR,
13121 				type, fold_convert_loc (loc, type, arg0), op1);
13122 
13123       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
13124       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13125 	  && truth_value_p (TREE_CODE (arg0))
13126 	  && truth_value_p (TREE_CODE (arg1))
13127 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13128 	{
13129 	  location_t loc0 = expr_location_or (arg0, loc);
13130 	  /* Only perform transformation if ARG0 is easily inverted.  */
13131 	  tem = fold_invert_truthvalue (loc0, arg0);
13132 	  if (tem)
13133 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
13134 					 ? BIT_IOR_EXPR
13135 					 : TRUTH_ORIF_EXPR,
13136 				    type, fold_convert_loc (loc, type, tem),
13137 				    op1);
13138 	}
13139 
13140       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
13141       if (integer_zerop (arg1)
13142 	  && truth_value_p (TREE_CODE (arg0))
13143 	  && truth_value_p (TREE_CODE (op2))
13144 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13145 	{
13146 	  location_t loc0 = expr_location_or (arg0, loc);
13147 	  /* Only perform transformation if ARG0 is easily inverted.  */
13148 	  tem = fold_invert_truthvalue (loc0, arg0);
13149 	  if (tem)
13150 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
13151 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13152 				    type, fold_convert_loc (loc, type, tem),
13153 				    op2);
13154 	}
13155 
13156       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
13157       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13158 	  && truth_value_p (TREE_CODE (arg0))
13159 	  && truth_value_p (TREE_CODE (op2))
13160 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13161 	return fold_build2_loc (loc, code == VEC_COND_EXPR
13162 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13163 				type, fold_convert_loc (loc, type, arg0), op2);
13164 
13165       return NULL_TREE;
13166 
13167     case CALL_EXPR:
13168       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
13169 	 of fold_ternary on them.  */
13170       gcc_unreachable ();
13171 
13172     case BIT_FIELD_REF:
13173       if (TREE_CODE (arg0) == VECTOR_CST
13174 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
13175 	      || (VECTOR_TYPE_P (type)
13176 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13177 	  && tree_fits_uhwi_p (op1)
13178 	  && tree_fits_uhwi_p (op2))
13179 	{
13180 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13181 	  unsigned HOST_WIDE_INT width
13182 	    = (TREE_CODE (eltype) == BOOLEAN_TYPE
13183 	       ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13184 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13185 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13186 
13187 	  if (n != 0
13188 	      && (idx % width) == 0
13189 	      && (n % width) == 0
13190 	      && known_le ((idx + n) / width,
13191 			   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13192 	    {
13193 	      idx = idx / width;
13194 	      n = n / width;
13195 
13196 	      if (TREE_CODE (arg0) == VECTOR_CST)
13197 		{
13198 		  if (n == 1)
13199 		    {
13200 		      tem = VECTOR_CST_ELT (arg0, idx);
13201 		      if (VECTOR_TYPE_P (type))
13202 			tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13203 		      return tem;
13204 		    }
13205 
13206 		  tree_vector_builder vals (type, n, 1);
13207 		  for (unsigned i = 0; i < n; ++i)
13208 		    vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13209 		  return vals.build ();
13210 		}
13211 	    }
13212 	}
13213 
13214       /* On constants we can use native encode/interpret to constant
13215          fold (nearly) all BIT_FIELD_REFs.  */
13216       if (CONSTANT_CLASS_P (arg0)
13217 	  && can_native_interpret_type_p (type)
13218 	  && BITS_PER_UNIT == 8
13219 	  && tree_fits_uhwi_p (op1)
13220 	  && tree_fits_uhwi_p (op2))
13221 	{
13222 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13223 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13224 	  /* Limit us to a reasonable amount of work.  To relax the
13225 	     other limitations we need bit-shifting of the buffer
13226 	     and rounding up the size.  */
13227 	  if (bitpos % BITS_PER_UNIT == 0
13228 	      && bitsize % BITS_PER_UNIT == 0
13229 	      && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13230 	    {
13231 	      unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13232 	      unsigned HOST_WIDE_INT len
13233 		= native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13234 				      bitpos / BITS_PER_UNIT);
13235 	      if (len > 0
13236 		  && len * BITS_PER_UNIT >= bitsize)
13237 		{
13238 		  tree v = native_interpret_expr (type, b,
13239 						  bitsize / BITS_PER_UNIT);
13240 		  if (v)
13241 		    return v;
13242 		}
13243 	    }
13244 	}
13245 
13246       return NULL_TREE;
13247 
13248     case VEC_PERM_EXPR:
13249       /* Perform constant folding of BIT_INSERT_EXPR.  */
13250       if (TREE_CODE (arg2) == VECTOR_CST
13251 	  && TREE_CODE (op0) == VECTOR_CST
13252 	  && TREE_CODE (op1) == VECTOR_CST)
13253 	{
13254 	  /* Build a vector of integers from the tree mask.  */
13255 	  vec_perm_builder builder;
13256 	  if (!tree_to_vec_perm_builder (&builder, arg2))
13257 	    return NULL_TREE;
13258 
13259 	  /* Create a vec_perm_indices for the integer vector.  */
13260 	  poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13261 	  bool single_arg = (op0 == op1);
13262 	  vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13263 	  return fold_vec_perm (type, op0, op1, sel);
13264 	}
13265       return NULL_TREE;
13266 
13267     case BIT_INSERT_EXPR:
13268       /* Perform (partial) constant folding of BIT_INSERT_EXPR.  */
13269       if (TREE_CODE (arg0) == INTEGER_CST
13270 	  && TREE_CODE (arg1) == INTEGER_CST)
13271 	{
13272 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13273 	  unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13274 	  wide_int tem = (wi::to_wide (arg0)
13275 			  & wi::shifted_mask (bitpos, bitsize, true,
13276 					      TYPE_PRECISION (type)));
13277 	  wide_int tem2
13278 	    = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13279 				    bitsize), bitpos);
13280 	  return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13281 	}
13282       else if (TREE_CODE (arg0) == VECTOR_CST
13283 	       && CONSTANT_CLASS_P (arg1)
13284 	       && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13285 				      TREE_TYPE (arg1)))
13286 	{
13287 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13288 	  unsigned HOST_WIDE_INT elsize
13289 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13290 	  if (bitpos % elsize == 0)
13291 	    {
13292 	      unsigned k = bitpos / elsize;
13293 	      unsigned HOST_WIDE_INT nelts;
13294 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13295 		return arg0;
13296 	      else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13297 		{
13298 		  tree_vector_builder elts (type, nelts, 1);
13299 		  elts.quick_grow (nelts);
13300 		  for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13301 		    elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13302 		  return elts.build ();
13303 		}
13304 	    }
13305 	}
13306       return NULL_TREE;
13307 
13308     default:
13309       return NULL_TREE;
13310     } /* switch (code) */
13311 }
13312 
13313 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13314    of an array (or vector).  *CTOR_IDX if non-NULL is updated with the
13315    constructor element index of the value returned.  If the element is
13316    not found NULL_TREE is returned and *CTOR_IDX is updated to
13317    the index of the element after the ACCESS_INDEX position (which
13318    may be outside of the CTOR array).  */
13319 
13320 tree
get_array_ctor_element_at_index(tree ctor,offset_int access_index,unsigned * ctor_idx)13321 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13322 				 unsigned *ctor_idx)
13323 {
13324   tree index_type = NULL_TREE;
13325   signop index_sgn = UNSIGNED;
13326   offset_int low_bound = 0;
13327 
13328   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13329     {
13330       tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13331       if (domain_type && TYPE_MIN_VALUE (domain_type))
13332 	{
13333 	  /* Static constructors for variably sized objects makes no sense.  */
13334 	  gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13335 	  index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13336 	  /* ???  When it is obvious that the range is signed, treat it so.  */
13337 	  if (TYPE_UNSIGNED (index_type)
13338 	      && TYPE_MAX_VALUE (domain_type)
13339 	      && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13340 				  TYPE_MIN_VALUE (domain_type)))
13341 	    {
13342 	      index_sgn = SIGNED;
13343 	      low_bound
13344 		= offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13345 				    SIGNED);
13346 	    }
13347 	  else
13348 	    {
13349 	      index_sgn = TYPE_SIGN (index_type);
13350 	      low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13351 	    }
13352 	}
13353     }
13354 
13355   if (index_type)
13356     access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13357 			    index_sgn);
13358 
13359   offset_int index = low_bound;
13360   if (index_type)
13361     index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13362 
13363   offset_int max_index = index;
13364   unsigned cnt;
13365   tree cfield, cval;
13366   bool first_p = true;
13367 
13368   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13369     {
13370       /* Array constructor might explicitly set index, or specify a range,
13371 	 or leave index NULL meaning that it is next index after previous
13372 	 one.  */
13373       if (cfield)
13374 	{
13375 	  if (TREE_CODE (cfield) == INTEGER_CST)
13376 	    max_index = index
13377 	      = offset_int::from (wi::to_wide (cfield), index_sgn);
13378 	  else
13379 	    {
13380 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13381 	      index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13382 					index_sgn);
13383 	      max_index
13384 	        = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13385 				    index_sgn);
13386 	      gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13387 	    }
13388 	}
13389       else if (!first_p)
13390 	{
13391 	  index = max_index + 1;
13392 	  if (index_type)
13393 	    index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13394 	  gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13395 	  max_index = index;
13396 	}
13397       else
13398 	first_p = false;
13399 
13400       /* Do we have match?  */
13401       if (wi::cmp (access_index, index, index_sgn) >= 0)
13402 	{
13403 	  if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13404 	    {
13405 	      if (ctor_idx)
13406 		*ctor_idx = cnt;
13407 	      return cval;
13408 	    }
13409 	}
13410       else if (in_gimple_form)
13411 	/* We're past the element we search for.  Note during parsing
13412 	   the elements might not be sorted.
13413 	   ???  We should use a binary search and a flag on the
13414 	   CONSTRUCTOR as to whether elements are sorted in declaration
13415 	   order.  */
13416 	break;
13417     }
13418   if (ctor_idx)
13419     *ctor_idx = cnt;
13420   return NULL_TREE;
13421 }
13422 
13423 /* Perform constant folding and related simplification of EXPR.
13424    The related simplifications include x*1 => x, x*0 => 0, etc.,
13425    and application of the associative law.
13426    NOP_EXPR conversions may be removed freely (as long as we
13427    are careful not to change the type of the overall expression).
13428    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13429    but we can constant-fold them if they have constant operands.  */
13430 
13431 #ifdef ENABLE_FOLD_CHECKING
13432 # define fold(x) fold_1 (x)
13433 static tree fold_1 (tree);
13434 static
13435 #endif
13436 tree
fold(tree expr)13437 fold (tree expr)
13438 {
13439   const tree t = expr;
13440   enum tree_code code = TREE_CODE (t);
13441   enum tree_code_class kind = TREE_CODE_CLASS (code);
13442   tree tem;
13443   location_t loc = EXPR_LOCATION (expr);
13444 
13445   /* Return right away if a constant.  */
13446   if (kind == tcc_constant)
13447     return t;
13448 
13449   /* CALL_EXPR-like objects with variable numbers of operands are
13450      treated specially.  */
13451   if (kind == tcc_vl_exp)
13452     {
13453       if (code == CALL_EXPR)
13454 	{
13455 	  tem = fold_call_expr (loc, expr, false);
13456 	  return tem ? tem : expr;
13457 	}
13458       return expr;
13459     }
13460 
13461   if (IS_EXPR_CODE_CLASS (kind))
13462     {
13463       tree type = TREE_TYPE (t);
13464       tree op0, op1, op2;
13465 
13466       switch (TREE_CODE_LENGTH (code))
13467 	{
13468 	case 1:
13469 	  op0 = TREE_OPERAND (t, 0);
13470 	  tem = fold_unary_loc (loc, code, type, op0);
13471 	  return tem ? tem : expr;
13472 	case 2:
13473 	  op0 = TREE_OPERAND (t, 0);
13474 	  op1 = TREE_OPERAND (t, 1);
13475 	  tem = fold_binary_loc (loc, code, type, op0, op1);
13476 	  return tem ? tem : expr;
13477 	case 3:
13478 	  op0 = TREE_OPERAND (t, 0);
13479 	  op1 = TREE_OPERAND (t, 1);
13480 	  op2 = TREE_OPERAND (t, 2);
13481 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13482 	  return tem ? tem : expr;
13483 	default:
13484 	  break;
13485 	}
13486     }
13487 
13488   switch (code)
13489     {
13490     case ARRAY_REF:
13491       {
13492 	tree op0 = TREE_OPERAND (t, 0);
13493 	tree op1 = TREE_OPERAND (t, 1);
13494 
13495 	if (TREE_CODE (op1) == INTEGER_CST
13496 	    && TREE_CODE (op0) == CONSTRUCTOR
13497 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13498 	  {
13499 	    tree val = get_array_ctor_element_at_index (op0,
13500 							wi::to_offset (op1));
13501 	    if (val)
13502 	      return val;
13503 	  }
13504 
13505 	return t;
13506       }
13507 
13508       /* Return a VECTOR_CST if possible.  */
13509     case CONSTRUCTOR:
13510       {
13511 	tree type = TREE_TYPE (t);
13512 	if (TREE_CODE (type) != VECTOR_TYPE)
13513 	  return t;
13514 
13515 	unsigned i;
13516 	tree val;
13517 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13518 	  if (! CONSTANT_CLASS_P (val))
13519 	    return t;
13520 
13521 	return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13522       }
13523 
13524     case CONST_DECL:
13525       return fold (DECL_INITIAL (t));
13526 
13527     default:
13528       return t;
13529     } /* switch (code) */
13530 }
13531 
13532 #ifdef ENABLE_FOLD_CHECKING
13533 #undef fold
13534 
13535 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13536 				hash_table<nofree_ptr_hash<const tree_node> > *);
13537 static void fold_check_failed (const_tree, const_tree);
13538 void print_fold_checksum (const_tree);
13539 
13540 /* When --enable-checking=fold, compute a digest of expr before
13541    and after actual fold call to see if fold did not accidentally
13542    change original expr.  */
13543 
13544 tree
fold(tree expr)13545 fold (tree expr)
13546 {
13547   tree ret;
13548   struct md5_ctx ctx;
13549   unsigned char checksum_before[16], checksum_after[16];
13550   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13551 
13552   md5_init_ctx (&ctx);
13553   fold_checksum_tree (expr, &ctx, &ht);
13554   md5_finish_ctx (&ctx, checksum_before);
13555   ht.empty ();
13556 
13557   ret = fold_1 (expr);
13558 
13559   md5_init_ctx (&ctx);
13560   fold_checksum_tree (expr, &ctx, &ht);
13561   md5_finish_ctx (&ctx, checksum_after);
13562 
13563   if (memcmp (checksum_before, checksum_after, 16))
13564     fold_check_failed (expr, ret);
13565 
13566   return ret;
13567 }
13568 
13569 void
print_fold_checksum(const_tree expr)13570 print_fold_checksum (const_tree expr)
13571 {
13572   struct md5_ctx ctx;
13573   unsigned char checksum[16], cnt;
13574   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13575 
13576   md5_init_ctx (&ctx);
13577   fold_checksum_tree (expr, &ctx, &ht);
13578   md5_finish_ctx (&ctx, checksum);
13579   for (cnt = 0; cnt < 16; ++cnt)
13580     fprintf (stderr, "%02x", checksum[cnt]);
13581   putc ('\n', stderr);
13582 }
13583 
13584 static void
fold_check_failed(const_tree expr ATTRIBUTE_UNUSED,const_tree ret ATTRIBUTE_UNUSED)13585 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13586 {
13587   internal_error ("fold check: original tree changed by fold");
13588 }
13589 
13590 static void
fold_checksum_tree(const_tree expr,struct md5_ctx * ctx,hash_table<nofree_ptr_hash<const tree_node>> * ht)13591 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13592 		    hash_table<nofree_ptr_hash <const tree_node> > *ht)
13593 {
13594   const tree_node **slot;
13595   enum tree_code code;
13596   union tree_node *buf;
13597   int i, len;
13598 
13599  recursive_label:
13600   if (expr == NULL)
13601     return;
13602   slot = ht->find_slot (expr, INSERT);
13603   if (*slot != NULL)
13604     return;
13605   *slot = expr;
13606   code = TREE_CODE (expr);
13607   if (TREE_CODE_CLASS (code) == tcc_declaration
13608       && HAS_DECL_ASSEMBLER_NAME_P (expr))
13609     {
13610       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
13611       size_t sz = tree_size (expr);
13612       buf = XALLOCAVAR (union tree_node, sz);
13613       memcpy ((char *) buf, expr, sz);
13614       SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13615       buf->decl_with_vis.symtab_node = NULL;
13616       buf->base.nowarning_flag = 0;
13617       expr = (tree) buf;
13618     }
13619   else if (TREE_CODE_CLASS (code) == tcc_type
13620 	   && (TYPE_POINTER_TO (expr)
13621 	       || TYPE_REFERENCE_TO (expr)
13622 	       || TYPE_CACHED_VALUES_P (expr)
13623 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13624 	       || TYPE_NEXT_VARIANT (expr)
13625 	       || TYPE_ALIAS_SET_KNOWN_P (expr)))
13626     {
13627       /* Allow these fields to be modified.  */
13628       tree tmp;
13629       size_t sz = tree_size (expr);
13630       buf = XALLOCAVAR (union tree_node, sz);
13631       memcpy ((char *) buf, expr, sz);
13632       expr = tmp = (tree) buf;
13633       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13634       TYPE_POINTER_TO (tmp) = NULL;
13635       TYPE_REFERENCE_TO (tmp) = NULL;
13636       TYPE_NEXT_VARIANT (tmp) = NULL;
13637       TYPE_ALIAS_SET (tmp) = -1;
13638       if (TYPE_CACHED_VALUES_P (tmp))
13639 	{
13640 	  TYPE_CACHED_VALUES_P (tmp) = 0;
13641 	  TYPE_CACHED_VALUES (tmp) = NULL;
13642 	}
13643     }
13644   else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13645     {
13646       /* Allow the no-warning bit to be set.  Perhaps we shouldn't allow
13647 	 that and change builtins.cc etc. instead - see PR89543.  */
13648       size_t sz = tree_size (expr);
13649       buf = XALLOCAVAR (union tree_node, sz);
13650       memcpy ((char *) buf, expr, sz);
13651       buf->base.nowarning_flag = 0;
13652       expr = (tree) buf;
13653     }
13654   md5_process_bytes (expr, tree_size (expr), ctx);
13655   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13656     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13657   if (TREE_CODE_CLASS (code) != tcc_type
13658       && TREE_CODE_CLASS (code) != tcc_declaration
13659       && code != TREE_LIST
13660       && code != SSA_NAME
13661       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13662     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13663   switch (TREE_CODE_CLASS (code))
13664     {
13665     case tcc_constant:
13666       switch (code)
13667 	{
13668 	case STRING_CST:
13669 	  md5_process_bytes (TREE_STRING_POINTER (expr),
13670 			     TREE_STRING_LENGTH (expr), ctx);
13671 	  break;
13672 	case COMPLEX_CST:
13673 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13674 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13675 	  break;
13676 	case VECTOR_CST:
13677 	  len = vector_cst_encoded_nelts (expr);
13678 	  for (i = 0; i < len; ++i)
13679 	    fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13680 	  break;
13681 	default:
13682 	  break;
13683 	}
13684       break;
13685     case tcc_exceptional:
13686       switch (code)
13687 	{
13688 	case TREE_LIST:
13689 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13690 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13691 	  expr = TREE_CHAIN (expr);
13692 	  goto recursive_label;
13693 	  break;
13694 	case TREE_VEC:
13695 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13696 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13697 	  break;
13698 	default:
13699 	  break;
13700 	}
13701       break;
13702     case tcc_expression:
13703     case tcc_reference:
13704     case tcc_comparison:
13705     case tcc_unary:
13706     case tcc_binary:
13707     case tcc_statement:
13708     case tcc_vl_exp:
13709       len = TREE_OPERAND_LENGTH (expr);
13710       for (i = 0; i < len; ++i)
13711 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13712       break;
13713     case tcc_declaration:
13714       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13715       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13716       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13717 	{
13718 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13719 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13720 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13721 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13722 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13723 	}
13724 
13725       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13726 	{
13727 	  if (TREE_CODE (expr) == FUNCTION_DECL)
13728 	    {
13729 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13730 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13731 	    }
13732 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13733 	}
13734       break;
13735     case tcc_type:
13736       if (TREE_CODE (expr) == ENUMERAL_TYPE)
13737         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13738       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13739       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13740       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13741       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13742       if (INTEGRAL_TYPE_P (expr)
13743           || SCALAR_FLOAT_TYPE_P (expr))
13744 	{
13745 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13746 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13747 	}
13748       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13749       if (TREE_CODE (expr) == RECORD_TYPE
13750 	  || TREE_CODE (expr) == UNION_TYPE
13751 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
13752 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13753       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13754       break;
13755     default:
13756       break;
13757     }
13758 }
13759 
13760 /* Helper function for outputting the checksum of a tree T.  When
13761    debugging with gdb, you can "define mynext" to be "next" followed
13762    by "call debug_fold_checksum (op0)", then just trace down till the
13763    outputs differ.  */
13764 
13765 DEBUG_FUNCTION void
debug_fold_checksum(const_tree t)13766 debug_fold_checksum (const_tree t)
13767 {
13768   int i;
13769   unsigned char checksum[16];
13770   struct md5_ctx ctx;
13771   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13772 
13773   md5_init_ctx (&ctx);
13774   fold_checksum_tree (t, &ctx, &ht);
13775   md5_finish_ctx (&ctx, checksum);
13776   ht.empty ();
13777 
13778   for (i = 0; i < 16; i++)
13779     fprintf (stderr, "%d ", checksum[i]);
13780 
13781   fprintf (stderr, "\n");
13782 }
13783 
13784 #endif
13785 
13786 /* Fold a unary tree expression with code CODE of type TYPE with an
13787    operand OP0.  LOC is the location of the resulting expression.
13788    Return a folded expression if successful.  Otherwise, return a tree
13789    expression with code CODE of type TYPE with an operand OP0.  */
13790 
13791 tree
fold_build1_loc(location_t loc,enum tree_code code,tree type,tree op0 MEM_STAT_DECL)13792 fold_build1_loc (location_t loc,
13793 		 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13794 {
13795   tree tem;
13796 #ifdef ENABLE_FOLD_CHECKING
13797   unsigned char checksum_before[16], checksum_after[16];
13798   struct md5_ctx ctx;
13799   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13800 
13801   md5_init_ctx (&ctx);
13802   fold_checksum_tree (op0, &ctx, &ht);
13803   md5_finish_ctx (&ctx, checksum_before);
13804   ht.empty ();
13805 #endif
13806 
13807   tem = fold_unary_loc (loc, code, type, op0);
13808   if (!tem)
13809     tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13810 
13811 #ifdef ENABLE_FOLD_CHECKING
13812   md5_init_ctx (&ctx);
13813   fold_checksum_tree (op0, &ctx, &ht);
13814   md5_finish_ctx (&ctx, checksum_after);
13815 
13816   if (memcmp (checksum_before, checksum_after, 16))
13817     fold_check_failed (op0, tem);
13818 #endif
13819   return tem;
13820 }
13821 
13822 /* Fold a binary tree expression with code CODE of type TYPE with
13823    operands OP0 and OP1.  LOC is the location of the resulting
13824    expression.  Return a folded expression if successful.  Otherwise,
13825    return a tree expression with code CODE of type TYPE with operands
13826    OP0 and OP1.  */
13827 
13828 tree
fold_build2_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1 MEM_STAT_DECL)13829 fold_build2_loc (location_t loc,
13830 		      enum tree_code code, tree type, tree op0, tree op1
13831 		      MEM_STAT_DECL)
13832 {
13833   tree tem;
13834 #ifdef ENABLE_FOLD_CHECKING
13835   unsigned char checksum_before_op0[16],
13836                 checksum_before_op1[16],
13837 		checksum_after_op0[16],
13838 		checksum_after_op1[16];
13839   struct md5_ctx ctx;
13840   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13841 
13842   md5_init_ctx (&ctx);
13843   fold_checksum_tree (op0, &ctx, &ht);
13844   md5_finish_ctx (&ctx, checksum_before_op0);
13845   ht.empty ();
13846 
13847   md5_init_ctx (&ctx);
13848   fold_checksum_tree (op1, &ctx, &ht);
13849   md5_finish_ctx (&ctx, checksum_before_op1);
13850   ht.empty ();
13851 #endif
13852 
13853   tem = fold_binary_loc (loc, code, type, op0, op1);
13854   if (!tem)
13855     tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13856 
13857 #ifdef ENABLE_FOLD_CHECKING
13858   md5_init_ctx (&ctx);
13859   fold_checksum_tree (op0, &ctx, &ht);
13860   md5_finish_ctx (&ctx, checksum_after_op0);
13861   ht.empty ();
13862 
13863   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13864     fold_check_failed (op0, tem);
13865 
13866   md5_init_ctx (&ctx);
13867   fold_checksum_tree (op1, &ctx, &ht);
13868   md5_finish_ctx (&ctx, checksum_after_op1);
13869 
13870   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13871     fold_check_failed (op1, tem);
13872 #endif
13873   return tem;
13874 }
13875 
13876 /* Fold a ternary tree expression with code CODE of type TYPE with
13877    operands OP0, OP1, and OP2.  Return a folded expression if
13878    successful.  Otherwise, return a tree expression with code CODE of
13879    type TYPE with operands OP0, OP1, and OP2.  */
13880 
13881 tree
fold_build3_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2 MEM_STAT_DECL)13882 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13883 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
13884 {
13885   tree tem;
13886 #ifdef ENABLE_FOLD_CHECKING
13887   unsigned char checksum_before_op0[16],
13888                 checksum_before_op1[16],
13889                 checksum_before_op2[16],
13890 		checksum_after_op0[16],
13891 		checksum_after_op1[16],
13892 		checksum_after_op2[16];
13893   struct md5_ctx ctx;
13894   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13895 
13896   md5_init_ctx (&ctx);
13897   fold_checksum_tree (op0, &ctx, &ht);
13898   md5_finish_ctx (&ctx, checksum_before_op0);
13899   ht.empty ();
13900 
13901   md5_init_ctx (&ctx);
13902   fold_checksum_tree (op1, &ctx, &ht);
13903   md5_finish_ctx (&ctx, checksum_before_op1);
13904   ht.empty ();
13905 
13906   md5_init_ctx (&ctx);
13907   fold_checksum_tree (op2, &ctx, &ht);
13908   md5_finish_ctx (&ctx, checksum_before_op2);
13909   ht.empty ();
13910 #endif
13911 
13912   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13913   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13914   if (!tem)
13915     tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13916 
13917 #ifdef ENABLE_FOLD_CHECKING
13918   md5_init_ctx (&ctx);
13919   fold_checksum_tree (op0, &ctx, &ht);
13920   md5_finish_ctx (&ctx, checksum_after_op0);
13921   ht.empty ();
13922 
13923   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13924     fold_check_failed (op0, tem);
13925 
13926   md5_init_ctx (&ctx);
13927   fold_checksum_tree (op1, &ctx, &ht);
13928   md5_finish_ctx (&ctx, checksum_after_op1);
13929   ht.empty ();
13930 
13931   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13932     fold_check_failed (op1, tem);
13933 
13934   md5_init_ctx (&ctx);
13935   fold_checksum_tree (op2, &ctx, &ht);
13936   md5_finish_ctx (&ctx, checksum_after_op2);
13937 
13938   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13939     fold_check_failed (op2, tem);
13940 #endif
13941   return tem;
13942 }
13943 
13944 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13945    arguments in ARGARRAY, and a null static chain.
13946    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
13947    of type TYPE from the given operands as constructed by build_call_array.  */
13948 
13949 tree
fold_build_call_array_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)13950 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13951 			   int nargs, tree *argarray)
13952 {
13953   tree tem;
13954 #ifdef ENABLE_FOLD_CHECKING
13955   unsigned char checksum_before_fn[16],
13956                 checksum_before_arglist[16],
13957 		checksum_after_fn[16],
13958 		checksum_after_arglist[16];
13959   struct md5_ctx ctx;
13960   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13961   int i;
13962 
13963   md5_init_ctx (&ctx);
13964   fold_checksum_tree (fn, &ctx, &ht);
13965   md5_finish_ctx (&ctx, checksum_before_fn);
13966   ht.empty ();
13967 
13968   md5_init_ctx (&ctx);
13969   for (i = 0; i < nargs; i++)
13970     fold_checksum_tree (argarray[i], &ctx, &ht);
13971   md5_finish_ctx (&ctx, checksum_before_arglist);
13972   ht.empty ();
13973 #endif
13974 
13975   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13976   if (!tem)
13977     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13978 
13979 #ifdef ENABLE_FOLD_CHECKING
13980   md5_init_ctx (&ctx);
13981   fold_checksum_tree (fn, &ctx, &ht);
13982   md5_finish_ctx (&ctx, checksum_after_fn);
13983   ht.empty ();
13984 
13985   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13986     fold_check_failed (fn, tem);
13987 
13988   md5_init_ctx (&ctx);
13989   for (i = 0; i < nargs; i++)
13990     fold_checksum_tree (argarray[i], &ctx, &ht);
13991   md5_finish_ctx (&ctx, checksum_after_arglist);
13992 
13993   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13994     fold_check_failed (NULL_TREE, tem);
13995 #endif
13996   return tem;
13997 }
13998 
13999 /* Perform constant folding and related simplification of initializer
14000    expression EXPR.  These behave identically to "fold_buildN" but ignore
14001    potential run-time traps and exceptions that fold must preserve.  */
14002 
14003 #define START_FOLD_INIT \
14004   int saved_signaling_nans = flag_signaling_nans;\
14005   int saved_trapping_math = flag_trapping_math;\
14006   int saved_rounding_math = flag_rounding_math;\
14007   int saved_trapv = flag_trapv;\
14008   int saved_folding_initializer = folding_initializer;\
14009   flag_signaling_nans = 0;\
14010   flag_trapping_math = 0;\
14011   flag_rounding_math = 0;\
14012   flag_trapv = 0;\
14013   folding_initializer = 1;
14014 
14015 #define END_FOLD_INIT \
14016   flag_signaling_nans = saved_signaling_nans;\
14017   flag_trapping_math = saved_trapping_math;\
14018   flag_rounding_math = saved_rounding_math;\
14019   flag_trapv = saved_trapv;\
14020   folding_initializer = saved_folding_initializer;
14021 
14022 tree
fold_init(tree expr)14023 fold_init (tree expr)
14024 {
14025   tree result;
14026   START_FOLD_INIT;
14027 
14028   result = fold (expr);
14029 
14030   END_FOLD_INIT;
14031   return result;
14032 }
14033 
14034 tree
fold_build1_initializer_loc(location_t loc,enum tree_code code,tree type,tree op)14035 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14036 			     tree type, tree op)
14037 {
14038   tree result;
14039   START_FOLD_INIT;
14040 
14041   result = fold_build1_loc (loc, code, type, op);
14042 
14043   END_FOLD_INIT;
14044   return result;
14045 }
14046 
14047 tree
fold_build2_initializer_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)14048 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14049 			     tree type, tree op0, tree op1)
14050 {
14051   tree result;
14052   START_FOLD_INIT;
14053 
14054   result = fold_build2_loc (loc, code, type, op0, op1);
14055 
14056   END_FOLD_INIT;
14057   return result;
14058 }
14059 
14060 tree
fold_build_call_array_initializer_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)14061 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14062 				       int nargs, tree *argarray)
14063 {
14064   tree result;
14065   START_FOLD_INIT;
14066 
14067   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14068 
14069   END_FOLD_INIT;
14070   return result;
14071 }
14072 
14073 tree
fold_binary_initializer_loc(location_t loc,tree_code code,tree type,tree lhs,tree rhs)14074 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14075 			     tree lhs, tree rhs)
14076 {
14077   tree result;
14078   START_FOLD_INIT;
14079 
14080   result = fold_binary_loc (loc, code, type, lhs, rhs);
14081 
14082   END_FOLD_INIT;
14083   return result;
14084 }
14085 
14086 #undef START_FOLD_INIT
14087 #undef END_FOLD_INIT
14088 
14089 /* Determine if first argument is a multiple of second argument.  Return 0 if
14090    it is not, or we cannot easily determined it to be.
14091 
14092    An example of the sort of thing we care about (at this point; this routine
14093    could surely be made more general, and expanded to do what the *_DIV_EXPR's
14094    fold cases do now) is discovering that
14095 
14096      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14097 
14098    is a multiple of
14099 
14100      SAVE_EXPR (J * 8)
14101 
14102    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14103 
14104    This code also handles discovering that
14105 
14106      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14107 
14108    is a multiple of 8 so we don't have to worry about dealing with a
14109    possible remainder.
14110 
14111    Note that we *look* inside a SAVE_EXPR only to determine how it was
14112    calculated; it is not safe for fold to do much of anything else with the
14113    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14114    at run time.  For example, the latter example above *cannot* be implemented
14115    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14116    evaluation time of the original SAVE_EXPR is not necessarily the same at
14117    the time the new expression is evaluated.  The only optimization of this
14118    sort that would be valid is changing
14119 
14120      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14121 
14122    divided by 8 to
14123 
14124      SAVE_EXPR (I) * SAVE_EXPR (J)
14125 
14126    (where the same SAVE_EXPR (J) is used in the original and the
14127    transformed version).
14128 
14129    NOWRAP specifies whether all outer operations in TYPE should
14130    be considered not wrapping.  Any type conversion within TOP acts
14131    as a barrier and we will fall back to NOWRAP being false.
14132    NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14133    as not wrapping even though they are generally using unsigned arithmetic.  */
14134 
14135 int
multiple_of_p(tree type,const_tree top,const_tree bottom,bool nowrap)14136 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14137 {
14138   gimple *stmt;
14139   tree op1, op2;
14140 
14141   if (operand_equal_p (top, bottom, 0))
14142     return 1;
14143 
14144   if (TREE_CODE (type) != INTEGER_TYPE)
14145     return 0;
14146 
14147   switch (TREE_CODE (top))
14148     {
14149     case BIT_AND_EXPR:
14150       /* Bitwise and provides a power of two multiple.  If the mask is
14151 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
14152       if (!integer_pow2p (bottom))
14153 	return 0;
14154       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14155 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14156 
14157     case MULT_EXPR:
14158       /* If the multiplication can wrap we cannot recurse further unless
14159 	 the bottom is a power of two which is where wrapping does not
14160 	 matter.  */
14161       if (!nowrap
14162 	  && !TYPE_OVERFLOW_UNDEFINED (type)
14163 	  && !integer_pow2p (bottom))
14164 	return 0;
14165       if (TREE_CODE (bottom) == INTEGER_CST)
14166 	{
14167 	  op1 = TREE_OPERAND (top, 0);
14168 	  op2 = TREE_OPERAND (top, 1);
14169 	  if (TREE_CODE (op1) == INTEGER_CST)
14170 	    std::swap (op1, op2);
14171 	  if (TREE_CODE (op2) == INTEGER_CST)
14172 	    {
14173 	      if (multiple_of_p (type, op2, bottom, nowrap))
14174 		return 1;
14175 	      /* Handle multiple_of_p ((x * 2 + 2) * 4, 8).  */
14176 	      if (multiple_of_p (type, bottom, op2, nowrap))
14177 		{
14178 		  widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14179 						 wi::to_widest (op2));
14180 		  if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14181 		    {
14182 		      op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14183 		      return multiple_of_p (type, op1, op2, nowrap);
14184 		    }
14185 		}
14186 	      return multiple_of_p (type, op1, bottom, nowrap);
14187 	    }
14188 	}
14189       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14190 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14191 
14192     case LSHIFT_EXPR:
14193       /* Handle X << CST as X * (1 << CST) and only process the constant.  */
14194       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14195 	{
14196 	  op1 = TREE_OPERAND (top, 1);
14197 	  if (wi::to_widest (op1) < TYPE_PRECISION (type))
14198 	    {
14199 	      wide_int mul_op
14200 		= wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14201 	      return multiple_of_p (type,
14202 				    wide_int_to_tree (type, mul_op), bottom,
14203 				    nowrap);
14204 	    }
14205 	}
14206       return 0;
14207 
14208     case MINUS_EXPR:
14209     case PLUS_EXPR:
14210       /* If the addition or subtraction can wrap we cannot recurse further
14211 	 unless bottom is a power of two which is where wrapping does not
14212 	 matter.  */
14213       if (!nowrap
14214 	  && !TYPE_OVERFLOW_UNDEFINED (type)
14215 	  && !integer_pow2p (bottom))
14216 	return 0;
14217 
14218       /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14219 	 unsigned type.  For example, (X / 3) + 0xfffffffd is multiple of 3,
14220 	 but 0xfffffffd is not.  */
14221       op1 = TREE_OPERAND (top, 1);
14222       if (TREE_CODE (top) == PLUS_EXPR
14223 	  && nowrap
14224 	  && TYPE_UNSIGNED (type)
14225 	  && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14226 	op1 = fold_build1 (NEGATE_EXPR, type, op1);
14227 
14228       /* It is impossible to prove if op0 +- op1 is multiple of bottom
14229 	 precisely, so be conservative here checking if both op0 and op1
14230 	 are multiple of bottom.  Note we check the second operand first
14231 	 since it's usually simpler.  */
14232       return (multiple_of_p (type, op1, bottom, nowrap)
14233 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14234 
14235     CASE_CONVERT:
14236       /* Can't handle conversions from non-integral or wider integral type.  */
14237       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14238 	  || (TYPE_PRECISION (type)
14239 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14240 	return 0;
14241       /* NOWRAP only extends to operations in the outermost type so
14242 	 make sure to strip it off here.  */
14243       return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14244 			    TREE_OPERAND (top, 0), bottom, false);
14245 
14246     case SAVE_EXPR:
14247       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14248 
14249     case COND_EXPR:
14250       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14251 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14252 
14253     case INTEGER_CST:
14254       if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14255 	return 0;
14256       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14257 				SIGNED);
14258 
14259     case SSA_NAME:
14260       if (TREE_CODE (bottom) == INTEGER_CST
14261 	  && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14262 	  && gimple_code (stmt) == GIMPLE_ASSIGN)
14263 	{
14264 	  enum tree_code code = gimple_assign_rhs_code (stmt);
14265 
14266 	  /* Check for special cases to see if top is defined as multiple
14267 	     of bottom:
14268 
14269 	       top = (X & ~(bottom - 1) ; bottom is power of 2
14270 
14271 	     or
14272 
14273 	       Y = X % bottom
14274 	       top = X - Y.  */
14275 	  if (code == BIT_AND_EXPR
14276 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14277 	      && TREE_CODE (op2) == INTEGER_CST
14278 	      && integer_pow2p (bottom)
14279 	      && wi::multiple_of_p (wi::to_widest (op2),
14280 				    wi::to_widest (bottom), SIGNED))
14281 	    return 1;
14282 
14283 	  op1 = gimple_assign_rhs1 (stmt);
14284 	  if (code == MINUS_EXPR
14285 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14286 	      && TREE_CODE (op2) == SSA_NAME
14287 	      && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14288 	      && gimple_code (stmt) == GIMPLE_ASSIGN
14289 	      && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14290 	      && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14291 	      && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14292 	    return 1;
14293 	}
14294 
14295       /* fall through */
14296 
14297     default:
14298       if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14299 	return multiple_p (wi::to_poly_widest (top),
14300 			   wi::to_poly_widest (bottom));
14301 
14302       return 0;
14303     }
14304 }
14305 
14306 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14307    This function returns true for integer expressions, and returns
14308    false if uncertain.  */
14309 
14310 bool
tree_expr_finite_p(const_tree x)14311 tree_expr_finite_p (const_tree x)
14312 {
14313   machine_mode mode = element_mode (x);
14314   if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14315     return true;
14316   switch (TREE_CODE (x))
14317     {
14318     case REAL_CST:
14319       return real_isfinite (TREE_REAL_CST_PTR (x));
14320     case COMPLEX_CST:
14321       return tree_expr_finite_p (TREE_REALPART (x))
14322 	     && tree_expr_finite_p (TREE_IMAGPART (x));
14323     case FLOAT_EXPR:
14324       return true;
14325     case ABS_EXPR:
14326     case CONVERT_EXPR:
14327     case NON_LVALUE_EXPR:
14328     case NEGATE_EXPR:
14329     case SAVE_EXPR:
14330       return tree_expr_finite_p (TREE_OPERAND (x, 0));
14331     case MIN_EXPR:
14332     case MAX_EXPR:
14333       return tree_expr_finite_p (TREE_OPERAND (x, 0))
14334 	     && tree_expr_finite_p (TREE_OPERAND (x, 1));
14335     case COND_EXPR:
14336       return tree_expr_finite_p (TREE_OPERAND (x, 1))
14337 	     && tree_expr_finite_p (TREE_OPERAND (x, 2));
14338     case CALL_EXPR:
14339       switch (get_call_combined_fn (x))
14340 	{
14341 	CASE_CFN_FABS:
14342 	  return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14343 	CASE_CFN_FMAX:
14344 	CASE_CFN_FMIN:
14345 	  return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14346 		 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14347 	default:
14348 	  return false;
14349 	}
14350 
14351     default:
14352       return false;
14353     }
14354 }
14355 
14356 /* Return true if expression X evaluates to an infinity.
14357    This function returns false for integer expressions.  */
14358 
14359 bool
tree_expr_infinite_p(const_tree x)14360 tree_expr_infinite_p (const_tree x)
14361 {
14362   if (!HONOR_INFINITIES (x))
14363     return false;
14364   switch (TREE_CODE (x))
14365     {
14366     case REAL_CST:
14367       return real_isinf (TREE_REAL_CST_PTR (x));
14368     case ABS_EXPR:
14369     case NEGATE_EXPR:
14370     case NON_LVALUE_EXPR:
14371     case SAVE_EXPR:
14372       return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14373     case COND_EXPR:
14374       return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14375 	     && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14376     default:
14377       return false;
14378     }
14379 }
14380 
14381 /* Return true if expression X could evaluate to an infinity.
14382    This function returns false for integer expressions, and returns
14383    true if uncertain.  */
14384 
14385 bool
tree_expr_maybe_infinite_p(const_tree x)14386 tree_expr_maybe_infinite_p (const_tree x)
14387 {
14388   if (!HONOR_INFINITIES (x))
14389     return false;
14390   switch (TREE_CODE (x))
14391     {
14392     case REAL_CST:
14393       return real_isinf (TREE_REAL_CST_PTR (x));
14394     case FLOAT_EXPR:
14395       return false;
14396     case ABS_EXPR:
14397     case NEGATE_EXPR:
14398       return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14399     case COND_EXPR:
14400       return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14401 	     || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14402     default:
14403       return true;
14404     }
14405 }
14406 
14407 /* Return true if expression X evaluates to a signaling NaN.
14408    This function returns false for integer expressions.  */
14409 
14410 bool
tree_expr_signaling_nan_p(const_tree x)14411 tree_expr_signaling_nan_p (const_tree x)
14412 {
14413   if (!HONOR_SNANS (x))
14414     return false;
14415   switch (TREE_CODE (x))
14416     {
14417     case REAL_CST:
14418       return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14419     case NON_LVALUE_EXPR:
14420     case SAVE_EXPR:
14421       return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14422     case COND_EXPR:
14423       return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14424 	     && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14425     default:
14426       return false;
14427     }
14428 }
14429 
14430 /* Return true if expression X could evaluate to a signaling NaN.
14431    This function returns false for integer expressions, and returns
14432    true if uncertain.  */
14433 
14434 bool
tree_expr_maybe_signaling_nan_p(const_tree x)14435 tree_expr_maybe_signaling_nan_p (const_tree x)
14436 {
14437   if (!HONOR_SNANS (x))
14438     return false;
14439   switch (TREE_CODE (x))
14440     {
14441     case REAL_CST:
14442       return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14443     case FLOAT_EXPR:
14444       return false;
14445     case ABS_EXPR:
14446     case CONVERT_EXPR:
14447     case NEGATE_EXPR:
14448     case NON_LVALUE_EXPR:
14449     case SAVE_EXPR:
14450       return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14451     case MIN_EXPR:
14452     case MAX_EXPR:
14453       return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14454 	     || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14455     case COND_EXPR:
14456       return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14457 	     || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14458     case CALL_EXPR:
14459       switch (get_call_combined_fn (x))
14460 	{
14461 	CASE_CFN_FABS:
14462 	  return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14463 	CASE_CFN_FMAX:
14464 	CASE_CFN_FMIN:
14465 	  return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14466 		 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14467 	default:
14468 	  return true;
14469 	}
14470     default:
14471       return true;
14472     }
14473 }
14474 
14475 /* Return true if expression X evaluates to a NaN.
14476    This function returns false for integer expressions.  */
14477 
14478 bool
tree_expr_nan_p(const_tree x)14479 tree_expr_nan_p (const_tree x)
14480 {
14481   if (!HONOR_NANS (x))
14482     return false;
14483   switch (TREE_CODE (x))
14484     {
14485     case REAL_CST:
14486       return real_isnan (TREE_REAL_CST_PTR (x));
14487     case NON_LVALUE_EXPR:
14488     case SAVE_EXPR:
14489       return tree_expr_nan_p (TREE_OPERAND (x, 0));
14490     case COND_EXPR:
14491       return tree_expr_nan_p (TREE_OPERAND (x, 1))
14492 	     && tree_expr_nan_p (TREE_OPERAND (x, 2));
14493     default:
14494       return false;
14495     }
14496 }
14497 
14498 /* Return true if expression X could evaluate to a NaN.
14499    This function returns false for integer expressions, and returns
14500    true if uncertain.  */
14501 
14502 bool
tree_expr_maybe_nan_p(const_tree x)14503 tree_expr_maybe_nan_p (const_tree x)
14504 {
14505   if (!HONOR_NANS (x))
14506     return false;
14507   switch (TREE_CODE (x))
14508     {
14509     case REAL_CST:
14510       return real_isnan (TREE_REAL_CST_PTR (x));
14511     case FLOAT_EXPR:
14512       return false;
14513     case PLUS_EXPR:
14514     case MINUS_EXPR:
14515     case MULT_EXPR:
14516       return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14517 	     || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14518     case ABS_EXPR:
14519     case CONVERT_EXPR:
14520     case NEGATE_EXPR:
14521     case NON_LVALUE_EXPR:
14522     case SAVE_EXPR:
14523       return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14524     case MIN_EXPR:
14525     case MAX_EXPR:
14526       return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14527 	     || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14528     case COND_EXPR:
14529       return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14530 	     || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14531     case CALL_EXPR:
14532       switch (get_call_combined_fn (x))
14533 	{
14534 	CASE_CFN_FABS:
14535 	  return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14536 	CASE_CFN_FMAX:
14537 	CASE_CFN_FMIN:
14538 	  return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14539 		 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14540 	default:
14541 	  return true;
14542 	}
14543     default:
14544       return true;
14545     }
14546 }
14547 
14548 /* Return true if expression X could evaluate to -0.0.
14549    This function returns true if uncertain.  */
14550 
14551 bool
tree_expr_maybe_real_minus_zero_p(const_tree x)14552 tree_expr_maybe_real_minus_zero_p (const_tree x)
14553 {
14554   if (!HONOR_SIGNED_ZEROS (x))
14555     return false;
14556   switch (TREE_CODE (x))
14557     {
14558     case REAL_CST:
14559       return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14560     case INTEGER_CST:
14561     case FLOAT_EXPR:
14562     case ABS_EXPR:
14563       return false;
14564     case NON_LVALUE_EXPR:
14565     case SAVE_EXPR:
14566       return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14567     case COND_EXPR:
14568       return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14569 	     || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14570     case CALL_EXPR:
14571       switch (get_call_combined_fn (x))
14572 	{
14573 	CASE_CFN_FABS:
14574 	  return false;
14575 	default:
14576 	  break;
14577 	}
14578     default:
14579       break;
14580     }
14581   /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14582    * but currently those predicates require tree and not const_tree.  */
14583   return true;
14584 }
14585 
14586 #define tree_expr_nonnegative_warnv_p(X, Y) \
14587   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14588 
14589 #define RECURSE(X) \
14590   ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14591 
14592 /* Return true if CODE or TYPE is known to be non-negative. */
14593 
14594 static bool
tree_simple_nonnegative_warnv_p(enum tree_code code,tree type)14595 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14596 {
14597   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14598       && truth_value_p (code))
14599     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14600        have a signed:1 type (where the value is -1 and 0).  */
14601     return true;
14602   return false;
14603 }
14604 
14605 /* Return true if (CODE OP0) is known to be non-negative.  If the return
14606    value is based on the assumption that signed overflow is undefined,
14607    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14608    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14609 
14610 bool
tree_unary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p,int depth)14611 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14612 				bool *strict_overflow_p, int depth)
14613 {
14614   if (TYPE_UNSIGNED (type))
14615     return true;
14616 
14617   switch (code)
14618     {
14619     case ABS_EXPR:
14620       /* We can't return 1 if flag_wrapv is set because
14621 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
14622       if (!ANY_INTEGRAL_TYPE_P (type))
14623 	return true;
14624       if (TYPE_OVERFLOW_UNDEFINED (type))
14625 	{
14626 	  *strict_overflow_p = true;
14627 	  return true;
14628 	}
14629       break;
14630 
14631     case NON_LVALUE_EXPR:
14632     case FLOAT_EXPR:
14633     case FIX_TRUNC_EXPR:
14634       return RECURSE (op0);
14635 
14636     CASE_CONVERT:
14637       {
14638 	tree inner_type = TREE_TYPE (op0);
14639 	tree outer_type = type;
14640 
14641 	if (TREE_CODE (outer_type) == REAL_TYPE)
14642 	  {
14643 	    if (TREE_CODE (inner_type) == REAL_TYPE)
14644 	      return RECURSE (op0);
14645 	    if (INTEGRAL_TYPE_P (inner_type))
14646 	      {
14647 		if (TYPE_UNSIGNED (inner_type))
14648 		  return true;
14649 		return RECURSE (op0);
14650 	      }
14651 	  }
14652 	else if (INTEGRAL_TYPE_P (outer_type))
14653 	  {
14654 	    if (TREE_CODE (inner_type) == REAL_TYPE)
14655 	      return RECURSE (op0);
14656 	    if (INTEGRAL_TYPE_P (inner_type))
14657 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14658 		      && TYPE_UNSIGNED (inner_type);
14659 	  }
14660       }
14661       break;
14662 
14663     default:
14664       return tree_simple_nonnegative_warnv_p (code, type);
14665     }
14666 
14667   /* We don't know sign of `t', so be conservative and return false.  */
14668   return false;
14669 }
14670 
14671 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
14672    value is based on the assumption that signed overflow is undefined,
14673    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14674    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14675 
14676 bool
tree_binary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p,int depth)14677 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14678 				 tree op1, bool *strict_overflow_p,
14679 				 int depth)
14680 {
14681   if (TYPE_UNSIGNED (type))
14682     return true;
14683 
14684   switch (code)
14685     {
14686     case POINTER_PLUS_EXPR:
14687     case PLUS_EXPR:
14688       if (FLOAT_TYPE_P (type))
14689 	return RECURSE (op0) && RECURSE (op1);
14690 
14691       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14692 	 both unsigned and at least 2 bits shorter than the result.  */
14693       if (TREE_CODE (type) == INTEGER_TYPE
14694 	  && TREE_CODE (op0) == NOP_EXPR
14695 	  && TREE_CODE (op1) == NOP_EXPR)
14696 	{
14697 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14698 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14699 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14700 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14701 	    {
14702 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
14703 				       TYPE_PRECISION (inner2)) + 1;
14704 	      return prec < TYPE_PRECISION (type);
14705 	    }
14706 	}
14707       break;
14708 
14709     case MULT_EXPR:
14710       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14711 	{
14712 	  /* x * x is always non-negative for floating point x
14713 	     or without overflow.  */
14714 	  if (operand_equal_p (op0, op1, 0)
14715 	      || (RECURSE (op0) && RECURSE (op1)))
14716 	    {
14717 	      if (ANY_INTEGRAL_TYPE_P (type)
14718 		  && TYPE_OVERFLOW_UNDEFINED (type))
14719 		*strict_overflow_p = true;
14720 	      return true;
14721 	    }
14722 	}
14723 
14724       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14725 	 both unsigned and their total bits is shorter than the result.  */
14726       if (TREE_CODE (type) == INTEGER_TYPE
14727 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14728 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14729 	{
14730 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14731 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
14732 	    : TREE_TYPE (op0);
14733 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14734 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
14735 	    : TREE_TYPE (op1);
14736 
14737 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
14738 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
14739 
14740 	  if (TREE_CODE (op0) == INTEGER_CST)
14741 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14742 
14743 	  if (TREE_CODE (op1) == INTEGER_CST)
14744 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14745 
14746 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14747 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14748 	    {
14749 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14750 		? tree_int_cst_min_precision (op0, UNSIGNED)
14751 		: TYPE_PRECISION (inner0);
14752 
14753 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14754 		? tree_int_cst_min_precision (op1, UNSIGNED)
14755 		: TYPE_PRECISION (inner1);
14756 
14757 	      return precision0 + precision1 < TYPE_PRECISION (type);
14758 	    }
14759 	}
14760       return false;
14761 
14762     case BIT_AND_EXPR:
14763       return RECURSE (op0) || RECURSE (op1);
14764 
14765     case MAX_EXPR:
14766       /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14767 	 things.  */
14768       if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14769 	return RECURSE (op0) && RECURSE (op1);
14770       return RECURSE (op0) || RECURSE (op1);
14771 
14772     case BIT_IOR_EXPR:
14773     case BIT_XOR_EXPR:
14774     case MIN_EXPR:
14775     case RDIV_EXPR:
14776     case TRUNC_DIV_EXPR:
14777     case CEIL_DIV_EXPR:
14778     case FLOOR_DIV_EXPR:
14779     case ROUND_DIV_EXPR:
14780       return RECURSE (op0) && RECURSE (op1);
14781 
14782     case TRUNC_MOD_EXPR:
14783       return RECURSE (op0);
14784 
14785     case FLOOR_MOD_EXPR:
14786       return RECURSE (op1);
14787 
14788     case CEIL_MOD_EXPR:
14789     case ROUND_MOD_EXPR:
14790     default:
14791       return tree_simple_nonnegative_warnv_p (code, type);
14792     }
14793 
14794   /* We don't know sign of `t', so be conservative and return false.  */
14795   return false;
14796 }
14797 
14798 /* Return true if T is known to be non-negative.  If the return
14799    value is based on the assumption that signed overflow is undefined,
14800    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14801    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14802 
14803 bool
tree_single_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)14804 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14805 {
14806   if (TYPE_UNSIGNED (TREE_TYPE (t)))
14807     return true;
14808 
14809   switch (TREE_CODE (t))
14810     {
14811     case INTEGER_CST:
14812       return tree_int_cst_sgn (t) >= 0;
14813 
14814     case REAL_CST:
14815       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14816 
14817     case FIXED_CST:
14818       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14819 
14820     case COND_EXPR:
14821       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14822 
14823     case SSA_NAME:
14824       /* Limit the depth of recursion to avoid quadratic behavior.
14825 	 This is expected to catch almost all occurrences in practice.
14826 	 If this code misses important cases that unbounded recursion
14827 	 would not, passes that need this information could be revised
14828 	 to provide it through dataflow propagation.  */
14829       return (!name_registered_for_update_p (t)
14830 	      && depth < param_max_ssa_name_query_depth
14831 	      && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14832 						  strict_overflow_p, depth));
14833 
14834     default:
14835       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14836     }
14837 }
14838 
14839 /* Return true if T is known to be non-negative.  If the return
14840    value is based on the assumption that signed overflow is undefined,
14841    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14842    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14843 
14844 bool
tree_call_nonnegative_warnv_p(tree type,combined_fn fn,tree arg0,tree arg1,bool * strict_overflow_p,int depth)14845 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14846 			       bool *strict_overflow_p, int depth)
14847 {
14848   switch (fn)
14849     {
14850     CASE_CFN_ACOS:
14851     CASE_CFN_ACOSH:
14852     CASE_CFN_CABS:
14853     CASE_CFN_COSH:
14854     CASE_CFN_ERFC:
14855     CASE_CFN_EXP:
14856     CASE_CFN_EXP10:
14857     CASE_CFN_EXP2:
14858     CASE_CFN_FABS:
14859     CASE_CFN_FDIM:
14860     CASE_CFN_HYPOT:
14861     CASE_CFN_POW10:
14862     CASE_CFN_FFS:
14863     CASE_CFN_PARITY:
14864     CASE_CFN_POPCOUNT:
14865     CASE_CFN_CLRSB:
14866     case CFN_BUILT_IN_BSWAP16:
14867     case CFN_BUILT_IN_BSWAP32:
14868     case CFN_BUILT_IN_BSWAP64:
14869     case CFN_BUILT_IN_BSWAP128:
14870       /* Always true.  */
14871       return true;
14872 
14873     CASE_CFN_CLZ:
14874       if (fn != CFN_CLZ)
14875 	return true;
14876       else if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
14877 	{
14878 	  tree atype = TREE_TYPE (arg0);
14879 	  int val = 0;
14880 	  if (direct_internal_fn_supported_p (IFN_CLZ, atype,
14881 					      OPTIMIZE_FOR_BOTH)
14882 	      && CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (atype),
14883 					    val) == 2
14884 	      && val >= 0)
14885 	    return true;
14886 	}
14887       break;
14888 
14889     CASE_CFN_SQRT:
14890     CASE_CFN_SQRT_FN:
14891       /* sqrt(-0.0) is -0.0.  */
14892       if (!HONOR_SIGNED_ZEROS (type))
14893 	return true;
14894       return RECURSE (arg0);
14895 
14896     CASE_CFN_ASINH:
14897     CASE_CFN_ATAN:
14898     CASE_CFN_ATANH:
14899     CASE_CFN_CBRT:
14900     CASE_CFN_CEIL:
14901     CASE_CFN_CEIL_FN:
14902     CASE_CFN_ERF:
14903     CASE_CFN_EXPM1:
14904     CASE_CFN_FLOOR:
14905     CASE_CFN_FLOOR_FN:
14906     CASE_CFN_FMOD:
14907     CASE_CFN_FREXP:
14908     CASE_CFN_ICEIL:
14909     CASE_CFN_IFLOOR:
14910     CASE_CFN_IRINT:
14911     CASE_CFN_IROUND:
14912     CASE_CFN_LCEIL:
14913     CASE_CFN_LDEXP:
14914     CASE_CFN_LFLOOR:
14915     CASE_CFN_LLCEIL:
14916     CASE_CFN_LLFLOOR:
14917     CASE_CFN_LLRINT:
14918     CASE_CFN_LLROUND:
14919     CASE_CFN_LRINT:
14920     CASE_CFN_LROUND:
14921     CASE_CFN_MODF:
14922     CASE_CFN_NEARBYINT:
14923     CASE_CFN_NEARBYINT_FN:
14924     CASE_CFN_RINT:
14925     CASE_CFN_RINT_FN:
14926     CASE_CFN_ROUND:
14927     CASE_CFN_ROUND_FN:
14928     CASE_CFN_ROUNDEVEN:
14929     CASE_CFN_ROUNDEVEN_FN:
14930     CASE_CFN_SCALB:
14931     CASE_CFN_SCALBLN:
14932     CASE_CFN_SCALBN:
14933     CASE_CFN_SIGNBIT:
14934     CASE_CFN_SIGNIFICAND:
14935     CASE_CFN_SINH:
14936     CASE_CFN_TANH:
14937     CASE_CFN_TRUNC:
14938     CASE_CFN_TRUNC_FN:
14939       /* True if the 1st argument is nonnegative.  */
14940       return RECURSE (arg0);
14941 
14942     CASE_CFN_FMAX:
14943     CASE_CFN_FMAX_FN:
14944       /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14945 	 things.  In the presence of sNaNs, we're only guaranteed to be
14946 	 non-negative if both operands are non-negative.  In the presence
14947 	 of qNaNs, we're non-negative if either operand is non-negative
14948 	 and can't be a qNaN, or if both operands are non-negative.  */
14949       if (tree_expr_maybe_signaling_nan_p (arg0) ||
14950 	  tree_expr_maybe_signaling_nan_p (arg1))
14951         return RECURSE (arg0) && RECURSE (arg1);
14952       return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14953 			       || RECURSE (arg1))
14954 			    : (RECURSE (arg1)
14955 			       && !tree_expr_maybe_nan_p (arg1));
14956 
14957     CASE_CFN_FMIN:
14958     CASE_CFN_FMIN_FN:
14959       /* True if the 1st AND 2nd arguments are nonnegative.  */
14960       return RECURSE (arg0) && RECURSE (arg1);
14961 
14962     CASE_CFN_COPYSIGN:
14963     CASE_CFN_COPYSIGN_FN:
14964       /* True if the 2nd argument is nonnegative.  */
14965       return RECURSE (arg1);
14966 
14967     CASE_CFN_POWI:
14968       /* True if the 1st argument is nonnegative or the second
14969 	 argument is an even integer.  */
14970       if (TREE_CODE (arg1) == INTEGER_CST
14971 	  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14972 	return true;
14973       return RECURSE (arg0);
14974 
14975     CASE_CFN_POW:
14976       /* True if the 1st argument is nonnegative or the second
14977 	 argument is an even integer valued real.  */
14978       if (TREE_CODE (arg1) == REAL_CST)
14979 	{
14980 	  REAL_VALUE_TYPE c;
14981 	  HOST_WIDE_INT n;
14982 
14983 	  c = TREE_REAL_CST (arg1);
14984 	  n = real_to_integer (&c);
14985 	  if ((n & 1) == 0)
14986 	    {
14987 	      REAL_VALUE_TYPE cint;
14988 	      real_from_integer (&cint, VOIDmode, n, SIGNED);
14989 	      if (real_identical (&c, &cint))
14990 		return true;
14991 	    }
14992 	}
14993       return RECURSE (arg0);
14994 
14995     default:
14996       break;
14997     }
14998   return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14999 }
15000 
15001 /* Return true if T is known to be non-negative.  If the return
15002    value is based on the assumption that signed overflow is undefined,
15003    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15004    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
15005 
15006 static bool
tree_invalid_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)15007 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15008 {
15009   enum tree_code code = TREE_CODE (t);
15010   if (TYPE_UNSIGNED (TREE_TYPE (t)))
15011     return true;
15012 
15013   switch (code)
15014     {
15015     case TARGET_EXPR:
15016       {
15017 	tree temp = TARGET_EXPR_SLOT (t);
15018 	t = TARGET_EXPR_INITIAL (t);
15019 
15020 	/* If the initializer is non-void, then it's a normal expression
15021 	   that will be assigned to the slot.  */
15022 	if (!VOID_TYPE_P (t))
15023 	  return RECURSE (t);
15024 
15025 	/* Otherwise, the initializer sets the slot in some way.  One common
15026 	   way is an assignment statement at the end of the initializer.  */
15027 	while (1)
15028 	  {
15029 	    if (TREE_CODE (t) == BIND_EXPR)
15030 	      t = expr_last (BIND_EXPR_BODY (t));
15031 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15032 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
15033 	      t = expr_last (TREE_OPERAND (t, 0));
15034 	    else if (TREE_CODE (t) == STATEMENT_LIST)
15035 	      t = expr_last (t);
15036 	    else
15037 	      break;
15038 	  }
15039 	if (TREE_CODE (t) == MODIFY_EXPR
15040 	    && TREE_OPERAND (t, 0) == temp)
15041 	  return RECURSE (TREE_OPERAND (t, 1));
15042 
15043 	return false;
15044       }
15045 
15046     case CALL_EXPR:
15047       {
15048 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
15049 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
15050 
15051 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15052 					      get_call_combined_fn (t),
15053 					      arg0,
15054 					      arg1,
15055 					      strict_overflow_p, depth);
15056       }
15057     case COMPOUND_EXPR:
15058     case MODIFY_EXPR:
15059       return RECURSE (TREE_OPERAND (t, 1));
15060 
15061     case BIND_EXPR:
15062       return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15063 
15064     case SAVE_EXPR:
15065       return RECURSE (TREE_OPERAND (t, 0));
15066 
15067     default:
15068       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15069     }
15070 }
15071 
15072 #undef RECURSE
15073 #undef tree_expr_nonnegative_warnv_p
15074 
15075 /* Return true if T is known to be non-negative.  If the return
15076    value is based on the assumption that signed overflow is undefined,
15077    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15078    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
15079 
15080 bool
tree_expr_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)15081 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15082 {
15083   enum tree_code code;
15084   if (t == error_mark_node)
15085     return false;
15086 
15087   code = TREE_CODE (t);
15088   switch (TREE_CODE_CLASS (code))
15089     {
15090     case tcc_binary:
15091     case tcc_comparison:
15092       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15093 					      TREE_TYPE (t),
15094 					      TREE_OPERAND (t, 0),
15095 					      TREE_OPERAND (t, 1),
15096 					      strict_overflow_p, depth);
15097 
15098     case tcc_unary:
15099       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15100 					     TREE_TYPE (t),
15101 					     TREE_OPERAND (t, 0),
15102 					     strict_overflow_p, depth);
15103 
15104     case tcc_constant:
15105     case tcc_declaration:
15106     case tcc_reference:
15107       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15108 
15109     default:
15110       break;
15111     }
15112 
15113   switch (code)
15114     {
15115     case TRUTH_AND_EXPR:
15116     case TRUTH_OR_EXPR:
15117     case TRUTH_XOR_EXPR:
15118       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15119 					      TREE_TYPE (t),
15120 					      TREE_OPERAND (t, 0),
15121 					      TREE_OPERAND (t, 1),
15122 					      strict_overflow_p, depth);
15123     case TRUTH_NOT_EXPR:
15124       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15125 					     TREE_TYPE (t),
15126 					     TREE_OPERAND (t, 0),
15127 					     strict_overflow_p, depth);
15128 
15129     case COND_EXPR:
15130     case CONSTRUCTOR:
15131     case OBJ_TYPE_REF:
15132     case ASSERT_EXPR:
15133     case ADDR_EXPR:
15134     case WITH_SIZE_EXPR:
15135     case SSA_NAME:
15136       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15137 
15138     default:
15139       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15140     }
15141 }
15142 
15143 /* Return true if `t' is known to be non-negative.  Handle warnings
15144    about undefined signed overflow.  */
15145 
15146 bool
tree_expr_nonnegative_p(tree t)15147 tree_expr_nonnegative_p (tree t)
15148 {
15149   bool ret, strict_overflow_p;
15150 
15151   strict_overflow_p = false;
15152   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15153   if (strict_overflow_p)
15154     fold_overflow_warning (("assuming signed overflow does not occur when "
15155 			    "determining that expression is always "
15156 			    "non-negative"),
15157 			   WARN_STRICT_OVERFLOW_MISC);
15158   return ret;
15159 }
15160 
15161 
15162 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15163    For floating point we further ensure that T is not denormal.
15164    Similar logic is present in nonzero_address in rtlanal.h.
15165 
15166    If the return value is based on the assumption that signed overflow
15167    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15168    change *STRICT_OVERFLOW_P.  */
15169 
15170 bool
tree_unary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p)15171 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15172 				 bool *strict_overflow_p)
15173 {
15174   switch (code)
15175     {
15176     case ABS_EXPR:
15177       return tree_expr_nonzero_warnv_p (op0,
15178 					strict_overflow_p);
15179 
15180     case NOP_EXPR:
15181       {
15182 	tree inner_type = TREE_TYPE (op0);
15183 	tree outer_type = type;
15184 
15185 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15186 		&& tree_expr_nonzero_warnv_p (op0,
15187 					      strict_overflow_p));
15188       }
15189       break;
15190 
15191     case NON_LVALUE_EXPR:
15192       return tree_expr_nonzero_warnv_p (op0,
15193 					strict_overflow_p);
15194 
15195     default:
15196       break;
15197   }
15198 
15199   return false;
15200 }
15201 
15202 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15203    For floating point we further ensure that T is not denormal.
15204    Similar logic is present in nonzero_address in rtlanal.h.
15205 
15206    If the return value is based on the assumption that signed overflow
15207    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15208    change *STRICT_OVERFLOW_P.  */
15209 
15210 bool
tree_binary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p)15211 tree_binary_nonzero_warnv_p (enum tree_code code,
15212 			     tree type,
15213 			     tree op0,
15214 			     tree op1, bool *strict_overflow_p)
15215 {
15216   bool sub_strict_overflow_p;
15217   switch (code)
15218     {
15219     case POINTER_PLUS_EXPR:
15220     case PLUS_EXPR:
15221       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15222 	{
15223 	  /* With the presence of negative values it is hard
15224 	     to say something.  */
15225 	  sub_strict_overflow_p = false;
15226 	  if (!tree_expr_nonnegative_warnv_p (op0,
15227 					      &sub_strict_overflow_p)
15228 	      || !tree_expr_nonnegative_warnv_p (op1,
15229 						 &sub_strict_overflow_p))
15230 	    return false;
15231 	  /* One of operands must be positive and the other non-negative.  */
15232 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
15233 	     overflows, on a twos-complement machine the sum of two
15234 	     nonnegative numbers can never be zero.  */
15235 	  return (tree_expr_nonzero_warnv_p (op0,
15236 					     strict_overflow_p)
15237 		  || tree_expr_nonzero_warnv_p (op1,
15238 						strict_overflow_p));
15239 	}
15240       break;
15241 
15242     case MULT_EXPR:
15243       if (TYPE_OVERFLOW_UNDEFINED (type))
15244 	{
15245 	  if (tree_expr_nonzero_warnv_p (op0,
15246 					 strict_overflow_p)
15247 	      && tree_expr_nonzero_warnv_p (op1,
15248 					    strict_overflow_p))
15249 	    {
15250 	      *strict_overflow_p = true;
15251 	      return true;
15252 	    }
15253 	}
15254       break;
15255 
15256     case MIN_EXPR:
15257       sub_strict_overflow_p = false;
15258       if (tree_expr_nonzero_warnv_p (op0,
15259 				     &sub_strict_overflow_p)
15260 	  && tree_expr_nonzero_warnv_p (op1,
15261 					&sub_strict_overflow_p))
15262 	{
15263 	  if (sub_strict_overflow_p)
15264 	    *strict_overflow_p = true;
15265 	}
15266       break;
15267 
15268     case MAX_EXPR:
15269       sub_strict_overflow_p = false;
15270       if (tree_expr_nonzero_warnv_p (op0,
15271 				     &sub_strict_overflow_p))
15272 	{
15273 	  if (sub_strict_overflow_p)
15274 	    *strict_overflow_p = true;
15275 
15276 	  /* When both operands are nonzero, then MAX must be too.  */
15277 	  if (tree_expr_nonzero_warnv_p (op1,
15278 					 strict_overflow_p))
15279 	    return true;
15280 
15281 	  /* MAX where operand 0 is positive is positive.  */
15282 	  return tree_expr_nonnegative_warnv_p (op0,
15283 					       strict_overflow_p);
15284 	}
15285       /* MAX where operand 1 is positive is positive.  */
15286       else if (tree_expr_nonzero_warnv_p (op1,
15287 					  &sub_strict_overflow_p)
15288 	       && tree_expr_nonnegative_warnv_p (op1,
15289 						 &sub_strict_overflow_p))
15290 	{
15291 	  if (sub_strict_overflow_p)
15292 	    *strict_overflow_p = true;
15293 	  return true;
15294 	}
15295       break;
15296 
15297     case BIT_IOR_EXPR:
15298       return (tree_expr_nonzero_warnv_p (op1,
15299 					 strict_overflow_p)
15300 	      || tree_expr_nonzero_warnv_p (op0,
15301 					    strict_overflow_p));
15302 
15303     default:
15304       break;
15305   }
15306 
15307   return false;
15308 }
15309 
15310 /* Return true when T is an address and is known to be nonzero.
15311    For floating point we further ensure that T is not denormal.
15312    Similar logic is present in nonzero_address in rtlanal.h.
15313 
15314    If the return value is based on the assumption that signed overflow
15315    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15316    change *STRICT_OVERFLOW_P.  */
15317 
15318 bool
tree_single_nonzero_warnv_p(tree t,bool * strict_overflow_p)15319 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15320 {
15321   bool sub_strict_overflow_p;
15322   switch (TREE_CODE (t))
15323     {
15324     case INTEGER_CST:
15325       return !integer_zerop (t);
15326 
15327     case ADDR_EXPR:
15328       {
15329 	tree base = TREE_OPERAND (t, 0);
15330 
15331 	if (!DECL_P (base))
15332 	  base = get_base_address (base);
15333 
15334 	if (base && TREE_CODE (base) == TARGET_EXPR)
15335 	  base = TARGET_EXPR_SLOT (base);
15336 
15337 	if (!base)
15338 	  return false;
15339 
15340 	/* For objects in symbol table check if we know they are non-zero.
15341 	   Don't do anything for variables and functions before symtab is built;
15342 	   it is quite possible that they will be declared weak later.  */
15343 	int nonzero_addr = maybe_nonzero_address (base);
15344 	if (nonzero_addr >= 0)
15345 	  return nonzero_addr;
15346 
15347 	/* Constants are never weak.  */
15348 	if (CONSTANT_CLASS_P (base))
15349 	  return true;
15350 
15351 	return false;
15352       }
15353 
15354     case COND_EXPR:
15355       sub_strict_overflow_p = false;
15356       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15357 				     &sub_strict_overflow_p)
15358 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15359 					&sub_strict_overflow_p))
15360 	{
15361 	  if (sub_strict_overflow_p)
15362 	    *strict_overflow_p = true;
15363 	  return true;
15364 	}
15365       break;
15366 
15367     case SSA_NAME:
15368       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15369 	break;
15370       return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15371 
15372     default:
15373       break;
15374     }
15375   return false;
15376 }
15377 
15378 #define integer_valued_real_p(X) \
15379   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15380 
15381 #define RECURSE(X) \
15382   ((integer_valued_real_p) (X, depth + 1))
15383 
15384 /* Return true if the floating point result of (CODE OP0) has an
15385    integer value.  We also allow +Inf, -Inf and NaN to be considered
15386    integer values. Return false for signaling NaN.
15387 
15388    DEPTH is the current nesting depth of the query.  */
15389 
15390 bool
integer_valued_real_unary_p(tree_code code,tree op0,int depth)15391 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15392 {
15393   switch (code)
15394     {
15395     case FLOAT_EXPR:
15396       return true;
15397 
15398     case ABS_EXPR:
15399       return RECURSE (op0);
15400 
15401     CASE_CONVERT:
15402       {
15403 	tree type = TREE_TYPE (op0);
15404 	if (TREE_CODE (type) == INTEGER_TYPE)
15405 	  return true;
15406 	if (TREE_CODE (type) == REAL_TYPE)
15407 	  return RECURSE (op0);
15408 	break;
15409       }
15410 
15411     default:
15412       break;
15413     }
15414   return false;
15415 }
15416 
15417 /* Return true if the floating point result of (CODE OP0 OP1) has an
15418    integer value.  We also allow +Inf, -Inf and NaN to be considered
15419    integer values. Return false for signaling NaN.
15420 
15421    DEPTH is the current nesting depth of the query.  */
15422 
15423 bool
integer_valued_real_binary_p(tree_code code,tree op0,tree op1,int depth)15424 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15425 {
15426   switch (code)
15427     {
15428     case PLUS_EXPR:
15429     case MINUS_EXPR:
15430     case MULT_EXPR:
15431     case MIN_EXPR:
15432     case MAX_EXPR:
15433       return RECURSE (op0) && RECURSE (op1);
15434 
15435     default:
15436       break;
15437     }
15438   return false;
15439 }
15440 
15441 /* Return true if the floating point result of calling FNDECL with arguments
15442    ARG0 and ARG1 has an integer value.  We also allow +Inf, -Inf and NaN to be
15443    considered integer values. Return false for signaling NaN.  If FNDECL
15444    takes fewer than 2 arguments, the remaining ARGn are null.
15445 
15446    DEPTH is the current nesting depth of the query.  */
15447 
15448 bool
integer_valued_real_call_p(combined_fn fn,tree arg0,tree arg1,int depth)15449 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15450 {
15451   switch (fn)
15452     {
15453     CASE_CFN_CEIL:
15454     CASE_CFN_CEIL_FN:
15455     CASE_CFN_FLOOR:
15456     CASE_CFN_FLOOR_FN:
15457     CASE_CFN_NEARBYINT:
15458     CASE_CFN_NEARBYINT_FN:
15459     CASE_CFN_RINT:
15460     CASE_CFN_RINT_FN:
15461     CASE_CFN_ROUND:
15462     CASE_CFN_ROUND_FN:
15463     CASE_CFN_ROUNDEVEN:
15464     CASE_CFN_ROUNDEVEN_FN:
15465     CASE_CFN_TRUNC:
15466     CASE_CFN_TRUNC_FN:
15467       return true;
15468 
15469     CASE_CFN_FMIN:
15470     CASE_CFN_FMIN_FN:
15471     CASE_CFN_FMAX:
15472     CASE_CFN_FMAX_FN:
15473       return RECURSE (arg0) && RECURSE (arg1);
15474 
15475     default:
15476       break;
15477     }
15478   return false;
15479 }
15480 
15481 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15482    has an integer value.  We also allow +Inf, -Inf and NaN to be
15483    considered integer values. Return false for signaling NaN.
15484 
15485    DEPTH is the current nesting depth of the query.  */
15486 
15487 bool
integer_valued_real_single_p(tree t,int depth)15488 integer_valued_real_single_p (tree t, int depth)
15489 {
15490   switch (TREE_CODE (t))
15491     {
15492     case REAL_CST:
15493       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15494 
15495     case COND_EXPR:
15496       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15497 
15498     case SSA_NAME:
15499       /* Limit the depth of recursion to avoid quadratic behavior.
15500 	 This is expected to catch almost all occurrences in practice.
15501 	 If this code misses important cases that unbounded recursion
15502 	 would not, passes that need this information could be revised
15503 	 to provide it through dataflow propagation.  */
15504       return (!name_registered_for_update_p (t)
15505 	      && depth < param_max_ssa_name_query_depth
15506 	      && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15507 						    depth));
15508 
15509     default:
15510       break;
15511     }
15512   return false;
15513 }
15514 
15515 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15516    has an integer value.  We also allow +Inf, -Inf and NaN to be
15517    considered integer values. Return false for signaling NaN.
15518 
15519    DEPTH is the current nesting depth of the query.  */
15520 
15521 static bool
integer_valued_real_invalid_p(tree t,int depth)15522 integer_valued_real_invalid_p (tree t, int depth)
15523 {
15524   switch (TREE_CODE (t))
15525     {
15526     case COMPOUND_EXPR:
15527     case MODIFY_EXPR:
15528     case BIND_EXPR:
15529       return RECURSE (TREE_OPERAND (t, 1));
15530 
15531     case SAVE_EXPR:
15532       return RECURSE (TREE_OPERAND (t, 0));
15533 
15534     default:
15535       break;
15536     }
15537   return false;
15538 }
15539 
15540 #undef RECURSE
15541 #undef integer_valued_real_p
15542 
15543 /* Return true if the floating point expression T has an integer value.
15544    We also allow +Inf, -Inf and NaN to be considered integer values.
15545    Return false for signaling NaN.
15546 
15547    DEPTH is the current nesting depth of the query.  */
15548 
15549 bool
integer_valued_real_p(tree t,int depth)15550 integer_valued_real_p (tree t, int depth)
15551 {
15552   if (t == error_mark_node)
15553     return false;
15554 
15555   STRIP_ANY_LOCATION_WRAPPER (t);
15556 
15557   tree_code code = TREE_CODE (t);
15558   switch (TREE_CODE_CLASS (code))
15559     {
15560     case tcc_binary:
15561     case tcc_comparison:
15562       return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15563 					   TREE_OPERAND (t, 1), depth);
15564 
15565     case tcc_unary:
15566       return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15567 
15568     case tcc_constant:
15569     case tcc_declaration:
15570     case tcc_reference:
15571       return integer_valued_real_single_p (t, depth);
15572 
15573     default:
15574       break;
15575     }
15576 
15577   switch (code)
15578     {
15579     case COND_EXPR:
15580     case SSA_NAME:
15581       return integer_valued_real_single_p (t, depth);
15582 
15583     case CALL_EXPR:
15584       {
15585 	tree arg0 = (call_expr_nargs (t) > 0
15586 		     ? CALL_EXPR_ARG (t, 0)
15587 		     : NULL_TREE);
15588 	tree arg1 = (call_expr_nargs (t) > 1
15589 		     ? CALL_EXPR_ARG (t, 1)
15590 		     : NULL_TREE);
15591 	return integer_valued_real_call_p (get_call_combined_fn (t),
15592 					   arg0, arg1, depth);
15593       }
15594 
15595     default:
15596       return integer_valued_real_invalid_p (t, depth);
15597     }
15598 }
15599 
15600 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15601    attempt to fold the expression to a constant without modifying TYPE,
15602    OP0 or OP1.
15603 
15604    If the expression could be simplified to a constant, then return
15605    the constant.  If the expression would not be simplified to a
15606    constant, then return NULL_TREE.  */
15607 
15608 tree
fold_binary_to_constant(enum tree_code code,tree type,tree op0,tree op1)15609 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15610 {
15611   tree tem = fold_binary (code, type, op0, op1);
15612   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15613 }
15614 
15615 /* Given the components of a unary expression CODE, TYPE and OP0,
15616    attempt to fold the expression to a constant without modifying
15617    TYPE or OP0.
15618 
15619    If the expression could be simplified to a constant, then return
15620    the constant.  If the expression would not be simplified to a
15621    constant, then return NULL_TREE.  */
15622 
15623 tree
fold_unary_to_constant(enum tree_code code,tree type,tree op0)15624 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15625 {
15626   tree tem = fold_unary (code, type, op0);
15627   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15628 }
15629 
15630 /* If EXP represents referencing an element in a constant string
15631    (either via pointer arithmetic or array indexing), return the
15632    tree representing the value accessed, otherwise return NULL.  */
15633 
15634 tree
fold_read_from_constant_string(tree exp)15635 fold_read_from_constant_string (tree exp)
15636 {
15637   if ((TREE_CODE (exp) == INDIRECT_REF
15638        || TREE_CODE (exp) == ARRAY_REF)
15639       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15640     {
15641       tree exp1 = TREE_OPERAND (exp, 0);
15642       tree index;
15643       tree string;
15644       location_t loc = EXPR_LOCATION (exp);
15645 
15646       if (TREE_CODE (exp) == INDIRECT_REF)
15647 	string = string_constant (exp1, &index, NULL, NULL);
15648       else
15649 	{
15650 	  tree low_bound = array_ref_low_bound (exp);
15651 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15652 
15653 	  /* Optimize the special-case of a zero lower bound.
15654 
15655 	     We convert the low_bound to sizetype to avoid some problems
15656 	     with constant folding.  (E.g. suppose the lower bound is 1,
15657 	     and its mode is QI.  Without the conversion,l (ARRAY
15658 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15659 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
15660 	  if (! integer_zerop (low_bound))
15661 	    index = size_diffop_loc (loc, index,
15662 				 fold_convert_loc (loc, sizetype, low_bound));
15663 
15664 	  string = exp1;
15665 	}
15666 
15667       scalar_int_mode char_mode;
15668       if (string
15669 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15670 	  && TREE_CODE (string) == STRING_CST
15671 	  && tree_fits_uhwi_p (index)
15672 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15673 	  && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15674 			  &char_mode)
15675 	  && GET_MODE_SIZE (char_mode) == 1)
15676 	return build_int_cst_type (TREE_TYPE (exp),
15677 				   (TREE_STRING_POINTER (string)
15678 				    [TREE_INT_CST_LOW (index)]));
15679     }
15680   return NULL;
15681 }
15682 
15683 /* Folds a read from vector element at IDX of vector ARG.  */
15684 
15685 tree
fold_read_from_vector(tree arg,poly_uint64 idx)15686 fold_read_from_vector (tree arg, poly_uint64 idx)
15687 {
15688   unsigned HOST_WIDE_INT i;
15689   if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15690       && known_ge (idx, 0u)
15691       && idx.is_constant (&i))
15692     {
15693       if (TREE_CODE (arg) == VECTOR_CST)
15694 	return VECTOR_CST_ELT (arg, i);
15695       else if (TREE_CODE (arg) == CONSTRUCTOR)
15696 	{
15697 	  if (CONSTRUCTOR_NELTS (arg)
15698 	      && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15699 	    return NULL_TREE;
15700 	  if (i >= CONSTRUCTOR_NELTS (arg))
15701 	    return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15702 	  return CONSTRUCTOR_ELT (arg, i)->value;
15703 	}
15704     }
15705   return NULL_TREE;
15706 }
15707 
15708 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15709    an integer constant, real, or fixed-point constant.
15710 
15711    TYPE is the type of the result.  */
15712 
15713 static tree
fold_negate_const(tree arg0,tree type)15714 fold_negate_const (tree arg0, tree type)
15715 {
15716   tree t = NULL_TREE;
15717 
15718   switch (TREE_CODE (arg0))
15719     {
15720     case REAL_CST:
15721       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15722       break;
15723 
15724     case FIXED_CST:
15725       {
15726         FIXED_VALUE_TYPE f;
15727         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15728 					    &(TREE_FIXED_CST (arg0)), NULL,
15729 					    TYPE_SATURATING (type));
15730 	t = build_fixed (type, f);
15731 	/* Propagate overflow flags.  */
15732 	if (overflow_p | TREE_OVERFLOW (arg0))
15733 	  TREE_OVERFLOW (t) = 1;
15734 	break;
15735       }
15736 
15737     default:
15738       if (poly_int_tree_p (arg0))
15739 	{
15740 	  wi::overflow_type overflow;
15741 	  poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15742 	  t = force_fit_type (type, res, 1,
15743 			      (overflow && ! TYPE_UNSIGNED (type))
15744 			      || TREE_OVERFLOW (arg0));
15745 	  break;
15746 	}
15747 
15748       gcc_unreachable ();
15749     }
15750 
15751   return t;
15752 }
15753 
15754 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15755    an integer constant or real constant.
15756 
15757    TYPE is the type of the result.  */
15758 
15759 tree
fold_abs_const(tree arg0,tree type)15760 fold_abs_const (tree arg0, tree type)
15761 {
15762   tree t = NULL_TREE;
15763 
15764   switch (TREE_CODE (arg0))
15765     {
15766     case INTEGER_CST:
15767       {
15768         /* If the value is unsigned or non-negative, then the absolute value
15769 	   is the same as the ordinary value.  */
15770 	wide_int val = wi::to_wide (arg0);
15771 	wi::overflow_type overflow = wi::OVF_NONE;
15772 	if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15773 	  ;
15774 
15775 	/* If the value is negative, then the absolute value is
15776 	   its negation.  */
15777 	else
15778 	  val = wi::neg (val, &overflow);
15779 
15780 	/* Force to the destination type, set TREE_OVERFLOW for signed
15781 	   TYPE only.  */
15782 	t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15783       }
15784     break;
15785 
15786     case REAL_CST:
15787       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15788 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15789       else
15790 	t =  arg0;
15791       break;
15792 
15793     default:
15794       gcc_unreachable ();
15795     }
15796 
15797   return t;
15798 }
15799 
15800 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15801    constant.  TYPE is the type of the result.  */
15802 
15803 static tree
fold_not_const(const_tree arg0,tree type)15804 fold_not_const (const_tree arg0, tree type)
15805 {
15806   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15807 
15808   return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15809 }
15810 
15811 /* Given CODE, a relational operator, the target type, TYPE and two
15812    constant operands OP0 and OP1, return the result of the
15813    relational operation.  If the result is not a compile time
15814    constant, then return NULL_TREE.  */
15815 
15816 static tree
fold_relational_const(enum tree_code code,tree type,tree op0,tree op1)15817 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15818 {
15819   int result, invert;
15820 
15821   /* From here on, the only cases we handle are when the result is
15822      known to be a constant.  */
15823 
15824   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15825     {
15826       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15827       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15828 
15829       /* Handle the cases where either operand is a NaN.  */
15830       if (real_isnan (c0) || real_isnan (c1))
15831 	{
15832 	  switch (code)
15833 	    {
15834 	    case EQ_EXPR:
15835 	    case ORDERED_EXPR:
15836 	      result = 0;
15837 	      break;
15838 
15839 	    case NE_EXPR:
15840 	    case UNORDERED_EXPR:
15841 	    case UNLT_EXPR:
15842 	    case UNLE_EXPR:
15843 	    case UNGT_EXPR:
15844 	    case UNGE_EXPR:
15845 	    case UNEQ_EXPR:
15846               result = 1;
15847 	      break;
15848 
15849 	    case LT_EXPR:
15850 	    case LE_EXPR:
15851 	    case GT_EXPR:
15852 	    case GE_EXPR:
15853 	    case LTGT_EXPR:
15854 	      if (flag_trapping_math)
15855 		return NULL_TREE;
15856 	      result = 0;
15857 	      break;
15858 
15859 	    default:
15860 	      gcc_unreachable ();
15861 	    }
15862 
15863 	  return constant_boolean_node (result, type);
15864 	}
15865 
15866       return constant_boolean_node (real_compare (code, c0, c1), type);
15867     }
15868 
15869   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15870     {
15871       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15872       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15873       return constant_boolean_node (fixed_compare (code, c0, c1), type);
15874     }
15875 
15876   /* Handle equality/inequality of complex constants.  */
15877   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15878     {
15879       tree rcond = fold_relational_const (code, type,
15880 					  TREE_REALPART (op0),
15881 					  TREE_REALPART (op1));
15882       tree icond = fold_relational_const (code, type,
15883 					  TREE_IMAGPART (op0),
15884 					  TREE_IMAGPART (op1));
15885       if (code == EQ_EXPR)
15886 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15887       else if (code == NE_EXPR)
15888 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15889       else
15890 	return NULL_TREE;
15891     }
15892 
15893   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15894     {
15895       if (!VECTOR_TYPE_P (type))
15896 	{
15897 	  /* Have vector comparison with scalar boolean result.  */
15898 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15899 		      && known_eq (VECTOR_CST_NELTS (op0),
15900 				   VECTOR_CST_NELTS (op1)));
15901 	  unsigned HOST_WIDE_INT nunits;
15902 	  if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15903 	    return NULL_TREE;
15904 	  for (unsigned i = 0; i < nunits; i++)
15905 	    {
15906 	      tree elem0 = VECTOR_CST_ELT (op0, i);
15907 	      tree elem1 = VECTOR_CST_ELT (op1, i);
15908 	      tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15909 	      if (tmp == NULL_TREE)
15910 		return NULL_TREE;
15911 	      if (integer_zerop (tmp))
15912 		return constant_boolean_node (code == NE_EXPR, type);
15913 	    }
15914 	  return constant_boolean_node (code == EQ_EXPR, type);
15915 	}
15916       tree_vector_builder elts;
15917       if (!elts.new_binary_operation (type, op0, op1, false))
15918 	return NULL_TREE;
15919       unsigned int count = elts.encoded_nelts ();
15920       for (unsigned i = 0; i < count; i++)
15921 	{
15922 	  tree elem_type = TREE_TYPE (type);
15923 	  tree elem0 = VECTOR_CST_ELT (op0, i);
15924 	  tree elem1 = VECTOR_CST_ELT (op1, i);
15925 
15926 	  tree tem = fold_relational_const (code, elem_type,
15927 					    elem0, elem1);
15928 
15929 	  if (tem == NULL_TREE)
15930 	    return NULL_TREE;
15931 
15932 	  elts.quick_push (build_int_cst (elem_type,
15933 					  integer_zerop (tem) ? 0 : -1));
15934 	}
15935 
15936       return elts.build ();
15937     }
15938 
15939   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15940 
15941      To compute GT, swap the arguments and do LT.
15942      To compute GE, do LT and invert the result.
15943      To compute LE, swap the arguments, do LT and invert the result.
15944      To compute NE, do EQ and invert the result.
15945 
15946      Therefore, the code below must handle only EQ and LT.  */
15947 
15948   if (code == LE_EXPR || code == GT_EXPR)
15949     {
15950       std::swap (op0, op1);
15951       code = swap_tree_comparison (code);
15952     }
15953 
15954   /* Note that it is safe to invert for real values here because we
15955      have already handled the one case that it matters.  */
15956 
15957   invert = 0;
15958   if (code == NE_EXPR || code == GE_EXPR)
15959     {
15960       invert = 1;
15961       code = invert_tree_comparison (code, false);
15962     }
15963 
15964   /* Compute a result for LT or EQ if args permit;
15965      Otherwise return T.  */
15966   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15967     {
15968       if (code == EQ_EXPR)
15969 	result = tree_int_cst_equal (op0, op1);
15970       else
15971 	result = tree_int_cst_lt (op0, op1);
15972     }
15973   else
15974     return NULL_TREE;
15975 
15976   if (invert)
15977     result ^= 1;
15978   return constant_boolean_node (result, type);
15979 }
15980 
15981 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15982    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
15983    itself.  */
15984 
15985 tree
fold_build_cleanup_point_expr(tree type,tree expr)15986 fold_build_cleanup_point_expr (tree type, tree expr)
15987 {
15988   /* If the expression does not have side effects then we don't have to wrap
15989      it with a cleanup point expression.  */
15990   if (!TREE_SIDE_EFFECTS (expr))
15991     return expr;
15992 
15993   /* If the expression is a return, check to see if the expression inside the
15994      return has no side effects or the right hand side of the modify expression
15995      inside the return. If either don't have side effects set we don't need to
15996      wrap the expression in a cleanup point expression.  Note we don't check the
15997      left hand side of the modify because it should always be a return decl.  */
15998   if (TREE_CODE (expr) == RETURN_EXPR)
15999     {
16000       tree op = TREE_OPERAND (expr, 0);
16001       if (!op || !TREE_SIDE_EFFECTS (op))
16002         return expr;
16003       op = TREE_OPERAND (op, 1);
16004       if (!TREE_SIDE_EFFECTS (op))
16005         return expr;
16006     }
16007 
16008   return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16009 }
16010 
16011 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16012    of an indirection through OP0, or NULL_TREE if no simplification is
16013    possible.  */
16014 
16015 tree
fold_indirect_ref_1(location_t loc,tree type,tree op0)16016 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16017 {
16018   tree sub = op0;
16019   tree subtype;
16020   poly_uint64 const_op01;
16021 
16022   STRIP_NOPS (sub);
16023   subtype = TREE_TYPE (sub);
16024   if (!POINTER_TYPE_P (subtype)
16025       || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16026     return NULL_TREE;
16027 
16028   if (TREE_CODE (sub) == ADDR_EXPR)
16029     {
16030       tree op = TREE_OPERAND (sub, 0);
16031       tree optype = TREE_TYPE (op);
16032 
16033       /* *&CONST_DECL -> to the value of the const decl.  */
16034       if (TREE_CODE (op) == CONST_DECL)
16035 	return DECL_INITIAL (op);
16036       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
16037       if (type == optype)
16038 	{
16039 	  tree fop = fold_read_from_constant_string (op);
16040 	  if (fop)
16041 	    return fop;
16042 	  else
16043 	    return op;
16044 	}
16045       /* *(foo *)&fooarray => fooarray[0] */
16046       else if (TREE_CODE (optype) == ARRAY_TYPE
16047 	       && type == TREE_TYPE (optype)
16048 	       && (!in_gimple_form
16049 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16050 	{
16051 	  tree type_domain = TYPE_DOMAIN (optype);
16052 	  tree min_val = size_zero_node;
16053 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
16054 	    min_val = TYPE_MIN_VALUE (type_domain);
16055 	  if (in_gimple_form
16056 	      && TREE_CODE (min_val) != INTEGER_CST)
16057 	    return NULL_TREE;
16058 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
16059 			     NULL_TREE, NULL_TREE);
16060 	}
16061       /* *(foo *)&complexfoo => __real__ complexfoo */
16062       else if (TREE_CODE (optype) == COMPLEX_TYPE
16063 	       && type == TREE_TYPE (optype))
16064 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
16065       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16066       else if (VECTOR_TYPE_P (optype)
16067 	       && type == TREE_TYPE (optype))
16068 	{
16069 	  tree part_width = TYPE_SIZE (type);
16070 	  tree index = bitsize_int (0);
16071 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16072 				  index);
16073 	}
16074     }
16075 
16076   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16077       && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16078     {
16079       tree op00 = TREE_OPERAND (sub, 0);
16080       tree op01 = TREE_OPERAND (sub, 1);
16081 
16082       STRIP_NOPS (op00);
16083       if (TREE_CODE (op00) == ADDR_EXPR)
16084 	{
16085 	  tree op00type;
16086 	  op00 = TREE_OPERAND (op00, 0);
16087 	  op00type = TREE_TYPE (op00);
16088 
16089 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16090 	  if (VECTOR_TYPE_P (op00type)
16091 	      && type == TREE_TYPE (op00type)
16092 	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16093 		 but we want to treat offsets with MSB set as negative.
16094 		 For the code below negative offsets are invalid and
16095 		 TYPE_SIZE of the element is something unsigned, so
16096 		 check whether op01 fits into poly_int64, which implies
16097 		 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16098 		 then just use poly_uint64 because we want to treat the
16099 		 value as unsigned.  */
16100 	      && tree_fits_poly_int64_p (op01))
16101 	    {
16102 	      tree part_width = TYPE_SIZE (type);
16103 	      poly_uint64 max_offset
16104 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
16105 		   * TYPE_VECTOR_SUBPARTS (op00type));
16106 	      if (known_lt (const_op01, max_offset))
16107 		{
16108 		  tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16109 		  return fold_build3_loc (loc,
16110 					  BIT_FIELD_REF, type, op00,
16111 					  part_width, index);
16112 		}
16113 	    }
16114 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16115 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
16116 		   && type == TREE_TYPE (op00type))
16117 	    {
16118 	      if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16119 			    const_op01))
16120 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16121 	    }
16122 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
16123 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
16124 		   && type == TREE_TYPE (op00type))
16125 	    {
16126 	      tree type_domain = TYPE_DOMAIN (op00type);
16127 	      tree min_val = size_zero_node;
16128 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
16129 		min_val = TYPE_MIN_VALUE (type_domain);
16130 	      poly_uint64 type_size, index;
16131 	      if (poly_int_tree_p (min_val)
16132 		  && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16133 		  && multiple_p (const_op01, type_size, &index))
16134 		{
16135 		  poly_offset_int off = index + wi::to_poly_offset (min_val);
16136 		  op01 = wide_int_to_tree (sizetype, off);
16137 		  return build4_loc (loc, ARRAY_REF, type, op00, op01,
16138 				     NULL_TREE, NULL_TREE);
16139 		}
16140 	    }
16141 	}
16142     }
16143 
16144   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16145   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16146       && type == TREE_TYPE (TREE_TYPE (subtype))
16147       && (!in_gimple_form
16148 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16149     {
16150       tree type_domain;
16151       tree min_val = size_zero_node;
16152       sub = build_fold_indirect_ref_loc (loc, sub);
16153       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16154       if (type_domain && TYPE_MIN_VALUE (type_domain))
16155 	min_val = TYPE_MIN_VALUE (type_domain);
16156       if (in_gimple_form
16157 	  && TREE_CODE (min_val) != INTEGER_CST)
16158 	return NULL_TREE;
16159       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16160 			 NULL_TREE);
16161     }
16162 
16163   return NULL_TREE;
16164 }
16165 
16166 /* Builds an expression for an indirection through T, simplifying some
16167    cases.  */
16168 
16169 tree
build_fold_indirect_ref_loc(location_t loc,tree t)16170 build_fold_indirect_ref_loc (location_t loc, tree t)
16171 {
16172   tree type = TREE_TYPE (TREE_TYPE (t));
16173   tree sub = fold_indirect_ref_1 (loc, type, t);
16174 
16175   if (sub)
16176     return sub;
16177 
16178   return build1_loc (loc, INDIRECT_REF, type, t);
16179 }
16180 
16181 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
16182 
16183 tree
fold_indirect_ref_loc(location_t loc,tree t)16184 fold_indirect_ref_loc (location_t loc, tree t)
16185 {
16186   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16187 
16188   if (sub)
16189     return sub;
16190   else
16191     return t;
16192 }
16193 
16194 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16195    whose result is ignored.  The type of the returned tree need not be
16196    the same as the original expression.  */
16197 
16198 tree
fold_ignored_result(tree t)16199 fold_ignored_result (tree t)
16200 {
16201   if (!TREE_SIDE_EFFECTS (t))
16202     return integer_zero_node;
16203 
16204   for (;;)
16205     switch (TREE_CODE_CLASS (TREE_CODE (t)))
16206       {
16207       case tcc_unary:
16208 	t = TREE_OPERAND (t, 0);
16209 	break;
16210 
16211       case tcc_binary:
16212       case tcc_comparison:
16213 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16214 	  t = TREE_OPERAND (t, 0);
16215 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16216 	  t = TREE_OPERAND (t, 1);
16217 	else
16218 	  return t;
16219 	break;
16220 
16221       case tcc_expression:
16222 	switch (TREE_CODE (t))
16223 	  {
16224 	  case COMPOUND_EXPR:
16225 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16226 	      return t;
16227 	    t = TREE_OPERAND (t, 0);
16228 	    break;
16229 
16230 	  case COND_EXPR:
16231 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16232 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16233 	      return t;
16234 	    t = TREE_OPERAND (t, 0);
16235 	    break;
16236 
16237 	  default:
16238 	    return t;
16239 	  }
16240 	break;
16241 
16242       default:
16243 	return t;
16244       }
16245 }
16246 
16247 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16248 
16249 tree
round_up_loc(location_t loc,tree value,unsigned int divisor)16250 round_up_loc (location_t loc, tree value, unsigned int divisor)
16251 {
16252   tree div = NULL_TREE;
16253 
16254   if (divisor == 1)
16255     return value;
16256 
16257   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16258      have to do anything.  Only do this when we are not given a const,
16259      because in that case, this check is more expensive than just
16260      doing it.  */
16261   if (TREE_CODE (value) != INTEGER_CST)
16262     {
16263       div = build_int_cst (TREE_TYPE (value), divisor);
16264 
16265       if (multiple_of_p (TREE_TYPE (value), value, div))
16266 	return value;
16267     }
16268 
16269   /* If divisor is a power of two, simplify this to bit manipulation.  */
16270   if (pow2_or_zerop (divisor))
16271     {
16272       if (TREE_CODE (value) == INTEGER_CST)
16273 	{
16274 	  wide_int val = wi::to_wide (value);
16275 	  bool overflow_p;
16276 
16277 	  if ((val & (divisor - 1)) == 0)
16278 	    return value;
16279 
16280 	  overflow_p = TREE_OVERFLOW (value);
16281 	  val += divisor - 1;
16282 	  val &= (int) -divisor;
16283 	  if (val == 0)
16284 	    overflow_p = true;
16285 
16286 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16287 	}
16288       else
16289 	{
16290 	  tree t;
16291 
16292 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
16293 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
16294 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16295 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16296 	}
16297     }
16298   else
16299     {
16300       if (!div)
16301 	div = build_int_cst (TREE_TYPE (value), divisor);
16302       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16303       value = size_binop_loc (loc, MULT_EXPR, value, div);
16304     }
16305 
16306   return value;
16307 }
16308 
16309 /* Likewise, but round down.  */
16310 
16311 tree
round_down_loc(location_t loc,tree value,int divisor)16312 round_down_loc (location_t loc, tree value, int divisor)
16313 {
16314   tree div = NULL_TREE;
16315 
16316   gcc_assert (divisor > 0);
16317   if (divisor == 1)
16318     return value;
16319 
16320   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16321      have to do anything.  Only do this when we are not given a const,
16322      because in that case, this check is more expensive than just
16323      doing it.  */
16324   if (TREE_CODE (value) != INTEGER_CST)
16325     {
16326       div = build_int_cst (TREE_TYPE (value), divisor);
16327 
16328       if (multiple_of_p (TREE_TYPE (value), value, div))
16329 	return value;
16330     }
16331 
16332   /* If divisor is a power of two, simplify this to bit manipulation.  */
16333   if (pow2_or_zerop (divisor))
16334     {
16335       tree t;
16336 
16337       t = build_int_cst (TREE_TYPE (value), -divisor);
16338       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16339     }
16340   else
16341     {
16342       if (!div)
16343 	div = build_int_cst (TREE_TYPE (value), divisor);
16344       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16345       value = size_binop_loc (loc, MULT_EXPR, value, div);
16346     }
16347 
16348   return value;
16349 }
16350 
16351 /* Returns the pointer to the base of the object addressed by EXP and
16352    extracts the information about the offset of the access, storing it
16353    to PBITPOS and POFFSET.  */
16354 
16355 static tree
split_address_to_core_and_offset(tree exp,poly_int64_pod * pbitpos,tree * poffset)16356 split_address_to_core_and_offset (tree exp,
16357 				  poly_int64_pod *pbitpos, tree *poffset)
16358 {
16359   tree core;
16360   machine_mode mode;
16361   int unsignedp, reversep, volatilep;
16362   poly_int64 bitsize;
16363   location_t loc = EXPR_LOCATION (exp);
16364 
16365   if (TREE_CODE (exp) == ADDR_EXPR)
16366     {
16367       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16368 				  poffset, &mode, &unsignedp, &reversep,
16369 				  &volatilep);
16370       core = build_fold_addr_expr_loc (loc, core);
16371     }
16372   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16373     {
16374       core = TREE_OPERAND (exp, 0);
16375       STRIP_NOPS (core);
16376       *pbitpos = 0;
16377       *poffset = TREE_OPERAND (exp, 1);
16378       if (poly_int_tree_p (*poffset))
16379 	{
16380 	  poly_offset_int tem
16381 	    = wi::sext (wi::to_poly_offset (*poffset),
16382 			TYPE_PRECISION (TREE_TYPE (*poffset)));
16383 	  tem <<= LOG2_BITS_PER_UNIT;
16384 	  if (tem.to_shwi (pbitpos))
16385 	    *poffset = NULL_TREE;
16386 	}
16387     }
16388   else
16389     {
16390       core = exp;
16391       *pbitpos = 0;
16392       *poffset = NULL_TREE;
16393     }
16394 
16395   return core;
16396 }
16397 
16398 /* Returns true if addresses of E1 and E2 differ by a constant, false
16399    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
16400 
16401 bool
ptr_difference_const(tree e1,tree e2,poly_int64_pod * diff)16402 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16403 {
16404   tree core1, core2;
16405   poly_int64 bitpos1, bitpos2;
16406   tree toffset1, toffset2, tdiff, type;
16407 
16408   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16409   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16410 
16411   poly_int64 bytepos1, bytepos2;
16412   if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16413       || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16414       || !operand_equal_p (core1, core2, 0))
16415     return false;
16416 
16417   if (toffset1 && toffset2)
16418     {
16419       type = TREE_TYPE (toffset1);
16420       if (type != TREE_TYPE (toffset2))
16421 	toffset2 = fold_convert (type, toffset2);
16422 
16423       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16424       if (!cst_and_fits_in_hwi (tdiff))
16425 	return false;
16426 
16427       *diff = int_cst_value (tdiff);
16428     }
16429   else if (toffset1 || toffset2)
16430     {
16431       /* If only one of the offsets is non-constant, the difference cannot
16432 	 be a constant.  */
16433       return false;
16434     }
16435   else
16436     *diff = 0;
16437 
16438   *diff += bytepos1 - bytepos2;
16439   return true;
16440 }
16441 
16442 /* Return OFF converted to a pointer offset type suitable as offset for
16443    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
16444 tree
convert_to_ptrofftype_loc(location_t loc,tree off)16445 convert_to_ptrofftype_loc (location_t loc, tree off)
16446 {
16447   if (ptrofftype_p (TREE_TYPE (off)))
16448     return off;
16449   return fold_convert_loc (loc, sizetype, off);
16450 }
16451 
16452 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
16453 tree
fold_build_pointer_plus_loc(location_t loc,tree ptr,tree off)16454 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16455 {
16456   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16457 			  ptr, convert_to_ptrofftype_loc (loc, off));
16458 }
16459 
16460 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
16461 tree
fold_build_pointer_plus_hwi_loc(location_t loc,tree ptr,HOST_WIDE_INT off)16462 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16463 {
16464   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16465 			  ptr, size_int (off));
16466 }
16467 
16468 /* Return a pointer to a NUL-terminated string containing the sequence
16469    of bytes corresponding to the representation of the object referred to
16470    by SRC (or a subsequence of such bytes within it if SRC is a reference
16471    to an initialized constant array plus some constant offset).
16472    Set *STRSIZE the number of bytes in the constant sequence including
16473    the terminating NUL byte.  *STRSIZE is equal to sizeof(A) - OFFSET
16474    where A is the array that stores the constant sequence that SRC points
16475    to and OFFSET is the byte offset of SRC from the beginning of A.  SRC
16476    need not point to a string or even an array of characters but may point
16477    to an object of any type.  */
16478 
16479 const char *
getbyterep(tree src,unsigned HOST_WIDE_INT * strsize)16480 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16481 {
16482   /* The offset into the array A storing the string, and A's byte size.  */
16483   tree offset_node;
16484   tree mem_size;
16485 
16486   if (strsize)
16487     *strsize = 0;
16488 
16489   if (strsize)
16490     src = byte_representation (src, &offset_node, &mem_size, NULL);
16491   else
16492     src = string_constant (src, &offset_node, &mem_size, NULL);
16493   if (!src)
16494     return NULL;
16495 
16496   unsigned HOST_WIDE_INT offset = 0;
16497   if (offset_node != NULL_TREE)
16498     {
16499       if (!tree_fits_uhwi_p (offset_node))
16500 	return NULL;
16501       else
16502 	offset = tree_to_uhwi (offset_node);
16503     }
16504 
16505   if (!tree_fits_uhwi_p (mem_size))
16506     return NULL;
16507 
16508   /* ARRAY_SIZE is the byte size of the array the constant sequence
16509      is stored in and equal to sizeof A.  INIT_BYTES is the number
16510      of bytes in the constant sequence used to initialize the array,
16511      including any embedded NULs as well as the terminating NUL (for
16512      strings), but not including any trailing zeros/NULs past
16513      the terminating one appended implicitly to a string literal to
16514      zero out the remainder of the array it's stored in.  For example,
16515      given:
16516        const char a[7] = "abc\0d";
16517        n = strlen (a + 1);
16518      ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1.  For a valid
16519      (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16520      is equal to strlen (A) + 1.  */
16521   const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16522   unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16523   const char *string = TREE_STRING_POINTER (src);
16524 
16525   /* Ideally this would turn into a gcc_checking_assert over time.  */
16526   if (init_bytes > array_size)
16527     init_bytes = array_size;
16528 
16529   if (init_bytes == 0 || offset >= array_size)
16530     return NULL;
16531 
16532   if (strsize)
16533     {
16534       /* Compute and store the number of characters from the beginning
16535 	 of the substring at OFFSET to the end, including the terminating
16536 	 nul.  Offsets past the initial length refer to null strings.  */
16537       if (offset < init_bytes)
16538 	*strsize = init_bytes - offset;
16539       else
16540 	*strsize = 1;
16541     }
16542   else
16543     {
16544       tree eltype = TREE_TYPE (TREE_TYPE (src));
16545       /* Support only properly NUL-terminated single byte strings.  */
16546       if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16547 	return NULL;
16548       if (string[init_bytes - 1] != '\0')
16549 	return NULL;
16550     }
16551 
16552   return offset < init_bytes ? string + offset : "";
16553 }
16554 
16555 /* Return a pointer to a NUL-terminated string corresponding to
16556    the expression STR referencing a constant string, possibly
16557    involving a constant offset.  Return null if STR either doesn't
16558    reference a constant string or if it involves a nonconstant
16559    offset.  */
16560 
16561 const char *
c_getstr(tree str)16562 c_getstr (tree str)
16563 {
16564   return getbyterep (str, NULL);
16565 }
16566 
16567 /* Given a tree T, compute which bits in T may be nonzero.  */
16568 
16569 wide_int
tree_nonzero_bits(const_tree t)16570 tree_nonzero_bits (const_tree t)
16571 {
16572   switch (TREE_CODE (t))
16573     {
16574     case INTEGER_CST:
16575       return wi::to_wide (t);
16576     case SSA_NAME:
16577       return get_nonzero_bits (t);
16578     case NON_LVALUE_EXPR:
16579     case SAVE_EXPR:
16580       return tree_nonzero_bits (TREE_OPERAND (t, 0));
16581     case BIT_AND_EXPR:
16582       return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16583 			  tree_nonzero_bits (TREE_OPERAND (t, 1)));
16584     case BIT_IOR_EXPR:
16585     case BIT_XOR_EXPR:
16586       return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16587 			 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16588     case COND_EXPR:
16589       return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16590 			 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16591     CASE_CONVERT:
16592       return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16593 			     TYPE_PRECISION (TREE_TYPE (t)),
16594 			     TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16595     case PLUS_EXPR:
16596       if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16597 	{
16598 	  wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16599 	  wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16600 	  if (wi::bit_and (nzbits1, nzbits2) == 0)
16601 	    return wi::bit_or (nzbits1, nzbits2);
16602 	}
16603       break;
16604     case LSHIFT_EXPR:
16605       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16606 	{
16607 	  tree type = TREE_TYPE (t);
16608 	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16609 	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16610 				       TYPE_PRECISION (type));
16611 	  return wi::neg_p (arg1)
16612 		 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16613 		 : wi::lshift (nzbits, arg1);
16614 	}
16615       break;
16616     case RSHIFT_EXPR:
16617       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16618         {
16619 	  tree type = TREE_TYPE (t);
16620 	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16621 	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16622 				       TYPE_PRECISION (type));
16623 	  return wi::neg_p (arg1)
16624 		 ? wi::lshift (nzbits, -arg1)
16625 		 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16626         }
16627       break;
16628     default:
16629       break;
16630     }
16631 
16632   return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16633 }
16634 
16635 /* Helper function for address compare simplifications in match.pd.
16636    OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16637    TYPE is the type of comparison operands.
16638    BASE0, BASE1, OFF0 and OFF1 are set by the function.
16639    GENERIC is true if GENERIC folding and false for GIMPLE folding.
16640    Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16641    1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16642    and 2 if unknown.  */
16643 
16644 int
address_compare(tree_code code,tree type,tree op0,tree op1,tree & base0,tree & base1,poly_int64 & off0,poly_int64 & off1,bool generic)16645 address_compare (tree_code code, tree type, tree op0, tree op1,
16646 		 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16647 		 bool generic)
16648 {
16649   gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16650   gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16651   base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16652   base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16653   if (base0 && TREE_CODE (base0) == MEM_REF)
16654     {
16655       off0 += mem_ref_offset (base0).force_shwi ();
16656       base0 = TREE_OPERAND (base0, 0);
16657     }
16658   if (base1 && TREE_CODE (base1) == MEM_REF)
16659     {
16660       off1 += mem_ref_offset (base1).force_shwi ();
16661       base1 = TREE_OPERAND (base1, 0);
16662     }
16663   if (base0 == NULL_TREE || base1 == NULL_TREE)
16664     return 2;
16665 
16666   int equal = 2;
16667   /* Punt in GENERIC on variables with value expressions;
16668      the value expressions might point to fields/elements
16669      of other vars etc.  */
16670   if (generic
16671       && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16672 	  || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16673     return 2;
16674   else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16675     {
16676       symtab_node *node0 = symtab_node::get_create (base0);
16677       symtab_node *node1 = symtab_node::get_create (base1);
16678       equal = node0->equal_address_to (node1);
16679     }
16680   else if ((DECL_P (base0)
16681 	    || TREE_CODE (base0) == SSA_NAME
16682 	    || TREE_CODE (base0) == STRING_CST)
16683 	   && (DECL_P (base1)
16684 	       || TREE_CODE (base1) == SSA_NAME
16685 	       || TREE_CODE (base1) == STRING_CST))
16686     equal = (base0 == base1);
16687   /* Assume different STRING_CSTs with the same content will be
16688      merged.  */
16689   if (equal == 0
16690       && TREE_CODE (base0) == STRING_CST
16691       && TREE_CODE (base1) == STRING_CST
16692       && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16693       && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16694 		 TREE_STRING_LENGTH (base0)) == 0)
16695     equal = 1;
16696   if (equal == 1)
16697     {
16698       if (code == EQ_EXPR
16699 	  || code == NE_EXPR
16700 	  /* If the offsets are equal we can ignore overflow.  */
16701 	  || known_eq (off0, off1)
16702 	  || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16703 	  /* Or if we compare using pointers to decls or strings.  */
16704 	  || (POINTER_TYPE_P (type)
16705 	      && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16706 	return 1;
16707       return 2;
16708     }
16709   if (equal != 0)
16710     return equal;
16711   if (code != EQ_EXPR && code != NE_EXPR)
16712     return 2;
16713 
16714   /* At this point we know (or assume) the two pointers point at
16715      different objects.  */
16716   HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16717   off0.is_constant (&ioff0);
16718   off1.is_constant (&ioff1);
16719   /* Punt on non-zero offsets from functions.  */
16720   if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16721       || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16722     return 2;
16723   /* Or if the bases are neither decls nor string literals.  */
16724   if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16725     return 2;
16726   if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16727     return 2;
16728   /* For initializers, assume addresses of different functions are
16729      different.  */
16730   if (folding_initializer
16731       && TREE_CODE (base0) == FUNCTION_DECL
16732       && TREE_CODE (base1) == FUNCTION_DECL)
16733     return 0;
16734 
16735   /* Compute whether one address points to the start of one
16736      object and another one to the end of another one.  */
16737   poly_int64 size0 = 0, size1 = 0;
16738   if (TREE_CODE (base0) == STRING_CST)
16739     {
16740       if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16741 	equal = 2;
16742       else
16743 	size0 = TREE_STRING_LENGTH (base0);
16744     }
16745   else if (TREE_CODE (base0) == FUNCTION_DECL)
16746     size0 = 1;
16747   else
16748     {
16749       tree sz0 = DECL_SIZE_UNIT (base0);
16750       if (!tree_fits_poly_int64_p (sz0))
16751 	equal = 2;
16752       else
16753 	size0 = tree_to_poly_int64 (sz0);
16754     }
16755   if (TREE_CODE (base1) == STRING_CST)
16756     {
16757       if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16758 	equal = 2;
16759       else
16760 	size1 = TREE_STRING_LENGTH (base1);
16761     }
16762   else if (TREE_CODE (base1) == FUNCTION_DECL)
16763     size1 = 1;
16764   else
16765     {
16766       tree sz1 = DECL_SIZE_UNIT (base1);
16767       if (!tree_fits_poly_int64_p (sz1))
16768 	equal = 2;
16769       else
16770 	size1 = tree_to_poly_int64 (sz1);
16771     }
16772   if (equal == 0)
16773     {
16774       /* If one offset is pointing (or could be) to the beginning of one
16775 	 object and the other is pointing to one past the last byte of the
16776 	 other object, punt.  */
16777       if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16778 	equal = 2;
16779       else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16780 	equal = 2;
16781       /* If both offsets are the same, there are some cases we know that are
16782 	 ok.  Either if we know they aren't zero, or if we know both sizes
16783 	 are no zero.  */
16784       if (equal == 2
16785 	  && known_eq (off0, off1)
16786 	  && (known_ne (off0, 0)
16787 	      || (known_ne (size0, 0) && known_ne (size1, 0))))
16788 	equal = 0;
16789     }
16790 
16791   /* At this point, equal is 2 if either one or both pointers are out of
16792      bounds of their object, or one points to start of its object and the
16793      other points to end of its object.  This is unspecified behavior
16794      e.g. in C++.  Otherwise equal is 0.  */
16795   if (folding_cxx_constexpr && equal)
16796     return equal;
16797 
16798   /* When both pointers point to string literals, even when equal is 0,
16799      due to tail merging of string literals the pointers might be the same.  */
16800   if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16801     {
16802       if (ioff0 < 0
16803 	  || ioff1 < 0
16804 	  || ioff0 > TREE_STRING_LENGTH (base0)
16805 	  || ioff1 > TREE_STRING_LENGTH (base1))
16806 	return 2;
16807 
16808       /* If the bytes in the string literals starting at the pointers
16809 	 differ, the pointers need to be different.  */
16810       if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16811 		  TREE_STRING_POINTER (base1) + ioff1,
16812 		  MIN (TREE_STRING_LENGTH (base0) - ioff0,
16813 		       TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16814 	{
16815 	  HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16816 	  if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16817 		      TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16818 		      ioffmin) == 0)
16819 	    /* If even the bytes in the string literal before the
16820 	       pointers are the same, the string literals could be
16821 	       tail merged.  */
16822 	    return 2;
16823 	}
16824       return 0;
16825     }
16826 
16827   if (folding_cxx_constexpr)
16828     return 0;
16829 
16830   /* If this is a pointer comparison, ignore for now even
16831      valid equalities where one pointer is the offset zero
16832      of one object and the other to one past end of another one.  */
16833   if (!INTEGRAL_TYPE_P (type))
16834     return 0;
16835 
16836   /* Assume that string literals can't be adjacent to variables
16837      (automatic or global).  */
16838   if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
16839     return 0;
16840 
16841   /* Assume that automatic variables can't be adjacent to global
16842      variables.  */
16843   if (is_global_var (base0) != is_global_var (base1))
16844     return 0;
16845 
16846   return equal;
16847 }
16848 
16849 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE.  */
16850 tree
ctor_single_nonzero_element(const_tree t)16851 ctor_single_nonzero_element (const_tree t)
16852 {
16853   unsigned HOST_WIDE_INT idx;
16854   constructor_elt *ce;
16855   tree elt = NULL_TREE;
16856 
16857   if (TREE_CODE (t) != CONSTRUCTOR)
16858     return NULL_TREE;
16859   for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
16860     if (!integer_zerop (ce->value) && !real_zerop (ce->value))
16861       {
16862 	if (elt)
16863 	  return NULL_TREE;
16864 	elt = ce->value;
16865       }
16866   return elt;
16867 }
16868 
16869 #if CHECKING_P
16870 
16871 namespace selftest {
16872 
16873 /* Helper functions for writing tests of folding trees.  */
16874 
16875 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT.  */
16876 
16877 static void
assert_binop_folds_to_const(tree lhs,enum tree_code code,tree rhs,tree constant)16878 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16879 			     tree constant)
16880 {
16881   ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16882 }
16883 
16884 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16885    wrapping WRAPPED_EXPR.  */
16886 
16887 static void
assert_binop_folds_to_nonlvalue(tree lhs,enum tree_code code,tree rhs,tree wrapped_expr)16888 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16889 				 tree wrapped_expr)
16890 {
16891   tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16892   ASSERT_NE (wrapped_expr, result);
16893   ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16894   ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16895 }
16896 
16897 /* Verify that various arithmetic binary operations are folded
16898    correctly.  */
16899 
16900 static void
test_arithmetic_folding()16901 test_arithmetic_folding ()
16902 {
16903   tree type = integer_type_node;
16904   tree x = create_tmp_var_raw (type, "x");
16905   tree zero = build_zero_cst (type);
16906   tree one = build_int_cst (type, 1);
16907 
16908   /* Addition.  */
16909   /* 1 <-- (0 + 1) */
16910   assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16911 			       one);
16912   assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16913 			       one);
16914 
16915   /* (nonlvalue)x <-- (x + 0) */
16916   assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16917 				   x);
16918 
16919   /* Subtraction.  */
16920   /* 0 <-- (x - x) */
16921   assert_binop_folds_to_const (x, MINUS_EXPR, x,
16922 			       zero);
16923   assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16924 				   x);
16925 
16926   /* Multiplication.  */
16927   /* 0 <-- (x * 0) */
16928   assert_binop_folds_to_const (x, MULT_EXPR, zero,
16929 			       zero);
16930 
16931   /* (nonlvalue)x <-- (x * 1) */
16932   assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16933 				   x);
16934 }
16935 
16936 /* Verify that various binary operations on vectors are folded
16937    correctly.  */
16938 
16939 static void
test_vector_folding()16940 test_vector_folding ()
16941 {
16942   tree inner_type = integer_type_node;
16943   tree type = build_vector_type (inner_type, 4);
16944   tree zero = build_zero_cst (type);
16945   tree one = build_one_cst (type);
16946   tree index = build_index_vector (type, 0, 1);
16947 
16948   /* Verify equality tests that return a scalar boolean result.  */
16949   tree res_type = boolean_type_node;
16950   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16951   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16952   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16953   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16954   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16955   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16956 					       index, one)));
16957   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16958 					      index, index)));
16959   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16960 					      index, index)));
16961 }
16962 
16963 /* Verify folding of VEC_DUPLICATE_EXPRs.  */
16964 
16965 static void
test_vec_duplicate_folding()16966 test_vec_duplicate_folding ()
16967 {
16968   scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16969   machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16970   /* This will be 1 if VEC_MODE isn't a vector mode.  */
16971   poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16972 
16973   tree type = build_vector_type (ssizetype, nunits);
16974   tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16975   tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16976   ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16977 }
16978 
16979 /* Run all of the selftests within this file.  */
16980 
16981 void
fold_const_cc_tests()16982 fold_const_cc_tests ()
16983 {
16984   test_arithmetic_folding ();
16985   test_vector_folding ();
16986   test_vec_duplicate_folding ();
16987 }
16988 
16989 } // namespace selftest
16990 
16991 #endif /* CHECKING_P */
16992