xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/fold-const.c (revision 8feb0f0b7eaff0608f8350bbfa3098827b4bb91b)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85 
86 /* Nonzero if we are folding constants inside an initializer; zero
87    otherwise.  */
88 int folding_initializer = 0;
89 
90 /* The following constants represent a bit based encoding of GCC's
91    comparison operators.  This encoding simplifies transformations
92    on relational comparison operators, such as AND and OR.  */
93 enum comparison_code {
94   COMPCODE_FALSE = 0,
95   COMPCODE_LT = 1,
96   COMPCODE_EQ = 2,
97   COMPCODE_LE = 3,
98   COMPCODE_GT = 4,
99   COMPCODE_LTGT = 5,
100   COMPCODE_GE = 6,
101   COMPCODE_ORD = 7,
102   COMPCODE_UNORD = 8,
103   COMPCODE_UNLT = 9,
104   COMPCODE_UNEQ = 10,
105   COMPCODE_UNLE = 11,
106   COMPCODE_UNGT = 12,
107   COMPCODE_NE = 13,
108   COMPCODE_UNGE = 14,
109   COMPCODE_TRUE = 15
110 };
111 
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 					tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 						 enum tree_code, tree,
133 						 tree, tree,
134 						 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
141 
142 
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144    Otherwise, return LOC.  */
145 
146 static location_t
expr_location_or(tree t,location_t loc)147 expr_location_or (tree t, location_t loc)
148 {
149   location_t tloc = EXPR_LOCATION (t);
150   return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 }
152 
153 /* Similar to protected_set_expr_location, but never modify x in place,
154    if location can and needs to be set, unshare it.  */
155 
156 static inline tree
protected_set_expr_location_unshare(tree x,location_t loc)157 protected_set_expr_location_unshare (tree x, location_t loc)
158 {
159   if (CAN_HAVE_LOCATION_P (x)
160       && EXPR_LOCATION (x) != loc
161       && !(TREE_CODE (x) == SAVE_EXPR
162 	   || TREE_CODE (x) == TARGET_EXPR
163 	   || TREE_CODE (x) == BIND_EXPR))
164     {
165       x = copy_node (x);
166       SET_EXPR_LOCATION (x, loc);
167     }
168   return x;
169 }
170 
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172    division and returns the quotient.  Otherwise returns
173    NULL_TREE.  */
174 
175 tree
div_if_zero_remainder(const_tree arg1,const_tree arg2)176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
177 {
178   widest_int quo;
179 
180   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 			 SIGNED, &quo))
182     return wide_int_to_tree (TREE_TYPE (arg1), quo);
183 
184   return NULL_TREE;
185 }
186 
187 /* This is nonzero if we should defer warnings about undefined
188    overflow.  This facility exists because these warnings are a
189    special case.  The code to estimate loop iterations does not want
190    to issue any warnings, since it works with expressions which do not
191    occur in user code.  Various bits of cleanup code call fold(), but
192    only use the result if it has certain characteristics (e.g., is a
193    constant); that code only wants to issue a warning if the result is
194    used.  */
195 
196 static int fold_deferring_overflow_warnings;
197 
198 /* If a warning about undefined overflow is deferred, this is the
199    warning.  Note that this may cause us to turn two warnings into
200    one, but that is fine since it is sufficient to only give one
201    warning per expression.  */
202 
203 static const char* fold_deferred_overflow_warning;
204 
205 /* If a warning about undefined overflow is deferred, this is the
206    level at which the warning should be emitted.  */
207 
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209 
210 /* Start deferring overflow warnings.  We could use a stack here to
211    permit nested calls, but at present it is not necessary.  */
212 
213 void
fold_defer_overflow_warnings(void)214 fold_defer_overflow_warnings (void)
215 {
216   ++fold_deferring_overflow_warnings;
217 }
218 
219 /* Stop deferring overflow warnings.  If there is a pending warning,
220    and ISSUE is true, then issue the warning if appropriate.  STMT is
221    the statement with which the warning should be associated (used for
222    location information); STMT may be NULL.  CODE is the level of the
223    warning--a warn_strict_overflow_code value.  This function will use
224    the smaller of CODE and the deferred code when deciding whether to
225    issue the warning.  CODE may be zero to mean to always use the
226    deferred code.  */
227 
228 void
fold_undefer_overflow_warnings(bool issue,const gimple * stmt,int code)229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
230 {
231   const char *warnmsg;
232   location_t locus;
233 
234   gcc_assert (fold_deferring_overflow_warnings > 0);
235   --fold_deferring_overflow_warnings;
236   if (fold_deferring_overflow_warnings > 0)
237     {
238       if (fold_deferred_overflow_warning != NULL
239 	  && code != 0
240 	  && code < (int) fold_deferred_overflow_code)
241 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242       return;
243     }
244 
245   warnmsg = fold_deferred_overflow_warning;
246   fold_deferred_overflow_warning = NULL;
247 
248   if (!issue || warnmsg == NULL)
249     return;
250 
251   if (gimple_no_warning_p (stmt))
252     return;
253 
254   /* Use the smallest code level when deciding to issue the
255      warning.  */
256   if (code == 0 || code > (int) fold_deferred_overflow_code)
257     code = fold_deferred_overflow_code;
258 
259   if (!issue_strict_overflow_warning (code))
260     return;
261 
262   if (stmt == NULL)
263     locus = input_location;
264   else
265     locus = gimple_location (stmt);
266   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
267 }
268 
269 /* Stop deferring overflow warnings, ignoring any deferred
270    warnings.  */
271 
272 void
fold_undefer_and_ignore_overflow_warnings(void)273 fold_undefer_and_ignore_overflow_warnings (void)
274 {
275   fold_undefer_overflow_warnings (false, NULL, 0);
276 }
277 
278 /* Whether we are deferring overflow warnings.  */
279 
280 bool
fold_deferring_overflow_warnings_p(void)281 fold_deferring_overflow_warnings_p (void)
282 {
283   return fold_deferring_overflow_warnings > 0;
284 }
285 
286 /* This is called when we fold something based on the fact that signed
287    overflow is undefined.  */
288 
289 void
fold_overflow_warning(const char * gmsgid,enum warn_strict_overflow_code wc)290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 {
292   if (fold_deferring_overflow_warnings > 0)
293     {
294       if (fold_deferred_overflow_warning == NULL
295 	  || wc < fold_deferred_overflow_code)
296 	{
297 	  fold_deferred_overflow_warning = gmsgid;
298 	  fold_deferred_overflow_code = wc;
299 	}
300     }
301   else if (issue_strict_overflow_warning (wc))
302     warning (OPT_Wstrict_overflow, gmsgid);
303 }
304 
305 /* Return true if the built-in mathematical function specified by CODE
306    is odd, i.e. -f(x) == f(-x).  */
307 
308 bool
negate_mathfn_p(combined_fn fn)309 negate_mathfn_p (combined_fn fn)
310 {
311   switch (fn)
312     {
313     CASE_CFN_ASIN:
314     CASE_CFN_ASINH:
315     CASE_CFN_ATAN:
316     CASE_CFN_ATANH:
317     CASE_CFN_CASIN:
318     CASE_CFN_CASINH:
319     CASE_CFN_CATAN:
320     CASE_CFN_CATANH:
321     CASE_CFN_CBRT:
322     CASE_CFN_CPROJ:
323     CASE_CFN_CSIN:
324     CASE_CFN_CSINH:
325     CASE_CFN_CTAN:
326     CASE_CFN_CTANH:
327     CASE_CFN_ERF:
328     CASE_CFN_LLROUND:
329     CASE_CFN_LROUND:
330     CASE_CFN_ROUND:
331     CASE_CFN_ROUNDEVEN:
332     CASE_CFN_ROUNDEVEN_FN:
333     CASE_CFN_SIN:
334     CASE_CFN_SINH:
335     CASE_CFN_TAN:
336     CASE_CFN_TANH:
337     CASE_CFN_TRUNC:
338       return true;
339 
340     CASE_CFN_LLRINT:
341     CASE_CFN_LRINT:
342     CASE_CFN_NEARBYINT:
343     CASE_CFN_RINT:
344       return !flag_rounding_math;
345 
346     default:
347       break;
348     }
349   return false;
350 }
351 
352 /* Check whether we may negate an integer constant T without causing
353    overflow.  */
354 
355 bool
may_negate_without_overflow_p(const_tree t)356 may_negate_without_overflow_p (const_tree t)
357 {
358   tree type;
359 
360   gcc_assert (TREE_CODE (t) == INTEGER_CST);
361 
362   type = TREE_TYPE (t);
363   if (TYPE_UNSIGNED (type))
364     return false;
365 
366   return !wi::only_sign_bit_p (wi::to_wide (t));
367 }
368 
369 /* Determine whether an expression T can be cheaply negated using
370    the function negate_expr without introducing undefined overflow.  */
371 
372 static bool
negate_expr_p(tree t)373 negate_expr_p (tree t)
374 {
375   tree type;
376 
377   if (t == 0)
378     return false;
379 
380   type = TREE_TYPE (t);
381 
382   STRIP_SIGN_NOPS (t);
383   switch (TREE_CODE (t))
384     {
385     case INTEGER_CST:
386       if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 	return true;
388 
389       /* Check that -CST will not overflow type.  */
390       return may_negate_without_overflow_p (t);
391     case BIT_NOT_EXPR:
392       return (INTEGRAL_TYPE_P (type)
393 	      && TYPE_OVERFLOW_WRAPS (type));
394 
395     case FIXED_CST:
396       return true;
397 
398     case NEGATE_EXPR:
399       return !TYPE_OVERFLOW_SANITIZED (type);
400 
401     case REAL_CST:
402       /* We want to canonicalize to positive real constants.  Pretend
403          that only negative ones can be easily negated.  */
404       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405 
406     case COMPLEX_CST:
407       return negate_expr_p (TREE_REALPART (t))
408 	     && negate_expr_p (TREE_IMAGPART (t));
409 
410     case VECTOR_CST:
411       {
412 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 	  return true;
414 
415 	/* Steps don't prevent negation.  */
416 	unsigned int count = vector_cst_encoded_nelts (t);
417 	for (unsigned int i = 0; i < count; ++i)
418 	  if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 	    return false;
420 
421 	return true;
422       }
423 
424     case COMPLEX_EXPR:
425       return negate_expr_p (TREE_OPERAND (t, 0))
426 	     && negate_expr_p (TREE_OPERAND (t, 1));
427 
428     case CONJ_EXPR:
429       return negate_expr_p (TREE_OPERAND (t, 0));
430 
431     case PLUS_EXPR:
432       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 	  || HONOR_SIGNED_ZEROS (element_mode (type))
434 	  || (ANY_INTEGRAL_TYPE_P (type)
435 	      && ! TYPE_OVERFLOW_WRAPS (type)))
436 	return false;
437       /* -(A + B) -> (-B) - A.  */
438       if (negate_expr_p (TREE_OPERAND (t, 1)))
439 	return true;
440       /* -(A + B) -> (-A) - B.  */
441       return negate_expr_p (TREE_OPERAND (t, 0));
442 
443     case MINUS_EXPR:
444       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
445       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
447 	     && (! ANY_INTEGRAL_TYPE_P (type)
448 		 || TYPE_OVERFLOW_WRAPS (type));
449 
450     case MULT_EXPR:
451       if (TYPE_UNSIGNED (type))
452 	break;
453       /* INT_MIN/n * n doesn't overflow while negating one operand it does
454          if n is a (negative) power of two.  */
455       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 		 && (wi::popcount
459 		     (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 		    && (wi::popcount
462 			(wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 	break;
464 
465       /* Fall through.  */
466 
467     case RDIV_EXPR:
468       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 	return negate_expr_p (TREE_OPERAND (t, 1))
470 	       || negate_expr_p (TREE_OPERAND (t, 0));
471       break;
472 
473     case TRUNC_DIV_EXPR:
474     case ROUND_DIV_EXPR:
475     case EXACT_DIV_EXPR:
476       if (TYPE_UNSIGNED (type))
477 	break;
478       /* In general we can't negate A in A / B, because if A is INT_MIN and
479          B is not 1 we change the sign of the result.  */
480       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 	  && negate_expr_p (TREE_OPERAND (t, 0)))
482 	return true;
483       /* In general we can't negate B in A / B, because if A is INT_MIN and
484 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 	 and actually traps on some architectures.  */
486       if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 	      && ! integer_onep (TREE_OPERAND (t, 1))))
490 	return negate_expr_p (TREE_OPERAND (t, 1));
491       break;
492 
493     case NOP_EXPR:
494       /* Negate -((double)float) as (double)(-float).  */
495       if (TREE_CODE (type) == REAL_TYPE)
496 	{
497 	  tree tem = strip_float_extensions (t);
498 	  if (tem != t)
499 	    return negate_expr_p (tem);
500 	}
501       break;
502 
503     case CALL_EXPR:
504       /* Negate -f(x) as f(-x).  */
505       if (negate_mathfn_p (get_call_combined_fn (t)))
506 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
507       break;
508 
509     case RSHIFT_EXPR:
510       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
511       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 	{
513 	  tree op1 = TREE_OPERAND (t, 1);
514 	  if (wi::to_wide (op1) == element_precision (type) - 1)
515 	    return true;
516 	}
517       break;
518 
519     default:
520       break;
521     }
522   return false;
523 }
524 
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526    simplification is possible.
527    If negate_expr_p would return true for T, NULL_TREE will never be
528    returned.  */
529 
530 static tree
fold_negate_expr_1(location_t loc,tree t)531 fold_negate_expr_1 (location_t loc, tree t)
532 {
533   tree type = TREE_TYPE (t);
534   tree tem;
535 
536   switch (TREE_CODE (t))
537     {
538     /* Convert - (~A) to A + 1.  */
539     case BIT_NOT_EXPR:
540       if (INTEGRAL_TYPE_P (type))
541         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 				build_one_cst (type));
543       break;
544 
545     case INTEGER_CST:
546       tem = fold_negate_const (t, type);
547       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 	  || (ANY_INTEGRAL_TYPE_P (type)
549 	      && !TYPE_OVERFLOW_TRAPS (type)
550 	      && TYPE_OVERFLOW_WRAPS (type))
551 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 	return tem;
553       break;
554 
555     case POLY_INT_CST:
556     case REAL_CST:
557     case FIXED_CST:
558       tem = fold_negate_const (t, type);
559       return tem;
560 
561     case COMPLEX_CST:
562       {
563 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 	if (rpart && ipart)
566 	  return build_complex (type, rpart, ipart);
567       }
568       break;
569 
570     case VECTOR_CST:
571       {
572 	tree_vector_builder elts;
573 	elts.new_unary_operation (type, t, true);
574 	unsigned int count = elts.encoded_nelts ();
575 	for (unsigned int i = 0; i < count; ++i)
576 	  {
577 	    tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 	    if (elt == NULL_TREE)
579 	      return NULL_TREE;
580 	    elts.quick_push (elt);
581 	  }
582 
583 	return elts.build ();
584       }
585 
586     case COMPLEX_EXPR:
587       if (negate_expr_p (t))
588 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 				fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 				fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591       break;
592 
593     case CONJ_EXPR:
594       if (negate_expr_p (t))
595 	return fold_build1_loc (loc, CONJ_EXPR, type,
596 				fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597       break;
598 
599     case NEGATE_EXPR:
600       if (!TYPE_OVERFLOW_SANITIZED (type))
601 	return TREE_OPERAND (t, 0);
602       break;
603 
604     case PLUS_EXPR:
605       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
607 	{
608 	  /* -(A + B) -> (-B) - A.  */
609 	  if (negate_expr_p (TREE_OPERAND (t, 1)))
610 	    {
611 	      tem = negate_expr (TREE_OPERAND (t, 1));
612 	      return fold_build2_loc (loc, MINUS_EXPR, type,
613 				      tem, TREE_OPERAND (t, 0));
614 	    }
615 
616 	  /* -(A + B) -> (-A) - B.  */
617 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
618 	    {
619 	      tem = negate_expr (TREE_OPERAND (t, 0));
620 	      return fold_build2_loc (loc, MINUS_EXPR, type,
621 				      tem, TREE_OPERAND (t, 1));
622 	    }
623 	}
624       break;
625 
626     case MINUS_EXPR:
627       /* - (A - B) -> B - A  */
628       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 	return fold_build2_loc (loc, MINUS_EXPR, type,
631 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632       break;
633 
634     case MULT_EXPR:
635       if (TYPE_UNSIGNED (type))
636         break;
637 
638       /* Fall through.  */
639 
640     case RDIV_EXPR:
641       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
642 	{
643 	  tem = TREE_OPERAND (t, 1);
644 	  if (negate_expr_p (tem))
645 	    return fold_build2_loc (loc, TREE_CODE (t), type,
646 				    TREE_OPERAND (t, 0), negate_expr (tem));
647 	  tem = TREE_OPERAND (t, 0);
648 	  if (negate_expr_p (tem))
649 	    return fold_build2_loc (loc, TREE_CODE (t), type,
650 				    negate_expr (tem), TREE_OPERAND (t, 1));
651 	}
652       break;
653 
654     case TRUNC_DIV_EXPR:
655     case ROUND_DIV_EXPR:
656     case EXACT_DIV_EXPR:
657       if (TYPE_UNSIGNED (type))
658 	break;
659       /* In general we can't negate A in A / B, because if A is INT_MIN and
660 	 B is not 1 we change the sign of the result.  */
661       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 	  && negate_expr_p (TREE_OPERAND (t, 0)))
663 	return fold_build2_loc (loc, TREE_CODE (t), type,
664 				negate_expr (TREE_OPERAND (t, 0)),
665 				TREE_OPERAND (t, 1));
666       /* In general we can't negate B in A / B, because if A is INT_MIN and
667 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 	 and actually traps on some architectures.  */
669       if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 	       && ! integer_onep (TREE_OPERAND (t, 1))))
673 	  && negate_expr_p (TREE_OPERAND (t, 1)))
674 	return fold_build2_loc (loc, TREE_CODE (t), type,
675 				TREE_OPERAND (t, 0),
676 				negate_expr (TREE_OPERAND (t, 1)));
677       break;
678 
679     case NOP_EXPR:
680       /* Convert -((double)float) into (double)(-float).  */
681       if (TREE_CODE (type) == REAL_TYPE)
682 	{
683 	  tem = strip_float_extensions (t);
684 	  if (tem != t && negate_expr_p (tem))
685 	    return fold_convert_loc (loc, type, negate_expr (tem));
686 	}
687       break;
688 
689     case CALL_EXPR:
690       /* Negate -f(x) as f(-x).  */
691       if (negate_mathfn_p (get_call_combined_fn (t))
692 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
693 	{
694 	  tree fndecl, arg;
695 
696 	  fndecl = get_callee_fndecl (t);
697 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 	  return build_call_expr_loc (loc, fndecl, 1, arg);
699 	}
700       break;
701 
702     case RSHIFT_EXPR:
703       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
704       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
705 	{
706 	  tree op1 = TREE_OPERAND (t, 1);
707 	  if (wi::to_wide (op1) == element_precision (type) - 1)
708 	    {
709 	      tree ntype = TYPE_UNSIGNED (type)
710 			   ? signed_type_for (type)
711 			   : unsigned_type_for (type);
712 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 	      return fold_convert_loc (loc, type, temp);
715 	    }
716 	}
717       break;
718 
719     default:
720       break;
721     }
722 
723   return NULL_TREE;
724 }
725 
726 /* A wrapper for fold_negate_expr_1.  */
727 
728 static tree
fold_negate_expr(location_t loc,tree t)729 fold_negate_expr (location_t loc, tree t)
730 {
731   tree type = TREE_TYPE (t);
732   STRIP_SIGN_NOPS (t);
733   tree tem = fold_negate_expr_1 (loc, t);
734   if (tem == NULL_TREE)
735     return NULL_TREE;
736   return fold_convert_loc (loc, type, tem);
737 }
738 
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
741    return NULL_TREE. */
742 
743 static tree
negate_expr(tree t)744 negate_expr (tree t)
745 {
746   tree type, tem;
747   location_t loc;
748 
749   if (t == NULL_TREE)
750     return NULL_TREE;
751 
752   loc = EXPR_LOCATION (t);
753   type = TREE_TYPE (t);
754   STRIP_SIGN_NOPS (t);
755 
756   tem = fold_negate_expr (loc, t);
757   if (!tem)
758     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759   return fold_convert_loc (loc, type, tem);
760 }
761 
762 /* Split a tree IN into a constant, literal and variable parts that could be
763    combined with CODE to make IN.  "constant" means an expression with
764    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
765    commutative arithmetic operation.  Store the constant part into *CONP,
766    the literal in *LITP and return the variable part.  If a part isn't
767    present, set it to null.  If the tree does not decompose in this way,
768    return the entire tree as the variable part and the other parts as null.
769 
770    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
771    case, we negate an operand that was subtracted.  Except if it is a
772    literal for which we use *MINUS_LITP instead.
773 
774    If NEGATE_P is true, we are negating all of IN, again except a literal
775    for which we use *MINUS_LITP instead.  If a variable part is of pointer
776    type, it is negated after converting to TYPE.  This prevents us from
777    generating illegal MINUS pointer expression.  LOC is the location of
778    the converted variable part.
779 
780    If IN is itself a literal or constant, return it as appropriate.
781 
782    Note that we do not guarantee that any of the three values will be the
783    same type as IN, but they will have the same signedness and mode.  */
784 
785 static tree
split_tree(tree in,tree type,enum tree_code code,tree * minus_varp,tree * conp,tree * minus_conp,tree * litp,tree * minus_litp,int negate_p)786 split_tree (tree in, tree type, enum tree_code code,
787 	    tree *minus_varp, tree *conp, tree *minus_conp,
788 	    tree *litp, tree *minus_litp, int negate_p)
789 {
790   tree var = 0;
791   *minus_varp = 0;
792   *conp = 0;
793   *minus_conp = 0;
794   *litp = 0;
795   *minus_litp = 0;
796 
797   /* Strip any conversions that don't change the machine mode or signedness.  */
798   STRIP_SIGN_NOPS (in);
799 
800   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801       || TREE_CODE (in) == FIXED_CST)
802     *litp = in;
803   else if (TREE_CODE (in) == code
804 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 	       /* We can associate addition and subtraction together (even
807 		  though the C standard doesn't say so) for integers because
808 		  the value is not affected.  For reals, the value might be
809 		  affected, so we can't.  */
810 	       && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 		   || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 		   || (code == MINUS_EXPR
813 		       && (TREE_CODE (in) == PLUS_EXPR
814 			   || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
815     {
816       tree op0 = TREE_OPERAND (in, 0);
817       tree op1 = TREE_OPERAND (in, 1);
818       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
820 
821       /* First see if either of the operands is a literal, then a constant.  */
822       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 	  || TREE_CODE (op0) == FIXED_CST)
824 	*litp = op0, op0 = 0;
825       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 	       || TREE_CODE (op1) == FIXED_CST)
827 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
828 
829       if (op0 != 0 && TREE_CONSTANT (op0))
830 	*conp = op0, op0 = 0;
831       else if (op1 != 0 && TREE_CONSTANT (op1))
832 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
833 
834       /* If we haven't dealt with either operand, this is not a case we can
835 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
836       if (op0 != 0 && op1 != 0)
837 	var = in;
838       else if (op0 != 0)
839 	var = op0;
840       else
841 	var = op1, neg_var_p = neg1_p;
842 
843       /* Now do any needed negations.  */
844       if (neg_litp_p)
845 	*minus_litp = *litp, *litp = 0;
846       if (neg_conp_p && *conp)
847 	*minus_conp = *conp, *conp = 0;
848       if (neg_var_p && var)
849 	*minus_varp = var, var = 0;
850     }
851   else if (TREE_CONSTANT (in))
852     *conp = in;
853   else if (TREE_CODE (in) == BIT_NOT_EXPR
854 	   && code == PLUS_EXPR)
855     {
856       /* -1 - X is folded to ~X, undo that here.  Do _not_ do this
857          when IN is constant.  */
858       *litp = build_minus_one_cst (type);
859       *minus_varp = TREE_OPERAND (in, 0);
860     }
861   else
862     var = in;
863 
864   if (negate_p)
865     {
866       if (*litp)
867 	*minus_litp = *litp, *litp = 0;
868       else if (*minus_litp)
869 	*litp = *minus_litp, *minus_litp = 0;
870       if (*conp)
871 	*minus_conp = *conp, *conp = 0;
872       else if (*minus_conp)
873 	*conp = *minus_conp, *minus_conp = 0;
874       if (var)
875 	*minus_varp = var, var = 0;
876       else if (*minus_varp)
877 	var = *minus_varp, *minus_varp = 0;
878     }
879 
880   if (*litp
881       && TREE_OVERFLOW_P (*litp))
882     *litp = drop_tree_overflow (*litp);
883   if (*minus_litp
884       && TREE_OVERFLOW_P (*minus_litp))
885     *minus_litp = drop_tree_overflow (*minus_litp);
886 
887   return var;
888 }
889 
890 /* Re-associate trees split by the above function.  T1 and T2 are
891    either expressions to associate or null.  Return the new
892    expression, if any.  LOC is the location of the new expression.  If
893    we build an operation, do it in TYPE and with CODE.  */
894 
895 static tree
associate_trees(location_t loc,tree t1,tree t2,enum tree_code code,tree type)896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
897 {
898   if (t1 == 0)
899     {
900       gcc_assert (t2 == 0 || code != MINUS_EXPR);
901       return t2;
902     }
903   else if (t2 == 0)
904     return t1;
905 
906   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907      try to fold this since we will have infinite recursion.  But do
908      deal with any NEGATE_EXPRs.  */
909   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910       || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912     {
913       if (code == PLUS_EXPR)
914 	{
915 	  if (TREE_CODE (t1) == NEGATE_EXPR)
916 	    return build2_loc (loc, MINUS_EXPR, type,
917 			       fold_convert_loc (loc, type, t2),
918 			       fold_convert_loc (loc, type,
919 						 TREE_OPERAND (t1, 0)));
920 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
921 	    return build2_loc (loc, MINUS_EXPR, type,
922 			       fold_convert_loc (loc, type, t1),
923 			       fold_convert_loc (loc, type,
924 						 TREE_OPERAND (t2, 0)));
925 	  else if (integer_zerop (t2))
926 	    return fold_convert_loc (loc, type, t1);
927 	}
928       else if (code == MINUS_EXPR)
929 	{
930 	  if (integer_zerop (t2))
931 	    return fold_convert_loc (loc, type, t1);
932 	}
933 
934       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 			 fold_convert_loc (loc, type, t2));
936     }
937 
938   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 			  fold_convert_loc (loc, type, t2));
940 }
941 
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943    for use in int_const_binop, size_binop and size_diffop.  */
944 
945 static bool
int_binop_types_match_p(enum tree_code code,const_tree type1,const_tree type2)946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 {
948   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949     return false;
950   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951     return false;
952 
953   switch (code)
954     {
955     case LSHIFT_EXPR:
956     case RSHIFT_EXPR:
957     case LROTATE_EXPR:
958     case RROTATE_EXPR:
959       return true;
960 
961     default:
962       break;
963     }
964 
965   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
968 }
969 
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971    a new constant in RES.  Return FALSE if we don't know how to
972    evaluate CODE at compile-time.  */
973 
974 bool
wide_int_binop(wide_int & res,enum tree_code code,const wide_int & arg1,const wide_int & arg2,signop sign,wi::overflow_type * overflow)975 wide_int_binop (wide_int &res,
976 		enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 		signop sign, wi::overflow_type *overflow)
978 {
979   wide_int tmp;
980   *overflow = wi::OVF_NONE;
981   switch (code)
982     {
983     case BIT_IOR_EXPR:
984       res = wi::bit_or (arg1, arg2);
985       break;
986 
987     case BIT_XOR_EXPR:
988       res = wi::bit_xor (arg1, arg2);
989       break;
990 
991     case BIT_AND_EXPR:
992       res = wi::bit_and (arg1, arg2);
993       break;
994 
995     case RSHIFT_EXPR:
996     case LSHIFT_EXPR:
997       if (wi::neg_p (arg2))
998 	{
999 	  tmp = -arg2;
1000 	  if (code == RSHIFT_EXPR)
1001 	    code = LSHIFT_EXPR;
1002 	  else
1003 	    code = RSHIFT_EXPR;
1004 	}
1005       else
1006         tmp = arg2;
1007 
1008       if (code == RSHIFT_EXPR)
1009 	/* It's unclear from the C standard whether shifts can overflow.
1010 	   The following code ignores overflow; perhaps a C standard
1011 	   interpretation ruling is needed.  */
1012 	res = wi::rshift (arg1, tmp, sign);
1013       else
1014 	res = wi::lshift (arg1, tmp);
1015       break;
1016 
1017     case RROTATE_EXPR:
1018     case LROTATE_EXPR:
1019       if (wi::neg_p (arg2))
1020 	{
1021 	  tmp = -arg2;
1022 	  if (code == RROTATE_EXPR)
1023 	    code = LROTATE_EXPR;
1024 	  else
1025 	    code = RROTATE_EXPR;
1026 	}
1027       else
1028         tmp = arg2;
1029 
1030       if (code == RROTATE_EXPR)
1031 	res = wi::rrotate (arg1, tmp);
1032       else
1033 	res = wi::lrotate (arg1, tmp);
1034       break;
1035 
1036     case PLUS_EXPR:
1037       res = wi::add (arg1, arg2, sign, overflow);
1038       break;
1039 
1040     case MINUS_EXPR:
1041       res = wi::sub (arg1, arg2, sign, overflow);
1042       break;
1043 
1044     case MULT_EXPR:
1045       res = wi::mul (arg1, arg2, sign, overflow);
1046       break;
1047 
1048     case MULT_HIGHPART_EXPR:
1049       res = wi::mul_high (arg1, arg2, sign);
1050       break;
1051 
1052     case TRUNC_DIV_EXPR:
1053     case EXACT_DIV_EXPR:
1054       if (arg2 == 0)
1055 	return false;
1056       res = wi::div_trunc (arg1, arg2, sign, overflow);
1057       break;
1058 
1059     case FLOOR_DIV_EXPR:
1060       if (arg2 == 0)
1061 	return false;
1062       res = wi::div_floor (arg1, arg2, sign, overflow);
1063       break;
1064 
1065     case CEIL_DIV_EXPR:
1066       if (arg2 == 0)
1067 	return false;
1068       res = wi::div_ceil (arg1, arg2, sign, overflow);
1069       break;
1070 
1071     case ROUND_DIV_EXPR:
1072       if (arg2 == 0)
1073 	return false;
1074       res = wi::div_round (arg1, arg2, sign, overflow);
1075       break;
1076 
1077     case TRUNC_MOD_EXPR:
1078       if (arg2 == 0)
1079 	return false;
1080       res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081       break;
1082 
1083     case FLOOR_MOD_EXPR:
1084       if (arg2 == 0)
1085 	return false;
1086       res = wi::mod_floor (arg1, arg2, sign, overflow);
1087       break;
1088 
1089     case CEIL_MOD_EXPR:
1090       if (arg2 == 0)
1091 	return false;
1092       res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093       break;
1094 
1095     case ROUND_MOD_EXPR:
1096       if (arg2 == 0)
1097 	return false;
1098       res = wi::mod_round (arg1, arg2, sign, overflow);
1099       break;
1100 
1101     case MIN_EXPR:
1102       res = wi::min (arg1, arg2, sign);
1103       break;
1104 
1105     case MAX_EXPR:
1106       res = wi::max (arg1, arg2, sign);
1107       break;
1108 
1109     default:
1110       return false;
1111     }
1112   return true;
1113 }
1114 
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116    produce a new constant in RES.  Return FALSE if we don't know how
1117    to evaluate CODE at compile-time.  */
1118 
1119 static bool
poly_int_binop(poly_wide_int & res,enum tree_code code,const_tree arg1,const_tree arg2,signop sign,wi::overflow_type * overflow)1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 		const_tree arg1, const_tree arg2,
1122 		signop sign, wi::overflow_type *overflow)
1123 {
1124   gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125   gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126   switch (code)
1127     {
1128     case PLUS_EXPR:
1129       res = wi::add (wi::to_poly_wide (arg1),
1130 		     wi::to_poly_wide (arg2), sign, overflow);
1131       break;
1132 
1133     case MINUS_EXPR:
1134       res = wi::sub (wi::to_poly_wide (arg1),
1135 		     wi::to_poly_wide (arg2), sign, overflow);
1136       break;
1137 
1138     case MULT_EXPR:
1139       if (TREE_CODE (arg2) == INTEGER_CST)
1140 	res = wi::mul (wi::to_poly_wide (arg1),
1141 		       wi::to_wide (arg2), sign, overflow);
1142       else if (TREE_CODE (arg1) == INTEGER_CST)
1143 	res = wi::mul (wi::to_poly_wide (arg2),
1144 		       wi::to_wide (arg1), sign, overflow);
1145       else
1146 	return NULL_TREE;
1147       break;
1148 
1149     case LSHIFT_EXPR:
1150       if (TREE_CODE (arg2) == INTEGER_CST)
1151 	res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152       else
1153 	return false;
1154       break;
1155 
1156     case BIT_IOR_EXPR:
1157       if (TREE_CODE (arg2) != INTEGER_CST
1158 	  || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 			 &res))
1160 	return false;
1161       break;
1162 
1163     default:
1164       return false;
1165     }
1166   return true;
1167 }
1168 
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170    produce a new constant.  Return NULL_TREE if we don't know how to
1171    evaluate CODE at compile-time.  */
1172 
1173 tree
int_const_binop(enum tree_code code,const_tree arg1,const_tree arg2,int overflowable)1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 		 int overflowable)
1176 {
1177   poly_wide_int poly_res;
1178   tree type = TREE_TYPE (arg1);
1179   signop sign = TYPE_SIGN (type);
1180   wi::overflow_type overflow = wi::OVF_NONE;
1181 
1182   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1183     {
1184       wide_int warg1 = wi::to_wide (arg1), res;
1185       wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186       if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 	return NULL_TREE;
1188       poly_res = res;
1189     }
1190   else if (!poly_int_tree_p (arg1)
1191 	   || !poly_int_tree_p (arg2)
1192 	   || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193     return NULL_TREE;
1194   return force_fit_type (type, poly_res, overflowable,
1195 			 (((sign == SIGNED || overflowable == -1)
1196 			   && overflow)
1197 			  | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1198 }
1199 
1200 /* Return true if binary operation OP distributes over addition in operand
1201    OPNO, with the other operand being held constant.  OPNO counts from 1.  */
1202 
1203 static bool
distributes_over_addition_p(tree_code op,int opno)1204 distributes_over_addition_p (tree_code op, int opno)
1205 {
1206   switch (op)
1207     {
1208     case PLUS_EXPR:
1209     case MINUS_EXPR:
1210     case MULT_EXPR:
1211       return true;
1212 
1213     case LSHIFT_EXPR:
1214       return opno == 1;
1215 
1216     default:
1217       return false;
1218     }
1219 }
1220 
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1223    are the same kind of constant and the same machine mode.  Return zero if
1224    combining the constants is not allowed in the current operating mode.  */
1225 
1226 static tree
const_binop(enum tree_code code,tree arg1,tree arg2)1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1228 {
1229   /* Sanity check for the recursive cases.  */
1230   if (!arg1 || !arg2)
1231     return NULL_TREE;
1232 
1233   STRIP_NOPS (arg1);
1234   STRIP_NOPS (arg2);
1235 
1236   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1237     {
1238       if (code == POINTER_PLUS_EXPR)
1239 	return int_const_binop (PLUS_EXPR,
1240 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1241 
1242       return int_const_binop (code, arg1, arg2);
1243     }
1244 
1245   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1246     {
1247       machine_mode mode;
1248       REAL_VALUE_TYPE d1;
1249       REAL_VALUE_TYPE d2;
1250       REAL_VALUE_TYPE value;
1251       REAL_VALUE_TYPE result;
1252       bool inexact;
1253       tree t, type;
1254 
1255       /* The following codes are handled by real_arithmetic.  */
1256       switch (code)
1257 	{
1258 	case PLUS_EXPR:
1259 	case MINUS_EXPR:
1260 	case MULT_EXPR:
1261 	case RDIV_EXPR:
1262 	case MIN_EXPR:
1263 	case MAX_EXPR:
1264 	  break;
1265 
1266 	default:
1267 	  return NULL_TREE;
1268 	}
1269 
1270       d1 = TREE_REAL_CST (arg1);
1271       d2 = TREE_REAL_CST (arg2);
1272 
1273       type = TREE_TYPE (arg1);
1274       mode = TYPE_MODE (type);
1275 
1276       /* Don't perform operation if we honor signaling NaNs and
1277 	 either operand is a signaling NaN.  */
1278       if (HONOR_SNANS (mode)
1279 	  && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 	      || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 	return NULL_TREE;
1282 
1283       /* Don't perform operation if it would raise a division
1284 	 by zero exception.  */
1285       if (code == RDIV_EXPR
1286 	  && real_equal (&d2, &dconst0)
1287 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 	return NULL_TREE;
1289 
1290       /* If either operand is a NaN, just return it.  Otherwise, set up
1291 	 for floating-point trap; we return an overflow.  */
1292       if (REAL_VALUE_ISNAN (d1))
1293       {
1294 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 	   is off.  */
1296 	d1.signalling = 0;
1297 	t = build_real (type, d1);
1298 	return t;
1299       }
1300       else if (REAL_VALUE_ISNAN (d2))
1301       {
1302 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 	   is off.  */
1304 	d2.signalling = 0;
1305 	t = build_real (type, d2);
1306 	return t;
1307       }
1308 
1309       inexact = real_arithmetic (&value, code, &d1, &d2);
1310       real_convert (&result, mode, &value);
1311 
1312       /* Don't constant fold this floating point operation if
1313 	 both operands are not NaN but the result is NaN, and
1314 	 flag_trapping_math.  Such operations should raise an
1315 	 invalid operation exception.  */
1316       if (flag_trapping_math
1317 	  && MODE_HAS_NANS (mode)
1318 	  && REAL_VALUE_ISNAN (result)
1319 	  && !REAL_VALUE_ISNAN (d1)
1320 	  && !REAL_VALUE_ISNAN (d2))
1321 	return NULL_TREE;
1322 
1323       /* Don't constant fold this floating point operation if
1324 	 the result has overflowed and flag_trapping_math.  */
1325       if (flag_trapping_math
1326 	  && MODE_HAS_INFINITIES (mode)
1327 	  && REAL_VALUE_ISINF (result)
1328 	  && !REAL_VALUE_ISINF (d1)
1329 	  && !REAL_VALUE_ISINF (d2))
1330 	return NULL_TREE;
1331 
1332       /* Don't constant fold this floating point operation if the
1333 	 result may dependent upon the run-time rounding mode and
1334 	 flag_rounding_math is set, or if GCC's software emulation
1335 	 is unable to accurately represent the result.  */
1336       if ((flag_rounding_math
1337 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1338 	  && (inexact || !real_identical (&result, &value)))
1339 	return NULL_TREE;
1340 
1341       t = build_real (type, result);
1342 
1343       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1344       return t;
1345     }
1346 
1347   if (TREE_CODE (arg1) == FIXED_CST)
1348     {
1349       FIXED_VALUE_TYPE f1;
1350       FIXED_VALUE_TYPE f2;
1351       FIXED_VALUE_TYPE result;
1352       tree t, type;
1353       int sat_p;
1354       bool overflow_p;
1355 
1356       /* The following codes are handled by fixed_arithmetic.  */
1357       switch (code)
1358         {
1359 	case PLUS_EXPR:
1360 	case MINUS_EXPR:
1361 	case MULT_EXPR:
1362 	case TRUNC_DIV_EXPR:
1363 	  if (TREE_CODE (arg2) != FIXED_CST)
1364 	    return NULL_TREE;
1365 	  f2 = TREE_FIXED_CST (arg2);
1366 	  break;
1367 
1368 	case LSHIFT_EXPR:
1369 	case RSHIFT_EXPR:
1370 	  {
1371 	    if (TREE_CODE (arg2) != INTEGER_CST)
1372 	      return NULL_TREE;
1373 	    wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1374 	    f2.data.high = w2.elt (1);
1375 	    f2.data.low = w2.ulow ();
1376 	    f2.mode = SImode;
1377 	  }
1378 	  break;
1379 
1380         default:
1381 	  return NULL_TREE;
1382         }
1383 
1384       f1 = TREE_FIXED_CST (arg1);
1385       type = TREE_TYPE (arg1);
1386       sat_p = TYPE_SATURATING (type);
1387       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1388       t = build_fixed (type, result);
1389       /* Propagate overflow flags.  */
1390       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1391 	TREE_OVERFLOW (t) = 1;
1392       return t;
1393     }
1394 
1395   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1396     {
1397       tree type = TREE_TYPE (arg1);
1398       tree r1 = TREE_REALPART (arg1);
1399       tree i1 = TREE_IMAGPART (arg1);
1400       tree r2 = TREE_REALPART (arg2);
1401       tree i2 = TREE_IMAGPART (arg2);
1402       tree real, imag;
1403 
1404       switch (code)
1405 	{
1406 	case PLUS_EXPR:
1407 	case MINUS_EXPR:
1408 	  real = const_binop (code, r1, r2);
1409 	  imag = const_binop (code, i1, i2);
1410 	  break;
1411 
1412 	case MULT_EXPR:
1413 	  if (COMPLEX_FLOAT_TYPE_P (type))
1414 	    return do_mpc_arg2 (arg1, arg2, type,
1415 				/* do_nonfinite= */ folding_initializer,
1416 				mpc_mul);
1417 
1418 	  real = const_binop (MINUS_EXPR,
1419 			      const_binop (MULT_EXPR, r1, r2),
1420 			      const_binop (MULT_EXPR, i1, i2));
1421 	  imag = const_binop (PLUS_EXPR,
1422 			      const_binop (MULT_EXPR, r1, i2),
1423 			      const_binop (MULT_EXPR, i1, r2));
1424 	  break;
1425 
1426 	case RDIV_EXPR:
1427 	  if (COMPLEX_FLOAT_TYPE_P (type))
1428 	    return do_mpc_arg2 (arg1, arg2, type,
1429                                 /* do_nonfinite= */ folding_initializer,
1430 				mpc_div);
1431 	  /* Fallthru. */
1432 	case TRUNC_DIV_EXPR:
1433 	case CEIL_DIV_EXPR:
1434 	case FLOOR_DIV_EXPR:
1435 	case ROUND_DIV_EXPR:
1436 	  if (flag_complex_method == 0)
1437 	  {
1438 	    /* Keep this algorithm in sync with
1439 	       tree-complex.c:expand_complex_div_straight().
1440 
1441 	       Expand complex division to scalars, straightforward algorithm.
1442 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1443 	       t = br*br + bi*bi
1444 	    */
1445 	    tree magsquared
1446 	      = const_binop (PLUS_EXPR,
1447 			     const_binop (MULT_EXPR, r2, r2),
1448 			     const_binop (MULT_EXPR, i2, i2));
1449 	    tree t1
1450 	      = const_binop (PLUS_EXPR,
1451 			     const_binop (MULT_EXPR, r1, r2),
1452 			     const_binop (MULT_EXPR, i1, i2));
1453 	    tree t2
1454 	      = const_binop (MINUS_EXPR,
1455 			     const_binop (MULT_EXPR, i1, r2),
1456 			     const_binop (MULT_EXPR, r1, i2));
1457 
1458 	    real = const_binop (code, t1, magsquared);
1459 	    imag = const_binop (code, t2, magsquared);
1460 	  }
1461 	  else
1462 	  {
1463 	    /* Keep this algorithm in sync with
1464                tree-complex.c:expand_complex_div_wide().
1465 
1466 	       Expand complex division to scalars, modified algorithm to minimize
1467 	       overflow with wide input ranges.  */
1468 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1469 					fold_abs_const (r2, TREE_TYPE (type)),
1470 					fold_abs_const (i2, TREE_TYPE (type)));
1471 
1472 	    if (integer_nonzerop (compare))
1473 	      {
1474 		/* In the TRUE branch, we compute
1475 		   ratio = br/bi;
1476 		   div = (br * ratio) + bi;
1477 		   tr = (ar * ratio) + ai;
1478 		   ti = (ai * ratio) - ar;
1479 		   tr = tr / div;
1480 		   ti = ti / div;  */
1481 		tree ratio = const_binop (code, r2, i2);
1482 		tree div = const_binop (PLUS_EXPR, i2,
1483 					const_binop (MULT_EXPR, r2, ratio));
1484 		real = const_binop (MULT_EXPR, r1, ratio);
1485 		real = const_binop (PLUS_EXPR, real, i1);
1486 		real = const_binop (code, real, div);
1487 
1488 		imag = const_binop (MULT_EXPR, i1, ratio);
1489 		imag = const_binop (MINUS_EXPR, imag, r1);
1490 		imag = const_binop (code, imag, div);
1491 	      }
1492 	    else
1493 	      {
1494 		/* In the FALSE branch, we compute
1495 		   ratio = d/c;
1496 		   divisor = (d * ratio) + c;
1497 		   tr = (b * ratio) + a;
1498 		   ti = b - (a * ratio);
1499 		   tr = tr / div;
1500 		   ti = ti / div;  */
1501 		tree ratio = const_binop (code, i2, r2);
1502 		tree div = const_binop (PLUS_EXPR, r2,
1503                                         const_binop (MULT_EXPR, i2, ratio));
1504 
1505 		real = const_binop (MULT_EXPR, i1, ratio);
1506 		real = const_binop (PLUS_EXPR, real, r1);
1507 		real = const_binop (code, real, div);
1508 
1509 		imag = const_binop (MULT_EXPR, r1, ratio);
1510 		imag = const_binop (MINUS_EXPR, i1, imag);
1511 		imag = const_binop (code, imag, div);
1512 	      }
1513 	  }
1514 	  break;
1515 
1516 	default:
1517 	  return NULL_TREE;
1518 	}
1519 
1520       if (real && imag)
1521 	return build_complex (type, real, imag);
1522     }
1523 
1524   if (TREE_CODE (arg1) == VECTOR_CST
1525       && TREE_CODE (arg2) == VECTOR_CST
1526       && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1527 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1528     {
1529       tree type = TREE_TYPE (arg1);
1530       bool step_ok_p;
1531       if (VECTOR_CST_STEPPED_P (arg1)
1532 	  && VECTOR_CST_STEPPED_P (arg2))
1533 	/* We can operate directly on the encoding if:
1534 
1535 	      a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1536 	    implies
1537 	      (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1538 
1539 	   Addition and subtraction are the supported operators
1540 	   for which this is true.  */
1541 	step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1542       else if (VECTOR_CST_STEPPED_P (arg1))
1543 	/* We can operate directly on stepped encodings if:
1544 
1545 	     a3 - a2 == a2 - a1
1546 	   implies:
1547 	     (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1548 
1549 	   which is true if (x -> x op c) distributes over addition.  */
1550 	step_ok_p = distributes_over_addition_p (code, 1);
1551       else
1552 	/* Similarly in reverse.  */
1553 	step_ok_p = distributes_over_addition_p (code, 2);
1554       tree_vector_builder elts;
1555       if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1556 	return NULL_TREE;
1557       unsigned int count = elts.encoded_nelts ();
1558       for (unsigned int i = 0; i < count; ++i)
1559 	{
1560 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1561 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1562 
1563 	  tree elt = const_binop (code, elem1, elem2);
1564 
1565 	  /* It is possible that const_binop cannot handle the given
1566 	     code and return NULL_TREE */
1567 	  if (elt == NULL_TREE)
1568 	    return NULL_TREE;
1569 	  elts.quick_push (elt);
1570 	}
1571 
1572       return elts.build ();
1573     }
1574 
1575   /* Shifts allow a scalar offset for a vector.  */
1576   if (TREE_CODE (arg1) == VECTOR_CST
1577       && TREE_CODE (arg2) == INTEGER_CST)
1578     {
1579       tree type = TREE_TYPE (arg1);
1580       bool step_ok_p = distributes_over_addition_p (code, 1);
1581       tree_vector_builder elts;
1582       if (!elts.new_unary_operation (type, arg1, step_ok_p))
1583 	return NULL_TREE;
1584       unsigned int count = elts.encoded_nelts ();
1585       for (unsigned int i = 0; i < count; ++i)
1586 	{
1587 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1588 
1589 	  tree elt = const_binop (code, elem1, arg2);
1590 
1591 	  /* It is possible that const_binop cannot handle the given
1592 	     code and return NULL_TREE.  */
1593 	  if (elt == NULL_TREE)
1594 	    return NULL_TREE;
1595 	  elts.quick_push (elt);
1596 	}
1597 
1598       return elts.build ();
1599     }
1600   return NULL_TREE;
1601 }
1602 
1603 /* Overload that adds a TYPE parameter to be able to dispatch
1604    to fold_relational_const.  */
1605 
1606 tree
const_binop(enum tree_code code,tree type,tree arg1,tree arg2)1607 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1608 {
1609   if (TREE_CODE_CLASS (code) == tcc_comparison)
1610     return fold_relational_const (code, type, arg1, arg2);
1611 
1612   /* ???  Until we make the const_binop worker take the type of the
1613      result as argument put those cases that need it here.  */
1614   switch (code)
1615     {
1616     case VEC_SERIES_EXPR:
1617       if (CONSTANT_CLASS_P (arg1)
1618 	  && CONSTANT_CLASS_P (arg2))
1619 	return build_vec_series (type, arg1, arg2);
1620       return NULL_TREE;
1621 
1622     case COMPLEX_EXPR:
1623       if ((TREE_CODE (arg1) == REAL_CST
1624 	   && TREE_CODE (arg2) == REAL_CST)
1625 	  || (TREE_CODE (arg1) == INTEGER_CST
1626 	      && TREE_CODE (arg2) == INTEGER_CST))
1627 	return build_complex (type, arg1, arg2);
1628       return NULL_TREE;
1629 
1630     case POINTER_DIFF_EXPR:
1631       if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1632 	{
1633 	  poly_offset_int res = (wi::to_poly_offset (arg1)
1634 				 - wi::to_poly_offset (arg2));
1635 	  return force_fit_type (type, res, 1,
1636 				 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1637 	}
1638       return NULL_TREE;
1639 
1640     case VEC_PACK_TRUNC_EXPR:
1641     case VEC_PACK_FIX_TRUNC_EXPR:
1642     case VEC_PACK_FLOAT_EXPR:
1643       {
1644 	unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1645 
1646 	if (TREE_CODE (arg1) != VECTOR_CST
1647 	    || TREE_CODE (arg2) != VECTOR_CST)
1648 	  return NULL_TREE;
1649 
1650 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1651 	  return NULL_TREE;
1652 
1653 	out_nelts = in_nelts * 2;
1654 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1655 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1656 
1657 	tree_vector_builder elts (type, out_nelts, 1);
1658 	for (i = 0; i < out_nelts; i++)
1659 	  {
1660 	    tree elt = (i < in_nelts
1661 			? VECTOR_CST_ELT (arg1, i)
1662 			: VECTOR_CST_ELT (arg2, i - in_nelts));
1663 	    elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1664 				      ? NOP_EXPR
1665 				      : code == VEC_PACK_FLOAT_EXPR
1666 				      ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1667 				      TREE_TYPE (type), elt);
1668 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1669 	      return NULL_TREE;
1670 	    elts.quick_push (elt);
1671 	  }
1672 
1673 	return elts.build ();
1674       }
1675 
1676     case VEC_WIDEN_MULT_LO_EXPR:
1677     case VEC_WIDEN_MULT_HI_EXPR:
1678     case VEC_WIDEN_MULT_EVEN_EXPR:
1679     case VEC_WIDEN_MULT_ODD_EXPR:
1680       {
1681 	unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1682 
1683 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1684 	  return NULL_TREE;
1685 
1686 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1687 	  return NULL_TREE;
1688 	out_nelts = in_nelts / 2;
1689 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1690 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1691 
1692 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1693 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1694 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1695 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1696 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1697 	  scale = 1, ofs = 0;
1698 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1699 	  scale = 1, ofs = 1;
1700 
1701 	tree_vector_builder elts (type, out_nelts, 1);
1702 	for (out = 0; out < out_nelts; out++)
1703 	  {
1704 	    unsigned int in = (out << scale) + ofs;
1705 	    tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1706 					  VECTOR_CST_ELT (arg1, in));
1707 	    tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1708 					  VECTOR_CST_ELT (arg2, in));
1709 
1710 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1711 	      return NULL_TREE;
1712 	    tree elt = const_binop (MULT_EXPR, t1, t2);
1713 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1714 	      return NULL_TREE;
1715 	    elts.quick_push (elt);
1716 	  }
1717 
1718 	return elts.build ();
1719       }
1720 
1721     default:;
1722     }
1723 
1724   if (TREE_CODE_CLASS (code) != tcc_binary)
1725     return NULL_TREE;
1726 
1727   /* Make sure type and arg0 have the same saturating flag.  */
1728   gcc_checking_assert (TYPE_SATURATING (type)
1729 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1730 
1731   return const_binop (code, arg1, arg2);
1732 }
1733 
1734 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1735    Return zero if computing the constants is not possible.  */
1736 
1737 tree
const_unop(enum tree_code code,tree type,tree arg0)1738 const_unop (enum tree_code code, tree type, tree arg0)
1739 {
1740   /* Don't perform the operation, other than NEGATE and ABS, if
1741      flag_signaling_nans is on and the operand is a signaling NaN.  */
1742   if (TREE_CODE (arg0) == REAL_CST
1743       && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1744       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1745       && code != NEGATE_EXPR
1746       && code != ABS_EXPR
1747       && code != ABSU_EXPR)
1748     return NULL_TREE;
1749 
1750   switch (code)
1751     {
1752     CASE_CONVERT:
1753     case FLOAT_EXPR:
1754     case FIX_TRUNC_EXPR:
1755     case FIXED_CONVERT_EXPR:
1756       return fold_convert_const (code, type, arg0);
1757 
1758     case ADDR_SPACE_CONVERT_EXPR:
1759       /* If the source address is 0, and the source address space
1760 	 cannot have a valid object at 0, fold to dest type null.  */
1761       if (integer_zerop (arg0)
1762 	  && !(targetm.addr_space.zero_address_valid
1763 	       (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1764 	return fold_convert_const (code, type, arg0);
1765       break;
1766 
1767     case VIEW_CONVERT_EXPR:
1768       return fold_view_convert_expr (type, arg0);
1769 
1770     case NEGATE_EXPR:
1771       {
1772 	/* Can't call fold_negate_const directly here as that doesn't
1773 	   handle all cases and we might not be able to negate some
1774 	   constants.  */
1775 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1776 	if (tem && CONSTANT_CLASS_P (tem))
1777 	  return tem;
1778 	break;
1779       }
1780 
1781     case ABS_EXPR:
1782     case ABSU_EXPR:
1783       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1784 	return fold_abs_const (arg0, type);
1785       break;
1786 
1787     case CONJ_EXPR:
1788       if (TREE_CODE (arg0) == COMPLEX_CST)
1789 	{
1790 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1791 					  TREE_TYPE (type));
1792 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1793 	}
1794       break;
1795 
1796     case BIT_NOT_EXPR:
1797       if (TREE_CODE (arg0) == INTEGER_CST)
1798 	return fold_not_const (arg0, type);
1799       else if (POLY_INT_CST_P (arg0))
1800 	return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1801       /* Perform BIT_NOT_EXPR on each element individually.  */
1802       else if (TREE_CODE (arg0) == VECTOR_CST)
1803 	{
1804 	  tree elem;
1805 
1806 	  /* This can cope with stepped encodings because ~x == -1 - x.  */
1807 	  tree_vector_builder elements;
1808 	  elements.new_unary_operation (type, arg0, true);
1809 	  unsigned int i, count = elements.encoded_nelts ();
1810 	  for (i = 0; i < count; ++i)
1811 	    {
1812 	      elem = VECTOR_CST_ELT (arg0, i);
1813 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1814 	      if (elem == NULL_TREE)
1815 		break;
1816 	      elements.quick_push (elem);
1817 	    }
1818 	  if (i == count)
1819 	    return elements.build ();
1820 	}
1821       break;
1822 
1823     case TRUTH_NOT_EXPR:
1824       if (TREE_CODE (arg0) == INTEGER_CST)
1825 	return constant_boolean_node (integer_zerop (arg0), type);
1826       break;
1827 
1828     case REALPART_EXPR:
1829       if (TREE_CODE (arg0) == COMPLEX_CST)
1830 	return fold_convert (type, TREE_REALPART (arg0));
1831       break;
1832 
1833     case IMAGPART_EXPR:
1834       if (TREE_CODE (arg0) == COMPLEX_CST)
1835 	return fold_convert (type, TREE_IMAGPART (arg0));
1836       break;
1837 
1838     case VEC_UNPACK_LO_EXPR:
1839     case VEC_UNPACK_HI_EXPR:
1840     case VEC_UNPACK_FLOAT_LO_EXPR:
1841     case VEC_UNPACK_FLOAT_HI_EXPR:
1842     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1843     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1844       {
1845 	unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1846 	enum tree_code subcode;
1847 
1848 	if (TREE_CODE (arg0) != VECTOR_CST)
1849 	  return NULL_TREE;
1850 
1851 	if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1852 	  return NULL_TREE;
1853 	out_nelts = in_nelts / 2;
1854 	gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1855 
1856 	unsigned int offset = 0;
1857 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1858 				   || code == VEC_UNPACK_FLOAT_LO_EXPR
1859 				   || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1860 	  offset = out_nelts;
1861 
1862 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1863 	  subcode = NOP_EXPR;
1864 	else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1865 		 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1866 	  subcode = FLOAT_EXPR;
1867 	else
1868 	  subcode = FIX_TRUNC_EXPR;
1869 
1870 	tree_vector_builder elts (type, out_nelts, 1);
1871 	for (i = 0; i < out_nelts; i++)
1872 	  {
1873 	    tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1874 					   VECTOR_CST_ELT (arg0, i + offset));
1875 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1876 	      return NULL_TREE;
1877 	    elts.quick_push (elt);
1878 	  }
1879 
1880 	return elts.build ();
1881       }
1882 
1883     case VEC_DUPLICATE_EXPR:
1884       if (CONSTANT_CLASS_P (arg0))
1885 	return build_vector_from_val (type, arg0);
1886       return NULL_TREE;
1887 
1888     default:
1889       break;
1890     }
1891 
1892   return NULL_TREE;
1893 }
1894 
1895 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1896    indicates which particular sizetype to create.  */
1897 
1898 tree
size_int_kind(poly_int64 number,enum size_type_kind kind)1899 size_int_kind (poly_int64 number, enum size_type_kind kind)
1900 {
1901   return build_int_cst (sizetype_tab[(int) kind], number);
1902 }
1903 
1904 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1905    is a tree code.  The type of the result is taken from the operands.
1906    Both must be equivalent integer types, ala int_binop_types_match_p.
1907    If the operands are constant, so is the result.  */
1908 
1909 tree
size_binop_loc(location_t loc,enum tree_code code,tree arg0,tree arg1)1910 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1911 {
1912   tree type = TREE_TYPE (arg0);
1913 
1914   if (arg0 == error_mark_node || arg1 == error_mark_node)
1915     return error_mark_node;
1916 
1917   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1918                                        TREE_TYPE (arg1)));
1919 
1920   /* Handle the special case of two poly_int constants faster.  */
1921   if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1922     {
1923       /* And some specific cases even faster than that.  */
1924       if (code == PLUS_EXPR)
1925 	{
1926 	  if (integer_zerop (arg0)
1927 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1928 	    return arg1;
1929 	  if (integer_zerop (arg1)
1930 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1931 	    return arg0;
1932 	}
1933       else if (code == MINUS_EXPR)
1934 	{
1935 	  if (integer_zerop (arg1)
1936 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1937 	    return arg0;
1938 	}
1939       else if (code == MULT_EXPR)
1940 	{
1941 	  if (integer_onep (arg0)
1942 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1943 	    return arg1;
1944 	}
1945 
1946       /* Handle general case of two integer constants.  For sizetype
1947          constant calculations we always want to know about overflow,
1948 	 even in the unsigned case.  */
1949       tree res = int_const_binop (code, arg0, arg1, -1);
1950       if (res != NULL_TREE)
1951 	return res;
1952     }
1953 
1954   return fold_build2_loc (loc, code, type, arg0, arg1);
1955 }
1956 
1957 /* Given two values, either both of sizetype or both of bitsizetype,
1958    compute the difference between the two values.  Return the value
1959    in signed type corresponding to the type of the operands.  */
1960 
1961 tree
size_diffop_loc(location_t loc,tree arg0,tree arg1)1962 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1963 {
1964   tree type = TREE_TYPE (arg0);
1965   tree ctype;
1966 
1967   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1968 				       TREE_TYPE (arg1)));
1969 
1970   /* If the type is already signed, just do the simple thing.  */
1971   if (!TYPE_UNSIGNED (type))
1972     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1973 
1974   if (type == sizetype)
1975     ctype = ssizetype;
1976   else if (type == bitsizetype)
1977     ctype = sbitsizetype;
1978   else
1979     ctype = signed_type_for (type);
1980 
1981   /* If either operand is not a constant, do the conversions to the signed
1982      type and subtract.  The hardware will do the right thing with any
1983      overflow in the subtraction.  */
1984   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1985     return size_binop_loc (loc, MINUS_EXPR,
1986 			   fold_convert_loc (loc, ctype, arg0),
1987 			   fold_convert_loc (loc, ctype, arg1));
1988 
1989   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1990      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1991      overflow) and negate (which can't either).  Special-case a result
1992      of zero while we're here.  */
1993   if (tree_int_cst_equal (arg0, arg1))
1994     return build_int_cst (ctype, 0);
1995   else if (tree_int_cst_lt (arg1, arg0))
1996     return fold_convert_loc (loc, ctype,
1997 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1998   else
1999     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2000 			   fold_convert_loc (loc, ctype,
2001 					     size_binop_loc (loc,
2002 							     MINUS_EXPR,
2003 							     arg1, arg0)));
2004 }
2005 
2006 /* A subroutine of fold_convert_const handling conversions of an
2007    INTEGER_CST to another integer type.  */
2008 
2009 static tree
fold_convert_const_int_from_int(tree type,const_tree arg1)2010 fold_convert_const_int_from_int (tree type, const_tree arg1)
2011 {
2012   /* Given an integer constant, make new constant with new type,
2013      appropriately sign-extended or truncated.  Use widest_int
2014      so that any extension is done according ARG1's type.  */
2015   return force_fit_type (type, wi::to_widest (arg1),
2016 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2017 			 TREE_OVERFLOW (arg1));
2018 }
2019 
2020 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2021    to an integer type.  */
2022 
2023 static tree
fold_convert_const_int_from_real(enum tree_code code,tree type,const_tree arg1)2024 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2025 {
2026   bool overflow = false;
2027   tree t;
2028 
2029   /* The following code implements the floating point to integer
2030      conversion rules required by the Java Language Specification,
2031      that IEEE NaNs are mapped to zero and values that overflow
2032      the target precision saturate, i.e. values greater than
2033      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2034      are mapped to INT_MIN.  These semantics are allowed by the
2035      C and C++ standards that simply state that the behavior of
2036      FP-to-integer conversion is unspecified upon overflow.  */
2037 
2038   wide_int val;
2039   REAL_VALUE_TYPE r;
2040   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2041 
2042   switch (code)
2043     {
2044     case FIX_TRUNC_EXPR:
2045       real_trunc (&r, VOIDmode, &x);
2046       break;
2047 
2048     default:
2049       gcc_unreachable ();
2050     }
2051 
2052   /* If R is NaN, return zero and show we have an overflow.  */
2053   if (REAL_VALUE_ISNAN (r))
2054     {
2055       overflow = true;
2056       val = wi::zero (TYPE_PRECISION (type));
2057     }
2058 
2059   /* See if R is less than the lower bound or greater than the
2060      upper bound.  */
2061 
2062   if (! overflow)
2063     {
2064       tree lt = TYPE_MIN_VALUE (type);
2065       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2066       if (real_less (&r, &l))
2067 	{
2068 	  overflow = true;
2069 	  val = wi::to_wide (lt);
2070 	}
2071     }
2072 
2073   if (! overflow)
2074     {
2075       tree ut = TYPE_MAX_VALUE (type);
2076       if (ut)
2077 	{
2078 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2079 	  if (real_less (&u, &r))
2080 	    {
2081 	      overflow = true;
2082 	      val = wi::to_wide (ut);
2083 	    }
2084 	}
2085     }
2086 
2087   if (! overflow)
2088     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2089 
2090   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2091   return t;
2092 }
2093 
2094 /* A subroutine of fold_convert_const handling conversions of a
2095    FIXED_CST to an integer type.  */
2096 
2097 static tree
fold_convert_const_int_from_fixed(tree type,const_tree arg1)2098 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2099 {
2100   tree t;
2101   double_int temp, temp_trunc;
2102   scalar_mode mode;
2103 
2104   /* Right shift FIXED_CST to temp by fbit.  */
2105   temp = TREE_FIXED_CST (arg1).data;
2106   mode = TREE_FIXED_CST (arg1).mode;
2107   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2108     {
2109       temp = temp.rshift (GET_MODE_FBIT (mode),
2110 			  HOST_BITS_PER_DOUBLE_INT,
2111 			  SIGNED_FIXED_POINT_MODE_P (mode));
2112 
2113       /* Left shift temp to temp_trunc by fbit.  */
2114       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2115 				HOST_BITS_PER_DOUBLE_INT,
2116 				SIGNED_FIXED_POINT_MODE_P (mode));
2117     }
2118   else
2119     {
2120       temp = double_int_zero;
2121       temp_trunc = double_int_zero;
2122     }
2123 
2124   /* If FIXED_CST is negative, we need to round the value toward 0.
2125      By checking if the fractional bits are not zero to add 1 to temp.  */
2126   if (SIGNED_FIXED_POINT_MODE_P (mode)
2127       && temp_trunc.is_negative ()
2128       && TREE_FIXED_CST (arg1).data != temp_trunc)
2129     temp += double_int_one;
2130 
2131   /* Given a fixed-point constant, make new constant with new type,
2132      appropriately sign-extended or truncated.  */
2133   t = force_fit_type (type, temp, -1,
2134 		      (temp.is_negative ()
2135 		       && (TYPE_UNSIGNED (type)
2136 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2137 		      | TREE_OVERFLOW (arg1));
2138 
2139   return t;
2140 }
2141 
2142 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2143    to another floating point type.  */
2144 
2145 static tree
fold_convert_const_real_from_real(tree type,const_tree arg1)2146 fold_convert_const_real_from_real (tree type, const_tree arg1)
2147 {
2148   REAL_VALUE_TYPE value;
2149   tree t;
2150 
2151   /* Don't perform the operation if flag_signaling_nans is on
2152      and the operand is a signaling NaN.  */
2153   if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2154       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2155     return NULL_TREE;
2156 
2157   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2158   t = build_real (type, value);
2159 
2160   /* If converting an infinity or NAN to a representation that doesn't
2161      have one, set the overflow bit so that we can produce some kind of
2162      error message at the appropriate point if necessary.  It's not the
2163      most user-friendly message, but it's better than nothing.  */
2164   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2165       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2166     TREE_OVERFLOW (t) = 1;
2167   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2168 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2169     TREE_OVERFLOW (t) = 1;
2170   /* Regular overflow, conversion produced an infinity in a mode that
2171      can't represent them.  */
2172   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2173 	   && REAL_VALUE_ISINF (value)
2174 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2175     TREE_OVERFLOW (t) = 1;
2176   else
2177     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2178   return t;
2179 }
2180 
2181 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2182    to a floating point type.  */
2183 
2184 static tree
fold_convert_const_real_from_fixed(tree type,const_tree arg1)2185 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2186 {
2187   REAL_VALUE_TYPE value;
2188   tree t;
2189 
2190   real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2191 			   &TREE_FIXED_CST (arg1));
2192   t = build_real (type, value);
2193 
2194   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2195   return t;
2196 }
2197 
2198 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2199    to another fixed-point type.  */
2200 
2201 static tree
fold_convert_const_fixed_from_fixed(tree type,const_tree arg1)2202 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2203 {
2204   FIXED_VALUE_TYPE value;
2205   tree t;
2206   bool overflow_p;
2207 
2208   overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2209 			      &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2210   t = build_fixed (type, value);
2211 
2212   /* Propagate overflow flags.  */
2213   if (overflow_p | TREE_OVERFLOW (arg1))
2214     TREE_OVERFLOW (t) = 1;
2215   return t;
2216 }
2217 
2218 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2219    to a fixed-point type.  */
2220 
2221 static tree
fold_convert_const_fixed_from_int(tree type,const_tree arg1)2222 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2223 {
2224   FIXED_VALUE_TYPE value;
2225   tree t;
2226   bool overflow_p;
2227   double_int di;
2228 
2229   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2230 
2231   di.low = TREE_INT_CST_ELT (arg1, 0);
2232   if (TREE_INT_CST_NUNITS (arg1) == 1)
2233     di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2234   else
2235     di.high = TREE_INT_CST_ELT (arg1, 1);
2236 
2237   overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2238 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2239 				       TYPE_SATURATING (type));
2240   t = build_fixed (type, value);
2241 
2242   /* Propagate overflow flags.  */
2243   if (overflow_p | TREE_OVERFLOW (arg1))
2244     TREE_OVERFLOW (t) = 1;
2245   return t;
2246 }
2247 
2248 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2249    to a fixed-point type.  */
2250 
2251 static tree
fold_convert_const_fixed_from_real(tree type,const_tree arg1)2252 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2253 {
2254   FIXED_VALUE_TYPE value;
2255   tree t;
2256   bool overflow_p;
2257 
2258   overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2259 					&TREE_REAL_CST (arg1),
2260 					TYPE_SATURATING (type));
2261   t = build_fixed (type, value);
2262 
2263   /* Propagate overflow flags.  */
2264   if (overflow_p | TREE_OVERFLOW (arg1))
2265     TREE_OVERFLOW (t) = 1;
2266   return t;
2267 }
2268 
2269 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2270    type TYPE.  If no simplification can be done return NULL_TREE.  */
2271 
2272 static tree
fold_convert_const(enum tree_code code,tree type,tree arg1)2273 fold_convert_const (enum tree_code code, tree type, tree arg1)
2274 {
2275   tree arg_type = TREE_TYPE (arg1);
2276   if (arg_type == type)
2277     return arg1;
2278 
2279   /* We can't widen types, since the runtime value could overflow the
2280      original type before being extended to the new type.  */
2281   if (POLY_INT_CST_P (arg1)
2282       && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2283       && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2284     return build_poly_int_cst (type,
2285 			       poly_wide_int::from (poly_int_cst_value (arg1),
2286 						    TYPE_PRECISION (type),
2287 						    TYPE_SIGN (arg_type)));
2288 
2289   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2290       || TREE_CODE (type) == OFFSET_TYPE)
2291     {
2292       if (TREE_CODE (arg1) == INTEGER_CST)
2293 	return fold_convert_const_int_from_int (type, arg1);
2294       else if (TREE_CODE (arg1) == REAL_CST)
2295 	return fold_convert_const_int_from_real (code, type, arg1);
2296       else if (TREE_CODE (arg1) == FIXED_CST)
2297 	return fold_convert_const_int_from_fixed (type, arg1);
2298     }
2299   else if (TREE_CODE (type) == REAL_TYPE)
2300     {
2301       if (TREE_CODE (arg1) == INTEGER_CST)
2302 	return build_real_from_int_cst (type, arg1);
2303       else if (TREE_CODE (arg1) == REAL_CST)
2304 	return fold_convert_const_real_from_real (type, arg1);
2305       else if (TREE_CODE (arg1) == FIXED_CST)
2306 	return fold_convert_const_real_from_fixed (type, arg1);
2307     }
2308   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2309     {
2310       if (TREE_CODE (arg1) == FIXED_CST)
2311 	return fold_convert_const_fixed_from_fixed (type, arg1);
2312       else if (TREE_CODE (arg1) == INTEGER_CST)
2313 	return fold_convert_const_fixed_from_int (type, arg1);
2314       else if (TREE_CODE (arg1) == REAL_CST)
2315 	return fold_convert_const_fixed_from_real (type, arg1);
2316     }
2317   else if (TREE_CODE (type) == VECTOR_TYPE)
2318     {
2319       if (TREE_CODE (arg1) == VECTOR_CST
2320 	  && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2321 	{
2322 	  tree elttype = TREE_TYPE (type);
2323 	  tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2324 	  /* We can't handle steps directly when extending, since the
2325 	     values need to wrap at the original precision first.  */
2326 	  bool step_ok_p
2327 	    = (INTEGRAL_TYPE_P (elttype)
2328 	       && INTEGRAL_TYPE_P (arg1_elttype)
2329 	       && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2330 	  tree_vector_builder v;
2331 	  if (!v.new_unary_operation (type, arg1, step_ok_p))
2332 	    return NULL_TREE;
2333 	  unsigned int len = v.encoded_nelts ();
2334 	  for (unsigned int i = 0; i < len; ++i)
2335 	    {
2336 	      tree elt = VECTOR_CST_ELT (arg1, i);
2337 	      tree cvt = fold_convert_const (code, elttype, elt);
2338 	      if (cvt == NULL_TREE)
2339 		return NULL_TREE;
2340 	      v.quick_push (cvt);
2341 	    }
2342 	  return v.build ();
2343 	}
2344     }
2345   return NULL_TREE;
2346 }
2347 
2348 /* Construct a vector of zero elements of vector type TYPE.  */
2349 
2350 static tree
build_zero_vector(tree type)2351 build_zero_vector (tree type)
2352 {
2353   tree t;
2354 
2355   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2356   return build_vector_from_val (type, t);
2357 }
2358 
2359 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2360 
2361 bool
fold_convertible_p(const_tree type,const_tree arg)2362 fold_convertible_p (const_tree type, const_tree arg)
2363 {
2364   const_tree orig = TREE_TYPE (arg);
2365 
2366   if (type == orig)
2367     return true;
2368 
2369   if (TREE_CODE (arg) == ERROR_MARK
2370       || TREE_CODE (type) == ERROR_MARK
2371       || TREE_CODE (orig) == ERROR_MARK)
2372     return false;
2373 
2374   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2375     return true;
2376 
2377   switch (TREE_CODE (type))
2378     {
2379     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2380     case POINTER_TYPE: case REFERENCE_TYPE:
2381     case OFFSET_TYPE:
2382       return (INTEGRAL_TYPE_P (orig)
2383 	      || (POINTER_TYPE_P (orig)
2384 		  && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2385 	      || TREE_CODE (orig) == OFFSET_TYPE);
2386 
2387     case REAL_TYPE:
2388     case FIXED_POINT_TYPE:
2389     case VOID_TYPE:
2390       return TREE_CODE (type) == TREE_CODE (orig);
2391 
2392     case VECTOR_TYPE:
2393       return (VECTOR_TYPE_P (orig)
2394 	      && known_eq (TYPE_VECTOR_SUBPARTS (type),
2395 			   TYPE_VECTOR_SUBPARTS (orig))
2396 	      && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2397 
2398     default:
2399       return false;
2400     }
2401 }
2402 
2403 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2404    simple conversions in preference to calling the front-end's convert.  */
2405 
2406 tree
fold_convert_loc(location_t loc,tree type,tree arg)2407 fold_convert_loc (location_t loc, tree type, tree arg)
2408 {
2409   tree orig = TREE_TYPE (arg);
2410   tree tem;
2411 
2412   if (type == orig)
2413     return arg;
2414 
2415   if (TREE_CODE (arg) == ERROR_MARK
2416       || TREE_CODE (type) == ERROR_MARK
2417       || TREE_CODE (orig) == ERROR_MARK)
2418     return error_mark_node;
2419 
2420   switch (TREE_CODE (type))
2421     {
2422     case POINTER_TYPE:
2423     case REFERENCE_TYPE:
2424       /* Handle conversions between pointers to different address spaces.  */
2425       if (POINTER_TYPE_P (orig)
2426 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2427 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2428 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2429       /* fall through */
2430 
2431     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2432     case OFFSET_TYPE:
2433       if (TREE_CODE (arg) == INTEGER_CST)
2434 	{
2435 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2436 	  if (tem != NULL_TREE)
2437 	    return tem;
2438 	}
2439       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2440 	  || TREE_CODE (orig) == OFFSET_TYPE)
2441 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2442       if (TREE_CODE (orig) == COMPLEX_TYPE)
2443 	return fold_convert_loc (loc, type,
2444 				 fold_build1_loc (loc, REALPART_EXPR,
2445 						  TREE_TYPE (orig), arg));
2446       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2447 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2448       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2449 
2450     case REAL_TYPE:
2451       if (TREE_CODE (arg) == INTEGER_CST)
2452 	{
2453 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2454 	  if (tem != NULL_TREE)
2455 	    return tem;
2456 	}
2457       else if (TREE_CODE (arg) == REAL_CST)
2458 	{
2459 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2460 	  if (tem != NULL_TREE)
2461 	    return tem;
2462 	}
2463       else if (TREE_CODE (arg) == FIXED_CST)
2464 	{
2465 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2466 	  if (tem != NULL_TREE)
2467 	    return tem;
2468 	}
2469 
2470       switch (TREE_CODE (orig))
2471 	{
2472 	case INTEGER_TYPE:
2473 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2474 	case POINTER_TYPE: case REFERENCE_TYPE:
2475 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2476 
2477 	case REAL_TYPE:
2478 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2479 
2480 	case FIXED_POINT_TYPE:
2481 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2482 
2483 	case COMPLEX_TYPE:
2484 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2485 	  return fold_convert_loc (loc, type, tem);
2486 
2487 	default:
2488 	  gcc_unreachable ();
2489 	}
2490 
2491     case FIXED_POINT_TYPE:
2492       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2493 	  || TREE_CODE (arg) == REAL_CST)
2494 	{
2495 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2496 	  if (tem != NULL_TREE)
2497 	    goto fold_convert_exit;
2498 	}
2499 
2500       switch (TREE_CODE (orig))
2501 	{
2502 	case FIXED_POINT_TYPE:
2503 	case INTEGER_TYPE:
2504 	case ENUMERAL_TYPE:
2505 	case BOOLEAN_TYPE:
2506 	case REAL_TYPE:
2507 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2508 
2509 	case COMPLEX_TYPE:
2510 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2511 	  return fold_convert_loc (loc, type, tem);
2512 
2513 	default:
2514 	  gcc_unreachable ();
2515 	}
2516 
2517     case COMPLEX_TYPE:
2518       switch (TREE_CODE (orig))
2519 	{
2520 	case INTEGER_TYPE:
2521 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2522 	case POINTER_TYPE: case REFERENCE_TYPE:
2523 	case REAL_TYPE:
2524 	case FIXED_POINT_TYPE:
2525 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2526 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2527 			      fold_convert_loc (loc, TREE_TYPE (type),
2528 					    integer_zero_node));
2529 	case COMPLEX_TYPE:
2530 	  {
2531 	    tree rpart, ipart;
2532 
2533 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2534 	      {
2535 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2536 				      TREE_OPERAND (arg, 0));
2537 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2538 				      TREE_OPERAND (arg, 1));
2539 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2540 	      }
2541 
2542 	    arg = save_expr (arg);
2543 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2544 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2545 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2546 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2547 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2548 	  }
2549 
2550 	default:
2551 	  gcc_unreachable ();
2552 	}
2553 
2554     case VECTOR_TYPE:
2555       if (integer_zerop (arg))
2556 	return build_zero_vector (type);
2557       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2558       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2559 		  || TREE_CODE (orig) == VECTOR_TYPE);
2560       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2561 
2562     case VOID_TYPE:
2563       tem = fold_ignored_result (arg);
2564       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2565 
2566     default:
2567       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2568 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2569       gcc_unreachable ();
2570     }
2571  fold_convert_exit:
2572   protected_set_expr_location_unshare (tem, loc);
2573   return tem;
2574 }
2575 
2576 /* Return false if expr can be assumed not to be an lvalue, true
2577    otherwise.  */
2578 
2579 static bool
maybe_lvalue_p(const_tree x)2580 maybe_lvalue_p (const_tree x)
2581 {
2582   /* We only need to wrap lvalue tree codes.  */
2583   switch (TREE_CODE (x))
2584   {
2585   case VAR_DECL:
2586   case PARM_DECL:
2587   case RESULT_DECL:
2588   case LABEL_DECL:
2589   case FUNCTION_DECL:
2590   case SSA_NAME:
2591 
2592   case COMPONENT_REF:
2593   case MEM_REF:
2594   case INDIRECT_REF:
2595   case ARRAY_REF:
2596   case ARRAY_RANGE_REF:
2597   case BIT_FIELD_REF:
2598   case OBJ_TYPE_REF:
2599 
2600   case REALPART_EXPR:
2601   case IMAGPART_EXPR:
2602   case PREINCREMENT_EXPR:
2603   case PREDECREMENT_EXPR:
2604   case SAVE_EXPR:
2605   case TRY_CATCH_EXPR:
2606   case WITH_CLEANUP_EXPR:
2607   case COMPOUND_EXPR:
2608   case MODIFY_EXPR:
2609   case TARGET_EXPR:
2610   case COND_EXPR:
2611   case BIND_EXPR:
2612   case VIEW_CONVERT_EXPR:
2613     break;
2614 
2615   default:
2616     /* Assume the worst for front-end tree codes.  */
2617     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2618       break;
2619     return false;
2620   }
2621 
2622   return true;
2623 }
2624 
2625 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2626 
2627 tree
non_lvalue_loc(location_t loc,tree x)2628 non_lvalue_loc (location_t loc, tree x)
2629 {
2630   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2631      us.  */
2632   if (in_gimple_form)
2633     return x;
2634 
2635   if (! maybe_lvalue_p (x))
2636     return x;
2637   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2638 }
2639 
2640 /* When pedantic, return an expr equal to X but certainly not valid as a
2641    pedantic lvalue.  Otherwise, return X.  */
2642 
2643 static tree
pedantic_non_lvalue_loc(location_t loc,tree x)2644 pedantic_non_lvalue_loc (location_t loc, tree x)
2645 {
2646   return protected_set_expr_location_unshare (x, loc);
2647 }
2648 
2649 /* Given a tree comparison code, return the code that is the logical inverse.
2650    It is generally not safe to do this for floating-point comparisons, except
2651    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2652    ERROR_MARK in this case.  */
2653 
2654 enum tree_code
invert_tree_comparison(enum tree_code code,bool honor_nans)2655 invert_tree_comparison (enum tree_code code, bool honor_nans)
2656 {
2657   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2658       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2659     return ERROR_MARK;
2660 
2661   switch (code)
2662     {
2663     case EQ_EXPR:
2664       return NE_EXPR;
2665     case NE_EXPR:
2666       return EQ_EXPR;
2667     case GT_EXPR:
2668       return honor_nans ? UNLE_EXPR : LE_EXPR;
2669     case GE_EXPR:
2670       return honor_nans ? UNLT_EXPR : LT_EXPR;
2671     case LT_EXPR:
2672       return honor_nans ? UNGE_EXPR : GE_EXPR;
2673     case LE_EXPR:
2674       return honor_nans ? UNGT_EXPR : GT_EXPR;
2675     case LTGT_EXPR:
2676       return UNEQ_EXPR;
2677     case UNEQ_EXPR:
2678       return LTGT_EXPR;
2679     case UNGT_EXPR:
2680       return LE_EXPR;
2681     case UNGE_EXPR:
2682       return LT_EXPR;
2683     case UNLT_EXPR:
2684       return GE_EXPR;
2685     case UNLE_EXPR:
2686       return GT_EXPR;
2687     case ORDERED_EXPR:
2688       return UNORDERED_EXPR;
2689     case UNORDERED_EXPR:
2690       return ORDERED_EXPR;
2691     default:
2692       gcc_unreachable ();
2693     }
2694 }
2695 
2696 /* Similar, but return the comparison that results if the operands are
2697    swapped.  This is safe for floating-point.  */
2698 
2699 enum tree_code
swap_tree_comparison(enum tree_code code)2700 swap_tree_comparison (enum tree_code code)
2701 {
2702   switch (code)
2703     {
2704     case EQ_EXPR:
2705     case NE_EXPR:
2706     case ORDERED_EXPR:
2707     case UNORDERED_EXPR:
2708     case LTGT_EXPR:
2709     case UNEQ_EXPR:
2710       return code;
2711     case GT_EXPR:
2712       return LT_EXPR;
2713     case GE_EXPR:
2714       return LE_EXPR;
2715     case LT_EXPR:
2716       return GT_EXPR;
2717     case LE_EXPR:
2718       return GE_EXPR;
2719     case UNGT_EXPR:
2720       return UNLT_EXPR;
2721     case UNGE_EXPR:
2722       return UNLE_EXPR;
2723     case UNLT_EXPR:
2724       return UNGT_EXPR;
2725     case UNLE_EXPR:
2726       return UNGE_EXPR;
2727     default:
2728       gcc_unreachable ();
2729     }
2730 }
2731 
2732 
2733 /* Convert a comparison tree code from an enum tree_code representation
2734    into a compcode bit-based encoding.  This function is the inverse of
2735    compcode_to_comparison.  */
2736 
2737 static enum comparison_code
comparison_to_compcode(enum tree_code code)2738 comparison_to_compcode (enum tree_code code)
2739 {
2740   switch (code)
2741     {
2742     case LT_EXPR:
2743       return COMPCODE_LT;
2744     case EQ_EXPR:
2745       return COMPCODE_EQ;
2746     case LE_EXPR:
2747       return COMPCODE_LE;
2748     case GT_EXPR:
2749       return COMPCODE_GT;
2750     case NE_EXPR:
2751       return COMPCODE_NE;
2752     case GE_EXPR:
2753       return COMPCODE_GE;
2754     case ORDERED_EXPR:
2755       return COMPCODE_ORD;
2756     case UNORDERED_EXPR:
2757       return COMPCODE_UNORD;
2758     case UNLT_EXPR:
2759       return COMPCODE_UNLT;
2760     case UNEQ_EXPR:
2761       return COMPCODE_UNEQ;
2762     case UNLE_EXPR:
2763       return COMPCODE_UNLE;
2764     case UNGT_EXPR:
2765       return COMPCODE_UNGT;
2766     case LTGT_EXPR:
2767       return COMPCODE_LTGT;
2768     case UNGE_EXPR:
2769       return COMPCODE_UNGE;
2770     default:
2771       gcc_unreachable ();
2772     }
2773 }
2774 
2775 /* Convert a compcode bit-based encoding of a comparison operator back
2776    to GCC's enum tree_code representation.  This function is the
2777    inverse of comparison_to_compcode.  */
2778 
2779 static enum tree_code
compcode_to_comparison(enum comparison_code code)2780 compcode_to_comparison (enum comparison_code code)
2781 {
2782   switch (code)
2783     {
2784     case COMPCODE_LT:
2785       return LT_EXPR;
2786     case COMPCODE_EQ:
2787       return EQ_EXPR;
2788     case COMPCODE_LE:
2789       return LE_EXPR;
2790     case COMPCODE_GT:
2791       return GT_EXPR;
2792     case COMPCODE_NE:
2793       return NE_EXPR;
2794     case COMPCODE_GE:
2795       return GE_EXPR;
2796     case COMPCODE_ORD:
2797       return ORDERED_EXPR;
2798     case COMPCODE_UNORD:
2799       return UNORDERED_EXPR;
2800     case COMPCODE_UNLT:
2801       return UNLT_EXPR;
2802     case COMPCODE_UNEQ:
2803       return UNEQ_EXPR;
2804     case COMPCODE_UNLE:
2805       return UNLE_EXPR;
2806     case COMPCODE_UNGT:
2807       return UNGT_EXPR;
2808     case COMPCODE_LTGT:
2809       return LTGT_EXPR;
2810     case COMPCODE_UNGE:
2811       return UNGE_EXPR;
2812     default:
2813       gcc_unreachable ();
2814     }
2815 }
2816 
2817 /* Return true if COND1 tests the opposite condition of COND2.  */
2818 
2819 bool
inverse_conditions_p(const_tree cond1,const_tree cond2)2820 inverse_conditions_p (const_tree cond1, const_tree cond2)
2821 {
2822   return (COMPARISON_CLASS_P (cond1)
2823 	  && COMPARISON_CLASS_P (cond2)
2824 	  && (invert_tree_comparison
2825 	      (TREE_CODE (cond1),
2826 	       HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2827 	  && operand_equal_p (TREE_OPERAND (cond1, 0),
2828 			      TREE_OPERAND (cond2, 0), 0)
2829 	  && operand_equal_p (TREE_OPERAND (cond1, 1),
2830 			      TREE_OPERAND (cond2, 1), 0));
2831 }
2832 
2833 /* Return a tree for the comparison which is the combination of
2834    doing the AND or OR (depending on CODE) of the two operations LCODE
2835    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2836    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2837    if this makes the transformation invalid.  */
2838 
2839 tree
combine_comparisons(location_t loc,enum tree_code code,enum tree_code lcode,enum tree_code rcode,tree truth_type,tree ll_arg,tree lr_arg)2840 combine_comparisons (location_t loc,
2841 		     enum tree_code code, enum tree_code lcode,
2842 		     enum tree_code rcode, tree truth_type,
2843 		     tree ll_arg, tree lr_arg)
2844 {
2845   bool honor_nans = HONOR_NANS (ll_arg);
2846   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2847   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2848   int compcode;
2849 
2850   switch (code)
2851     {
2852     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2853       compcode = lcompcode & rcompcode;
2854       break;
2855 
2856     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2857       compcode = lcompcode | rcompcode;
2858       break;
2859 
2860     default:
2861       return NULL_TREE;
2862     }
2863 
2864   if (!honor_nans)
2865     {
2866       /* Eliminate unordered comparisons, as well as LTGT and ORD
2867 	 which are not used unless the mode has NaNs.  */
2868       compcode &= ~COMPCODE_UNORD;
2869       if (compcode == COMPCODE_LTGT)
2870 	compcode = COMPCODE_NE;
2871       else if (compcode == COMPCODE_ORD)
2872 	compcode = COMPCODE_TRUE;
2873     }
2874    else if (flag_trapping_math)
2875      {
2876 	/* Check that the original operation and the optimized ones will trap
2877 	   under the same condition.  */
2878 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2879 		     && (lcompcode != COMPCODE_EQ)
2880 		     && (lcompcode != COMPCODE_ORD);
2881 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2882 		     && (rcompcode != COMPCODE_EQ)
2883 		     && (rcompcode != COMPCODE_ORD);
2884 	bool trap = (compcode & COMPCODE_UNORD) == 0
2885 		    && (compcode != COMPCODE_EQ)
2886 		    && (compcode != COMPCODE_ORD);
2887 
2888         /* In a short-circuited boolean expression the LHS might be
2889 	   such that the RHS, if evaluated, will never trap.  For
2890 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2891 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2892 	   example, the expression above will never trap, hence
2893 	   optimizing it to x < y would be invalid).  */
2894         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2895             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2896           rtrap = false;
2897 
2898         /* If the comparison was short-circuited, and only the RHS
2899 	   trapped, we may now generate a spurious trap.  */
2900 	if (rtrap && !ltrap
2901 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2902 	  return NULL_TREE;
2903 
2904 	/* If we changed the conditions that cause a trap, we lose.  */
2905 	if ((ltrap || rtrap) != trap)
2906 	  return NULL_TREE;
2907       }
2908 
2909   if (compcode == COMPCODE_TRUE)
2910     return constant_boolean_node (true, truth_type);
2911   else if (compcode == COMPCODE_FALSE)
2912     return constant_boolean_node (false, truth_type);
2913   else
2914     {
2915       enum tree_code tcode;
2916 
2917       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2918       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2919     }
2920 }
2921 
2922 /* Return nonzero if two operands (typically of the same tree node)
2923    are necessarily equal. FLAGS modifies behavior as follows:
2924 
2925    If OEP_ONLY_CONST is set, only return nonzero for constants.
2926    This function tests whether the operands are indistinguishable;
2927    it does not test whether they are equal using C's == operation.
2928    The distinction is important for IEEE floating point, because
2929    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2930    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2931 
2932    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2933    even though it may hold multiple values during a function.
2934    This is because a GCC tree node guarantees that nothing else is
2935    executed between the evaluation of its "operands" (which may often
2936    be evaluated in arbitrary order).  Hence if the operands themselves
2937    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2938    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2939    unset means assuming isochronic (or instantaneous) tree equivalence.
2940    Unless comparing arbitrary expression trees, such as from different
2941    statements, this flag can usually be left unset.
2942 
2943    If OEP_PURE_SAME is set, then pure functions with identical arguments
2944    are considered the same.  It is used when the caller has other ways
2945    to ensure that global memory is unchanged in between.
2946 
2947    If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2948    not values of expressions.
2949 
2950    If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2951    such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2952 
2953    If OEP_BITWISE is set, then require the values to be bitwise identical
2954    rather than simply numerically equal.  Do not take advantage of things
2955    like math-related flags or undefined behavior; only return true for
2956    values that are provably bitwise identical in all circumstances.
2957 
2958    Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2959    any operand with side effect.  This is unnecesarily conservative in the
2960    case we know that arg0 and arg1 are in disjoint code paths (such as in
2961    ?: operator).  In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2962    addresses with TREE_CONSTANT flag set so we know that &var == &var
2963    even if var is volatile.  */
2964 
2965 bool
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)2966 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2967 				  unsigned int flags)
2968 {
2969   bool r;
2970   if (verify_hash_value (arg0, arg1, flags, &r))
2971     return r;
2972 
2973   STRIP_ANY_LOCATION_WRAPPER (arg0);
2974   STRIP_ANY_LOCATION_WRAPPER (arg1);
2975 
2976   /* If either is ERROR_MARK, they aren't equal.  */
2977   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2978       || TREE_TYPE (arg0) == error_mark_node
2979       || TREE_TYPE (arg1) == error_mark_node)
2980     return false;
2981 
2982   /* Similar, if either does not have a type (like a template id),
2983      they aren't equal.  */
2984   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2985     return false;
2986 
2987   /* Bitwise identity makes no sense if the values have different layouts.  */
2988   if ((flags & OEP_BITWISE)
2989       && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2990     return false;
2991 
2992   /* We cannot consider pointers to different address space equal.  */
2993   if (POINTER_TYPE_P (TREE_TYPE (arg0))
2994       && POINTER_TYPE_P (TREE_TYPE (arg1))
2995       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2996 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2997     return false;
2998 
2999   /* Check equality of integer constants before bailing out due to
3000      precision differences.  */
3001   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3002     {
3003       /* Address of INTEGER_CST is not defined; check that we did not forget
3004 	 to drop the OEP_ADDRESS_OF flags.  */
3005       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3006       return tree_int_cst_equal (arg0, arg1);
3007     }
3008 
3009   if (!(flags & OEP_ADDRESS_OF))
3010     {
3011       /* If both types don't have the same signedness, then we can't consider
3012 	 them equal.  We must check this before the STRIP_NOPS calls
3013 	 because they may change the signedness of the arguments.  As pointers
3014 	 strictly don't have a signedness, require either two pointers or
3015 	 two non-pointers as well.  */
3016       if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3017 	  || POINTER_TYPE_P (TREE_TYPE (arg0))
3018 			     != POINTER_TYPE_P (TREE_TYPE (arg1)))
3019 	return false;
3020 
3021       /* If both types don't have the same precision, then it is not safe
3022 	 to strip NOPs.  */
3023       if (element_precision (TREE_TYPE (arg0))
3024 	  != element_precision (TREE_TYPE (arg1)))
3025 	return false;
3026 
3027       STRIP_NOPS (arg0);
3028       STRIP_NOPS (arg1);
3029     }
3030 #if 0
3031   /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3032      sanity check once the issue is solved.  */
3033   else
3034     /* Addresses of conversions and SSA_NAMEs (and many other things)
3035        are not defined.  Check that we did not forget to drop the
3036        OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags.  */
3037     gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3038 			 && TREE_CODE (arg0) != SSA_NAME);
3039 #endif
3040 
3041   /* In case both args are comparisons but with different comparison
3042      code, try to swap the comparison operands of one arg to produce
3043      a match and compare that variant.  */
3044   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3045       && COMPARISON_CLASS_P (arg0)
3046       && COMPARISON_CLASS_P (arg1))
3047     {
3048       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3049 
3050       if (TREE_CODE (arg0) == swap_code)
3051 	return operand_equal_p (TREE_OPERAND (arg0, 0),
3052 			        TREE_OPERAND (arg1, 1), flags)
3053 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
3054 				   TREE_OPERAND (arg1, 0), flags);
3055     }
3056 
3057   if (TREE_CODE (arg0) != TREE_CODE (arg1))
3058     {
3059       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
3060       if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3061 	;
3062       else if (flags & OEP_ADDRESS_OF)
3063 	{
3064 	  /* If we are interested in comparing addresses ignore
3065 	     MEM_REF wrappings of the base that can appear just for
3066 	     TBAA reasons.  */
3067 	  if (TREE_CODE (arg0) == MEM_REF
3068 	      && DECL_P (arg1)
3069 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3070 	      && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3071 	      && integer_zerop (TREE_OPERAND (arg0, 1)))
3072 	    return true;
3073 	  else if (TREE_CODE (arg1) == MEM_REF
3074 		   && DECL_P (arg0)
3075 		   && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3076 		   && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3077 		   && integer_zerop (TREE_OPERAND (arg1, 1)))
3078 	    return true;
3079 	  return false;
3080 	}
3081       else
3082 	return false;
3083     }
3084 
3085   /* When not checking adddresses, this is needed for conversions and for
3086      COMPONENT_REF.  Might as well play it safe and always test this.  */
3087   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3088       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3089       || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3090 	  && !(flags & OEP_ADDRESS_OF)))
3091     return false;
3092 
3093   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3094      We don't care about side effects in that case because the SAVE_EXPR
3095      takes care of that for us. In all other cases, two expressions are
3096      equal if they have no side effects.  If we have two identical
3097      expressions with side effects that should be treated the same due
3098      to the only side effects being identical SAVE_EXPR's, that will
3099      be detected in the recursive calls below.
3100      If we are taking an invariant address of two identical objects
3101      they are necessarily equal as well.  */
3102   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3103       && (TREE_CODE (arg0) == SAVE_EXPR
3104 	  || (flags & OEP_MATCH_SIDE_EFFECTS)
3105 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3106     return true;
3107 
3108   /* Next handle constant cases, those for which we can return 1 even
3109      if ONLY_CONST is set.  */
3110   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3111     switch (TREE_CODE (arg0))
3112       {
3113       case INTEGER_CST:
3114 	return tree_int_cst_equal (arg0, arg1);
3115 
3116       case FIXED_CST:
3117 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3118 				       TREE_FIXED_CST (arg1));
3119 
3120       case REAL_CST:
3121 	if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3122 	  return true;
3123 
3124 	if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3125 	  {
3126 	    /* If we do not distinguish between signed and unsigned zero,
3127 	       consider them equal.  */
3128 	    if (real_zerop (arg0) && real_zerop (arg1))
3129 	      return true;
3130 	  }
3131 	return false;
3132 
3133       case VECTOR_CST:
3134 	{
3135 	  if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3136 	      != VECTOR_CST_LOG2_NPATTERNS (arg1))
3137 	    return false;
3138 
3139 	  if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3140 	      != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3141 	    return false;
3142 
3143 	  unsigned int count = vector_cst_encoded_nelts (arg0);
3144 	  for (unsigned int i = 0; i < count; ++i)
3145 	    if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3146 				  VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3147 	      return false;
3148 	  return true;
3149 	}
3150 
3151       case COMPLEX_CST:
3152 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3153 				 flags)
3154 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3155 				    flags));
3156 
3157       case STRING_CST:
3158 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3159 		&& ! memcmp (TREE_STRING_POINTER (arg0),
3160 			      TREE_STRING_POINTER (arg1),
3161 			      TREE_STRING_LENGTH (arg0)));
3162 
3163       case ADDR_EXPR:
3164 	gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3165 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3166 				flags | OEP_ADDRESS_OF
3167 				| OEP_MATCH_SIDE_EFFECTS);
3168       case CONSTRUCTOR:
3169 	/* In GIMPLE empty constructors are allowed in initializers of
3170 	   aggregates.  */
3171 	return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3172       default:
3173 	break;
3174       }
3175 
3176   /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3177      two instances of undefined behavior will give identical results.  */
3178   if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3179     return false;
3180 
3181 /* Define macros to test an operand from arg0 and arg1 for equality and a
3182    variant that allows null and views null as being different from any
3183    non-null value.  In the latter case, if either is null, the both
3184    must be; otherwise, do the normal comparison.  */
3185 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
3186 				    TREE_OPERAND (arg1, N), flags)
3187 
3188 #define OP_SAME_WITH_NULL(N)				\
3189   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3190    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3191 
3192   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3193     {
3194     case tcc_unary:
3195       /* Two conversions are equal only if signedness and modes match.  */
3196       switch (TREE_CODE (arg0))
3197         {
3198 	CASE_CONVERT:
3199         case FIX_TRUNC_EXPR:
3200 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3201 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3202 	    return false;
3203 	  break;
3204 	default:
3205 	  break;
3206 	}
3207 
3208       return OP_SAME (0);
3209 
3210 
3211     case tcc_comparison:
3212     case tcc_binary:
3213       if (OP_SAME (0) && OP_SAME (1))
3214 	return true;
3215 
3216       /* For commutative ops, allow the other order.  */
3217       return (commutative_tree_code (TREE_CODE (arg0))
3218 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3219 				  TREE_OPERAND (arg1, 1), flags)
3220 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3221 				  TREE_OPERAND (arg1, 0), flags));
3222 
3223     case tcc_reference:
3224       /* If either of the pointer (or reference) expressions we are
3225 	 dereferencing contain a side effect, these cannot be equal,
3226 	 but their addresses can be.  */
3227       if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3228 	  && (TREE_SIDE_EFFECTS (arg0)
3229 	      || TREE_SIDE_EFFECTS (arg1)))
3230 	return false;
3231 
3232       switch (TREE_CODE (arg0))
3233 	{
3234 	case INDIRECT_REF:
3235 	  if (!(flags & OEP_ADDRESS_OF))
3236 	    {
3237 	      if (TYPE_ALIGN (TREE_TYPE (arg0))
3238 		  != TYPE_ALIGN (TREE_TYPE (arg1)))
3239 		return false;
3240 	      /* Verify that the access types are compatible.  */
3241 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3242 		  != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3243 		return false;
3244 	    }
3245 	  flags &= ~OEP_ADDRESS_OF;
3246 	  return OP_SAME (0);
3247 
3248 	case IMAGPART_EXPR:
3249 	  /* Require the same offset.  */
3250 	  if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3251 				TYPE_SIZE (TREE_TYPE (arg1)),
3252 				flags & ~OEP_ADDRESS_OF))
3253 	    return false;
3254 
3255 	/* Fallthru.  */
3256 	case REALPART_EXPR:
3257 	case VIEW_CONVERT_EXPR:
3258 	  return OP_SAME (0);
3259 
3260 	case TARGET_MEM_REF:
3261 	case MEM_REF:
3262 	  if (!(flags & OEP_ADDRESS_OF))
3263 	    {
3264 	      /* Require equal access sizes */
3265 	      if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3266 		  && (!TYPE_SIZE (TREE_TYPE (arg0))
3267 		      || !TYPE_SIZE (TREE_TYPE (arg1))
3268 		      || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3269 					   TYPE_SIZE (TREE_TYPE (arg1)),
3270 					   flags)))
3271 		return false;
3272 	      /* Verify that access happens in similar types.  */
3273 	      if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3274 		return false;
3275 	      /* Verify that accesses are TBAA compatible.  */
3276 	      if (!alias_ptr_types_compatible_p
3277 		    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3278 		     TREE_TYPE (TREE_OPERAND (arg1, 1)))
3279 		  || (MR_DEPENDENCE_CLIQUE (arg0)
3280 		      != MR_DEPENDENCE_CLIQUE (arg1))
3281 		  || (MR_DEPENDENCE_BASE (arg0)
3282 		      != MR_DEPENDENCE_BASE (arg1)))
3283 		return false;
3284 	     /* Verify that alignment is compatible.  */
3285 	     if (TYPE_ALIGN (TREE_TYPE (arg0))
3286 		 != TYPE_ALIGN (TREE_TYPE (arg1)))
3287 		return false;
3288 	    }
3289 	  flags &= ~OEP_ADDRESS_OF;
3290 	  return (OP_SAME (0) && OP_SAME (1)
3291 		  /* TARGET_MEM_REF require equal extra operands.  */
3292 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
3293 		      || (OP_SAME_WITH_NULL (2)
3294 			  && OP_SAME_WITH_NULL (3)
3295 			  && OP_SAME_WITH_NULL (4))));
3296 
3297 	case ARRAY_REF:
3298 	case ARRAY_RANGE_REF:
3299 	  if (!OP_SAME (0))
3300 	    return false;
3301 	  flags &= ~OEP_ADDRESS_OF;
3302 	  /* Compare the array index by value if it is constant first as we
3303 	     may have different types but same value here.  */
3304 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3305 				       TREE_OPERAND (arg1, 1))
3306 		   || OP_SAME (1))
3307 		  && OP_SAME_WITH_NULL (2)
3308 		  && OP_SAME_WITH_NULL (3)
3309 		  /* Compare low bound and element size as with OEP_ADDRESS_OF
3310 		     we have to account for the offset of the ref.  */
3311 		  && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3312 		      == TREE_TYPE (TREE_OPERAND (arg1, 0))
3313 		      || (operand_equal_p (array_ref_low_bound
3314 					     (CONST_CAST_TREE (arg0)),
3315 					   array_ref_low_bound
3316 					     (CONST_CAST_TREE (arg1)), flags)
3317 			  && operand_equal_p (array_ref_element_size
3318 					        (CONST_CAST_TREE (arg0)),
3319 					      array_ref_element_size
3320 					        (CONST_CAST_TREE (arg1)),
3321 					      flags))));
3322 
3323 	case COMPONENT_REF:
3324 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3325 	     may be NULL when we're called to compare MEM_EXPRs.  */
3326 	  if (!OP_SAME_WITH_NULL (0)
3327 	      || !OP_SAME (1))
3328 	    return false;
3329 	  flags &= ~OEP_ADDRESS_OF;
3330 	  return OP_SAME_WITH_NULL (2);
3331 
3332 	case BIT_FIELD_REF:
3333 	  if (!OP_SAME (0))
3334 	    return false;
3335 	  flags &= ~OEP_ADDRESS_OF;
3336 	  return OP_SAME (1) && OP_SAME (2);
3337 
3338 	/* Virtual table call.  */
3339 	case OBJ_TYPE_REF:
3340 	  {
3341 	    if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3342 				  OBJ_TYPE_REF_EXPR (arg1), flags))
3343 	      return false;
3344 	    if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3345 		!= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3346 	      return false;
3347 	    if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3348 				  OBJ_TYPE_REF_OBJECT (arg1), flags))
3349 	      return false;
3350 	    if (!types_same_for_odr (obj_type_ref_class (arg0),
3351 				     obj_type_ref_class (arg1)))
3352 	      return false;
3353 	    return true;
3354 	  }
3355 
3356 	default:
3357 	  return false;
3358 	}
3359 
3360     case tcc_expression:
3361       switch (TREE_CODE (arg0))
3362 	{
3363 	case ADDR_EXPR:
3364 	  /* Be sure we pass right ADDRESS_OF flag.  */
3365 	  gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3366 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3367 				  TREE_OPERAND (arg1, 0),
3368 				  flags | OEP_ADDRESS_OF);
3369 
3370 	case TRUTH_NOT_EXPR:
3371 	  return OP_SAME (0);
3372 
3373 	case TRUTH_ANDIF_EXPR:
3374 	case TRUTH_ORIF_EXPR:
3375 	  return OP_SAME (0) && OP_SAME (1);
3376 
3377 	case WIDEN_MULT_PLUS_EXPR:
3378 	case WIDEN_MULT_MINUS_EXPR:
3379 	  if (!OP_SAME (2))
3380 	    return false;
3381 	  /* The multiplcation operands are commutative.  */
3382 	  /* FALLTHRU */
3383 
3384 	case TRUTH_AND_EXPR:
3385 	case TRUTH_OR_EXPR:
3386 	case TRUTH_XOR_EXPR:
3387 	  if (OP_SAME (0) && OP_SAME (1))
3388 	    return true;
3389 
3390 	  /* Otherwise take into account this is a commutative operation.  */
3391 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3392 				   TREE_OPERAND (arg1, 1), flags)
3393 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3394 				      TREE_OPERAND (arg1, 0), flags));
3395 
3396 	case COND_EXPR:
3397 	  if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3398 	    return false;
3399 	  flags &= ~OEP_ADDRESS_OF;
3400 	  return OP_SAME (0);
3401 
3402 	case BIT_INSERT_EXPR:
3403 	  /* BIT_INSERT_EXPR has an implict operand as the type precision
3404 	     of op1.  Need to check to make sure they are the same.  */
3405 	  if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3406 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3407 	      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3408 		 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3409 	    return false;
3410 	  /* FALLTHRU */
3411 
3412 	case VEC_COND_EXPR:
3413 	case DOT_PROD_EXPR:
3414 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3415 
3416 	case MODIFY_EXPR:
3417 	case INIT_EXPR:
3418 	case COMPOUND_EXPR:
3419 	case PREDECREMENT_EXPR:
3420 	case PREINCREMENT_EXPR:
3421 	case POSTDECREMENT_EXPR:
3422 	case POSTINCREMENT_EXPR:
3423 	  if (flags & OEP_LEXICOGRAPHIC)
3424 	    return OP_SAME (0) && OP_SAME (1);
3425 	  return false;
3426 
3427 	case CLEANUP_POINT_EXPR:
3428 	case EXPR_STMT:
3429 	case SAVE_EXPR:
3430 	  if (flags & OEP_LEXICOGRAPHIC)
3431 	    return OP_SAME (0);
3432 	  return false;
3433 
3434 	default:
3435 	  return false;
3436 	}
3437 
3438     case tcc_vl_exp:
3439       switch (TREE_CODE (arg0))
3440 	{
3441 	case CALL_EXPR:
3442 	  if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3443 	      != (CALL_EXPR_FN (arg1) == NULL_TREE))
3444 	    /* If not both CALL_EXPRs are either internal or normal function
3445 	       functions, then they are not equal.  */
3446 	    return false;
3447 	  else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3448 	    {
3449 	      /* If the CALL_EXPRs call different internal functions, then they
3450 		 are not equal.  */
3451 	      if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3452 		return false;
3453 	    }
3454 	  else
3455 	    {
3456 	      /* If the CALL_EXPRs call different functions, then they are not
3457 		 equal.  */
3458 	      if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3459 				     flags))
3460 		return false;
3461 	    }
3462 
3463 	  /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS.  */
3464 	  {
3465 	    unsigned int cef = call_expr_flags (arg0);
3466 	    if (flags & OEP_PURE_SAME)
3467 	      cef &= ECF_CONST | ECF_PURE;
3468 	    else
3469 	      cef &= ECF_CONST;
3470 	    if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3471 	      return false;
3472 	  }
3473 
3474 	  /* Now see if all the arguments are the same.  */
3475 	  {
3476 	    const_call_expr_arg_iterator iter0, iter1;
3477 	    const_tree a0, a1;
3478 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3479 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3480 		 a0 && a1;
3481 		 a0 = next_const_call_expr_arg (&iter0),
3482 		   a1 = next_const_call_expr_arg (&iter1))
3483 	      if (! operand_equal_p (a0, a1, flags))
3484 		return false;
3485 
3486 	    /* If we get here and both argument lists are exhausted
3487 	       then the CALL_EXPRs are equal.  */
3488 	    return ! (a0 || a1);
3489 	  }
3490 	default:
3491 	  return false;
3492 	}
3493 
3494     case tcc_declaration:
3495       /* Consider __builtin_sqrt equal to sqrt.  */
3496       return (TREE_CODE (arg0) == FUNCTION_DECL
3497 	      && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3498 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3499 	      && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3500 		  == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3501 
3502     case tcc_exceptional:
3503       if (TREE_CODE (arg0) == CONSTRUCTOR)
3504 	{
3505 	  if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3506 	    return false;
3507 
3508 	  /* In GIMPLE constructors are used only to build vectors from
3509 	     elements.  Individual elements in the constructor must be
3510 	     indexed in increasing order and form an initial sequence.
3511 
3512 	     We make no effort to compare constructors in generic.
3513 	     (see sem_variable::equals in ipa-icf which can do so for
3514 	      constants).  */
3515 	  if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3516 	      || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3517 	    return false;
3518 
3519 	  /* Be sure that vectors constructed have the same representation.
3520 	     We only tested element precision and modes to match.
3521 	     Vectors may be BLKmode and thus also check that the number of
3522 	     parts match.  */
3523 	  if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3524 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3525 	    return false;
3526 
3527 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3528 	  vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3529 	  unsigned int len = vec_safe_length (v0);
3530 
3531 	  if (len != vec_safe_length (v1))
3532 	    return false;
3533 
3534 	  for (unsigned int i = 0; i < len; i++)
3535 	    {
3536 	      constructor_elt *c0 = &(*v0)[i];
3537 	      constructor_elt *c1 = &(*v1)[i];
3538 
3539 	      if (!operand_equal_p (c0->value, c1->value, flags)
3540 		  /* In GIMPLE the indexes can be either NULL or matching i.
3541 		     Double check this so we won't get false
3542 		     positives for GENERIC.  */
3543 		  || (c0->index
3544 		      && (TREE_CODE (c0->index) != INTEGER_CST
3545 			  || compare_tree_int (c0->index, i)))
3546 		  || (c1->index
3547 		      && (TREE_CODE (c1->index) != INTEGER_CST
3548 			  || compare_tree_int (c1->index, i))))
3549 		return false;
3550 	    }
3551 	  return true;
3552 	}
3553       else if (TREE_CODE (arg0) == STATEMENT_LIST
3554 	       && (flags & OEP_LEXICOGRAPHIC))
3555 	{
3556 	  /* Compare the STATEMENT_LISTs.  */
3557 	  tree_stmt_iterator tsi1, tsi2;
3558 	  tree body1 = CONST_CAST_TREE (arg0);
3559 	  tree body2 = CONST_CAST_TREE (arg1);
3560 	  for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3561 	       tsi_next (&tsi1), tsi_next (&tsi2))
3562 	    {
3563 	      /* The lists don't have the same number of statements.  */
3564 	      if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3565 		return false;
3566 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3567 		return true;
3568 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3569 				    flags & (OEP_LEXICOGRAPHIC
3570 					     | OEP_NO_HASH_CHECK)))
3571 		return false;
3572 	    }
3573 	}
3574       return false;
3575 
3576     case tcc_statement:
3577       switch (TREE_CODE (arg0))
3578 	{
3579 	case RETURN_EXPR:
3580 	  if (flags & OEP_LEXICOGRAPHIC)
3581 	    return OP_SAME_WITH_NULL (0);
3582 	  return false;
3583 	case DEBUG_BEGIN_STMT:
3584 	  if (flags & OEP_LEXICOGRAPHIC)
3585 	    return true;
3586 	  return false;
3587 	default:
3588 	  return false;
3589 	 }
3590 
3591     default:
3592       return false;
3593     }
3594 
3595 #undef OP_SAME
3596 #undef OP_SAME_WITH_NULL
3597 }
3598 
3599 /* Generate a hash value for an expression.  This can be used iteratively
3600    by passing a previous result as the HSTATE argument.  */
3601 
3602 void
hash_operand(const_tree t,inchash::hash & hstate,unsigned int flags)3603 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3604 			       unsigned int flags)
3605 {
3606   int i;
3607   enum tree_code code;
3608   enum tree_code_class tclass;
3609 
3610   if (t == NULL_TREE || t == error_mark_node)
3611     {
3612       hstate.merge_hash (0);
3613       return;
3614     }
3615 
3616   STRIP_ANY_LOCATION_WRAPPER (t);
3617 
3618   if (!(flags & OEP_ADDRESS_OF))
3619     STRIP_NOPS (t);
3620 
3621   code = TREE_CODE (t);
3622 
3623   switch (code)
3624     {
3625     /* Alas, constants aren't shared, so we can't rely on pointer
3626        identity.  */
3627     case VOID_CST:
3628       hstate.merge_hash (0);
3629       return;
3630     case INTEGER_CST:
3631       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3632       for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3633 	hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3634       return;
3635     case REAL_CST:
3636       {
3637 	unsigned int val2;
3638 	if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3639 	  val2 = rvc_zero;
3640 	else
3641 	  val2 = real_hash (TREE_REAL_CST_PTR (t));
3642 	hstate.merge_hash (val2);
3643 	return;
3644       }
3645     case FIXED_CST:
3646       {
3647 	unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3648 	hstate.merge_hash (val2);
3649 	return;
3650       }
3651     case STRING_CST:
3652       hstate.add ((const void *) TREE_STRING_POINTER (t),
3653 		  TREE_STRING_LENGTH (t));
3654       return;
3655     case COMPLEX_CST:
3656       hash_operand (TREE_REALPART (t), hstate, flags);
3657       hash_operand (TREE_IMAGPART (t), hstate, flags);
3658       return;
3659     case VECTOR_CST:
3660       {
3661 	hstate.add_int (VECTOR_CST_NPATTERNS (t));
3662 	hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3663 	unsigned int count = vector_cst_encoded_nelts (t);
3664 	for (unsigned int i = 0; i < count; ++i)
3665 	  hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3666 	return;
3667       }
3668     case SSA_NAME:
3669       /* We can just compare by pointer.  */
3670       hstate.add_hwi (SSA_NAME_VERSION (t));
3671       return;
3672     case PLACEHOLDER_EXPR:
3673       /* The node itself doesn't matter.  */
3674       return;
3675     case BLOCK:
3676     case OMP_CLAUSE:
3677       /* Ignore.  */
3678       return;
3679     case TREE_LIST:
3680       /* A list of expressions, for a CALL_EXPR or as the elements of a
3681 	 VECTOR_CST.  */
3682       for (; t; t = TREE_CHAIN (t))
3683 	hash_operand (TREE_VALUE (t), hstate, flags);
3684       return;
3685     case CONSTRUCTOR:
3686       {
3687 	unsigned HOST_WIDE_INT idx;
3688 	tree field, value;
3689 	flags &= ~OEP_ADDRESS_OF;
3690 	hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3691 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3692 	  {
3693 	    /* In GIMPLE the indexes can be either NULL or matching i.  */
3694 	    if (field == NULL_TREE)
3695 	      field = bitsize_int (idx);
3696 	    hash_operand (field, hstate, flags);
3697 	    hash_operand (value, hstate, flags);
3698 	  }
3699 	return;
3700       }
3701     case STATEMENT_LIST:
3702       {
3703 	tree_stmt_iterator i;
3704 	for (i = tsi_start (CONST_CAST_TREE (t));
3705 	     !tsi_end_p (i); tsi_next (&i))
3706 	  hash_operand (tsi_stmt (i), hstate, flags);
3707 	return;
3708       }
3709     case TREE_VEC:
3710       for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3711 	hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3712       return;
3713     case IDENTIFIER_NODE:
3714       hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3715       return;
3716     case FUNCTION_DECL:
3717       /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3718 	 Otherwise nodes that compare equal according to operand_equal_p might
3719 	 get different hash codes.  However, don't do this for machine specific
3720 	 or front end builtins, since the function code is overloaded in those
3721 	 cases.  */
3722       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3723 	  && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3724 	{
3725 	  t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3726 	  code = TREE_CODE (t);
3727 	}
3728       /* FALL THROUGH */
3729     default:
3730       if (POLY_INT_CST_P (t))
3731 	{
3732 	  for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3733 	    hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3734 	  return;
3735 	}
3736       tclass = TREE_CODE_CLASS (code);
3737 
3738       if (tclass == tcc_declaration)
3739 	{
3740 	  /* DECL's have a unique ID */
3741 	  hstate.add_hwi (DECL_UID (t));
3742 	}
3743       else if (tclass == tcc_comparison && !commutative_tree_code (code))
3744 	{
3745 	  /* For comparisons that can be swapped, use the lower
3746 	     tree code.  */
3747 	  enum tree_code ccode = swap_tree_comparison (code);
3748 	  if (code < ccode)
3749 	    ccode = code;
3750 	  hstate.add_object (ccode);
3751 	  hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3752 	  hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3753 	}
3754       else if (CONVERT_EXPR_CODE_P (code))
3755 	{
3756 	  /* NOP_EXPR and CONVERT_EXPR are considered equal by
3757 	     operand_equal_p.  */
3758 	  enum tree_code ccode = NOP_EXPR;
3759 	  hstate.add_object (ccode);
3760 
3761 	  /* Don't hash the type, that can lead to having nodes which
3762 	     compare equal according to operand_equal_p, but which
3763 	     have different hash codes.  Make sure to include signedness
3764 	     in the hash computation.  */
3765 	  hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3766 	  hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3767 	}
3768       /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl.  */
3769       else if (code == MEM_REF
3770 	       && (flags & OEP_ADDRESS_OF) != 0
3771 	       && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3772 	       && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3773 	       && integer_zerop (TREE_OPERAND (t, 1)))
3774 	hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3775 		      hstate, flags);
3776       /* Don't ICE on FE specific trees, or their arguments etc.
3777 	 during operand_equal_p hash verification.  */
3778       else if (!IS_EXPR_CODE_CLASS (tclass))
3779 	gcc_assert (flags & OEP_HASH_CHECK);
3780       else
3781 	{
3782 	  unsigned int sflags = flags;
3783 
3784 	  hstate.add_object (code);
3785 
3786 	  switch (code)
3787 	    {
3788 	    case ADDR_EXPR:
3789 	      gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3790 	      flags |= OEP_ADDRESS_OF;
3791 	      sflags = flags;
3792 	      break;
3793 
3794 	    case INDIRECT_REF:
3795 	    case MEM_REF:
3796 	    case TARGET_MEM_REF:
3797 	      flags &= ~OEP_ADDRESS_OF;
3798 	      sflags = flags;
3799 	      break;
3800 
3801 	    case ARRAY_REF:
3802 	    case ARRAY_RANGE_REF:
3803 	    case COMPONENT_REF:
3804 	    case BIT_FIELD_REF:
3805 	      sflags &= ~OEP_ADDRESS_OF;
3806 	      break;
3807 
3808 	    case COND_EXPR:
3809 	      flags &= ~OEP_ADDRESS_OF;
3810 	      break;
3811 
3812 	    case WIDEN_MULT_PLUS_EXPR:
3813 	    case WIDEN_MULT_MINUS_EXPR:
3814 	      {
3815 		/* The multiplication operands are commutative.  */
3816 		inchash::hash one, two;
3817 		hash_operand (TREE_OPERAND (t, 0), one, flags);
3818 		hash_operand (TREE_OPERAND (t, 1), two, flags);
3819 		hstate.add_commutative (one, two);
3820 		hash_operand (TREE_OPERAND (t, 2), two, flags);
3821 		return;
3822 	      }
3823 
3824 	    case CALL_EXPR:
3825 	      if (CALL_EXPR_FN (t) == NULL_TREE)
3826 		hstate.add_int (CALL_EXPR_IFN (t));
3827 	      break;
3828 
3829 	    case TARGET_EXPR:
3830 	      /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3831 		 Usually different TARGET_EXPRs just should use
3832 		 different temporaries in their slots.  */
3833 	      hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3834 	      return;
3835 
3836 	    /* Virtual table call.  */
3837 	    case OBJ_TYPE_REF:
3838 	      inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3839 	      inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3840 	      inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3841 	      return;
3842 	    default:
3843 	      break;
3844 	    }
3845 
3846 	  /* Don't hash the type, that can lead to having nodes which
3847 	     compare equal according to operand_equal_p, but which
3848 	     have different hash codes.  */
3849 	  if (code == NON_LVALUE_EXPR)
3850 	    {
3851 	      /* Make sure to include signness in the hash computation.  */
3852 	      hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3853 	      hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3854 	    }
3855 
3856 	  else if (commutative_tree_code (code))
3857 	    {
3858 	      /* It's a commutative expression.  We want to hash it the same
3859 		 however it appears.  We do this by first hashing both operands
3860 		 and then rehashing based on the order of their independent
3861 		 hashes.  */
3862 	      inchash::hash one, two;
3863 	      hash_operand (TREE_OPERAND (t, 0), one, flags);
3864 	      hash_operand (TREE_OPERAND (t, 1), two, flags);
3865 	      hstate.add_commutative (one, two);
3866 	    }
3867 	  else
3868 	    for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3869 	      hash_operand (TREE_OPERAND (t, i), hstate,
3870 			    i == 0 ? flags : sflags);
3871 	}
3872       return;
3873     }
3874 }
3875 
3876 bool
verify_hash_value(const_tree arg0,const_tree arg1,unsigned int flags,bool * ret)3877 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3878 				    unsigned int flags, bool *ret)
3879 {
3880   /* When checking, verify at the outermost operand_equal_p call that
3881      if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3882      hash value.  */
3883   if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3884     {
3885       if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3886 	{
3887 	  if (arg0 != arg1)
3888 	    {
3889 	      inchash::hash hstate0 (0), hstate1 (0);
3890 	      hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3891 	      hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3892 	      hashval_t h0 = hstate0.end ();
3893 	      hashval_t h1 = hstate1.end ();
3894 	      gcc_assert (h0 == h1);
3895 	    }
3896 	  *ret = true;
3897 	}
3898       else
3899 	*ret = false;
3900 
3901       return true;
3902     }
3903 
3904   return false;
3905 }
3906 
3907 
3908 static operand_compare default_compare_instance;
3909 
3910 /* Conveinece wrapper around operand_compare class because usually we do
3911    not need to play with the valueizer.  */
3912 
3913 bool
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)3914 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3915 {
3916   return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3917 }
3918 
3919 namespace inchash
3920 {
3921 
3922 /* Generate a hash value for an expression.  This can be used iteratively
3923    by passing a previous result as the HSTATE argument.
3924 
3925    This function is intended to produce the same hash for expressions which
3926    would compare equal using operand_equal_p.  */
3927 void
add_expr(const_tree t,inchash::hash & hstate,unsigned int flags)3928 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3929 {
3930   default_compare_instance.hash_operand (t, hstate, flags);
3931 }
3932 
3933 }
3934 
3935 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3936    with a different signedness or a narrower precision.  */
3937 
3938 static bool
operand_equal_for_comparison_p(tree arg0,tree arg1)3939 operand_equal_for_comparison_p (tree arg0, tree arg1)
3940 {
3941   if (operand_equal_p (arg0, arg1, 0))
3942     return true;
3943 
3944   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3945       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3946     return false;
3947 
3948   /* Discard any conversions that don't change the modes of ARG0 and ARG1
3949      and see if the inner values are the same.  This removes any
3950      signedness comparison, which doesn't matter here.  */
3951   tree op0 = arg0;
3952   tree op1 = arg1;
3953   STRIP_NOPS (op0);
3954   STRIP_NOPS (op1);
3955   if (operand_equal_p (op0, op1, 0))
3956     return true;
3957 
3958   /* Discard a single widening conversion from ARG1 and see if the inner
3959      value is the same as ARG0.  */
3960   if (CONVERT_EXPR_P (arg1)
3961       && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3962       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3963          < TYPE_PRECISION (TREE_TYPE (arg1))
3964       && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3965     return true;
3966 
3967   return false;
3968 }
3969 
3970 /* See if ARG is an expression that is either a comparison or is performing
3971    arithmetic on comparisons.  The comparisons must only be comparing
3972    two different values, which will be stored in *CVAL1 and *CVAL2; if
3973    they are nonzero it means that some operands have already been found.
3974    No variables may be used anywhere else in the expression except in the
3975    comparisons.
3976 
3977    If this is true, return 1.  Otherwise, return zero.  */
3978 
3979 static bool
twoval_comparison_p(tree arg,tree * cval1,tree * cval2)3980 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3981 {
3982   enum tree_code code = TREE_CODE (arg);
3983   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3984 
3985   /* We can handle some of the tcc_expression cases here.  */
3986   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3987     tclass = tcc_unary;
3988   else if (tclass == tcc_expression
3989 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3990 	       || code == COMPOUND_EXPR))
3991     tclass = tcc_binary;
3992 
3993   switch (tclass)
3994     {
3995     case tcc_unary:
3996       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3997 
3998     case tcc_binary:
3999       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4000 	      && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4001 
4002     case tcc_constant:
4003       return true;
4004 
4005     case tcc_expression:
4006       if (code == COND_EXPR)
4007 	return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4008 		&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4009 		&& twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4010       return false;
4011 
4012     case tcc_comparison:
4013       /* First see if we can handle the first operand, then the second.  For
4014 	 the second operand, we know *CVAL1 can't be zero.  It must be that
4015 	 one side of the comparison is each of the values; test for the
4016 	 case where this isn't true by failing if the two operands
4017 	 are the same.  */
4018 
4019       if (operand_equal_p (TREE_OPERAND (arg, 0),
4020 			   TREE_OPERAND (arg, 1), 0))
4021 	return false;
4022 
4023       if (*cval1 == 0)
4024 	*cval1 = TREE_OPERAND (arg, 0);
4025       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4026 	;
4027       else if (*cval2 == 0)
4028 	*cval2 = TREE_OPERAND (arg, 0);
4029       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4030 	;
4031       else
4032 	return false;
4033 
4034       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4035 	;
4036       else if (*cval2 == 0)
4037 	*cval2 = TREE_OPERAND (arg, 1);
4038       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4039 	;
4040       else
4041 	return false;
4042 
4043       return true;
4044 
4045     default:
4046       return false;
4047     }
4048 }
4049 
4050 /* ARG is a tree that is known to contain just arithmetic operations and
4051    comparisons.  Evaluate the operations in the tree substituting NEW0 for
4052    any occurrence of OLD0 as an operand of a comparison and likewise for
4053    NEW1 and OLD1.  */
4054 
4055 static tree
eval_subst(location_t loc,tree arg,tree old0,tree new0,tree old1,tree new1)4056 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4057 	    tree old1, tree new1)
4058 {
4059   tree type = TREE_TYPE (arg);
4060   enum tree_code code = TREE_CODE (arg);
4061   enum tree_code_class tclass = TREE_CODE_CLASS (code);
4062 
4063   /* We can handle some of the tcc_expression cases here.  */
4064   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4065     tclass = tcc_unary;
4066   else if (tclass == tcc_expression
4067 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4068     tclass = tcc_binary;
4069 
4070   switch (tclass)
4071     {
4072     case tcc_unary:
4073       return fold_build1_loc (loc, code, type,
4074 			  eval_subst (loc, TREE_OPERAND (arg, 0),
4075 				      old0, new0, old1, new1));
4076 
4077     case tcc_binary:
4078       return fold_build2_loc (loc, code, type,
4079 			  eval_subst (loc, TREE_OPERAND (arg, 0),
4080 				      old0, new0, old1, new1),
4081 			  eval_subst (loc, TREE_OPERAND (arg, 1),
4082 				      old0, new0, old1, new1));
4083 
4084     case tcc_expression:
4085       switch (code)
4086 	{
4087 	case SAVE_EXPR:
4088 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4089 			     old1, new1);
4090 
4091 	case COMPOUND_EXPR:
4092 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4093 			     old1, new1);
4094 
4095 	case COND_EXPR:
4096 	  return fold_build3_loc (loc, code, type,
4097 			      eval_subst (loc, TREE_OPERAND (arg, 0),
4098 					  old0, new0, old1, new1),
4099 			      eval_subst (loc, TREE_OPERAND (arg, 1),
4100 					  old0, new0, old1, new1),
4101 			      eval_subst (loc, TREE_OPERAND (arg, 2),
4102 					  old0, new0, old1, new1));
4103 	default:
4104 	  break;
4105 	}
4106       /* Fall through - ???  */
4107 
4108     case tcc_comparison:
4109       {
4110 	tree arg0 = TREE_OPERAND (arg, 0);
4111 	tree arg1 = TREE_OPERAND (arg, 1);
4112 
4113 	/* We need to check both for exact equality and tree equality.  The
4114 	   former will be true if the operand has a side-effect.  In that
4115 	   case, we know the operand occurred exactly once.  */
4116 
4117 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4118 	  arg0 = new0;
4119 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4120 	  arg0 = new1;
4121 
4122 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4123 	  arg1 = new0;
4124 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4125 	  arg1 = new1;
4126 
4127 	return fold_build2_loc (loc, code, type, arg0, arg1);
4128       }
4129 
4130     default:
4131       return arg;
4132     }
4133 }
4134 
4135 /* Return a tree for the case when the result of an expression is RESULT
4136    converted to TYPE and OMITTED was previously an operand of the expression
4137    but is now not needed (e.g., we folded OMITTED * 0).
4138 
4139    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
4140    the conversion of RESULT to TYPE.  */
4141 
4142 tree
omit_one_operand_loc(location_t loc,tree type,tree result,tree omitted)4143 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4144 {
4145   tree t = fold_convert_loc (loc, type, result);
4146 
4147   /* If the resulting operand is an empty statement, just return the omitted
4148      statement casted to void. */
4149   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4150     return build1_loc (loc, NOP_EXPR, void_type_node,
4151 		       fold_ignored_result (omitted));
4152 
4153   if (TREE_SIDE_EFFECTS (omitted))
4154     return build2_loc (loc, COMPOUND_EXPR, type,
4155 		       fold_ignored_result (omitted), t);
4156 
4157   return non_lvalue_loc (loc, t);
4158 }
4159 
4160 /* Return a tree for the case when the result of an expression is RESULT
4161    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4162    of the expression but are now not needed.
4163 
4164    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4165    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4166    evaluated before OMITTED2.  Otherwise, if neither has side effects,
4167    just do the conversion of RESULT to TYPE.  */
4168 
4169 tree
omit_two_operands_loc(location_t loc,tree type,tree result,tree omitted1,tree omitted2)4170 omit_two_operands_loc (location_t loc, tree type, tree result,
4171 		       tree omitted1, tree omitted2)
4172 {
4173   tree t = fold_convert_loc (loc, type, result);
4174 
4175   if (TREE_SIDE_EFFECTS (omitted2))
4176     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4177   if (TREE_SIDE_EFFECTS (omitted1))
4178     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4179 
4180   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4181 }
4182 
4183 
4184 /* Return a simplified tree node for the truth-negation of ARG.  This
4185    never alters ARG itself.  We assume that ARG is an operation that
4186    returns a truth value (0 or 1).
4187 
4188    FIXME: one would think we would fold the result, but it causes
4189    problems with the dominator optimizer.  */
4190 
4191 static tree
fold_truth_not_expr(location_t loc,tree arg)4192 fold_truth_not_expr (location_t loc, tree arg)
4193 {
4194   tree type = TREE_TYPE (arg);
4195   enum tree_code code = TREE_CODE (arg);
4196   location_t loc1, loc2;
4197 
4198   /* If this is a comparison, we can simply invert it, except for
4199      floating-point non-equality comparisons, in which case we just
4200      enclose a TRUTH_NOT_EXPR around what we have.  */
4201 
4202   if (TREE_CODE_CLASS (code) == tcc_comparison)
4203     {
4204       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4205       if (FLOAT_TYPE_P (op_type)
4206 	  && flag_trapping_math
4207 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
4208 	  && code != NE_EXPR && code != EQ_EXPR)
4209 	return NULL_TREE;
4210 
4211       code = invert_tree_comparison (code, HONOR_NANS (op_type));
4212       if (code == ERROR_MARK)
4213 	return NULL_TREE;
4214 
4215       tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4216 			     TREE_OPERAND (arg, 1));
4217       if (TREE_NO_WARNING (arg))
4218 	TREE_NO_WARNING (ret) = 1;
4219       return ret;
4220     }
4221 
4222   switch (code)
4223     {
4224     case INTEGER_CST:
4225       return constant_boolean_node (integer_zerop (arg), type);
4226 
4227     case TRUTH_AND_EXPR:
4228       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4229       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4230       return build2_loc (loc, TRUTH_OR_EXPR, type,
4231 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4232 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4233 
4234     case TRUTH_OR_EXPR:
4235       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4236       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4237       return build2_loc (loc, TRUTH_AND_EXPR, type,
4238 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4239 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4240 
4241     case TRUTH_XOR_EXPR:
4242       /* Here we can invert either operand.  We invert the first operand
4243 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
4244 	 result is the XOR of the first operand with the inside of the
4245 	 negation of the second operand.  */
4246 
4247       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4248 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4249 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4250       else
4251 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
4252 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4253 			   TREE_OPERAND (arg, 1));
4254 
4255     case TRUTH_ANDIF_EXPR:
4256       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4257       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4258       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4259 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4260 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4261 
4262     case TRUTH_ORIF_EXPR:
4263       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4264       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4265       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4266 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4267 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4268 
4269     case TRUTH_NOT_EXPR:
4270       return TREE_OPERAND (arg, 0);
4271 
4272     case COND_EXPR:
4273       {
4274 	tree arg1 = TREE_OPERAND (arg, 1);
4275 	tree arg2 = TREE_OPERAND (arg, 2);
4276 
4277 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4278 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4279 
4280 	/* A COND_EXPR may have a throw as one operand, which
4281 	   then has void type.  Just leave void operands
4282 	   as they are.  */
4283 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4284 			   VOID_TYPE_P (TREE_TYPE (arg1))
4285 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
4286 			   VOID_TYPE_P (TREE_TYPE (arg2))
4287 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
4288       }
4289 
4290     case COMPOUND_EXPR:
4291       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4292       return build2_loc (loc, COMPOUND_EXPR, type,
4293 			 TREE_OPERAND (arg, 0),
4294 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4295 
4296     case NON_LVALUE_EXPR:
4297       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4298       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4299 
4300     CASE_CONVERT:
4301       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4302 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4303 
4304       /* fall through */
4305 
4306     case FLOAT_EXPR:
4307       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4308       return build1_loc (loc, TREE_CODE (arg), type,
4309 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4310 
4311     case BIT_AND_EXPR:
4312       if (!integer_onep (TREE_OPERAND (arg, 1)))
4313 	return NULL_TREE;
4314       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4315 
4316     case SAVE_EXPR:
4317       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4318 
4319     case CLEANUP_POINT_EXPR:
4320       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4321       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4322 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4323 
4324     default:
4325       return NULL_TREE;
4326     }
4327 }
4328 
4329 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
4330    assume that ARG is an operation that returns a truth value (0 or 1
4331    for scalars, 0 or -1 for vectors).  Return the folded expression if
4332    folding is successful.  Otherwise, return NULL_TREE.  */
4333 
4334 static tree
fold_invert_truthvalue(location_t loc,tree arg)4335 fold_invert_truthvalue (location_t loc, tree arg)
4336 {
4337   tree type = TREE_TYPE (arg);
4338   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4339 			      ? BIT_NOT_EXPR
4340 			      : TRUTH_NOT_EXPR,
4341 			 type, arg);
4342 }
4343 
4344 /* Return a simplified tree node for the truth-negation of ARG.  This
4345    never alters ARG itself.  We assume that ARG is an operation that
4346    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
4347 
4348 tree
invert_truthvalue_loc(location_t loc,tree arg)4349 invert_truthvalue_loc (location_t loc, tree arg)
4350 {
4351   if (TREE_CODE (arg) == ERROR_MARK)
4352     return arg;
4353 
4354   tree type = TREE_TYPE (arg);
4355   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4356 			       ? BIT_NOT_EXPR
4357 			       : TRUTH_NOT_EXPR,
4358 			  type, arg);
4359 }
4360 
4361 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4362    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero
4363    and uses reverse storage order if REVERSEP is nonzero.  ORIG_INNER
4364    is the original memory reference used to preserve the alias set of
4365    the access.  */
4366 
4367 static tree
make_bit_field_ref(location_t loc,tree inner,tree orig_inner,tree type,HOST_WIDE_INT bitsize,poly_int64 bitpos,int unsignedp,int reversep)4368 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4369 		    HOST_WIDE_INT bitsize, poly_int64 bitpos,
4370 		    int unsignedp, int reversep)
4371 {
4372   tree result, bftype;
4373 
4374   /* Attempt not to lose the access path if possible.  */
4375   if (TREE_CODE (orig_inner) == COMPONENT_REF)
4376     {
4377       tree ninner = TREE_OPERAND (orig_inner, 0);
4378       machine_mode nmode;
4379       poly_int64 nbitsize, nbitpos;
4380       tree noffset;
4381       int nunsignedp, nreversep, nvolatilep = 0;
4382       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4383 				       &noffset, &nmode, &nunsignedp,
4384 				       &nreversep, &nvolatilep);
4385       if (base == inner
4386 	  && noffset == NULL_TREE
4387 	  && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4388 	  && !reversep
4389 	  && !nreversep
4390 	  && !nvolatilep)
4391 	{
4392 	  inner = ninner;
4393 	  bitpos -= nbitpos;
4394 	}
4395     }
4396 
4397   alias_set_type iset = get_alias_set (orig_inner);
4398   if (iset == 0 && get_alias_set (inner) != iset)
4399     inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4400 			 build_fold_addr_expr (inner),
4401 			 build_int_cst (ptr_type_node, 0));
4402 
4403   if (known_eq (bitpos, 0) && !reversep)
4404     {
4405       tree size = TYPE_SIZE (TREE_TYPE (inner));
4406       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4407 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
4408 	  && tree_fits_shwi_p (size)
4409 	  && tree_to_shwi (size) == bitsize)
4410 	return fold_convert_loc (loc, type, inner);
4411     }
4412 
4413   bftype = type;
4414   if (TYPE_PRECISION (bftype) != bitsize
4415       || TYPE_UNSIGNED (bftype) == !unsignedp)
4416     bftype = build_nonstandard_integer_type (bitsize, 0);
4417 
4418   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4419 		       bitsize_int (bitsize), bitsize_int (bitpos));
4420   REF_REVERSE_STORAGE_ORDER (result) = reversep;
4421 
4422   if (bftype != type)
4423     result = fold_convert_loc (loc, type, result);
4424 
4425   return result;
4426 }
4427 
4428 /* Optimize a bit-field compare.
4429 
4430    There are two cases:  First is a compare against a constant and the
4431    second is a comparison of two items where the fields are at the same
4432    bit position relative to the start of a chunk (byte, halfword, word)
4433    large enough to contain it.  In these cases we can avoid the shift
4434    implicit in bitfield extractions.
4435 
4436    For constants, we emit a compare of the shifted constant with the
4437    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4438    compared.  For two fields at the same position, we do the ANDs with the
4439    similar mask and compare the result of the ANDs.
4440 
4441    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4442    COMPARE_TYPE is the type of the comparison, and LHS and RHS
4443    are the left and right operands of the comparison, respectively.
4444 
4445    If the optimization described above can be done, we return the resulting
4446    tree.  Otherwise we return zero.  */
4447 
4448 static tree
optimize_bit_field_compare(location_t loc,enum tree_code code,tree compare_type,tree lhs,tree rhs)4449 optimize_bit_field_compare (location_t loc, enum tree_code code,
4450 			    tree compare_type, tree lhs, tree rhs)
4451 {
4452   poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4453   HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4454   tree type = TREE_TYPE (lhs);
4455   tree unsigned_type;
4456   int const_p = TREE_CODE (rhs) == INTEGER_CST;
4457   machine_mode lmode, rmode;
4458   scalar_int_mode nmode;
4459   int lunsignedp, runsignedp;
4460   int lreversep, rreversep;
4461   int lvolatilep = 0, rvolatilep = 0;
4462   tree linner, rinner = NULL_TREE;
4463   tree mask;
4464   tree offset;
4465 
4466   /* Get all the information about the extractions being done.  If the bit size
4467      is the same as the size of the underlying object, we aren't doing an
4468      extraction at all and so can do nothing.  We also don't want to
4469      do anything if the inner expression is a PLACEHOLDER_EXPR since we
4470      then will no longer be able to replace it.  */
4471   linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4472 				&lunsignedp, &lreversep, &lvolatilep);
4473   if (linner == lhs
4474       || !known_size_p (plbitsize)
4475       || !plbitsize.is_constant (&lbitsize)
4476       || !plbitpos.is_constant (&lbitpos)
4477       || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4478       || offset != 0
4479       || TREE_CODE (linner) == PLACEHOLDER_EXPR
4480       || lvolatilep)
4481     return 0;
4482 
4483   if (const_p)
4484     rreversep = lreversep;
4485   else
4486    {
4487      /* If this is not a constant, we can only do something if bit positions,
4488 	sizes, signedness and storage order are the same.  */
4489      rinner
4490        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4491 			      &runsignedp, &rreversep, &rvolatilep);
4492 
4493      if (rinner == rhs
4494 	 || maybe_ne (lbitpos, rbitpos)
4495 	 || maybe_ne (lbitsize, rbitsize)
4496 	 || lunsignedp != runsignedp
4497 	 || lreversep != rreversep
4498 	 || offset != 0
4499 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4500 	 || rvolatilep)
4501        return 0;
4502    }
4503 
4504   /* Honor the C++ memory model and mimic what RTL expansion does.  */
4505   poly_uint64 bitstart = 0;
4506   poly_uint64 bitend = 0;
4507   if (TREE_CODE (lhs) == COMPONENT_REF)
4508     {
4509       get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4510       if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4511 	return 0;
4512     }
4513 
4514   /* See if we can find a mode to refer to this field.  We should be able to,
4515      but fail if we can't.  */
4516   if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4517 		      const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4518 		      : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4519 			     TYPE_ALIGN (TREE_TYPE (rinner))),
4520 		      BITS_PER_WORD, false, &nmode))
4521     return 0;
4522 
4523   /* Set signed and unsigned types of the precision of this mode for the
4524      shifts below.  */
4525   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4526 
4527   /* Compute the bit position and size for the new reference and our offset
4528      within it. If the new reference is the same size as the original, we
4529      won't optimize anything, so return zero.  */
4530   nbitsize = GET_MODE_BITSIZE (nmode);
4531   nbitpos = lbitpos & ~ (nbitsize - 1);
4532   lbitpos -= nbitpos;
4533   if (nbitsize == lbitsize)
4534     return 0;
4535 
4536   if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4537     lbitpos = nbitsize - lbitsize - lbitpos;
4538 
4539   /* Make the mask to be used against the extracted field.  */
4540   mask = build_int_cst_type (unsigned_type, -1);
4541   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4542   mask = const_binop (RSHIFT_EXPR, mask,
4543 		      size_int (nbitsize - lbitsize - lbitpos));
4544 
4545   if (! const_p)
4546     {
4547       if (nbitpos < 0)
4548 	return 0;
4549 
4550       /* If not comparing with constant, just rework the comparison
4551 	 and return.  */
4552       tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4553 				    nbitsize, nbitpos, 1, lreversep);
4554       t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4555       tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4556 				    nbitsize, nbitpos, 1, rreversep);
4557       t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4558       return fold_build2_loc (loc, code, compare_type, t1, t2);
4559     }
4560 
4561   /* Otherwise, we are handling the constant case.  See if the constant is too
4562      big for the field.  Warn and return a tree for 0 (false) if so.  We do
4563      this not only for its own sake, but to avoid having to test for this
4564      error case below.  If we didn't, we might generate wrong code.
4565 
4566      For unsigned fields, the constant shifted right by the field length should
4567      be all zero.  For signed fields, the high-order bits should agree with
4568      the sign bit.  */
4569 
4570   if (lunsignedp)
4571     {
4572       if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4573 	{
4574 	  warning (0, "comparison is always %d due to width of bit-field",
4575 		   code == NE_EXPR);
4576 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4577 	}
4578     }
4579   else
4580     {
4581       wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4582       if (tem != 0 && tem != -1)
4583 	{
4584 	  warning (0, "comparison is always %d due to width of bit-field",
4585 		   code == NE_EXPR);
4586 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4587 	}
4588     }
4589 
4590   if (nbitpos < 0)
4591     return 0;
4592 
4593   /* Single-bit compares should always be against zero.  */
4594   if (lbitsize == 1 && ! integer_zerop (rhs))
4595     {
4596       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4597       rhs = build_int_cst (type, 0);
4598     }
4599 
4600   /* Make a new bitfield reference, shift the constant over the
4601      appropriate number of bits and mask it with the computed mask
4602      (in case this was a signed field).  If we changed it, make a new one.  */
4603   lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4604 			    nbitsize, nbitpos, 1, lreversep);
4605 
4606   rhs = const_binop (BIT_AND_EXPR,
4607 		     const_binop (LSHIFT_EXPR,
4608 				  fold_convert_loc (loc, unsigned_type, rhs),
4609 				  size_int (lbitpos)),
4610 		     mask);
4611 
4612   lhs = build2_loc (loc, code, compare_type,
4613 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4614   return lhs;
4615 }
4616 
4617 /* Subroutine for fold_truth_andor_1: decode a field reference.
4618 
4619    If EXP is a comparison reference, we return the innermost reference.
4620 
4621    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4622    set to the starting bit number.
4623 
4624    If the innermost field can be completely contained in a mode-sized
4625    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4626 
4627    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4628    otherwise it is not changed.
4629 
4630    *PUNSIGNEDP is set to the signedness of the field.
4631 
4632    *PREVERSEP is set to the storage order of the field.
4633 
4634    *PMASK is set to the mask used.  This is either contained in a
4635    BIT_AND_EXPR or derived from the width of the field.
4636 
4637    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4638 
4639    Return 0 if this is not a component reference or is one that we can't
4640    do anything with.  */
4641 
4642 static tree
decode_field_reference(location_t loc,tree * exp_,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,machine_mode * pmode,int * punsignedp,int * preversep,int * pvolatilep,tree * pmask,tree * pand_mask)4643 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4644 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4645 			int *punsignedp, int *preversep, int *pvolatilep,
4646 			tree *pmask, tree *pand_mask)
4647 {
4648   tree exp = *exp_;
4649   tree outer_type = 0;
4650   tree and_mask = 0;
4651   tree mask, inner, offset;
4652   tree unsigned_type;
4653   unsigned int precision;
4654 
4655   /* All the optimizations using this function assume integer fields.
4656      There are problems with FP fields since the type_for_size call
4657      below can fail for, e.g., XFmode.  */
4658   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4659     return NULL_TREE;
4660 
4661   /* We are interested in the bare arrangement of bits, so strip everything
4662      that doesn't affect the machine mode.  However, record the type of the
4663      outermost expression if it may matter below.  */
4664   if (CONVERT_EXPR_P (exp)
4665       || TREE_CODE (exp) == NON_LVALUE_EXPR)
4666     outer_type = TREE_TYPE (exp);
4667   STRIP_NOPS (exp);
4668 
4669   if (TREE_CODE (exp) == BIT_AND_EXPR)
4670     {
4671       and_mask = TREE_OPERAND (exp, 1);
4672       exp = TREE_OPERAND (exp, 0);
4673       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4674       if (TREE_CODE (and_mask) != INTEGER_CST)
4675 	return NULL_TREE;
4676     }
4677 
4678   poly_int64 poly_bitsize, poly_bitpos;
4679   inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4680 			       pmode, punsignedp, preversep, pvolatilep);
4681   if ((inner == exp && and_mask == 0)
4682       || !poly_bitsize.is_constant (pbitsize)
4683       || !poly_bitpos.is_constant (pbitpos)
4684       || *pbitsize < 0
4685       || offset != 0
4686       || TREE_CODE (inner) == PLACEHOLDER_EXPR
4687       /* Reject out-of-bound accesses (PR79731).  */
4688       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4689 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4690 			       *pbitpos + *pbitsize) < 0))
4691     return NULL_TREE;
4692 
4693   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4694   if (unsigned_type == NULL_TREE)
4695     return NULL_TREE;
4696 
4697   *exp_ = exp;
4698 
4699   /* If the number of bits in the reference is the same as the bitsize of
4700      the outer type, then the outer type gives the signedness. Otherwise
4701      (in case of a small bitfield) the signedness is unchanged.  */
4702   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4703     *punsignedp = TYPE_UNSIGNED (outer_type);
4704 
4705   /* Compute the mask to access the bitfield.  */
4706   precision = TYPE_PRECISION (unsigned_type);
4707 
4708   mask = build_int_cst_type (unsigned_type, -1);
4709 
4710   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4711   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4712 
4713   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4714   if (and_mask != 0)
4715     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4716 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4717 
4718   *pmask = mask;
4719   *pand_mask = and_mask;
4720   return inner;
4721 }
4722 
4723 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4724    bit positions and MASK is SIGNED.  */
4725 
4726 static bool
all_ones_mask_p(const_tree mask,unsigned int size)4727 all_ones_mask_p (const_tree mask, unsigned int size)
4728 {
4729   tree type = TREE_TYPE (mask);
4730   unsigned int precision = TYPE_PRECISION (type);
4731 
4732   /* If this function returns true when the type of the mask is
4733      UNSIGNED, then there will be errors.  In particular see
4734      gcc.c-torture/execute/990326-1.c.  There does not appear to be
4735      any documentation paper trail as to why this is so.  But the pre
4736      wide-int worked with that restriction and it has been preserved
4737      here.  */
4738   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4739     return false;
4740 
4741   return wi::mask (size, false, precision) == wi::to_wide (mask);
4742 }
4743 
4744 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4745    represents the sign bit of EXP's type.  If EXP represents a sign
4746    or zero extension, also test VAL against the unextended type.
4747    The return value is the (sub)expression whose sign bit is VAL,
4748    or NULL_TREE otherwise.  */
4749 
4750 tree
sign_bit_p(tree exp,const_tree val)4751 sign_bit_p (tree exp, const_tree val)
4752 {
4753   int width;
4754   tree t;
4755 
4756   /* Tree EXP must have an integral type.  */
4757   t = TREE_TYPE (exp);
4758   if (! INTEGRAL_TYPE_P (t))
4759     return NULL_TREE;
4760 
4761   /* Tree VAL must be an integer constant.  */
4762   if (TREE_CODE (val) != INTEGER_CST
4763       || TREE_OVERFLOW (val))
4764     return NULL_TREE;
4765 
4766   width = TYPE_PRECISION (t);
4767   if (wi::only_sign_bit_p (wi::to_wide (val), width))
4768     return exp;
4769 
4770   /* Handle extension from a narrower type.  */
4771   if (TREE_CODE (exp) == NOP_EXPR
4772       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4773     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4774 
4775   return NULL_TREE;
4776 }
4777 
4778 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4779    to be evaluated unconditionally.  */
4780 
4781 static bool
simple_operand_p(const_tree exp)4782 simple_operand_p (const_tree exp)
4783 {
4784   /* Strip any conversions that don't change the machine mode.  */
4785   STRIP_NOPS (exp);
4786 
4787   return (CONSTANT_CLASS_P (exp)
4788   	  || TREE_CODE (exp) == SSA_NAME
4789 	  || (DECL_P (exp)
4790 	      && ! TREE_ADDRESSABLE (exp)
4791 	      && ! TREE_THIS_VOLATILE (exp)
4792 	      && ! DECL_NONLOCAL (exp)
4793 	      /* Don't regard global variables as simple.  They may be
4794 		 allocated in ways unknown to the compiler (shared memory,
4795 		 #pragma weak, etc).  */
4796 	      && ! TREE_PUBLIC (exp)
4797 	      && ! DECL_EXTERNAL (exp)
4798 	      /* Weakrefs are not safe to be read, since they can be NULL.
4799  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4800 		 have DECL_WEAK flag set.  */
4801 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4802 	      /* Loading a static variable is unduly expensive, but global
4803 		 registers aren't expensive.  */
4804 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4805 }
4806 
4807 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4808    to be evaluated unconditionally.
4809    I addition to simple_operand_p, we assume that comparisons, conversions,
4810    and logic-not operations are simple, if their operands are simple, too.  */
4811 
4812 static bool
simple_operand_p_2(tree exp)4813 simple_operand_p_2 (tree exp)
4814 {
4815   enum tree_code code;
4816 
4817   if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4818     return false;
4819 
4820   while (CONVERT_EXPR_P (exp))
4821     exp = TREE_OPERAND (exp, 0);
4822 
4823   code = TREE_CODE (exp);
4824 
4825   if (TREE_CODE_CLASS (code) == tcc_comparison)
4826     return (simple_operand_p (TREE_OPERAND (exp, 0))
4827 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4828 
4829   if (code == TRUTH_NOT_EXPR)
4830       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4831 
4832   return simple_operand_p (exp);
4833 }
4834 
4835 
4836 /* The following functions are subroutines to fold_range_test and allow it to
4837    try to change a logical combination of comparisons into a range test.
4838 
4839    For example, both
4840 	X == 2 || X == 3 || X == 4 || X == 5
4841    and
4842 	X >= 2 && X <= 5
4843    are converted to
4844 	(unsigned) (X - 2) <= 3
4845 
4846    We describe each set of comparisons as being either inside or outside
4847    a range, using a variable named like IN_P, and then describe the
4848    range with a lower and upper bound.  If one of the bounds is omitted,
4849    it represents either the highest or lowest value of the type.
4850 
4851    In the comments below, we represent a range by two numbers in brackets
4852    preceded by a "+" to designate being inside that range, or a "-" to
4853    designate being outside that range, so the condition can be inverted by
4854    flipping the prefix.  An omitted bound is represented by a "-".  For
4855    example, "- [-, 10]" means being outside the range starting at the lowest
4856    possible value and ending at 10, in other words, being greater than 10.
4857    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4858    always false.
4859 
4860    We set up things so that the missing bounds are handled in a consistent
4861    manner so neither a missing bound nor "true" and "false" need to be
4862    handled using a special case.  */
4863 
4864 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4865    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4866    and UPPER1_P are nonzero if the respective argument is an upper bound
4867    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4868    must be specified for a comparison.  ARG1 will be converted to ARG0's
4869    type if both are specified.  */
4870 
4871 static tree
range_binop(enum tree_code code,tree type,tree arg0,int upper0_p,tree arg1,int upper1_p)4872 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4873 	     tree arg1, int upper1_p)
4874 {
4875   tree tem;
4876   int result;
4877   int sgn0, sgn1;
4878 
4879   /* If neither arg represents infinity, do the normal operation.
4880      Else, if not a comparison, return infinity.  Else handle the special
4881      comparison rules. Note that most of the cases below won't occur, but
4882      are handled for consistency.  */
4883 
4884   if (arg0 != 0 && arg1 != 0)
4885     {
4886       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4887 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4888       STRIP_NOPS (tem);
4889       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4890     }
4891 
4892   if (TREE_CODE_CLASS (code) != tcc_comparison)
4893     return 0;
4894 
4895   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4896      for neither.  In real maths, we cannot assume open ended ranges are
4897      the same. But, this is computer arithmetic, where numbers are finite.
4898      We can therefore make the transformation of any unbounded range with
4899      the value Z, Z being greater than any representable number. This permits
4900      us to treat unbounded ranges as equal.  */
4901   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4902   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4903   switch (code)
4904     {
4905     case EQ_EXPR:
4906       result = sgn0 == sgn1;
4907       break;
4908     case NE_EXPR:
4909       result = sgn0 != sgn1;
4910       break;
4911     case LT_EXPR:
4912       result = sgn0 < sgn1;
4913       break;
4914     case LE_EXPR:
4915       result = sgn0 <= sgn1;
4916       break;
4917     case GT_EXPR:
4918       result = sgn0 > sgn1;
4919       break;
4920     case GE_EXPR:
4921       result = sgn0 >= sgn1;
4922       break;
4923     default:
4924       gcc_unreachable ();
4925     }
4926 
4927   return constant_boolean_node (result, type);
4928 }
4929 
4930 /* Helper routine for make_range.  Perform one step for it, return
4931    new expression if the loop should continue or NULL_TREE if it should
4932    stop.  */
4933 
4934 tree
make_range_step(location_t loc,enum tree_code code,tree arg0,tree arg1,tree exp_type,tree * p_low,tree * p_high,int * p_in_p,bool * strict_overflow_p)4935 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4936 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4937 		 bool *strict_overflow_p)
4938 {
4939   tree arg0_type = TREE_TYPE (arg0);
4940   tree n_low, n_high, low = *p_low, high = *p_high;
4941   int in_p = *p_in_p, n_in_p;
4942 
4943   switch (code)
4944     {
4945     case TRUTH_NOT_EXPR:
4946       /* We can only do something if the range is testing for zero.  */
4947       if (low == NULL_TREE || high == NULL_TREE
4948 	  || ! integer_zerop (low) || ! integer_zerop (high))
4949 	return NULL_TREE;
4950       *p_in_p = ! in_p;
4951       return arg0;
4952 
4953     case EQ_EXPR: case NE_EXPR:
4954     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4955       /* We can only do something if the range is testing for zero
4956 	 and if the second operand is an integer constant.  Note that
4957 	 saying something is "in" the range we make is done by
4958 	 complementing IN_P since it will set in the initial case of
4959 	 being not equal to zero; "out" is leaving it alone.  */
4960       if (low == NULL_TREE || high == NULL_TREE
4961 	  || ! integer_zerop (low) || ! integer_zerop (high)
4962 	  || TREE_CODE (arg1) != INTEGER_CST)
4963 	return NULL_TREE;
4964 
4965       switch (code)
4966 	{
4967 	case NE_EXPR:  /* - [c, c]  */
4968 	  low = high = arg1;
4969 	  break;
4970 	case EQ_EXPR:  /* + [c, c]  */
4971 	  in_p = ! in_p, low = high = arg1;
4972 	  break;
4973 	case GT_EXPR:  /* - [-, c] */
4974 	  low = 0, high = arg1;
4975 	  break;
4976 	case GE_EXPR:  /* + [c, -] */
4977 	  in_p = ! in_p, low = arg1, high = 0;
4978 	  break;
4979 	case LT_EXPR:  /* - [c, -] */
4980 	  low = arg1, high = 0;
4981 	  break;
4982 	case LE_EXPR:  /* + [-, c] */
4983 	  in_p = ! in_p, low = 0, high = arg1;
4984 	  break;
4985 	default:
4986 	  gcc_unreachable ();
4987 	}
4988 
4989       /* If this is an unsigned comparison, we also know that EXP is
4990 	 greater than or equal to zero.  We base the range tests we make
4991 	 on that fact, so we record it here so we can parse existing
4992 	 range tests.  We test arg0_type since often the return type
4993 	 of, e.g. EQ_EXPR, is boolean.  */
4994       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4995 	{
4996 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4997 			      in_p, low, high, 1,
4998 			      build_int_cst (arg0_type, 0),
4999 			      NULL_TREE))
5000 	    return NULL_TREE;
5001 
5002 	  in_p = n_in_p, low = n_low, high = n_high;
5003 
5004 	  /* If the high bound is missing, but we have a nonzero low
5005 	     bound, reverse the range so it goes from zero to the low bound
5006 	     minus 1.  */
5007 	  if (high == 0 && low && ! integer_zerop (low))
5008 	    {
5009 	      in_p = ! in_p;
5010 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5011 				  build_int_cst (TREE_TYPE (low), 1), 0);
5012 	      low = build_int_cst (arg0_type, 0);
5013 	    }
5014 	}
5015 
5016       *p_low = low;
5017       *p_high = high;
5018       *p_in_p = in_p;
5019       return arg0;
5020 
5021     case NEGATE_EXPR:
5022       /* If flag_wrapv and ARG0_TYPE is signed, make sure
5023 	 low and high are non-NULL, then normalize will DTRT.  */
5024       if (!TYPE_UNSIGNED (arg0_type)
5025 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5026 	{
5027 	  if (low == NULL_TREE)
5028 	    low = TYPE_MIN_VALUE (arg0_type);
5029 	  if (high == NULL_TREE)
5030 	    high = TYPE_MAX_VALUE (arg0_type);
5031 	}
5032 
5033       /* (-x) IN [a,b] -> x in [-b, -a]  */
5034       n_low = range_binop (MINUS_EXPR, exp_type,
5035 			   build_int_cst (exp_type, 0),
5036 			   0, high, 1);
5037       n_high = range_binop (MINUS_EXPR, exp_type,
5038 			    build_int_cst (exp_type, 0),
5039 			    0, low, 0);
5040       if (n_high != 0 && TREE_OVERFLOW (n_high))
5041 	return NULL_TREE;
5042       goto normalize;
5043 
5044     case BIT_NOT_EXPR:
5045       /* ~ X -> -X - 1  */
5046       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5047 			 build_int_cst (exp_type, 1));
5048 
5049     case PLUS_EXPR:
5050     case MINUS_EXPR:
5051       if (TREE_CODE (arg1) != INTEGER_CST)
5052 	return NULL_TREE;
5053 
5054       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5055 	 move a constant to the other side.  */
5056       if (!TYPE_UNSIGNED (arg0_type)
5057 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5058 	return NULL_TREE;
5059 
5060       /* If EXP is signed, any overflow in the computation is undefined,
5061 	 so we don't worry about it so long as our computations on
5062 	 the bounds don't overflow.  For unsigned, overflow is defined
5063 	 and this is exactly the right thing.  */
5064       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5065 			   arg0_type, low, 0, arg1, 0);
5066       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5067 			    arg0_type, high, 1, arg1, 0);
5068       if ((n_low != 0 && TREE_OVERFLOW (n_low))
5069 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
5070 	return NULL_TREE;
5071 
5072       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5073 	*strict_overflow_p = true;
5074 
5075       normalize:
5076 	/* Check for an unsigned range which has wrapped around the maximum
5077 	   value thus making n_high < n_low, and normalize it.  */
5078 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5079 	  {
5080 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5081 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
5082 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5083 				build_int_cst (TREE_TYPE (n_low), 1), 0);
5084 
5085 	    /* If the range is of the form +/- [ x+1, x ], we won't
5086 	       be able to normalize it.  But then, it represents the
5087 	       whole range or the empty set, so make it
5088 	       +/- [ -, - ].  */
5089 	    if (tree_int_cst_equal (n_low, low)
5090 		&& tree_int_cst_equal (n_high, high))
5091 	      low = high = 0;
5092 	    else
5093 	      in_p = ! in_p;
5094 	  }
5095 	else
5096 	  low = n_low, high = n_high;
5097 
5098 	*p_low = low;
5099 	*p_high = high;
5100 	*p_in_p = in_p;
5101 	return arg0;
5102 
5103     CASE_CONVERT:
5104     case NON_LVALUE_EXPR:
5105       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5106 	return NULL_TREE;
5107 
5108       if (! INTEGRAL_TYPE_P (arg0_type)
5109 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
5110 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5111 	return NULL_TREE;
5112 
5113       n_low = low, n_high = high;
5114 
5115       if (n_low != 0)
5116 	n_low = fold_convert_loc (loc, arg0_type, n_low);
5117 
5118       if (n_high != 0)
5119 	n_high = fold_convert_loc (loc, arg0_type, n_high);
5120 
5121       /* If we're converting arg0 from an unsigned type, to exp,
5122 	 a signed type, we will be doing the comparison as unsigned.
5123 	 The tests above have already verified that LOW and HIGH
5124 	 are both positive.
5125 
5126 	 So we have to ensure that we will handle large unsigned
5127 	 values the same way that the current signed bounds treat
5128 	 negative values.  */
5129 
5130       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5131 	{
5132 	  tree high_positive;
5133 	  tree equiv_type;
5134 	  /* For fixed-point modes, we need to pass the saturating flag
5135 	     as the 2nd parameter.  */
5136 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5137 	    equiv_type
5138 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5139 						TYPE_SATURATING (arg0_type));
5140 	  else
5141 	    equiv_type
5142 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5143 
5144 	  /* A range without an upper bound is, naturally, unbounded.
5145 	     Since convert would have cropped a very large value, use
5146 	     the max value for the destination type.  */
5147 	  high_positive
5148 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5149 	      : TYPE_MAX_VALUE (arg0_type);
5150 
5151 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5152 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5153 					     fold_convert_loc (loc, arg0_type,
5154 							       high_positive),
5155 					     build_int_cst (arg0_type, 1));
5156 
5157 	  /* If the low bound is specified, "and" the range with the
5158 	     range for which the original unsigned value will be
5159 	     positive.  */
5160 	  if (low != 0)
5161 	    {
5162 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5163 				  1, fold_convert_loc (loc, arg0_type,
5164 						       integer_zero_node),
5165 				  high_positive))
5166 		return NULL_TREE;
5167 
5168 	      in_p = (n_in_p == in_p);
5169 	    }
5170 	  else
5171 	    {
5172 	      /* Otherwise, "or" the range with the range of the input
5173 		 that will be interpreted as negative.  */
5174 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5175 				  1, fold_convert_loc (loc, arg0_type,
5176 						       integer_zero_node),
5177 				  high_positive))
5178 		return NULL_TREE;
5179 
5180 	      in_p = (in_p != n_in_p);
5181 	    }
5182 	}
5183 
5184       /* Otherwise, if we are converting arg0 from signed type, to exp,
5185 	 an unsigned type, we will do the comparison as signed.  If
5186 	 high is non-NULL, we punt above if it doesn't fit in the signed
5187 	 type, so if we get through here, +[-, high] or +[low, high] are
5188 	 equivalent to +[-, n_high] or +[n_low, n_high].  Similarly,
5189 	 +[-, -] or -[-, -] are equivalent too.  But if low is specified and
5190 	 high is not, the +[low, -] range is equivalent to union of
5191 	 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5192 	 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5193 	 low being 0, which should be treated as [-, -].  */
5194       else if (TYPE_UNSIGNED (exp_type)
5195 	       && !TYPE_UNSIGNED (arg0_type)
5196 	       && low
5197 	       && !high)
5198 	{
5199 	  if (integer_zerop (low))
5200 	    n_low = NULL_TREE;
5201 	  else
5202 	    {
5203 	      n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5204 					n_low, build_int_cst (arg0_type, -1));
5205 	      n_low = build_zero_cst (arg0_type);
5206 	      in_p = !in_p;
5207 	    }
5208 	}
5209 
5210       *p_low = n_low;
5211       *p_high = n_high;
5212       *p_in_p = in_p;
5213       return arg0;
5214 
5215     default:
5216       return NULL_TREE;
5217     }
5218 }
5219 
5220 /* Given EXP, a logical expression, set the range it is testing into
5221    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
5222    actually being tested.  *PLOW and *PHIGH will be made of the same
5223    type as the returned expression.  If EXP is not a comparison, we
5224    will most likely not be returning a useful value and range.  Set
5225    *STRICT_OVERFLOW_P to true if the return value is only valid
5226    because signed overflow is undefined; otherwise, do not change
5227    *STRICT_OVERFLOW_P.  */
5228 
5229 tree
make_range(tree exp,int * pin_p,tree * plow,tree * phigh,bool * strict_overflow_p)5230 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5231 	    bool *strict_overflow_p)
5232 {
5233   enum tree_code code;
5234   tree arg0, arg1 = NULL_TREE;
5235   tree exp_type, nexp;
5236   int in_p;
5237   tree low, high;
5238   location_t loc = EXPR_LOCATION (exp);
5239 
5240   /* Start with simply saying "EXP != 0" and then look at the code of EXP
5241      and see if we can refine the range.  Some of the cases below may not
5242      happen, but it doesn't seem worth worrying about this.  We "continue"
5243      the outer loop when we've changed something; otherwise we "break"
5244      the switch, which will "break" the while.  */
5245 
5246   in_p = 0;
5247   low = high = build_int_cst (TREE_TYPE (exp), 0);
5248 
5249   while (1)
5250     {
5251       code = TREE_CODE (exp);
5252       exp_type = TREE_TYPE (exp);
5253       arg0 = NULL_TREE;
5254 
5255       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5256 	{
5257 	  if (TREE_OPERAND_LENGTH (exp) > 0)
5258 	    arg0 = TREE_OPERAND (exp, 0);
5259 	  if (TREE_CODE_CLASS (code) == tcc_binary
5260 	      || TREE_CODE_CLASS (code) == tcc_comparison
5261 	      || (TREE_CODE_CLASS (code) == tcc_expression
5262 		  && TREE_OPERAND_LENGTH (exp) > 1))
5263 	    arg1 = TREE_OPERAND (exp, 1);
5264 	}
5265       if (arg0 == NULL_TREE)
5266 	break;
5267 
5268       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5269 			      &high, &in_p, strict_overflow_p);
5270       if (nexp == NULL_TREE)
5271 	break;
5272       exp = nexp;
5273     }
5274 
5275   /* If EXP is a constant, we can evaluate whether this is true or false.  */
5276   if (TREE_CODE (exp) == INTEGER_CST)
5277     {
5278       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5279 						 exp, 0, low, 0))
5280 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
5281 						    exp, 1, high, 1)));
5282       low = high = 0;
5283       exp = 0;
5284     }
5285 
5286   *pin_p = in_p, *plow = low, *phigh = high;
5287   return exp;
5288 }
5289 
5290 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5291    a bitwise check i.e. when
5292      LOW  == 0xXX...X00...0
5293      HIGH == 0xXX...X11...1
5294    Return corresponding mask in MASK and stem in VALUE.  */
5295 
5296 static bool
maskable_range_p(const_tree low,const_tree high,tree type,tree * mask,tree * value)5297 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5298 		  tree *value)
5299 {
5300   if (TREE_CODE (low) != INTEGER_CST
5301       || TREE_CODE (high) != INTEGER_CST)
5302     return false;
5303 
5304   unsigned prec = TYPE_PRECISION (type);
5305   wide_int lo = wi::to_wide (low, prec);
5306   wide_int hi = wi::to_wide (high, prec);
5307 
5308   wide_int end_mask = lo ^ hi;
5309   if ((end_mask & (end_mask + 1)) != 0
5310       || (lo & end_mask) != 0)
5311     return false;
5312 
5313   wide_int stem_mask = ~end_mask;
5314   wide_int stem = lo & stem_mask;
5315   if (stem != (hi & stem_mask))
5316     return false;
5317 
5318   *mask = wide_int_to_tree (type, stem_mask);
5319   *value = wide_int_to_tree (type, stem);
5320 
5321   return true;
5322 }
5323 
5324 /* Helper routine for build_range_check and match.pd.  Return the type to
5325    perform the check or NULL if it shouldn't be optimized.  */
5326 
5327 tree
range_check_type(tree etype)5328 range_check_type (tree etype)
5329 {
5330   /* First make sure that arithmetics in this type is valid, then make sure
5331      that it wraps around.  */
5332   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5333     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5334 
5335   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5336     {
5337       tree utype, minv, maxv;
5338 
5339       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5340 	 for the type in question, as we rely on this here.  */
5341       utype = unsigned_type_for (etype);
5342       maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5343       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5344 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
5345       minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5346 
5347       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5348 				      minv, 1, maxv, 1)))
5349 	etype = utype;
5350       else
5351 	return NULL_TREE;
5352     }
5353   else if (POINTER_TYPE_P (etype))
5354     etype = unsigned_type_for (etype);
5355   return etype;
5356 }
5357 
5358 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5359    type, TYPE, return an expression to test if EXP is in (or out of, depending
5360    on IN_P) the range.  Return 0 if the test couldn't be created.  */
5361 
5362 tree
build_range_check(location_t loc,tree type,tree exp,int in_p,tree low,tree high)5363 build_range_check (location_t loc, tree type, tree exp, int in_p,
5364 		   tree low, tree high)
5365 {
5366   tree etype = TREE_TYPE (exp), mask, value;
5367 
5368   /* Disable this optimization for function pointer expressions
5369      on targets that require function pointer canonicalization.  */
5370   if (targetm.have_canonicalize_funcptr_for_compare ()
5371       && POINTER_TYPE_P (etype)
5372       && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5373     return NULL_TREE;
5374 
5375   if (! in_p)
5376     {
5377       value = build_range_check (loc, type, exp, 1, low, high);
5378       if (value != 0)
5379         return invert_truthvalue_loc (loc, value);
5380 
5381       return 0;
5382     }
5383 
5384   if (low == 0 && high == 0)
5385     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5386 
5387   if (low == 0)
5388     return fold_build2_loc (loc, LE_EXPR, type, exp,
5389 			    fold_convert_loc (loc, etype, high));
5390 
5391   if (high == 0)
5392     return fold_build2_loc (loc, GE_EXPR, type, exp,
5393 			    fold_convert_loc (loc, etype, low));
5394 
5395   if (operand_equal_p (low, high, 0))
5396     return fold_build2_loc (loc, EQ_EXPR, type, exp,
5397 			    fold_convert_loc (loc, etype, low));
5398 
5399   if (TREE_CODE (exp) == BIT_AND_EXPR
5400       && maskable_range_p (low, high, etype, &mask, &value))
5401     return fold_build2_loc (loc, EQ_EXPR, type,
5402 			    fold_build2_loc (loc, BIT_AND_EXPR, etype,
5403 					     exp, mask),
5404 			    value);
5405 
5406   if (integer_zerop (low))
5407     {
5408       if (! TYPE_UNSIGNED (etype))
5409 	{
5410 	  etype = unsigned_type_for (etype);
5411 	  high = fold_convert_loc (loc, etype, high);
5412 	  exp = fold_convert_loc (loc, etype, exp);
5413 	}
5414       return build_range_check (loc, type, exp, 1, 0, high);
5415     }
5416 
5417   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
5418   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5419     {
5420       int prec = TYPE_PRECISION (etype);
5421 
5422       if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5423 	{
5424 	  if (TYPE_UNSIGNED (etype))
5425 	    {
5426 	      tree signed_etype = signed_type_for (etype);
5427 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5428 		etype
5429 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5430 	      else
5431 		etype = signed_etype;
5432 	      exp = fold_convert_loc (loc, etype, exp);
5433 	    }
5434 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
5435 				  build_int_cst (etype, 0));
5436 	}
5437     }
5438 
5439   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5440      This requires wrap-around arithmetics for the type of the expression.  */
5441   etype = range_check_type (etype);
5442   if (etype == NULL_TREE)
5443     return NULL_TREE;
5444 
5445   high = fold_convert_loc (loc, etype, high);
5446   low = fold_convert_loc (loc, etype, low);
5447   exp = fold_convert_loc (loc, etype, exp);
5448 
5449   value = const_binop (MINUS_EXPR, high, low);
5450 
5451   if (value != 0 && !TREE_OVERFLOW (value))
5452     return build_range_check (loc, type,
5453 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5454 			      1, build_int_cst (etype, 0), value);
5455 
5456   return 0;
5457 }
5458 
5459 /* Return the predecessor of VAL in its type, handling the infinite case.  */
5460 
5461 static tree
range_predecessor(tree val)5462 range_predecessor (tree val)
5463 {
5464   tree type = TREE_TYPE (val);
5465 
5466   if (INTEGRAL_TYPE_P (type)
5467       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5468     return 0;
5469   else
5470     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5471 			build_int_cst (TREE_TYPE (val), 1), 0);
5472 }
5473 
5474 /* Return the successor of VAL in its type, handling the infinite case.  */
5475 
5476 static tree
range_successor(tree val)5477 range_successor (tree val)
5478 {
5479   tree type = TREE_TYPE (val);
5480 
5481   if (INTEGRAL_TYPE_P (type)
5482       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5483     return 0;
5484   else
5485     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5486 			build_int_cst (TREE_TYPE (val), 1), 0);
5487 }
5488 
5489 /* Given two ranges, see if we can merge them into one.  Return 1 if we
5490    can, 0 if we can't.  Set the output range into the specified parameters.  */
5491 
5492 bool
merge_ranges(int * pin_p,tree * plow,tree * phigh,int in0_p,tree low0,tree high0,int in1_p,tree low1,tree high1)5493 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5494 	      tree high0, int in1_p, tree low1, tree high1)
5495 {
5496   int no_overlap;
5497   int subset;
5498   int temp;
5499   tree tem;
5500   int in_p;
5501   tree low, high;
5502   int lowequal = ((low0 == 0 && low1 == 0)
5503 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5504 						low0, 0, low1, 0)));
5505   int highequal = ((high0 == 0 && high1 == 0)
5506 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5507 						 high0, 1, high1, 1)));
5508 
5509   /* Make range 0 be the range that starts first, or ends last if they
5510      start at the same value.  Swap them if it isn't.  */
5511   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5512 				 low0, 0, low1, 0))
5513       || (lowequal
5514 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
5515 					high1, 1, high0, 1))))
5516     {
5517       temp = in0_p, in0_p = in1_p, in1_p = temp;
5518       tem = low0, low0 = low1, low1 = tem;
5519       tem = high0, high0 = high1, high1 = tem;
5520     }
5521 
5522   /* If the second range is != high1 where high1 is the type maximum of
5523      the type, try first merging with < high1 range.  */
5524   if (low1
5525       && high1
5526       && TREE_CODE (low1) == INTEGER_CST
5527       && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5528 	  || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5529 	      && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5530 			   GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5531       && operand_equal_p (low1, high1, 0))
5532     {
5533       if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5534 	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5535 			   !in1_p, NULL_TREE, range_predecessor (low1)))
5536 	return true;
5537       /* Similarly for the second range != low1 where low1 is the type minimum
5538 	 of the type, try first merging with > low1 range.  */
5539       if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5540 	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5541 			   !in1_p, range_successor (low1), NULL_TREE))
5542 	return true;
5543     }
5544 
5545   /* Now flag two cases, whether the ranges are disjoint or whether the
5546      second range is totally subsumed in the first.  Note that the tests
5547      below are simplified by the ones above.  */
5548   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5549 					  high0, 1, low1, 0));
5550   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5551 				      high1, 1, high0, 1));
5552 
5553   /* We now have four cases, depending on whether we are including or
5554      excluding the two ranges.  */
5555   if (in0_p && in1_p)
5556     {
5557       /* If they don't overlap, the result is false.  If the second range
5558 	 is a subset it is the result.  Otherwise, the range is from the start
5559 	 of the second to the end of the first.  */
5560       if (no_overlap)
5561 	in_p = 0, low = high = 0;
5562       else if (subset)
5563 	in_p = 1, low = low1, high = high1;
5564       else
5565 	in_p = 1, low = low1, high = high0;
5566     }
5567 
5568   else if (in0_p && ! in1_p)
5569     {
5570       /* If they don't overlap, the result is the first range.  If they are
5571 	 equal, the result is false.  If the second range is a subset of the
5572 	 first, and the ranges begin at the same place, we go from just after
5573 	 the end of the second range to the end of the first.  If the second
5574 	 range is not a subset of the first, or if it is a subset and both
5575 	 ranges end at the same place, the range starts at the start of the
5576 	 first range and ends just before the second range.
5577 	 Otherwise, we can't describe this as a single range.  */
5578       if (no_overlap)
5579 	in_p = 1, low = low0, high = high0;
5580       else if (lowequal && highequal)
5581 	in_p = 0, low = high = 0;
5582       else if (subset && lowequal)
5583 	{
5584 	  low = range_successor (high1);
5585 	  high = high0;
5586 	  in_p = 1;
5587 	  if (low == 0)
5588 	    {
5589 	      /* We are in the weird situation where high0 > high1 but
5590 		 high1 has no successor.  Punt.  */
5591 	      return 0;
5592 	    }
5593 	}
5594       else if (! subset || highequal)
5595 	{
5596 	  low = low0;
5597 	  high = range_predecessor (low1);
5598 	  in_p = 1;
5599 	  if (high == 0)
5600 	    {
5601 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5602 	      return 0;
5603 	    }
5604 	}
5605       else
5606 	return 0;
5607     }
5608 
5609   else if (! in0_p && in1_p)
5610     {
5611       /* If they don't overlap, the result is the second range.  If the second
5612 	 is a subset of the first, the result is false.  Otherwise,
5613 	 the range starts just after the first range and ends at the
5614 	 end of the second.  */
5615       if (no_overlap)
5616 	in_p = 1, low = low1, high = high1;
5617       else if (subset || highequal)
5618 	in_p = 0, low = high = 0;
5619       else
5620 	{
5621 	  low = range_successor (high0);
5622 	  high = high1;
5623 	  in_p = 1;
5624 	  if (low == 0)
5625 	    {
5626 	      /* high1 > high0 but high0 has no successor.  Punt.  */
5627 	      return 0;
5628 	    }
5629 	}
5630     }
5631 
5632   else
5633     {
5634       /* The case where we are excluding both ranges.  Here the complex case
5635 	 is if they don't overlap.  In that case, the only time we have a
5636 	 range is if they are adjacent.  If the second is a subset of the
5637 	 first, the result is the first.  Otherwise, the range to exclude
5638 	 starts at the beginning of the first range and ends at the end of the
5639 	 second.  */
5640       if (no_overlap)
5641 	{
5642 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5643 					 range_successor (high0),
5644 					 1, low1, 0)))
5645 	    in_p = 0, low = low0, high = high1;
5646 	  else
5647 	    {
5648 	      /* Canonicalize - [min, x] into - [-, x].  */
5649 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5650 		switch (TREE_CODE (TREE_TYPE (low0)))
5651 		  {
5652 		  case ENUMERAL_TYPE:
5653 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5654 				  GET_MODE_BITSIZE
5655 				    (TYPE_MODE (TREE_TYPE (low0)))))
5656 		      break;
5657 		    /* FALLTHROUGH */
5658 		  case INTEGER_TYPE:
5659 		    if (tree_int_cst_equal (low0,
5660 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5661 		      low0 = 0;
5662 		    break;
5663 		  case POINTER_TYPE:
5664 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5665 			&& integer_zerop (low0))
5666 		      low0 = 0;
5667 		    break;
5668 		  default:
5669 		    break;
5670 		  }
5671 
5672 	      /* Canonicalize - [x, max] into - [x, -].  */
5673 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5674 		switch (TREE_CODE (TREE_TYPE (high1)))
5675 		  {
5676 		  case ENUMERAL_TYPE:
5677 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5678 				  GET_MODE_BITSIZE
5679 				    (TYPE_MODE (TREE_TYPE (high1)))))
5680 		      break;
5681 		    /* FALLTHROUGH */
5682 		  case INTEGER_TYPE:
5683 		    if (tree_int_cst_equal (high1,
5684 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5685 		      high1 = 0;
5686 		    break;
5687 		  case POINTER_TYPE:
5688 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5689 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5690 						       high1, 1,
5691 						       build_int_cst (TREE_TYPE (high1), 1),
5692 						       1)))
5693 		      high1 = 0;
5694 		    break;
5695 		  default:
5696 		    break;
5697 		  }
5698 
5699 	      /* The ranges might be also adjacent between the maximum and
5700 	         minimum values of the given type.  For
5701 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5702 	         return + [x + 1, y - 1].  */
5703 	      if (low0 == 0 && high1 == 0)
5704 	        {
5705 		  low = range_successor (high0);
5706 		  high = range_predecessor (low1);
5707 		  if (low == 0 || high == 0)
5708 		    return 0;
5709 
5710 		  in_p = 1;
5711 		}
5712 	      else
5713 		return 0;
5714 	    }
5715 	}
5716       else if (subset)
5717 	in_p = 0, low = low0, high = high0;
5718       else
5719 	in_p = 0, low = low0, high = high1;
5720     }
5721 
5722   *pin_p = in_p, *plow = low, *phigh = high;
5723   return 1;
5724 }
5725 
5726 
5727 /* Subroutine of fold, looking inside expressions of the form
5728    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5729    of the COND_EXPR.  This function is being used also to optimize
5730    A op B ? C : A, by reversing the comparison first.
5731 
5732    Return a folded expression whose code is not a COND_EXPR
5733    anymore, or NULL_TREE if no folding opportunity is found.  */
5734 
5735 static tree
fold_cond_expr_with_comparison(location_t loc,tree type,tree arg0,tree arg1,tree arg2)5736 fold_cond_expr_with_comparison (location_t loc, tree type,
5737 				tree arg0, tree arg1, tree arg2)
5738 {
5739   enum tree_code comp_code = TREE_CODE (arg0);
5740   tree arg00 = TREE_OPERAND (arg0, 0);
5741   tree arg01 = TREE_OPERAND (arg0, 1);
5742   tree arg1_type = TREE_TYPE (arg1);
5743   tree tem;
5744 
5745   STRIP_NOPS (arg1);
5746   STRIP_NOPS (arg2);
5747 
5748   /* If we have A op 0 ? A : -A, consider applying the following
5749      transformations:
5750 
5751      A == 0? A : -A    same as -A
5752      A != 0? A : -A    same as A
5753      A >= 0? A : -A    same as abs (A)
5754      A > 0?  A : -A    same as abs (A)
5755      A <= 0? A : -A    same as -abs (A)
5756      A < 0?  A : -A    same as -abs (A)
5757 
5758      None of these transformations work for modes with signed
5759      zeros.  If A is +/-0, the first two transformations will
5760      change the sign of the result (from +0 to -0, or vice
5761      versa).  The last four will fix the sign of the result,
5762      even though the original expressions could be positive or
5763      negative, depending on the sign of A.
5764 
5765      Note that all these transformations are correct if A is
5766      NaN, since the two alternatives (A and -A) are also NaNs.  */
5767   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5768       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5769 	  ? real_zerop (arg01)
5770 	  : integer_zerop (arg01))
5771       && ((TREE_CODE (arg2) == NEGATE_EXPR
5772 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5773 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5774 	        have already been folded to Y-X, check for that. */
5775 	  || (TREE_CODE (arg1) == MINUS_EXPR
5776 	      && TREE_CODE (arg2) == MINUS_EXPR
5777 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5778 				  TREE_OPERAND (arg2, 1), 0)
5779 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5780 				  TREE_OPERAND (arg2, 0), 0))))
5781     switch (comp_code)
5782       {
5783       case EQ_EXPR:
5784       case UNEQ_EXPR:
5785 	tem = fold_convert_loc (loc, arg1_type, arg1);
5786 	return fold_convert_loc (loc, type, negate_expr (tem));
5787       case NE_EXPR:
5788       case LTGT_EXPR:
5789 	return fold_convert_loc (loc, type, arg1);
5790       case UNGE_EXPR:
5791       case UNGT_EXPR:
5792 	if (flag_trapping_math)
5793 	  break;
5794 	/* Fall through.  */
5795       case GE_EXPR:
5796       case GT_EXPR:
5797 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5798 	  break;
5799 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5800 	return fold_convert_loc (loc, type, tem);
5801       case UNLE_EXPR:
5802       case UNLT_EXPR:
5803 	if (flag_trapping_math)
5804 	  break;
5805 	/* FALLTHRU */
5806       case LE_EXPR:
5807       case LT_EXPR:
5808 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5809 	  break;
5810 	if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5811 	    && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5812 	  {
5813 	    /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5814 	       is not, invokes UB both in abs and in the negation of it.
5815 	       So, use ABSU_EXPR instead.  */
5816 	    tree utype = unsigned_type_for (TREE_TYPE (arg1));
5817 	    tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5818 	    tem = negate_expr (tem);
5819 	    return fold_convert_loc (loc, type, tem);
5820 	  }
5821 	else
5822 	  {
5823 	    tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5824 	    return negate_expr (fold_convert_loc (loc, type, tem));
5825 	  }
5826       default:
5827 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5828 	break;
5829       }
5830 
5831   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5832      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5833      both transformations are correct when A is NaN: A != 0
5834      is then true, and A == 0 is false.  */
5835 
5836   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5837       && integer_zerop (arg01) && integer_zerop (arg2))
5838     {
5839       if (comp_code == NE_EXPR)
5840 	return fold_convert_loc (loc, type, arg1);
5841       else if (comp_code == EQ_EXPR)
5842 	return build_zero_cst (type);
5843     }
5844 
5845   /* Try some transformations of A op B ? A : B.
5846 
5847      A == B? A : B    same as B
5848      A != B? A : B    same as A
5849      A >= B? A : B    same as max (A, B)
5850      A > B?  A : B    same as max (B, A)
5851      A <= B? A : B    same as min (A, B)
5852      A < B?  A : B    same as min (B, A)
5853 
5854      As above, these transformations don't work in the presence
5855      of signed zeros.  For example, if A and B are zeros of
5856      opposite sign, the first two transformations will change
5857      the sign of the result.  In the last four, the original
5858      expressions give different results for (A=+0, B=-0) and
5859      (A=-0, B=+0), but the transformed expressions do not.
5860 
5861      The first two transformations are correct if either A or B
5862      is a NaN.  In the first transformation, the condition will
5863      be false, and B will indeed be chosen.  In the case of the
5864      second transformation, the condition A != B will be true,
5865      and A will be chosen.
5866 
5867      The conversions to max() and min() are not correct if B is
5868      a number and A is not.  The conditions in the original
5869      expressions will be false, so all four give B.  The min()
5870      and max() versions would give a NaN instead.  */
5871   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5872       && operand_equal_for_comparison_p (arg01, arg2)
5873       /* Avoid these transformations if the COND_EXPR may be used
5874 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5875       && (in_gimple_form
5876 	  || VECTOR_TYPE_P (type)
5877 	  || (! lang_GNU_CXX ()
5878 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5879 	  || ! maybe_lvalue_p (arg1)
5880 	  || ! maybe_lvalue_p (arg2)))
5881     {
5882       tree comp_op0 = arg00;
5883       tree comp_op1 = arg01;
5884       tree comp_type = TREE_TYPE (comp_op0);
5885 
5886       switch (comp_code)
5887 	{
5888 	case EQ_EXPR:
5889 	  return fold_convert_loc (loc, type, arg2);
5890 	case NE_EXPR:
5891 	  return fold_convert_loc (loc, type, arg1);
5892 	case LE_EXPR:
5893 	case LT_EXPR:
5894 	case UNLE_EXPR:
5895 	case UNLT_EXPR:
5896 	  /* In C++ a ?: expression can be an lvalue, so put the
5897 	     operand which will be used if they are equal first
5898 	     so that we can convert this back to the
5899 	     corresponding COND_EXPR.  */
5900 	  if (!HONOR_NANS (arg1))
5901 	    {
5902 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5903 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5904 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5905 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5906 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5907 				   comp_op1, comp_op0);
5908 	      return fold_convert_loc (loc, type, tem);
5909 	    }
5910 	  break;
5911 	case GE_EXPR:
5912 	case GT_EXPR:
5913 	case UNGE_EXPR:
5914 	case UNGT_EXPR:
5915 	  if (!HONOR_NANS (arg1))
5916 	    {
5917 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5918 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5919 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5920 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5921 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5922 				   comp_op1, comp_op0);
5923 	      return fold_convert_loc (loc, type, tem);
5924 	    }
5925 	  break;
5926 	case UNEQ_EXPR:
5927 	  if (!HONOR_NANS (arg1))
5928 	    return fold_convert_loc (loc, type, arg2);
5929 	  break;
5930 	case LTGT_EXPR:
5931 	  if (!HONOR_NANS (arg1))
5932 	    return fold_convert_loc (loc, type, arg1);
5933 	  break;
5934 	default:
5935 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5936 	  break;
5937 	}
5938     }
5939 
5940   return NULL_TREE;
5941 }
5942 
5943 
5944 
5945 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5946 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5947   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5948 		false) >= 2)
5949 #endif
5950 
5951 /* EXP is some logical combination of boolean tests.  See if we can
5952    merge it into some range test.  Return the new tree if so.  */
5953 
5954 static tree
fold_range_test(location_t loc,enum tree_code code,tree type,tree op0,tree op1)5955 fold_range_test (location_t loc, enum tree_code code, tree type,
5956 		 tree op0, tree op1)
5957 {
5958   int or_op = (code == TRUTH_ORIF_EXPR
5959 	       || code == TRUTH_OR_EXPR);
5960   int in0_p, in1_p, in_p;
5961   tree low0, low1, low, high0, high1, high;
5962   bool strict_overflow_p = false;
5963   tree tem, lhs, rhs;
5964   const char * const warnmsg = G_("assuming signed overflow does not occur "
5965 				  "when simplifying range test");
5966 
5967   if (!INTEGRAL_TYPE_P (type))
5968     return 0;
5969 
5970   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5971   /* If op0 is known true or false and this is a short-circuiting
5972      operation we must not merge with op1 since that makes side-effects
5973      unconditional.  So special-case this.  */
5974   if (!lhs
5975       && ((code == TRUTH_ORIF_EXPR && in0_p)
5976 	  || (code == TRUTH_ANDIF_EXPR && !in0_p)))
5977     return op0;
5978   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5979 
5980   /* If this is an OR operation, invert both sides; we will invert
5981      again at the end.  */
5982   if (or_op)
5983     in0_p = ! in0_p, in1_p = ! in1_p;
5984 
5985   /* If both expressions are the same, if we can merge the ranges, and we
5986      can build the range test, return it or it inverted.  If one of the
5987      ranges is always true or always false, consider it to be the same
5988      expression as the other.  */
5989   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5990       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5991 		       in1_p, low1, high1)
5992       && (tem = (build_range_check (loc, type,
5993 				    lhs != 0 ? lhs
5994 				    : rhs != 0 ? rhs : integer_zero_node,
5995 				    in_p, low, high))) != 0)
5996     {
5997       if (strict_overflow_p)
5998 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5999       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6000     }
6001 
6002   /* On machines where the branch cost is expensive, if this is a
6003      short-circuited branch and the underlying object on both sides
6004      is the same, make a non-short-circuit operation.  */
6005   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6006   if (param_logical_op_non_short_circuit != -1)
6007     logical_op_non_short_circuit
6008       = param_logical_op_non_short_circuit;
6009   if (logical_op_non_short_circuit
6010       && !flag_sanitize_coverage
6011       && lhs != 0 && rhs != 0
6012       && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6013       && operand_equal_p (lhs, rhs, 0))
6014     {
6015       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
6016 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6017 	 which cases we can't do this.  */
6018       if (simple_operand_p (lhs))
6019 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6020 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6021 			   type, op0, op1);
6022 
6023       else if (!lang_hooks.decls.global_bindings_p ()
6024 	       && !CONTAINS_PLACEHOLDER_P (lhs))
6025 	{
6026 	  tree common = save_expr (lhs);
6027 
6028 	  if ((lhs = build_range_check (loc, type, common,
6029 					or_op ? ! in0_p : in0_p,
6030 					low0, high0)) != 0
6031 	      && (rhs = build_range_check (loc, type, common,
6032 					   or_op ? ! in1_p : in1_p,
6033 					   low1, high1)) != 0)
6034 	    {
6035 	      if (strict_overflow_p)
6036 		fold_overflow_warning (warnmsg,
6037 				       WARN_STRICT_OVERFLOW_COMPARISON);
6038 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6039 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6040 				 type, lhs, rhs);
6041 	    }
6042 	}
6043     }
6044 
6045   return 0;
6046 }
6047 
6048 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6049    bit value.  Arrange things so the extra bits will be set to zero if and
6050    only if C is signed-extended to its full width.  If MASK is nonzero,
6051    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
6052 
6053 static tree
unextend(tree c,int p,int unsignedp,tree mask)6054 unextend (tree c, int p, int unsignedp, tree mask)
6055 {
6056   tree type = TREE_TYPE (c);
6057   int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6058   tree temp;
6059 
6060   if (p == modesize || unsignedp)
6061     return c;
6062 
6063   /* We work by getting just the sign bit into the low-order bit, then
6064      into the high-order bit, then sign-extend.  We then XOR that value
6065      with C.  */
6066   temp = build_int_cst (TREE_TYPE (c),
6067 			wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6068 
6069   /* We must use a signed type in order to get an arithmetic right shift.
6070      However, we must also avoid introducing accidental overflows, so that
6071      a subsequent call to integer_zerop will work.  Hence we must
6072      do the type conversion here.  At this point, the constant is either
6073      zero or one, and the conversion to a signed type can never overflow.
6074      We could get an overflow if this conversion is done anywhere else.  */
6075   if (TYPE_UNSIGNED (type))
6076     temp = fold_convert (signed_type_for (type), temp);
6077 
6078   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6079   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6080   if (mask != 0)
6081     temp = const_binop (BIT_AND_EXPR, temp,
6082 			fold_convert (TREE_TYPE (c), mask));
6083   /* If necessary, convert the type back to match the type of C.  */
6084   if (TYPE_UNSIGNED (type))
6085     temp = fold_convert (type, temp);
6086 
6087   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6088 }
6089 
6090 /* For an expression that has the form
6091      (A && B) || ~B
6092    or
6093      (A || B) && ~B,
6094    we can drop one of the inner expressions and simplify to
6095      A || ~B
6096    or
6097      A && ~B
6098    LOC is the location of the resulting expression.  OP is the inner
6099    logical operation; the left-hand side in the examples above, while CMPOP
6100    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
6101    removing a condition that guards another, as in
6102      (A != NULL && A->...) || A == NULL
6103    which we must not transform.  If RHS_ONLY is true, only eliminate the
6104    right-most operand of the inner logical operation.  */
6105 
6106 static tree
merge_truthop_with_opposite_arm(location_t loc,tree op,tree cmpop,bool rhs_only)6107 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6108 				 bool rhs_only)
6109 {
6110   tree type = TREE_TYPE (cmpop);
6111   enum tree_code code = TREE_CODE (cmpop);
6112   enum tree_code truthop_code = TREE_CODE (op);
6113   tree lhs = TREE_OPERAND (op, 0);
6114   tree rhs = TREE_OPERAND (op, 1);
6115   tree orig_lhs = lhs, orig_rhs = rhs;
6116   enum tree_code rhs_code = TREE_CODE (rhs);
6117   enum tree_code lhs_code = TREE_CODE (lhs);
6118   enum tree_code inv_code;
6119 
6120   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6121     return NULL_TREE;
6122 
6123   if (TREE_CODE_CLASS (code) != tcc_comparison)
6124     return NULL_TREE;
6125 
6126   if (rhs_code == truthop_code)
6127     {
6128       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6129       if (newrhs != NULL_TREE)
6130 	{
6131 	  rhs = newrhs;
6132 	  rhs_code = TREE_CODE (rhs);
6133 	}
6134     }
6135   if (lhs_code == truthop_code && !rhs_only)
6136     {
6137       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6138       if (newlhs != NULL_TREE)
6139 	{
6140 	  lhs = newlhs;
6141 	  lhs_code = TREE_CODE (lhs);
6142 	}
6143     }
6144 
6145   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6146   if (inv_code == rhs_code
6147       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6148       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6149     return lhs;
6150   if (!rhs_only && inv_code == lhs_code
6151       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6152       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6153     return rhs;
6154   if (rhs != orig_rhs || lhs != orig_lhs)
6155     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6156 			    lhs, rhs);
6157   return NULL_TREE;
6158 }
6159 
6160 /* Find ways of folding logical expressions of LHS and RHS:
6161    Try to merge two comparisons to the same innermost item.
6162    Look for range tests like "ch >= '0' && ch <= '9'".
6163    Look for combinations of simple terms on machines with expensive branches
6164    and evaluate the RHS unconditionally.
6165 
6166    For example, if we have p->a == 2 && p->b == 4 and we can make an
6167    object large enough to span both A and B, we can do this with a comparison
6168    against the object ANDed with the a mask.
6169 
6170    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6171    operations to do this with one comparison.
6172 
6173    We check for both normal comparisons and the BIT_AND_EXPRs made this by
6174    function and the one above.
6175 
6176    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
6177    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6178 
6179    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6180    two operands.
6181 
6182    We return the simplified tree or 0 if no optimization is possible.  */
6183 
6184 static tree
fold_truth_andor_1(location_t loc,enum tree_code code,tree truth_type,tree lhs,tree rhs)6185 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6186 		    tree lhs, tree rhs)
6187 {
6188   /* If this is the "or" of two comparisons, we can do something if
6189      the comparisons are NE_EXPR.  If this is the "and", we can do something
6190      if the comparisons are EQ_EXPR.  I.e.,
6191 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
6192 
6193      WANTED_CODE is this operation code.  For single bit fields, we can
6194      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6195      comparison for one-bit fields.  */
6196 
6197   enum tree_code wanted_code;
6198   enum tree_code lcode, rcode;
6199   tree ll_arg, lr_arg, rl_arg, rr_arg;
6200   tree ll_inner, lr_inner, rl_inner, rr_inner;
6201   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6202   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6203   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6204   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6205   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6206   int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6207   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6208   scalar_int_mode lnmode, rnmode;
6209   tree ll_mask, lr_mask, rl_mask, rr_mask;
6210   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6211   tree l_const, r_const;
6212   tree lntype, rntype, result;
6213   HOST_WIDE_INT first_bit, end_bit;
6214   int volatilep;
6215 
6216   /* Start by getting the comparison codes.  Fail if anything is volatile.
6217      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6218      it were surrounded with a NE_EXPR.  */
6219 
6220   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6221     return 0;
6222 
6223   lcode = TREE_CODE (lhs);
6224   rcode = TREE_CODE (rhs);
6225 
6226   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6227     {
6228       lhs = build2 (NE_EXPR, truth_type, lhs,
6229 		    build_int_cst (TREE_TYPE (lhs), 0));
6230       lcode = NE_EXPR;
6231     }
6232 
6233   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6234     {
6235       rhs = build2 (NE_EXPR, truth_type, rhs,
6236 		    build_int_cst (TREE_TYPE (rhs), 0));
6237       rcode = NE_EXPR;
6238     }
6239 
6240   if (TREE_CODE_CLASS (lcode) != tcc_comparison
6241       || TREE_CODE_CLASS (rcode) != tcc_comparison)
6242     return 0;
6243 
6244   ll_arg = TREE_OPERAND (lhs, 0);
6245   lr_arg = TREE_OPERAND (lhs, 1);
6246   rl_arg = TREE_OPERAND (rhs, 0);
6247   rr_arg = TREE_OPERAND (rhs, 1);
6248 
6249   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
6250   if (simple_operand_p (ll_arg)
6251       && simple_operand_p (lr_arg))
6252     {
6253       if (operand_equal_p (ll_arg, rl_arg, 0)
6254           && operand_equal_p (lr_arg, rr_arg, 0))
6255 	{
6256           result = combine_comparisons (loc, code, lcode, rcode,
6257 					truth_type, ll_arg, lr_arg);
6258 	  if (result)
6259 	    return result;
6260 	}
6261       else if (operand_equal_p (ll_arg, rr_arg, 0)
6262                && operand_equal_p (lr_arg, rl_arg, 0))
6263 	{
6264           result = combine_comparisons (loc, code, lcode,
6265 					swap_tree_comparison (rcode),
6266 					truth_type, ll_arg, lr_arg);
6267 	  if (result)
6268 	    return result;
6269 	}
6270     }
6271 
6272   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6273 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6274 
6275   /* If the RHS can be evaluated unconditionally and its operands are
6276      simple, it wins to evaluate the RHS unconditionally on machines
6277      with expensive branches.  In this case, this isn't a comparison
6278      that can be merged.  */
6279 
6280   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6281 		   false) >= 2
6282       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6283       && simple_operand_p (rl_arg)
6284       && simple_operand_p (rr_arg))
6285     {
6286       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
6287       if (code == TRUTH_OR_EXPR
6288 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
6289 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
6290 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6291 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6292 	return build2_loc (loc, NE_EXPR, truth_type,
6293 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6294 				   ll_arg, rl_arg),
6295 			   build_int_cst (TREE_TYPE (ll_arg), 0));
6296 
6297       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
6298       if (code == TRUTH_AND_EXPR
6299 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
6300 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
6301 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6302 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6303 	return build2_loc (loc, EQ_EXPR, truth_type,
6304 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6305 				   ll_arg, rl_arg),
6306 			   build_int_cst (TREE_TYPE (ll_arg), 0));
6307     }
6308 
6309   /* See if the comparisons can be merged.  Then get all the parameters for
6310      each side.  */
6311 
6312   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6313       || (rcode != EQ_EXPR && rcode != NE_EXPR))
6314     return 0;
6315 
6316   ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6317   volatilep = 0;
6318   ll_inner = decode_field_reference (loc, &ll_arg,
6319 				     &ll_bitsize, &ll_bitpos, &ll_mode,
6320 				     &ll_unsignedp, &ll_reversep, &volatilep,
6321 				     &ll_mask, &ll_and_mask);
6322   lr_inner = decode_field_reference (loc, &lr_arg,
6323 				     &lr_bitsize, &lr_bitpos, &lr_mode,
6324 				     &lr_unsignedp, &lr_reversep, &volatilep,
6325 				     &lr_mask, &lr_and_mask);
6326   rl_inner = decode_field_reference (loc, &rl_arg,
6327 				     &rl_bitsize, &rl_bitpos, &rl_mode,
6328 				     &rl_unsignedp, &rl_reversep, &volatilep,
6329 				     &rl_mask, &rl_and_mask);
6330   rr_inner = decode_field_reference (loc, &rr_arg,
6331 				     &rr_bitsize, &rr_bitpos, &rr_mode,
6332 				     &rr_unsignedp, &rr_reversep, &volatilep,
6333 				     &rr_mask, &rr_and_mask);
6334 
6335   /* It must be true that the inner operation on the lhs of each
6336      comparison must be the same if we are to be able to do anything.
6337      Then see if we have constants.  If not, the same must be true for
6338      the rhs's.  */
6339   if (volatilep
6340       || ll_reversep != rl_reversep
6341       || ll_inner == 0 || rl_inner == 0
6342       || ! operand_equal_p (ll_inner, rl_inner, 0))
6343     return 0;
6344 
6345   if (TREE_CODE (lr_arg) == INTEGER_CST
6346       && TREE_CODE (rr_arg) == INTEGER_CST)
6347     {
6348       l_const = lr_arg, r_const = rr_arg;
6349       lr_reversep = ll_reversep;
6350     }
6351   else if (lr_reversep != rr_reversep
6352 	   || lr_inner == 0 || rr_inner == 0
6353 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
6354     return 0;
6355   else
6356     l_const = r_const = 0;
6357 
6358   /* If either comparison code is not correct for our logical operation,
6359      fail.  However, we can convert a one-bit comparison against zero into
6360      the opposite comparison against that bit being set in the field.  */
6361 
6362   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6363   if (lcode != wanted_code)
6364     {
6365       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6366 	{
6367 	  /* Make the left operand unsigned, since we are only interested
6368 	     in the value of one bit.  Otherwise we are doing the wrong
6369 	     thing below.  */
6370 	  ll_unsignedp = 1;
6371 	  l_const = ll_mask;
6372 	}
6373       else
6374 	return 0;
6375     }
6376 
6377   /* This is analogous to the code for l_const above.  */
6378   if (rcode != wanted_code)
6379     {
6380       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6381 	{
6382 	  rl_unsignedp = 1;
6383 	  r_const = rl_mask;
6384 	}
6385       else
6386 	return 0;
6387     }
6388 
6389   /* See if we can find a mode that contains both fields being compared on
6390      the left.  If we can't, fail.  Otherwise, update all constants and masks
6391      to be relative to a field of that size.  */
6392   first_bit = MIN (ll_bitpos, rl_bitpos);
6393   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6394   if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6395 		      TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6396 		      volatilep, &lnmode))
6397     return 0;
6398 
6399   lnbitsize = GET_MODE_BITSIZE (lnmode);
6400   lnbitpos = first_bit & ~ (lnbitsize - 1);
6401   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6402   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6403 
6404   if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6405     {
6406       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6407       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6408     }
6409 
6410   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6411 			 size_int (xll_bitpos));
6412   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6413 			 size_int (xrl_bitpos));
6414 
6415   if (l_const)
6416     {
6417       l_const = fold_convert_loc (loc, lntype, l_const);
6418       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6419       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6420       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6421 					fold_build1_loc (loc, BIT_NOT_EXPR,
6422 						     lntype, ll_mask))))
6423 	{
6424 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6425 
6426 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6427 	}
6428     }
6429   if (r_const)
6430     {
6431       r_const = fold_convert_loc (loc, lntype, r_const);
6432       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6433       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6434       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6435 					fold_build1_loc (loc, BIT_NOT_EXPR,
6436 						     lntype, rl_mask))))
6437 	{
6438 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6439 
6440 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6441 	}
6442     }
6443 
6444   /* If the right sides are not constant, do the same for it.  Also,
6445      disallow this optimization if a size, signedness or storage order
6446      mismatch occurs between the left and right sides.  */
6447   if (l_const == 0)
6448     {
6449       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6450 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6451 	  || ll_reversep != lr_reversep
6452 	  /* Make sure the two fields on the right
6453 	     correspond to the left without being swapped.  */
6454 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6455 	return 0;
6456 
6457       first_bit = MIN (lr_bitpos, rr_bitpos);
6458       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6459       if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6460 			  TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6461 			  volatilep, &rnmode))
6462 	return 0;
6463 
6464       rnbitsize = GET_MODE_BITSIZE (rnmode);
6465       rnbitpos = first_bit & ~ (rnbitsize - 1);
6466       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6467       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6468 
6469       if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6470 	{
6471 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6472 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6473 	}
6474 
6475       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6476 							    rntype, lr_mask),
6477 			     size_int (xlr_bitpos));
6478       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6479 							    rntype, rr_mask),
6480 			     size_int (xrr_bitpos));
6481 
6482       /* Make a mask that corresponds to both fields being compared.
6483 	 Do this for both items being compared.  If the operands are the
6484 	 same size and the bits being compared are in the same position
6485 	 then we can do this by masking both and comparing the masked
6486 	 results.  */
6487       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6488       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6489       if (lnbitsize == rnbitsize
6490 	  && xll_bitpos == xlr_bitpos
6491 	  && lnbitpos >= 0
6492 	  && rnbitpos >= 0)
6493 	{
6494 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6495 				    lntype, lnbitsize, lnbitpos,
6496 				    ll_unsignedp || rl_unsignedp, ll_reversep);
6497 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
6498 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6499 
6500 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6501 				    rntype, rnbitsize, rnbitpos,
6502 				    lr_unsignedp || rr_unsignedp, lr_reversep);
6503 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
6504 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6505 
6506 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6507 	}
6508 
6509       /* There is still another way we can do something:  If both pairs of
6510 	 fields being compared are adjacent, we may be able to make a wider
6511 	 field containing them both.
6512 
6513 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
6514 	 the mask must be shifted to account for the shift done by
6515 	 make_bit_field_ref.  */
6516       if (((ll_bitsize + ll_bitpos == rl_bitpos
6517 	    && lr_bitsize + lr_bitpos == rr_bitpos)
6518 	   || (ll_bitpos == rl_bitpos + rl_bitsize
6519 	       && lr_bitpos == rr_bitpos + rr_bitsize))
6520 	  && ll_bitpos >= 0
6521 	  && rl_bitpos >= 0
6522 	  && lr_bitpos >= 0
6523 	  && rr_bitpos >= 0)
6524 	{
6525 	  tree type;
6526 
6527 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6528 				    ll_bitsize + rl_bitsize,
6529 				    MIN (ll_bitpos, rl_bitpos),
6530 				    ll_unsignedp, ll_reversep);
6531 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6532 				    lr_bitsize + rr_bitsize,
6533 				    MIN (lr_bitpos, rr_bitpos),
6534 				    lr_unsignedp, lr_reversep);
6535 
6536 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6537 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
6538 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6539 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6540 
6541 	  /* Convert to the smaller type before masking out unwanted bits.  */
6542 	  type = lntype;
6543 	  if (lntype != rntype)
6544 	    {
6545 	      if (lnbitsize > rnbitsize)
6546 		{
6547 		  lhs = fold_convert_loc (loc, rntype, lhs);
6548 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6549 		  type = rntype;
6550 		}
6551 	      else if (lnbitsize < rnbitsize)
6552 		{
6553 		  rhs = fold_convert_loc (loc, lntype, rhs);
6554 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6555 		  type = lntype;
6556 		}
6557 	    }
6558 
6559 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6560 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6561 
6562 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6563 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6564 
6565 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6566 	}
6567 
6568       return 0;
6569     }
6570 
6571   /* Handle the case of comparisons with constants.  If there is something in
6572      common between the masks, those bits of the constants must be the same.
6573      If not, the condition is always false.  Test for this to avoid generating
6574      incorrect code below.  */
6575   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6576   if (! integer_zerop (result)
6577       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6578 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6579     {
6580       if (wanted_code == NE_EXPR)
6581 	{
6582 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
6583 	  return constant_boolean_node (true, truth_type);
6584 	}
6585       else
6586 	{
6587 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6588 	  return constant_boolean_node (false, truth_type);
6589 	}
6590     }
6591 
6592   if (lnbitpos < 0)
6593     return 0;
6594 
6595   /* Construct the expression we will return.  First get the component
6596      reference we will make.  Unless the mask is all ones the width of
6597      that field, perform the mask operation.  Then compare with the
6598      merged constant.  */
6599   result = make_bit_field_ref (loc, ll_inner, ll_arg,
6600 			       lntype, lnbitsize, lnbitpos,
6601 			       ll_unsignedp || rl_unsignedp, ll_reversep);
6602 
6603   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6604   if (! all_ones_mask_p (ll_mask, lnbitsize))
6605     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6606 
6607   return build2_loc (loc, wanted_code, truth_type, result,
6608 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
6609 }
6610 
6611 /* T is an integer expression that is being multiplied, divided, or taken a
6612    modulus (CODE says which and what kind of divide or modulus) by a
6613    constant C.  See if we can eliminate that operation by folding it with
6614    other operations already in T.  WIDE_TYPE, if non-null, is a type that
6615    should be used for the computation if wider than our type.
6616 
6617    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6618    (X * 2) + (Y * 4).  We must, however, be assured that either the original
6619    expression would not overflow or that overflow is undefined for the type
6620    in the language in question.
6621 
6622    If we return a non-null expression, it is an equivalent form of the
6623    original computation, but need not be in the original type.
6624 
6625    We set *STRICT_OVERFLOW_P to true if the return values depends on
6626    signed overflow being undefined.  Otherwise we do not change
6627    *STRICT_OVERFLOW_P.  */
6628 
6629 static tree
extract_muldiv(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6630 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6631 		bool *strict_overflow_p)
6632 {
6633   /* To avoid exponential search depth, refuse to allow recursion past
6634      three levels.  Beyond that (1) it's highly unlikely that we'll find
6635      something interesting and (2) we've probably processed it before
6636      when we built the inner expression.  */
6637 
6638   static int depth;
6639   tree ret;
6640 
6641   if (depth > 3)
6642     return NULL;
6643 
6644   depth++;
6645   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6646   depth--;
6647 
6648   return ret;
6649 }
6650 
6651 static tree
extract_muldiv_1(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6652 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6653 		  bool *strict_overflow_p)
6654 {
6655   tree type = TREE_TYPE (t);
6656   enum tree_code tcode = TREE_CODE (t);
6657   tree ctype = (wide_type != 0
6658 		&& (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6659 		    > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6660 		? wide_type : type);
6661   tree t1, t2;
6662   int same_p = tcode == code;
6663   tree op0 = NULL_TREE, op1 = NULL_TREE;
6664   bool sub_strict_overflow_p;
6665 
6666   /* Don't deal with constants of zero here; they confuse the code below.  */
6667   if (integer_zerop (c))
6668     return NULL_TREE;
6669 
6670   if (TREE_CODE_CLASS (tcode) == tcc_unary)
6671     op0 = TREE_OPERAND (t, 0);
6672 
6673   if (TREE_CODE_CLASS (tcode) == tcc_binary)
6674     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6675 
6676   /* Note that we need not handle conditional operations here since fold
6677      already handles those cases.  So just do arithmetic here.  */
6678   switch (tcode)
6679     {
6680     case INTEGER_CST:
6681       /* For a constant, we can always simplify if we are a multiply
6682 	 or (for divide and modulus) if it is a multiple of our constant.  */
6683       if (code == MULT_EXPR
6684 	  || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6685 				TYPE_SIGN (type)))
6686 	{
6687 	  tree tem = const_binop (code, fold_convert (ctype, t),
6688 				  fold_convert (ctype, c));
6689 	  /* If the multiplication overflowed, we lost information on it.
6690 	     See PR68142 and PR69845.  */
6691 	  if (TREE_OVERFLOW (tem))
6692 	    return NULL_TREE;
6693 	  return tem;
6694 	}
6695       break;
6696 
6697     CASE_CONVERT: case NON_LVALUE_EXPR:
6698       if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6699 	break;
6700       /* If op0 is an expression ...  */
6701       if ((COMPARISON_CLASS_P (op0)
6702 	   || UNARY_CLASS_P (op0)
6703 	   || BINARY_CLASS_P (op0)
6704 	   || VL_EXP_CLASS_P (op0)
6705 	   || EXPRESSION_CLASS_P (op0))
6706 	  /* ... and has wrapping overflow, and its type is smaller
6707 	     than ctype, then we cannot pass through as widening.  */
6708 	  && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6709 	       && (TYPE_PRECISION (ctype)
6710 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6711 	      /* ... or this is a truncation (t is narrower than op0),
6712 		 then we cannot pass through this narrowing.  */
6713 	      || (TYPE_PRECISION (type)
6714 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6715 	      /* ... or signedness changes for division or modulus,
6716 		 then we cannot pass through this conversion.  */
6717 	      || (code != MULT_EXPR
6718 		  && (TYPE_UNSIGNED (ctype)
6719 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6720 	      /* ... or has undefined overflow while the converted to
6721 		 type has not, we cannot do the operation in the inner type
6722 		 as that would introduce undefined overflow.  */
6723 	      || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6724 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6725 	break;
6726 
6727       /* Pass the constant down and see if we can make a simplification.  If
6728 	 we can, replace this expression with the inner simplification for
6729 	 possible later conversion to our or some other type.  */
6730       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6731 	  && TREE_CODE (t2) == INTEGER_CST
6732 	  && !TREE_OVERFLOW (t2)
6733 	  && (t1 = extract_muldiv (op0, t2, code,
6734 				   code == MULT_EXPR ? ctype : NULL_TREE,
6735 				   strict_overflow_p)) != 0)
6736 	return t1;
6737       break;
6738 
6739     case ABS_EXPR:
6740       /* If widening the type changes it from signed to unsigned, then we
6741          must avoid building ABS_EXPR itself as unsigned.  */
6742       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6743         {
6744           tree cstype = (*signed_type_for) (ctype);
6745           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6746 	      != 0)
6747             {
6748               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6749               return fold_convert (ctype, t1);
6750             }
6751           break;
6752         }
6753       /* If the constant is negative, we cannot simplify this.  */
6754       if (tree_int_cst_sgn (c) == -1)
6755         break;
6756       /* FALLTHROUGH */
6757     case NEGATE_EXPR:
6758       /* For division and modulus, type can't be unsigned, as e.g.
6759 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6760 	 For signed types, even with wrapping overflow, this is fine.  */
6761       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6762 	break;
6763       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6764 	  != 0)
6765 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6766       break;
6767 
6768     case MIN_EXPR:  case MAX_EXPR:
6769       /* If widening the type changes the signedness, then we can't perform
6770 	 this optimization as that changes the result.  */
6771       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6772 	break;
6773 
6774       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6775       sub_strict_overflow_p = false;
6776       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6777 				&sub_strict_overflow_p)) != 0
6778 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6779 				   &sub_strict_overflow_p)) != 0)
6780 	{
6781 	  if (tree_int_cst_sgn (c) < 0)
6782 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6783 	  if (sub_strict_overflow_p)
6784 	    *strict_overflow_p = true;
6785 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6786 			      fold_convert (ctype, t2));
6787 	}
6788       break;
6789 
6790     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6791       /* If the second operand is constant, this is a multiplication
6792 	 or floor division, by a power of two, so we can treat it that
6793 	 way unless the multiplier or divisor overflows.  Signed
6794 	 left-shift overflow is implementation-defined rather than
6795 	 undefined in C90, so do not convert signed left shift into
6796 	 multiplication.  */
6797       if (TREE_CODE (op1) == INTEGER_CST
6798 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6799 	  /* const_binop may not detect overflow correctly,
6800 	     so check for it explicitly here.  */
6801 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6802 			wi::to_wide (op1))
6803 	  && (t1 = fold_convert (ctype,
6804 				 const_binop (LSHIFT_EXPR, size_one_node,
6805 					      op1))) != 0
6806 	  && !TREE_OVERFLOW (t1))
6807 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6808 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6809 				       ctype,
6810 				       fold_convert (ctype, op0),
6811 				       t1),
6812 			       c, code, wide_type, strict_overflow_p);
6813       break;
6814 
6815     case PLUS_EXPR:  case MINUS_EXPR:
6816       /* See if we can eliminate the operation on both sides.  If we can, we
6817 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6818 	 cases where we can do anything are if the second operand is a
6819 	 constant.  */
6820       sub_strict_overflow_p = false;
6821       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6822       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6823       if (t1 != 0 && t2 != 0
6824 	  && TYPE_OVERFLOW_WRAPS (ctype)
6825 	  && (code == MULT_EXPR
6826 	      /* If not multiplication, we can only do this if both operands
6827 		 are divisible by c.  */
6828 	      || (multiple_of_p (ctype, op0, c)
6829 	          && multiple_of_p (ctype, op1, c))))
6830 	{
6831 	  if (sub_strict_overflow_p)
6832 	    *strict_overflow_p = true;
6833 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6834 			      fold_convert (ctype, t2));
6835 	}
6836 
6837       /* If this was a subtraction, negate OP1 and set it to be an addition.
6838 	 This simplifies the logic below.  */
6839       if (tcode == MINUS_EXPR)
6840 	{
6841 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6842 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6843 	  if (TREE_CODE (op0) == INTEGER_CST)
6844 	    {
6845 	      std::swap (op0, op1);
6846 	      std::swap (t1, t2);
6847 	    }
6848 	}
6849 
6850       if (TREE_CODE (op1) != INTEGER_CST)
6851 	break;
6852 
6853       /* If either OP1 or C are negative, this optimization is not safe for
6854 	 some of the division and remainder types while for others we need
6855 	 to change the code.  */
6856       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6857 	{
6858 	  if (code == CEIL_DIV_EXPR)
6859 	    code = FLOOR_DIV_EXPR;
6860 	  else if (code == FLOOR_DIV_EXPR)
6861 	    code = CEIL_DIV_EXPR;
6862 	  else if (code != MULT_EXPR
6863 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6864 	    break;
6865 	}
6866 
6867       /* If it's a multiply or a division/modulus operation of a multiple
6868          of our constant, do the operation and verify it doesn't overflow.  */
6869       if (code == MULT_EXPR
6870 	  || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6871 				TYPE_SIGN (type)))
6872 	{
6873 	  op1 = const_binop (code, fold_convert (ctype, op1),
6874 			     fold_convert (ctype, c));
6875 	  /* We allow the constant to overflow with wrapping semantics.  */
6876 	  if (op1 == 0
6877 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6878 	    break;
6879 	}
6880       else
6881 	break;
6882 
6883       /* If we have an unsigned type, we cannot widen the operation since it
6884 	 will change the result if the original computation overflowed.  */
6885       if (TYPE_UNSIGNED (ctype) && ctype != type)
6886 	break;
6887 
6888       /* The last case is if we are a multiply.  In that case, we can
6889 	 apply the distributive law to commute the multiply and addition
6890 	 if the multiplication of the constants doesn't overflow
6891 	 and overflow is defined.  With undefined overflow
6892 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6893 	 But fold_plusminus_mult_expr would factor back any power-of-two
6894 	 value so do not distribute in the first place in this case.  */
6895       if (code == MULT_EXPR
6896 	  && TYPE_OVERFLOW_WRAPS (ctype)
6897 	  && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6898 	return fold_build2 (tcode, ctype,
6899 			    fold_build2 (code, ctype,
6900 					 fold_convert (ctype, op0),
6901 					 fold_convert (ctype, c)),
6902 			    op1);
6903 
6904       break;
6905 
6906     case MULT_EXPR:
6907       /* We have a special case here if we are doing something like
6908 	 (C * 8) % 4 since we know that's zero.  */
6909       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6910 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6911 	  /* If the multiplication can overflow we cannot optimize this.  */
6912 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6913 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6914 	  && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6915 				TYPE_SIGN (type)))
6916 	{
6917 	  *strict_overflow_p = true;
6918 	  return omit_one_operand (type, integer_zero_node, op0);
6919 	}
6920 
6921       /* ... fall through ...  */
6922 
6923     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6924     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6925       /* If we can extract our operation from the LHS, do so and return a
6926 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6927 	 do something only if the second operand is a constant.  */
6928       if (same_p
6929 	  && TYPE_OVERFLOW_WRAPS (ctype)
6930 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6931 				   strict_overflow_p)) != 0)
6932 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6933 			    fold_convert (ctype, op1));
6934       else if (tcode == MULT_EXPR && code == MULT_EXPR
6935 	       && TYPE_OVERFLOW_WRAPS (ctype)
6936 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6937 					strict_overflow_p)) != 0)
6938 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6939 			    fold_convert (ctype, t1));
6940       else if (TREE_CODE (op1) != INTEGER_CST)
6941 	return 0;
6942 
6943       /* If these are the same operation types, we can associate them
6944 	 assuming no overflow.  */
6945       if (tcode == code)
6946 	{
6947 	  bool overflow_p = false;
6948 	  wi::overflow_type overflow_mul;
6949 	  signop sign = TYPE_SIGN (ctype);
6950 	  unsigned prec = TYPE_PRECISION (ctype);
6951 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
6952 				  wi::to_wide (c, prec),
6953 				  sign, &overflow_mul);
6954 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6955 	  if (overflow_mul
6956 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6957 	    overflow_p = true;
6958 	  if (!overflow_p)
6959 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6960 				wide_int_to_tree (ctype, mul));
6961 	}
6962 
6963       /* If these operations "cancel" each other, we have the main
6964 	 optimizations of this pass, which occur when either constant is a
6965 	 multiple of the other, in which case we replace this with either an
6966 	 operation or CODE or TCODE.
6967 
6968 	 If we have an unsigned type, we cannot do this since it will change
6969 	 the result if the original computation overflowed.  */
6970       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6971 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6972 	      || (tcode == MULT_EXPR
6973 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6974 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6975 		  && code != MULT_EXPR)))
6976 	{
6977 	  if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6978 				 TYPE_SIGN (type)))
6979 	    {
6980 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6981 		*strict_overflow_p = true;
6982 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6983 				  fold_convert (ctype,
6984 						const_binop (TRUNC_DIV_EXPR,
6985 							     op1, c)));
6986 	    }
6987 	  else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6988 				      TYPE_SIGN (type)))
6989 	    {
6990 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6991 		*strict_overflow_p = true;
6992 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6993 				  fold_convert (ctype,
6994 						const_binop (TRUNC_DIV_EXPR,
6995 							     c, op1)));
6996 	    }
6997 	}
6998       break;
6999 
7000     default:
7001       break;
7002     }
7003 
7004   return 0;
7005 }
7006 
7007 /* Return a node which has the indicated constant VALUE (either 0 or
7008    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7009    and is of the indicated TYPE.  */
7010 
7011 tree
constant_boolean_node(bool value,tree type)7012 constant_boolean_node (bool value, tree type)
7013 {
7014   if (type == integer_type_node)
7015     return value ? integer_one_node : integer_zero_node;
7016   else if (type == boolean_type_node)
7017     return value ? boolean_true_node : boolean_false_node;
7018   else if (TREE_CODE (type) == VECTOR_TYPE)
7019     return build_vector_from_val (type,
7020 				  build_int_cst (TREE_TYPE (type),
7021 						 value ? -1 : 0));
7022   else
7023     return fold_convert (type, value ? integer_one_node : integer_zero_node);
7024 }
7025 
7026 
7027 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7028    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
7029    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7030    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
7031    COND is the first argument to CODE; otherwise (as in the example
7032    given here), it is the second argument.  TYPE is the type of the
7033    original expression.  Return NULL_TREE if no simplification is
7034    possible.  */
7035 
7036 static tree
fold_binary_op_with_conditional_arg(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree cond,tree arg,int cond_first_p)7037 fold_binary_op_with_conditional_arg (location_t loc,
7038 				     enum tree_code code,
7039 				     tree type, tree op0, tree op1,
7040 				     tree cond, tree arg, int cond_first_p)
7041 {
7042   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7043   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7044   tree test, true_value, false_value;
7045   tree lhs = NULL_TREE;
7046   tree rhs = NULL_TREE;
7047   enum tree_code cond_code = COND_EXPR;
7048 
7049   /* Do not move possibly trapping operations into the conditional as this
7050      pessimizes code and causes gimplification issues when applied late.  */
7051   if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7052 			      ANY_INTEGRAL_TYPE_P (type)
7053 			      && TYPE_OVERFLOW_TRAPS (type), op1))
7054     return NULL_TREE;
7055 
7056   if (TREE_CODE (cond) == COND_EXPR
7057       || TREE_CODE (cond) == VEC_COND_EXPR)
7058     {
7059       test = TREE_OPERAND (cond, 0);
7060       true_value = TREE_OPERAND (cond, 1);
7061       false_value = TREE_OPERAND (cond, 2);
7062       /* If this operand throws an expression, then it does not make
7063 	 sense to try to perform a logical or arithmetic operation
7064 	 involving it.  */
7065       if (VOID_TYPE_P (TREE_TYPE (true_value)))
7066 	lhs = true_value;
7067       if (VOID_TYPE_P (TREE_TYPE (false_value)))
7068 	rhs = false_value;
7069     }
7070   else if (!(TREE_CODE (type) != VECTOR_TYPE
7071 	     && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7072     {
7073       tree testtype = TREE_TYPE (cond);
7074       test = cond;
7075       true_value = constant_boolean_node (true, testtype);
7076       false_value = constant_boolean_node (false, testtype);
7077     }
7078   else
7079     /* Detect the case of mixing vector and scalar types - bail out.  */
7080     return NULL_TREE;
7081 
7082   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7083     cond_code = VEC_COND_EXPR;
7084 
7085   /* This transformation is only worthwhile if we don't have to wrap ARG
7086      in a SAVE_EXPR and the operation can be simplified without recursing
7087      on at least one of the branches once its pushed inside the COND_EXPR.  */
7088   if (!TREE_CONSTANT (arg)
7089       && (TREE_SIDE_EFFECTS (arg)
7090 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7091 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7092     return NULL_TREE;
7093 
7094   arg = fold_convert_loc (loc, arg_type, arg);
7095   if (lhs == 0)
7096     {
7097       true_value = fold_convert_loc (loc, cond_type, true_value);
7098       if (cond_first_p)
7099 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
7100       else
7101 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
7102     }
7103   if (rhs == 0)
7104     {
7105       false_value = fold_convert_loc (loc, cond_type, false_value);
7106       if (cond_first_p)
7107 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
7108       else
7109 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
7110     }
7111 
7112   /* Check that we have simplified at least one of the branches.  */
7113   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7114     return NULL_TREE;
7115 
7116   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7117 }
7118 
7119 
7120 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7121 
7122    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7123    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
7124    ADDEND is the same as X.
7125 
7126    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7127    and finite.  The problematic cases are when X is zero, and its mode
7128    has signed zeros.  In the case of rounding towards -infinity,
7129    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
7130    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
7131 
7132 bool
fold_real_zero_addition_p(const_tree type,const_tree addend,int negate)7133 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7134 {
7135   if (!real_zerop (addend))
7136     return false;
7137 
7138   /* Don't allow the fold with -fsignaling-nans.  */
7139   if (HONOR_SNANS (type))
7140     return false;
7141 
7142   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
7143   if (!HONOR_SIGNED_ZEROS (type))
7144     return true;
7145 
7146   /* There is no case that is safe for all rounding modes.  */
7147   if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7148     return false;
7149 
7150   /* In a vector or complex, we would need to check the sign of all zeros.  */
7151   if (TREE_CODE (addend) == VECTOR_CST)
7152     addend = uniform_vector_p (addend);
7153   if (!addend || TREE_CODE (addend) != REAL_CST)
7154     return false;
7155 
7156   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
7157   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7158     negate = !negate;
7159 
7160   /* The mode has signed zeros, and we have to honor their sign.
7161      In this situation, there is only one case we can return true for.
7162      X - 0 is the same as X with default rounding.  */
7163   return negate;
7164 }
7165 
7166 /* Subroutine of match.pd that optimizes comparisons of a division by
7167    a nonzero integer constant against an integer constant, i.e.
7168    X/C1 op C2.
7169 
7170    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7171    GE_EXPR or LE_EXPR.  ARG01 and ARG1 must be a INTEGER_CST.  */
7172 
7173 enum tree_code
fold_div_compare(enum tree_code code,tree c1,tree c2,tree * lo,tree * hi,bool * neg_overflow)7174 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7175 		  tree *hi, bool *neg_overflow)
7176 {
7177   tree prod, tmp, type = TREE_TYPE (c1);
7178   signop sign = TYPE_SIGN (type);
7179   wi::overflow_type overflow;
7180 
7181   /* We have to do this the hard way to detect unsigned overflow.
7182      prod = int_const_binop (MULT_EXPR, c1, c2);  */
7183   wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7184   prod = force_fit_type (type, val, -1, overflow);
7185   *neg_overflow = false;
7186 
7187   if (sign == UNSIGNED)
7188     {
7189       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7190       *lo = prod;
7191 
7192       /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
7193       val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7194       *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7195     }
7196   else if (tree_int_cst_sgn (c1) >= 0)
7197     {
7198       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7199       switch (tree_int_cst_sgn (c2))
7200 	{
7201 	case -1:
7202 	  *neg_overflow = true;
7203 	  *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7204 	  *hi = prod;
7205 	  break;
7206 
7207 	case 0:
7208 	  *lo = fold_negate_const (tmp, type);
7209 	  *hi = tmp;
7210 	  break;
7211 
7212 	case 1:
7213 	  *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7214 	  *lo = prod;
7215 	  break;
7216 
7217 	default:
7218 	  gcc_unreachable ();
7219 	}
7220     }
7221   else
7222     {
7223       /* A negative divisor reverses the relational operators.  */
7224       code = swap_tree_comparison (code);
7225 
7226       tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7227       switch (tree_int_cst_sgn (c2))
7228 	{
7229 	case -1:
7230 	  *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7231 	  *lo = prod;
7232 	  break;
7233 
7234 	case 0:
7235 	  *hi = fold_negate_const (tmp, type);
7236 	  *lo = tmp;
7237 	  break;
7238 
7239 	case 1:
7240 	  *neg_overflow = true;
7241 	  *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7242 	  *hi = prod;
7243 	  break;
7244 
7245 	default:
7246 	  gcc_unreachable ();
7247 	}
7248     }
7249 
7250   if (code != EQ_EXPR && code != NE_EXPR)
7251     return code;
7252 
7253   if (TREE_OVERFLOW (*lo)
7254       || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7255     *lo = NULL_TREE;
7256   if (TREE_OVERFLOW (*hi)
7257       || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7258     *hi = NULL_TREE;
7259 
7260   return code;
7261 }
7262 
7263 
7264 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7265    equality/inequality test, then return a simplified form of the test
7266    using a sign testing.  Otherwise return NULL.  TYPE is the desired
7267    result type.  */
7268 
7269 static tree
fold_single_bit_test_into_sign_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)7270 fold_single_bit_test_into_sign_test (location_t loc,
7271 				     enum tree_code code, tree arg0, tree arg1,
7272 				     tree result_type)
7273 {
7274   /* If this is testing a single bit, we can optimize the test.  */
7275   if ((code == NE_EXPR || code == EQ_EXPR)
7276       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7277       && integer_pow2p (TREE_OPERAND (arg0, 1)))
7278     {
7279       /* If we have (A & C) != 0 where C is the sign bit of A, convert
7280 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
7281       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7282 
7283       if (arg00 != NULL_TREE
7284 	  /* This is only a win if casting to a signed type is cheap,
7285 	     i.e. when arg00's type is not a partial mode.  */
7286 	  && type_has_mode_precision_p (TREE_TYPE (arg00)))
7287 	{
7288 	  tree stype = signed_type_for (TREE_TYPE (arg00));
7289 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7290 			      result_type,
7291 			      fold_convert_loc (loc, stype, arg00),
7292 			      build_int_cst (stype, 0));
7293 	}
7294     }
7295 
7296   return NULL_TREE;
7297 }
7298 
7299 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7300    equality/inequality test, then return a simplified form of
7301    the test using shifts and logical operations.  Otherwise return
7302    NULL.  TYPE is the desired result type.  */
7303 
7304 tree
fold_single_bit_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)7305 fold_single_bit_test (location_t loc, enum tree_code code,
7306 		      tree arg0, tree arg1, tree result_type)
7307 {
7308   /* If this is testing a single bit, we can optimize the test.  */
7309   if ((code == NE_EXPR || code == EQ_EXPR)
7310       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7311       && integer_pow2p (TREE_OPERAND (arg0, 1)))
7312     {
7313       tree inner = TREE_OPERAND (arg0, 0);
7314       tree type = TREE_TYPE (arg0);
7315       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7316       scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7317       int ops_unsigned;
7318       tree signed_type, unsigned_type, intermediate_type;
7319       tree tem, one;
7320 
7321       /* First, see if we can fold the single bit test into a sign-bit
7322 	 test.  */
7323       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7324 						 result_type);
7325       if (tem)
7326 	return tem;
7327 
7328       /* Otherwise we have (A & C) != 0 where C is a single bit,
7329 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
7330 	 Similarly for (A & C) == 0.  */
7331 
7332       /* If INNER is a right shift of a constant and it plus BITNUM does
7333 	 not overflow, adjust BITNUM and INNER.  */
7334       if (TREE_CODE (inner) == RSHIFT_EXPR
7335 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7336 	  && bitnum < TYPE_PRECISION (type)
7337 	  && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7338 			TYPE_PRECISION (type) - bitnum))
7339 	{
7340 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7341 	  inner = TREE_OPERAND (inner, 0);
7342 	}
7343 
7344       /* If we are going to be able to omit the AND below, we must do our
7345 	 operations as unsigned.  If we must use the AND, we have a choice.
7346 	 Normally unsigned is faster, but for some machines signed is.  */
7347       ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7348 		      && !flag_syntax_only) ? 0 : 1;
7349 
7350       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7351       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7352       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7353       inner = fold_convert_loc (loc, intermediate_type, inner);
7354 
7355       if (bitnum != 0)
7356 	inner = build2 (RSHIFT_EXPR, intermediate_type,
7357 			inner, size_int (bitnum));
7358 
7359       one = build_int_cst (intermediate_type, 1);
7360 
7361       if (code == EQ_EXPR)
7362 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7363 
7364       /* Put the AND last so it can combine with more things.  */
7365       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7366 
7367       /* Make sure to return the proper type.  */
7368       inner = fold_convert_loc (loc, result_type, inner);
7369 
7370       return inner;
7371     }
7372   return NULL_TREE;
7373 }
7374 
7375 /* Test whether it is preferable two swap two operands, ARG0 and
7376    ARG1, for example because ARG0 is an integer constant and ARG1
7377    isn't.  */
7378 
7379 bool
tree_swap_operands_p(const_tree arg0,const_tree arg1)7380 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7381 {
7382   if (CONSTANT_CLASS_P (arg1))
7383     return 0;
7384   if (CONSTANT_CLASS_P (arg0))
7385     return 1;
7386 
7387   STRIP_NOPS (arg0);
7388   STRIP_NOPS (arg1);
7389 
7390   if (TREE_CONSTANT (arg1))
7391     return 0;
7392   if (TREE_CONSTANT (arg0))
7393     return 1;
7394 
7395   /* It is preferable to swap two SSA_NAME to ensure a canonical form
7396      for commutative and comparison operators.  Ensuring a canonical
7397      form allows the optimizers to find additional redundancies without
7398      having to explicitly check for both orderings.  */
7399   if (TREE_CODE (arg0) == SSA_NAME
7400       && TREE_CODE (arg1) == SSA_NAME
7401       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7402     return 1;
7403 
7404   /* Put SSA_NAMEs last.  */
7405   if (TREE_CODE (arg1) == SSA_NAME)
7406     return 0;
7407   if (TREE_CODE (arg0) == SSA_NAME)
7408     return 1;
7409 
7410   /* Put variables last.  */
7411   if (DECL_P (arg1))
7412     return 0;
7413   if (DECL_P (arg0))
7414     return 1;
7415 
7416   return 0;
7417 }
7418 
7419 
7420 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7421    means A >= Y && A != MAX, but in this case we know that
7422    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7423 
7424 static tree
fold_to_nonsharp_ineq_using_bound(location_t loc,tree ineq,tree bound)7425 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7426 {
7427   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7428 
7429   if (TREE_CODE (bound) == LT_EXPR)
7430     a = TREE_OPERAND (bound, 0);
7431   else if (TREE_CODE (bound) == GT_EXPR)
7432     a = TREE_OPERAND (bound, 1);
7433   else
7434     return NULL_TREE;
7435 
7436   typea = TREE_TYPE (a);
7437   if (!INTEGRAL_TYPE_P (typea)
7438       && !POINTER_TYPE_P (typea))
7439     return NULL_TREE;
7440 
7441   if (TREE_CODE (ineq) == LT_EXPR)
7442     {
7443       a1 = TREE_OPERAND (ineq, 1);
7444       y = TREE_OPERAND (ineq, 0);
7445     }
7446   else if (TREE_CODE (ineq) == GT_EXPR)
7447     {
7448       a1 = TREE_OPERAND (ineq, 0);
7449       y = TREE_OPERAND (ineq, 1);
7450     }
7451   else
7452     return NULL_TREE;
7453 
7454   if (TREE_TYPE (a1) != typea)
7455     return NULL_TREE;
7456 
7457   if (POINTER_TYPE_P (typea))
7458     {
7459       /* Convert the pointer types into integer before taking the difference.  */
7460       tree ta = fold_convert_loc (loc, ssizetype, a);
7461       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7462       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7463     }
7464   else
7465     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7466 
7467   if (!diff || !integer_onep (diff))
7468    return NULL_TREE;
7469 
7470   return fold_build2_loc (loc, GE_EXPR, type, a, y);
7471 }
7472 
7473 /* Fold a sum or difference of at least one multiplication.
7474    Returns the folded tree or NULL if no simplification could be made.  */
7475 
7476 static tree
fold_plusminus_mult_expr(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)7477 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7478 			  tree arg0, tree arg1)
7479 {
7480   tree arg00, arg01, arg10, arg11;
7481   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7482 
7483   /* (A * C) +- (B * C) -> (A+-B) * C.
7484      (A * C) +- A -> A * (C+-1).
7485      We are most concerned about the case where C is a constant,
7486      but other combinations show up during loop reduction.  Since
7487      it is not difficult, try all four possibilities.  */
7488 
7489   if (TREE_CODE (arg0) == MULT_EXPR)
7490     {
7491       arg00 = TREE_OPERAND (arg0, 0);
7492       arg01 = TREE_OPERAND (arg0, 1);
7493     }
7494   else if (TREE_CODE (arg0) == INTEGER_CST)
7495     {
7496       arg00 = build_one_cst (type);
7497       arg01 = arg0;
7498     }
7499   else
7500     {
7501       /* We cannot generate constant 1 for fract.  */
7502       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7503 	return NULL_TREE;
7504       arg00 = arg0;
7505       arg01 = build_one_cst (type);
7506     }
7507   if (TREE_CODE (arg1) == MULT_EXPR)
7508     {
7509       arg10 = TREE_OPERAND (arg1, 0);
7510       arg11 = TREE_OPERAND (arg1, 1);
7511     }
7512   else if (TREE_CODE (arg1) == INTEGER_CST)
7513     {
7514       arg10 = build_one_cst (type);
7515       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7516 	 the purpose of this canonicalization.  */
7517       if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7518 	  && negate_expr_p (arg1)
7519 	  && code == PLUS_EXPR)
7520 	{
7521 	  arg11 = negate_expr (arg1);
7522 	  code = MINUS_EXPR;
7523 	}
7524       else
7525 	arg11 = arg1;
7526     }
7527   else
7528     {
7529       /* We cannot generate constant 1 for fract.  */
7530       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7531 	return NULL_TREE;
7532       arg10 = arg1;
7533       arg11 = build_one_cst (type);
7534     }
7535   same = NULL_TREE;
7536 
7537   /* Prefer factoring a common non-constant.  */
7538   if (operand_equal_p (arg00, arg10, 0))
7539     same = arg00, alt0 = arg01, alt1 = arg11;
7540   else if (operand_equal_p (arg01, arg11, 0))
7541     same = arg01, alt0 = arg00, alt1 = arg10;
7542   else if (operand_equal_p (arg00, arg11, 0))
7543     same = arg00, alt0 = arg01, alt1 = arg10;
7544   else if (operand_equal_p (arg01, arg10, 0))
7545     same = arg01, alt0 = arg00, alt1 = arg11;
7546 
7547   /* No identical multiplicands; see if we can find a common
7548      power-of-two factor in non-power-of-two multiplies.  This
7549      can help in multi-dimensional array access.  */
7550   else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7551     {
7552       HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7553       HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7554       HOST_WIDE_INT tmp;
7555       bool swap = false;
7556       tree maybe_same;
7557 
7558       /* Move min of absolute values to int11.  */
7559       if (absu_hwi (int01) < absu_hwi (int11))
7560         {
7561 	  tmp = int01, int01 = int11, int11 = tmp;
7562 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7563 	  maybe_same = arg01;
7564 	  swap = true;
7565 	}
7566       else
7567 	maybe_same = arg11;
7568 
7569       const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7570       if (factor > 1
7571 	  && pow2p_hwi (factor)
7572 	  && (int01 & (factor - 1)) == 0
7573 	  /* The remainder should not be a constant, otherwise we
7574 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7575 	     increased the number of multiplications necessary.  */
7576 	  && TREE_CODE (arg10) != INTEGER_CST)
7577         {
7578 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7579 			      build_int_cst (TREE_TYPE (arg00),
7580 					     int01 / int11));
7581 	  alt1 = arg10;
7582 	  same = maybe_same;
7583 	  if (swap)
7584 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7585 	}
7586     }
7587 
7588   if (!same)
7589     return NULL_TREE;
7590 
7591   if (! ANY_INTEGRAL_TYPE_P (type)
7592       || TYPE_OVERFLOW_WRAPS (type)
7593       /* We are neither factoring zero nor minus one.  */
7594       || TREE_CODE (same) == INTEGER_CST)
7595     return fold_build2_loc (loc, MULT_EXPR, type,
7596 			fold_build2_loc (loc, code, type,
7597 				     fold_convert_loc (loc, type, alt0),
7598 				     fold_convert_loc (loc, type, alt1)),
7599 			fold_convert_loc (loc, type, same));
7600 
7601   /* Same may be zero and thus the operation 'code' may overflow.  Likewise
7602      same may be minus one and thus the multiplication may overflow.  Perform
7603      the sum operation in an unsigned type.  */
7604   tree utype = unsigned_type_for (type);
7605   tree tem = fold_build2_loc (loc, code, utype,
7606 			      fold_convert_loc (loc, utype, alt0),
7607 			      fold_convert_loc (loc, utype, alt1));
7608   /* If the sum evaluated to a constant that is not -INF the multiplication
7609      cannot overflow.  */
7610   if (TREE_CODE (tem) == INTEGER_CST
7611       && (wi::to_wide (tem)
7612 	  != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7613     return fold_build2_loc (loc, MULT_EXPR, type,
7614 			    fold_convert (type, tem), same);
7615 
7616   /* Do not resort to unsigned multiplication because
7617      we lose the no-overflow property of the expression.  */
7618   return NULL_TREE;
7619 }
7620 
7621 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7622    specified by EXPR into the buffer PTR of length LEN bytes.
7623    Return the number of bytes placed in the buffer, or zero
7624    upon failure.  */
7625 
7626 static int
native_encode_int(const_tree expr,unsigned char * ptr,int len,int off)7627 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7628 {
7629   tree type = TREE_TYPE (expr);
7630   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7631   int byte, offset, word, words;
7632   unsigned char value;
7633 
7634   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7635     return 0;
7636   if (off == -1)
7637     off = 0;
7638 
7639   if (ptr == NULL)
7640     /* Dry run.  */
7641     return MIN (len, total_bytes - off);
7642 
7643   words = total_bytes / UNITS_PER_WORD;
7644 
7645   for (byte = 0; byte < total_bytes; byte++)
7646     {
7647       int bitpos = byte * BITS_PER_UNIT;
7648       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7649 	 number of bytes.  */
7650       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7651 
7652       if (total_bytes > UNITS_PER_WORD)
7653 	{
7654 	  word = byte / UNITS_PER_WORD;
7655 	  if (WORDS_BIG_ENDIAN)
7656 	    word = (words - 1) - word;
7657 	  offset = word * UNITS_PER_WORD;
7658 	  if (BYTES_BIG_ENDIAN)
7659 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7660 	  else
7661 	    offset += byte % UNITS_PER_WORD;
7662 	}
7663       else
7664 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7665       if (offset >= off && offset - off < len)
7666 	ptr[offset - off] = value;
7667     }
7668   return MIN (len, total_bytes - off);
7669 }
7670 
7671 
7672 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7673    specified by EXPR into the buffer PTR of length LEN bytes.
7674    Return the number of bytes placed in the buffer, or zero
7675    upon failure.  */
7676 
7677 static int
native_encode_fixed(const_tree expr,unsigned char * ptr,int len,int off)7678 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7679 {
7680   tree type = TREE_TYPE (expr);
7681   scalar_mode mode = SCALAR_TYPE_MODE (type);
7682   int total_bytes = GET_MODE_SIZE (mode);
7683   FIXED_VALUE_TYPE value;
7684   tree i_value, i_type;
7685 
7686   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7687     return 0;
7688 
7689   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7690 
7691   if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7692     return 0;
7693 
7694   value = TREE_FIXED_CST (expr);
7695   i_value = double_int_to_tree (i_type, value.data);
7696 
7697   return native_encode_int (i_value, ptr, len, off);
7698 }
7699 
7700 
7701 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7702    specified by EXPR into the buffer PTR of length LEN bytes.
7703    Return the number of bytes placed in the buffer, or zero
7704    upon failure.  */
7705 
7706 static int
native_encode_real(const_tree expr,unsigned char * ptr,int len,int off)7707 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7708 {
7709   tree type = TREE_TYPE (expr);
7710   int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7711   int byte, offset, word, words, bitpos;
7712   unsigned char value;
7713 
7714   /* There are always 32 bits in each long, no matter the size of
7715      the hosts long.  We handle floating point representations with
7716      up to 192 bits.  */
7717   long tmp[6];
7718 
7719   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7720     return 0;
7721   if (off == -1)
7722     off = 0;
7723 
7724   if (ptr == NULL)
7725     /* Dry run.  */
7726     return MIN (len, total_bytes - off);
7727 
7728   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7729 
7730   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7731 
7732   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7733        bitpos += BITS_PER_UNIT)
7734     {
7735       byte = (bitpos / BITS_PER_UNIT) & 3;
7736       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7737 
7738       if (UNITS_PER_WORD < 4)
7739 	{
7740 	  word = byte / UNITS_PER_WORD;
7741 	  if (WORDS_BIG_ENDIAN)
7742 	    word = (words - 1) - word;
7743 	  offset = word * UNITS_PER_WORD;
7744 	  if (BYTES_BIG_ENDIAN)
7745 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7746 	  else
7747 	    offset += byte % UNITS_PER_WORD;
7748 	}
7749       else
7750 	{
7751 	  offset = byte;
7752 	  if (BYTES_BIG_ENDIAN)
7753 	    {
7754 	      /* Reverse bytes within each long, or within the entire float
7755 		 if it's smaller than a long (for HFmode).  */
7756 	      offset = MIN (3, total_bytes - 1) - offset;
7757 	      gcc_assert (offset >= 0);
7758 	    }
7759 	}
7760       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7761       if (offset >= off
7762 	  && offset - off < len)
7763 	ptr[offset - off] = value;
7764     }
7765   return MIN (len, total_bytes - off);
7766 }
7767 
7768 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7769    specified by EXPR into the buffer PTR of length LEN bytes.
7770    Return the number of bytes placed in the buffer, or zero
7771    upon failure.  */
7772 
7773 static int
native_encode_complex(const_tree expr,unsigned char * ptr,int len,int off)7774 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7775 {
7776   int rsize, isize;
7777   tree part;
7778 
7779   part = TREE_REALPART (expr);
7780   rsize = native_encode_expr (part, ptr, len, off);
7781   if (off == -1 && rsize == 0)
7782     return 0;
7783   part = TREE_IMAGPART (expr);
7784   if (off != -1)
7785     off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7786   isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7787 			      len - rsize, off);
7788   if (off == -1 && isize != rsize)
7789     return 0;
7790   return rsize + isize;
7791 }
7792 
7793 /* Like native_encode_vector, but only encode the first COUNT elements.
7794    The other arguments are as for native_encode_vector.  */
7795 
7796 static int
native_encode_vector_part(const_tree expr,unsigned char * ptr,int len,int off,unsigned HOST_WIDE_INT count)7797 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7798 			   int off, unsigned HOST_WIDE_INT count)
7799 {
7800   tree itype = TREE_TYPE (TREE_TYPE (expr));
7801   if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7802       && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7803     {
7804       /* This is the only case in which elements can be smaller than a byte.
7805 	 Element 0 is always in the lsb of the containing byte.  */
7806       unsigned int elt_bits = TYPE_PRECISION (itype);
7807       int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7808       if ((off == -1 && total_bytes > len) || off >= total_bytes)
7809 	return 0;
7810 
7811       if (off == -1)
7812 	off = 0;
7813 
7814       /* Zero the buffer and then set bits later where necessary.  */
7815       int extract_bytes = MIN (len, total_bytes - off);
7816       if (ptr)
7817 	memset (ptr, 0, extract_bytes);
7818 
7819       unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7820       unsigned int first_elt = off * elts_per_byte;
7821       unsigned int extract_elts = extract_bytes * elts_per_byte;
7822       for (unsigned int i = 0; i < extract_elts; ++i)
7823 	{
7824 	  tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7825 	  if (TREE_CODE (elt) != INTEGER_CST)
7826 	    return 0;
7827 
7828 	  if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7829 	    {
7830 	      unsigned int bit = i * elt_bits;
7831 	      ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7832 	    }
7833 	}
7834       return extract_bytes;
7835     }
7836 
7837   int offset = 0;
7838   int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7839   for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7840     {
7841       if (off >= size)
7842 	{
7843 	  off -= size;
7844 	  continue;
7845 	}
7846       tree elem = VECTOR_CST_ELT (expr, i);
7847       int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7848 				    len - offset, off);
7849       if ((off == -1 && res != size) || res == 0)
7850 	return 0;
7851       offset += res;
7852       if (offset >= len)
7853 	return (off == -1 && i < count - 1) ? 0 : offset;
7854       if (off != -1)
7855 	off = 0;
7856     }
7857   return offset;
7858 }
7859 
7860 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7861    specified by EXPR into the buffer PTR of length LEN bytes.
7862    Return the number of bytes placed in the buffer, or zero
7863    upon failure.  */
7864 
7865 static int
native_encode_vector(const_tree expr,unsigned char * ptr,int len,int off)7866 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7867 {
7868   unsigned HOST_WIDE_INT count;
7869   if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7870     return 0;
7871   return native_encode_vector_part (expr, ptr, len, off, count);
7872 }
7873 
7874 
7875 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7876    specified by EXPR into the buffer PTR of length LEN bytes.
7877    Return the number of bytes placed in the buffer, or zero
7878    upon failure.  */
7879 
7880 static int
native_encode_string(const_tree expr,unsigned char * ptr,int len,int off)7881 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7882 {
7883   tree type = TREE_TYPE (expr);
7884 
7885   /* Wide-char strings are encoded in target byte-order so native
7886      encoding them is trivial.  */
7887   if (BITS_PER_UNIT != CHAR_BIT
7888       || TREE_CODE (type) != ARRAY_TYPE
7889       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7890       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7891     return 0;
7892 
7893   HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7894   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7895     return 0;
7896   if (off == -1)
7897     off = 0;
7898   len = MIN (total_bytes - off, len);
7899   if (ptr == NULL)
7900     /* Dry run.  */;
7901   else
7902     {
7903       int written = 0;
7904       if (off < TREE_STRING_LENGTH (expr))
7905 	{
7906 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7907 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7908 	}
7909       memset (ptr + written, 0, len - written);
7910     }
7911   return len;
7912 }
7913 
7914 
7915 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7916    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7917    buffer PTR of length LEN bytes.  If PTR is NULL, don't actually store
7918    anything, just do a dry run.  If OFF is not -1 then start
7919    the encoding at byte offset OFF and encode at most LEN bytes.
7920    Return the number of bytes placed in the buffer, or zero upon failure.  */
7921 
7922 int
native_encode_expr(const_tree expr,unsigned char * ptr,int len,int off)7923 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7924 {
7925   /* We don't support starting at negative offset and -1 is special.  */
7926   if (off < -1)
7927     return 0;
7928 
7929   switch (TREE_CODE (expr))
7930     {
7931     case INTEGER_CST:
7932       return native_encode_int (expr, ptr, len, off);
7933 
7934     case REAL_CST:
7935       return native_encode_real (expr, ptr, len, off);
7936 
7937     case FIXED_CST:
7938       return native_encode_fixed (expr, ptr, len, off);
7939 
7940     case COMPLEX_CST:
7941       return native_encode_complex (expr, ptr, len, off);
7942 
7943     case VECTOR_CST:
7944       return native_encode_vector (expr, ptr, len, off);
7945 
7946     case STRING_CST:
7947       return native_encode_string (expr, ptr, len, off);
7948 
7949     default:
7950       return 0;
7951     }
7952 }
7953 
7954 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7955    NON_LVALUE_EXPRs and nops.  */
7956 
7957 int
native_encode_initializer(tree init,unsigned char * ptr,int len,int off)7958 native_encode_initializer (tree init, unsigned char *ptr, int len,
7959 			   int off)
7960 {
7961   /* We don't support starting at negative offset and -1 is special.  */
7962   if (off < -1 || init == NULL_TREE)
7963     return 0;
7964 
7965   STRIP_NOPS (init);
7966   switch (TREE_CODE (init))
7967     {
7968     case VIEW_CONVERT_EXPR:
7969     case NON_LVALUE_EXPR:
7970       return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off);
7971     default:
7972       return native_encode_expr (init, ptr, len, off);
7973     case CONSTRUCTOR:
7974       tree type = TREE_TYPE (init);
7975       HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
7976       if (total_bytes < 0)
7977 	return 0;
7978       if ((off == -1 && total_bytes > len) || off >= total_bytes)
7979 	return 0;
7980       int o = off == -1 ? 0 : off;
7981       if (TREE_CODE (type) == ARRAY_TYPE)
7982 	{
7983 	  tree min_index;
7984 	  unsigned HOST_WIDE_INT cnt;
7985 	  HOST_WIDE_INT curpos = 0, fieldsize;
7986 	  constructor_elt *ce;
7987 
7988 	  if (!TYPE_DOMAIN (type)
7989 	      || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
7990 	    return 0;
7991 
7992 	  fieldsize = int_size_in_bytes (TREE_TYPE (type));
7993 	  if (fieldsize <= 0)
7994 	    return 0;
7995 
7996 	  min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7997 	  if (ptr)
7998 	    memset (ptr, '\0', MIN (total_bytes - off, len));
7999 
8000 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8001 	    {
8002 	      tree val = ce->value;
8003 	      tree index = ce->index;
8004 	      HOST_WIDE_INT pos = curpos, count = 0;
8005 	      bool full = false;
8006 	      if (index && TREE_CODE (index) == RANGE_EXPR)
8007 		{
8008 		  if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8009 		      || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8010 		    return 0;
8011 		  offset_int wpos
8012 		    = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8013 				- wi::to_offset (min_index),
8014 				TYPE_PRECISION (sizetype));
8015 		  wpos *= fieldsize;
8016 		  if (!wi::fits_shwi_p (pos))
8017 		    return 0;
8018 		  pos = wpos.to_shwi ();
8019 		  offset_int wcount
8020 		    = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8021 				- wi::to_offset (TREE_OPERAND (index, 0)),
8022 				TYPE_PRECISION (sizetype));
8023 		  if (!wi::fits_shwi_p (wcount))
8024 		    return 0;
8025 		  count = wcount.to_shwi ();
8026 		}
8027 	      else if (index)
8028 		{
8029 		  if (TREE_CODE (index) != INTEGER_CST)
8030 		    return 0;
8031 		  offset_int wpos
8032 		    = wi::sext (wi::to_offset (index)
8033 				- wi::to_offset (min_index),
8034 				TYPE_PRECISION (sizetype));
8035 		  wpos *= fieldsize;
8036 		  if (!wi::fits_shwi_p (wpos))
8037 		    return 0;
8038 		  pos = wpos.to_shwi ();
8039 		}
8040 
8041 	      curpos = pos;
8042 	      if (val)
8043 		do
8044 		  {
8045 		    if (off == -1
8046 			|| (curpos >= off
8047 			    && (curpos + fieldsize
8048 				<= (HOST_WIDE_INT) off + len)))
8049 		      {
8050 			if (full)
8051 			  {
8052 			    if (ptr)
8053 			      memcpy (ptr + (curpos - o), ptr + (pos - o),
8054 				      fieldsize);
8055 			  }
8056 			else if (!native_encode_initializer (val,
8057 							     ptr
8058 							     ? ptr + curpos - o
8059 							     : NULL,
8060 							     fieldsize,
8061 							     off == -1 ? -1
8062 								       : 0))
8063 			  return 0;
8064 			else
8065 			  {
8066 			    full = true;
8067 			    pos = curpos;
8068 			  }
8069 		      }
8070 		    else if (curpos + fieldsize > off
8071 			     && curpos < (HOST_WIDE_INT) off + len)
8072 		      {
8073 			/* Partial overlap.  */
8074 			unsigned char *p = NULL;
8075 			int no = 0;
8076 			int l;
8077 			if (curpos >= off)
8078 			  {
8079 			    if (ptr)
8080 			      p = ptr + curpos - off;
8081 			    l = MIN ((HOST_WIDE_INT) off + len - curpos,
8082 				     fieldsize);
8083 			  }
8084 			else
8085 			  {
8086 			    p = ptr;
8087 			    no = off - curpos;
8088 			    l = len;
8089 			  }
8090 			if (!native_encode_initializer (val, p, l, no))
8091 			  return 0;
8092 		      }
8093 		    curpos += fieldsize;
8094 		  }
8095 		while (count-- != 0);
8096 	    }
8097 	  return MIN (total_bytes - off, len);
8098 	}
8099       else if (TREE_CODE (type) == RECORD_TYPE
8100 	       || TREE_CODE (type) == UNION_TYPE)
8101 	{
8102 	  unsigned HOST_WIDE_INT cnt;
8103 	  constructor_elt *ce;
8104 
8105 	  if (ptr != NULL)
8106 	    memset (ptr, '\0', MIN (total_bytes - off, len));
8107 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8108 	    {
8109 	      tree field = ce->index;
8110 	      tree val = ce->value;
8111 	      HOST_WIDE_INT pos, fieldsize;
8112 
8113 	      if (field == NULL_TREE)
8114 		return 0;
8115 
8116 	      pos = int_byte_position (field);
8117 	      if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8118 		continue;
8119 
8120 	      if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8121 		  && TYPE_DOMAIN (TREE_TYPE (field))
8122 		  && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8123 		return 0;
8124 	      if (DECL_SIZE_UNIT (field) == NULL_TREE
8125 		  || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8126 		return 0;
8127 	      fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8128 	      if (fieldsize == 0)
8129 		continue;
8130 
8131 	      if (off != -1 && pos + fieldsize <= off)
8132 		continue;
8133 
8134 	      if (DECL_BIT_FIELD (field))
8135 		return 0;
8136 
8137 	      if (val == NULL_TREE)
8138 		continue;
8139 
8140 	      if (off == -1
8141 		  || (pos >= off
8142 		      && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8143 		{
8144 		  if (!native_encode_initializer (val, ptr ? ptr + pos - o
8145 							   : NULL,
8146 						  fieldsize,
8147 						  off == -1 ? -1 : 0))
8148 		    return 0;
8149 		}
8150 	      else
8151 		{
8152 		  /* Partial overlap.  */
8153 		  unsigned char *p = NULL;
8154 		  int no = 0;
8155 		  int l;
8156 		  if (pos >= off)
8157 		    {
8158 		      if (ptr)
8159 			p = ptr + pos - off;
8160 		      l = MIN ((HOST_WIDE_INT) off + len - pos,
8161 				fieldsize);
8162 		    }
8163 		  else
8164 		    {
8165 		      p = ptr;
8166 		      no = off - pos;
8167 		      l = len;
8168 		    }
8169 		  if (!native_encode_initializer (val, p, l, no))
8170 		    return 0;
8171 		}
8172 	    }
8173 	  return MIN (total_bytes - off, len);
8174 	}
8175       return 0;
8176     }
8177 }
8178 
8179 
8180 /* Subroutine of native_interpret_expr.  Interpret the contents of
8181    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8182    If the buffer cannot be interpreted, return NULL_TREE.  */
8183 
8184 static tree
native_interpret_int(tree type,const unsigned char * ptr,int len)8185 native_interpret_int (tree type, const unsigned char *ptr, int len)
8186 {
8187   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8188 
8189   if (total_bytes > len
8190       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8191     return NULL_TREE;
8192 
8193   wide_int result = wi::from_buffer (ptr, total_bytes);
8194 
8195   return wide_int_to_tree (type, result);
8196 }
8197 
8198 
8199 /* Subroutine of native_interpret_expr.  Interpret the contents of
8200    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8201    If the buffer cannot be interpreted, return NULL_TREE.  */
8202 
8203 static tree
native_interpret_fixed(tree type,const unsigned char * ptr,int len)8204 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8205 {
8206   scalar_mode mode = SCALAR_TYPE_MODE (type);
8207   int total_bytes = GET_MODE_SIZE (mode);
8208   double_int result;
8209   FIXED_VALUE_TYPE fixed_value;
8210 
8211   if (total_bytes > len
8212       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8213     return NULL_TREE;
8214 
8215   result = double_int::from_buffer (ptr, total_bytes);
8216   fixed_value = fixed_from_double_int (result, mode);
8217 
8218   return build_fixed (type, fixed_value);
8219 }
8220 
8221 
8222 /* Subroutine of native_interpret_expr.  Interpret the contents of
8223    the buffer PTR of length LEN as a REAL_CST of type TYPE.
8224    If the buffer cannot be interpreted, return NULL_TREE.  */
8225 
8226 static tree
native_interpret_real(tree type,const unsigned char * ptr,int len)8227 native_interpret_real (tree type, const unsigned char *ptr, int len)
8228 {
8229   scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8230   int total_bytes = GET_MODE_SIZE (mode);
8231   unsigned char value;
8232   /* There are always 32 bits in each long, no matter the size of
8233      the hosts long.  We handle floating point representations with
8234      up to 192 bits.  */
8235   REAL_VALUE_TYPE r;
8236   long tmp[6];
8237 
8238   if (total_bytes > len || total_bytes > 24)
8239     return NULL_TREE;
8240   int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8241 
8242   memset (tmp, 0, sizeof (tmp));
8243   for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8244        bitpos += BITS_PER_UNIT)
8245     {
8246       /* Both OFFSET and BYTE index within a long;
8247 	 bitpos indexes the whole float.  */
8248       int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8249       if (UNITS_PER_WORD < 4)
8250 	{
8251 	  int word = byte / UNITS_PER_WORD;
8252 	  if (WORDS_BIG_ENDIAN)
8253 	    word = (words - 1) - word;
8254 	  offset = word * UNITS_PER_WORD;
8255 	  if (BYTES_BIG_ENDIAN)
8256 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8257 	  else
8258 	    offset += byte % UNITS_PER_WORD;
8259 	}
8260       else
8261 	{
8262 	  offset = byte;
8263 	  if (BYTES_BIG_ENDIAN)
8264 	    {
8265 	      /* Reverse bytes within each long, or within the entire float
8266 		 if it's smaller than a long (for HFmode).  */
8267 	      offset = MIN (3, total_bytes - 1) - offset;
8268 	      gcc_assert (offset >= 0);
8269 	    }
8270 	}
8271       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8272 
8273       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8274     }
8275 
8276   real_from_target (&r, tmp, mode);
8277   tree ret = build_real (type, r);
8278   if (MODE_COMPOSITE_P (mode))
8279     {
8280       /* For floating point values in composite modes, punt if this folding
8281 	 doesn't preserve bit representation.  As the mode doesn't have fixed
8282 	 precision while GCC pretends it does, there could be valid values that
8283 	 GCC can't really represent accurately.  See PR95450.  */
8284       unsigned char buf[24];
8285       if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8286 	  || memcmp (ptr, buf, total_bytes) != 0)
8287 	ret = NULL_TREE;
8288     }
8289   return ret;
8290 }
8291 
8292 
8293 /* Subroutine of native_interpret_expr.  Interpret the contents of
8294    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8295    If the buffer cannot be interpreted, return NULL_TREE.  */
8296 
8297 static tree
native_interpret_complex(tree type,const unsigned char * ptr,int len)8298 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8299 {
8300   tree etype, rpart, ipart;
8301   int size;
8302 
8303   etype = TREE_TYPE (type);
8304   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8305   if (size * 2 > len)
8306     return NULL_TREE;
8307   rpart = native_interpret_expr (etype, ptr, size);
8308   if (!rpart)
8309     return NULL_TREE;
8310   ipart = native_interpret_expr (etype, ptr+size, size);
8311   if (!ipart)
8312     return NULL_TREE;
8313   return build_complex (type, rpart, ipart);
8314 }
8315 
8316 /* Read a vector of type TYPE from the target memory image given by BYTES,
8317    which contains LEN bytes.  The vector is known to be encodable using
8318    NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8319 
8320    Return the vector on success, otherwise return null.  */
8321 
8322 static tree
native_interpret_vector_part(tree type,const unsigned char * bytes,unsigned int len,unsigned int npatterns,unsigned int nelts_per_pattern)8323 native_interpret_vector_part (tree type, const unsigned char *bytes,
8324 			      unsigned int len, unsigned int npatterns,
8325 			      unsigned int nelts_per_pattern)
8326 {
8327   tree elt_type = TREE_TYPE (type);
8328   if (VECTOR_BOOLEAN_TYPE_P (type)
8329       && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8330     {
8331       /* This is the only case in which elements can be smaller than a byte.
8332 	 Element 0 is always in the lsb of the containing byte.  */
8333       unsigned int elt_bits = TYPE_PRECISION (elt_type);
8334       if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8335 	return NULL_TREE;
8336 
8337       tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8338       for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8339 	{
8340 	  unsigned int bit_index = i * elt_bits;
8341 	  unsigned int byte_index = bit_index / BITS_PER_UNIT;
8342 	  unsigned int lsb = bit_index % BITS_PER_UNIT;
8343 	  builder.quick_push (bytes[byte_index] & (1 << lsb)
8344 			      ? build_all_ones_cst (elt_type)
8345 			      : build_zero_cst (elt_type));
8346 	}
8347       return builder.build ();
8348     }
8349 
8350   unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8351   if (elt_bytes * npatterns * nelts_per_pattern > len)
8352     return NULL_TREE;
8353 
8354   tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8355   for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8356     {
8357       tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8358       if (!elt)
8359 	return NULL_TREE;
8360       builder.quick_push (elt);
8361       bytes += elt_bytes;
8362     }
8363   return builder.build ();
8364 }
8365 
8366 /* Subroutine of native_interpret_expr.  Interpret the contents of
8367    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8368    If the buffer cannot be interpreted, return NULL_TREE.  */
8369 
8370 static tree
native_interpret_vector(tree type,const unsigned char * ptr,unsigned int len)8371 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8372 {
8373   tree etype;
8374   unsigned int size;
8375   unsigned HOST_WIDE_INT count;
8376 
8377   etype = TREE_TYPE (type);
8378   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8379   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8380       || size * count > len)
8381     return NULL_TREE;
8382 
8383   return native_interpret_vector_part (type, ptr, len, count, 1);
8384 }
8385 
8386 
8387 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
8388    the buffer PTR of length LEN as a constant of type TYPE.  For
8389    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8390    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
8391    return NULL_TREE.  */
8392 
8393 tree
native_interpret_expr(tree type,const unsigned char * ptr,int len)8394 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8395 {
8396   switch (TREE_CODE (type))
8397     {
8398     case INTEGER_TYPE:
8399     case ENUMERAL_TYPE:
8400     case BOOLEAN_TYPE:
8401     case POINTER_TYPE:
8402     case REFERENCE_TYPE:
8403       return native_interpret_int (type, ptr, len);
8404 
8405     case REAL_TYPE:
8406       return native_interpret_real (type, ptr, len);
8407 
8408     case FIXED_POINT_TYPE:
8409       return native_interpret_fixed (type, ptr, len);
8410 
8411     case COMPLEX_TYPE:
8412       return native_interpret_complex (type, ptr, len);
8413 
8414     case VECTOR_TYPE:
8415       return native_interpret_vector (type, ptr, len);
8416 
8417     default:
8418       return NULL_TREE;
8419     }
8420 }
8421 
8422 /* Returns true if we can interpret the contents of a native encoding
8423    as TYPE.  */
8424 
8425 bool
can_native_interpret_type_p(tree type)8426 can_native_interpret_type_p (tree type)
8427 {
8428   switch (TREE_CODE (type))
8429     {
8430     case INTEGER_TYPE:
8431     case ENUMERAL_TYPE:
8432     case BOOLEAN_TYPE:
8433     case POINTER_TYPE:
8434     case REFERENCE_TYPE:
8435     case FIXED_POINT_TYPE:
8436     case REAL_TYPE:
8437     case COMPLEX_TYPE:
8438     case VECTOR_TYPE:
8439       return true;
8440     default:
8441       return false;
8442     }
8443 }
8444 
8445 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8446    or extracted constant positions and/or sizes aren't byte aligned.  */
8447 
8448 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8449    bits between adjacent elements.  AMNT should be within
8450    [0, BITS_PER_UNIT).
8451    Example, AMNT = 2:
8452    00011111|11100000 << 2 = 01111111|10000000
8453    PTR[1]  | PTR[0]         PTR[1]  | PTR[0].  */
8454 
8455 void
shift_bytes_in_array_left(unsigned char * ptr,unsigned int sz,unsigned int amnt)8456 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8457 			   unsigned int amnt)
8458 {
8459   if (amnt == 0)
8460     return;
8461 
8462   unsigned char carry_over = 0U;
8463   unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8464   unsigned char clear_mask = (~0U) << amnt;
8465 
8466   for (unsigned int i = 0; i < sz; i++)
8467     {
8468       unsigned prev_carry_over = carry_over;
8469       carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8470 
8471       ptr[i] <<= amnt;
8472       if (i != 0)
8473 	{
8474 	  ptr[i] &= clear_mask;
8475 	  ptr[i] |= prev_carry_over;
8476 	}
8477     }
8478 }
8479 
8480 /* Like shift_bytes_in_array_left but for big-endian.
8481    Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8482    bits between adjacent elements.  AMNT should be within
8483    [0, BITS_PER_UNIT).
8484    Example, AMNT = 2:
8485    00011111|11100000 >> 2 = 00000111|11111000
8486    PTR[0]  | PTR[1]         PTR[0]  | PTR[1].  */
8487 
8488 void
shift_bytes_in_array_right(unsigned char * ptr,unsigned int sz,unsigned int amnt)8489 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8490 			    unsigned int amnt)
8491 {
8492   if (amnt == 0)
8493     return;
8494 
8495   unsigned char carry_over = 0U;
8496   unsigned char carry_mask = ~(~0U << amnt);
8497 
8498   for (unsigned int i = 0; i < sz; i++)
8499     {
8500       unsigned prev_carry_over = carry_over;
8501       carry_over = ptr[i] & carry_mask;
8502 
8503       carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
8504       ptr[i] >>= amnt;
8505       ptr[i] |= prev_carry_over;
8506     }
8507 }
8508 
8509 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8510    directly on the VECTOR_CST encoding, in a way that works for variable-
8511    length vectors.  Return the resulting VECTOR_CST on success or null
8512    on failure.  */
8513 
8514 static tree
fold_view_convert_vector_encoding(tree type,tree expr)8515 fold_view_convert_vector_encoding (tree type, tree expr)
8516 {
8517   tree expr_type = TREE_TYPE (expr);
8518   poly_uint64 type_bits, expr_bits;
8519   if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8520       || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8521     return NULL_TREE;
8522 
8523   poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8524   poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8525   unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8526   unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8527 
8528   /* We can only preserve the semantics of a stepped pattern if the new
8529      vector element is an integer of the same size.  */
8530   if (VECTOR_CST_STEPPED_P (expr)
8531       && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8532     return NULL_TREE;
8533 
8534   /* The number of bits needed to encode one element from every pattern
8535      of the original vector.  */
8536   unsigned int expr_sequence_bits
8537     = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8538 
8539   /* The number of bits needed to encode one element from every pattern
8540      of the result.  */
8541   unsigned int type_sequence_bits
8542     = least_common_multiple (expr_sequence_bits, type_elt_bits);
8543 
8544   /* Don't try to read more bytes than are available, which can happen
8545      for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8546      The general VIEW_CONVERT handling can cope with that case, so there's
8547      no point complicating things here.  */
8548   unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
8549   unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
8550 				    BITS_PER_UNIT);
8551   unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
8552   if (known_gt (buffer_bits, expr_bits))
8553     return NULL_TREE;
8554 
8555   /* Get enough bytes of EXPR to form the new encoding.  */
8556   auto_vec<unsigned char, 128> buffer (buffer_bytes);
8557   buffer.quick_grow (buffer_bytes);
8558   if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
8559 				 buffer_bits / expr_elt_bits)
8560       != (int) buffer_bytes)
8561     return NULL_TREE;
8562 
8563   /* Reencode the bytes as TYPE.  */
8564   unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
8565   return native_interpret_vector_part (type, &buffer[0], buffer.length (),
8566 				       type_npatterns, nelts_per_pattern);
8567 }
8568 
8569 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8570    TYPE at compile-time.  If we're unable to perform the conversion
8571    return NULL_TREE.  */
8572 
8573 static tree
fold_view_convert_expr(tree type,tree expr)8574 fold_view_convert_expr (tree type, tree expr)
8575 {
8576   /* We support up to 512-bit values (for V8DFmode).  */
8577   unsigned char buffer[64];
8578   int len;
8579 
8580   /* Check that the host and target are sane.  */
8581   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8582     return NULL_TREE;
8583 
8584   if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
8585     if (tree res = fold_view_convert_vector_encoding (type, expr))
8586       return res;
8587 
8588   len = native_encode_expr (expr, buffer, sizeof (buffer));
8589   if (len == 0)
8590     return NULL_TREE;
8591 
8592   return native_interpret_expr (type, buffer, len);
8593 }
8594 
8595 /* Build an expression for the address of T.  Folds away INDIRECT_REF
8596    to avoid confusing the gimplify process.  */
8597 
8598 tree
build_fold_addr_expr_with_type_loc(location_t loc,tree t,tree ptrtype)8599 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8600 {
8601   /* The size of the object is not relevant when talking about its address.  */
8602   if (TREE_CODE (t) == WITH_SIZE_EXPR)
8603     t = TREE_OPERAND (t, 0);
8604 
8605   if (TREE_CODE (t) == INDIRECT_REF)
8606     {
8607       t = TREE_OPERAND (t, 0);
8608 
8609       if (TREE_TYPE (t) != ptrtype)
8610 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8611     }
8612   else if (TREE_CODE (t) == MEM_REF
8613 	   && integer_zerop (TREE_OPERAND (t, 1)))
8614     {
8615       t = TREE_OPERAND (t, 0);
8616 
8617       if (TREE_TYPE (t) != ptrtype)
8618 	t = fold_convert_loc (loc, ptrtype, t);
8619     }
8620   else if (TREE_CODE (t) == MEM_REF
8621 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8622     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8623 			TREE_OPERAND (t, 0),
8624 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8625   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8626     {
8627       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8628 
8629       if (TREE_TYPE (t) != ptrtype)
8630 	t = fold_convert_loc (loc, ptrtype, t);
8631     }
8632   else
8633     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8634 
8635   return t;
8636 }
8637 
8638 /* Build an expression for the address of T.  */
8639 
8640 tree
build_fold_addr_expr_loc(location_t loc,tree t)8641 build_fold_addr_expr_loc (location_t loc, tree t)
8642 {
8643   tree ptrtype = build_pointer_type (TREE_TYPE (t));
8644 
8645   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8646 }
8647 
8648 /* Fold a unary expression of code CODE and type TYPE with operand
8649    OP0.  Return the folded expression if folding is successful.
8650    Otherwise, return NULL_TREE.  */
8651 
8652 tree
fold_unary_loc(location_t loc,enum tree_code code,tree type,tree op0)8653 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8654 {
8655   tree tem;
8656   tree arg0;
8657   enum tree_code_class kind = TREE_CODE_CLASS (code);
8658 
8659   gcc_assert (IS_EXPR_CODE_CLASS (kind)
8660 	      && TREE_CODE_LENGTH (code) == 1);
8661 
8662   arg0 = op0;
8663   if (arg0)
8664     {
8665       if (CONVERT_EXPR_CODE_P (code)
8666 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8667 	{
8668 	  /* Don't use STRIP_NOPS, because signedness of argument type
8669 	     matters.  */
8670 	  STRIP_SIGN_NOPS (arg0);
8671 	}
8672       else
8673 	{
8674 	  /* Strip any conversions that don't change the mode.  This
8675 	     is safe for every expression, except for a comparison
8676 	     expression because its signedness is derived from its
8677 	     operands.
8678 
8679 	     Note that this is done as an internal manipulation within
8680 	     the constant folder, in order to find the simplest
8681 	     representation of the arguments so that their form can be
8682 	     studied.  In any cases, the appropriate type conversions
8683 	     should be put back in the tree that will get out of the
8684 	     constant folder.  */
8685 	  STRIP_NOPS (arg0);
8686 	}
8687 
8688       if (CONSTANT_CLASS_P (arg0))
8689 	{
8690 	  tree tem = const_unop (code, type, arg0);
8691 	  if (tem)
8692 	    {
8693 	      if (TREE_TYPE (tem) != type)
8694 		tem = fold_convert_loc (loc, type, tem);
8695 	      return tem;
8696 	    }
8697 	}
8698     }
8699 
8700   tem = generic_simplify (loc, code, type, op0);
8701   if (tem)
8702     return tem;
8703 
8704   if (TREE_CODE_CLASS (code) == tcc_unary)
8705     {
8706       if (TREE_CODE (arg0) == COMPOUND_EXPR)
8707 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8708 		       fold_build1_loc (loc, code, type,
8709 				    fold_convert_loc (loc, TREE_TYPE (op0),
8710 						      TREE_OPERAND (arg0, 1))));
8711       else if (TREE_CODE (arg0) == COND_EXPR)
8712 	{
8713 	  tree arg01 = TREE_OPERAND (arg0, 1);
8714 	  tree arg02 = TREE_OPERAND (arg0, 2);
8715 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8716 	    arg01 = fold_build1_loc (loc, code, type,
8717 				 fold_convert_loc (loc,
8718 						   TREE_TYPE (op0), arg01));
8719 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8720 	    arg02 = fold_build1_loc (loc, code, type,
8721 				 fold_convert_loc (loc,
8722 						   TREE_TYPE (op0), arg02));
8723 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8724 			     arg01, arg02);
8725 
8726 	  /* If this was a conversion, and all we did was to move into
8727 	     inside the COND_EXPR, bring it back out.  But leave it if
8728 	     it is a conversion from integer to integer and the
8729 	     result precision is no wider than a word since such a
8730 	     conversion is cheap and may be optimized away by combine,
8731 	     while it couldn't if it were outside the COND_EXPR.  Then return
8732 	     so we don't get into an infinite recursion loop taking the
8733 	     conversion out and then back in.  */
8734 
8735 	  if ((CONVERT_EXPR_CODE_P (code)
8736 	       || code == NON_LVALUE_EXPR)
8737 	      && TREE_CODE (tem) == COND_EXPR
8738 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8739 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8740 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8741 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8742 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8743 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8744 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8745 		     && (INTEGRAL_TYPE_P
8746 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8747 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8748 		  || flag_syntax_only))
8749 	    tem = build1_loc (loc, code, type,
8750 			      build3 (COND_EXPR,
8751 				      TREE_TYPE (TREE_OPERAND
8752 						 (TREE_OPERAND (tem, 1), 0)),
8753 				      TREE_OPERAND (tem, 0),
8754 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8755 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
8756 						    0)));
8757 	  return tem;
8758 	}
8759    }
8760 
8761   switch (code)
8762     {
8763     case NON_LVALUE_EXPR:
8764       if (!maybe_lvalue_p (op0))
8765 	return fold_convert_loc (loc, type, op0);
8766       return NULL_TREE;
8767 
8768     CASE_CONVERT:
8769     case FLOAT_EXPR:
8770     case FIX_TRUNC_EXPR:
8771       if (COMPARISON_CLASS_P (op0))
8772 	{
8773 	  /* If we have (type) (a CMP b) and type is an integral type, return
8774 	     new expression involving the new type.  Canonicalize
8775 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8776 	     non-integral type.
8777 	     Do not fold the result as that would not simplify further, also
8778 	     folding again results in recursions.  */
8779 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
8780 	    return build2_loc (loc, TREE_CODE (op0), type,
8781 			       TREE_OPERAND (op0, 0),
8782 			       TREE_OPERAND (op0, 1));
8783 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8784 		   && TREE_CODE (type) != VECTOR_TYPE)
8785 	    return build3_loc (loc, COND_EXPR, type, op0,
8786 			       constant_boolean_node (true, type),
8787 			       constant_boolean_node (false, type));
8788 	}
8789 
8790       /* Handle (T *)&A.B.C for A being of type T and B and C
8791 	 living at offset zero.  This occurs frequently in
8792 	 C++ upcasting and then accessing the base.  */
8793       if (TREE_CODE (op0) == ADDR_EXPR
8794 	  && POINTER_TYPE_P (type)
8795 	  && handled_component_p (TREE_OPERAND (op0, 0)))
8796         {
8797 	  poly_int64 bitsize, bitpos;
8798 	  tree offset;
8799 	  machine_mode mode;
8800 	  int unsignedp, reversep, volatilep;
8801 	  tree base
8802 	    = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8803 				   &offset, &mode, &unsignedp, &reversep,
8804 				   &volatilep);
8805 	  /* If the reference was to a (constant) zero offset, we can use
8806 	     the address of the base if it has the same base type
8807 	     as the result type and the pointer type is unqualified.  */
8808 	  if (!offset
8809 	      && known_eq (bitpos, 0)
8810 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8811 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8812 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8813 	    return fold_convert_loc (loc, type,
8814 				     build_fold_addr_expr_loc (loc, base));
8815         }
8816 
8817       if (TREE_CODE (op0) == MODIFY_EXPR
8818 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8819 	  /* Detect assigning a bitfield.  */
8820 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8821 	       && DECL_BIT_FIELD
8822 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8823 	{
8824 	  /* Don't leave an assignment inside a conversion
8825 	     unless assigning a bitfield.  */
8826 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8827 	  /* First do the assignment, then return converted constant.  */
8828 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8829 	  TREE_NO_WARNING (tem) = 1;
8830 	  TREE_USED (tem) = 1;
8831 	  return tem;
8832 	}
8833 
8834       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8835 	 constants (if x has signed type, the sign bit cannot be set
8836 	 in c).  This folds extension into the BIT_AND_EXPR.
8837 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8838 	 very likely don't have maximal range for their precision and this
8839 	 transformation effectively doesn't preserve non-maximal ranges.  */
8840       if (TREE_CODE (type) == INTEGER_TYPE
8841 	  && TREE_CODE (op0) == BIT_AND_EXPR
8842 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8843 	{
8844 	  tree and_expr = op0;
8845 	  tree and0 = TREE_OPERAND (and_expr, 0);
8846 	  tree and1 = TREE_OPERAND (and_expr, 1);
8847 	  int change = 0;
8848 
8849 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8850 	      || (TYPE_PRECISION (type)
8851 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8852 	    change = 1;
8853 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
8854 		   <= HOST_BITS_PER_WIDE_INT
8855 		   && tree_fits_uhwi_p (and1))
8856 	    {
8857 	      unsigned HOST_WIDE_INT cst;
8858 
8859 	      cst = tree_to_uhwi (and1);
8860 	      cst &= HOST_WIDE_INT_M1U
8861 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8862 	      change = (cst == 0);
8863 	      if (change
8864 		  && !flag_syntax_only
8865 		  && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8866 		      == ZERO_EXTEND))
8867 		{
8868 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
8869 		  and0 = fold_convert_loc (loc, uns, and0);
8870 		  and1 = fold_convert_loc (loc, uns, and1);
8871 		}
8872 	    }
8873 	  if (change)
8874 	    {
8875 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
8876 				    TREE_OVERFLOW (and1));
8877 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
8878 				      fold_convert_loc (loc, type, and0), tem);
8879 	    }
8880 	}
8881 
8882       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8883 	 cast (T1)X will fold away.  We assume that this happens when X itself
8884 	 is a cast.  */
8885       if (POINTER_TYPE_P (type)
8886 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8887 	  && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8888 	{
8889 	  tree arg00 = TREE_OPERAND (arg0, 0);
8890 	  tree arg01 = TREE_OPERAND (arg0, 1);
8891 
8892 	  /* Avoid this optimization in GENERIC for -fsanitize=null
8893 	     when type is a reference type and arg00's type is not,
8894 	     because arg00 could be validly nullptr and if arg01 doesn't return,
8895 	     we don't want false positive binding of reference to nullptr.  */
8896 	  if (TREE_CODE (type) == REFERENCE_TYPE
8897 	      && !in_gimple_form
8898 	      && (flag_sanitize & SANITIZE_NULL) != 0
8899 	      && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
8900 	    return NULL_TREE;
8901 
8902 	  arg00 = fold_convert_loc (loc, type, arg00);
8903 	  return fold_build_pointer_plus_loc (loc, arg00, arg01);
8904 	}
8905 
8906       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8907 	 of the same precision, and X is an integer type not narrower than
8908 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
8909       if (INTEGRAL_TYPE_P (type)
8910 	  && TREE_CODE (op0) == BIT_NOT_EXPR
8911 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8912 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8913 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8914 	{
8915 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8916 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8917 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8918 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8919 				fold_convert_loc (loc, type, tem));
8920 	}
8921 
8922       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8923 	 type of X and Y (integer types only).  */
8924       if (INTEGRAL_TYPE_P (type)
8925 	  && TREE_CODE (op0) == MULT_EXPR
8926 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8927 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8928 	{
8929 	  /* Be careful not to introduce new overflows.  */
8930 	  tree mult_type;
8931           if (TYPE_OVERFLOW_WRAPS (type))
8932 	    mult_type = type;
8933 	  else
8934 	    mult_type = unsigned_type_for (type);
8935 
8936 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8937 	    {
8938 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8939 				 fold_convert_loc (loc, mult_type,
8940 						   TREE_OPERAND (op0, 0)),
8941 				 fold_convert_loc (loc, mult_type,
8942 						   TREE_OPERAND (op0, 1)));
8943 	      return fold_convert_loc (loc, type, tem);
8944 	    }
8945 	}
8946 
8947       return NULL_TREE;
8948 
8949     case VIEW_CONVERT_EXPR:
8950       if (TREE_CODE (op0) == MEM_REF)
8951         {
8952 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8953 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8954 	  tem = fold_build2_loc (loc, MEM_REF, type,
8955 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8956 	  REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8957 	  return tem;
8958 	}
8959 
8960       return NULL_TREE;
8961 
8962     case NEGATE_EXPR:
8963       tem = fold_negate_expr (loc, arg0);
8964       if (tem)
8965 	return fold_convert_loc (loc, type, tem);
8966       return NULL_TREE;
8967 
8968     case ABS_EXPR:
8969       /* Convert fabs((double)float) into (double)fabsf(float).  */
8970       if (TREE_CODE (arg0) == NOP_EXPR
8971 	  && TREE_CODE (type) == REAL_TYPE)
8972 	{
8973 	  tree targ0 = strip_float_extensions (arg0);
8974 	  if (targ0 != arg0)
8975 	    return fold_convert_loc (loc, type,
8976 				     fold_build1_loc (loc, ABS_EXPR,
8977 						  TREE_TYPE (targ0),
8978 						  targ0));
8979 	}
8980       return NULL_TREE;
8981 
8982     case BIT_NOT_EXPR:
8983       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8984       if (TREE_CODE (arg0) == BIT_XOR_EXPR
8985 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8986 				    fold_convert_loc (loc, type,
8987 						      TREE_OPERAND (arg0, 0)))))
8988 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8989 				fold_convert_loc (loc, type,
8990 						  TREE_OPERAND (arg0, 1)));
8991       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8992 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8993 			       	     fold_convert_loc (loc, type,
8994 						       TREE_OPERAND (arg0, 1)))))
8995 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8996 			    fold_convert_loc (loc, type,
8997 					      TREE_OPERAND (arg0, 0)), tem);
8998 
8999       return NULL_TREE;
9000 
9001     case TRUTH_NOT_EXPR:
9002       /* Note that the operand of this must be an int
9003 	 and its values must be 0 or 1.
9004 	 ("true" is a fixed value perhaps depending on the language,
9005 	 but we don't handle values other than 1 correctly yet.)  */
9006       tem = fold_truth_not_expr (loc, arg0);
9007       if (!tem)
9008 	return NULL_TREE;
9009       return fold_convert_loc (loc, type, tem);
9010 
9011     case INDIRECT_REF:
9012       /* Fold *&X to X if X is an lvalue.  */
9013       if (TREE_CODE (op0) == ADDR_EXPR)
9014 	{
9015 	  tree op00 = TREE_OPERAND (op0, 0);
9016 	  if ((VAR_P (op00)
9017 	       || TREE_CODE (op00) == PARM_DECL
9018 	       || TREE_CODE (op00) == RESULT_DECL)
9019 	      && !TREE_READONLY (op00))
9020 	    return op00;
9021 	}
9022       return NULL_TREE;
9023 
9024     default:
9025       return NULL_TREE;
9026     } /* switch (code) */
9027 }
9028 
9029 
9030 /* If the operation was a conversion do _not_ mark a resulting constant
9031    with TREE_OVERFLOW if the original constant was not.  These conversions
9032    have implementation defined behavior and retaining the TREE_OVERFLOW
9033    flag here would confuse later passes such as VRP.  */
9034 tree
fold_unary_ignore_overflow_loc(location_t loc,enum tree_code code,tree type,tree op0)9035 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9036 				tree type, tree op0)
9037 {
9038   tree res = fold_unary_loc (loc, code, type, op0);
9039   if (res
9040       && TREE_CODE (res) == INTEGER_CST
9041       && TREE_CODE (op0) == INTEGER_CST
9042       && CONVERT_EXPR_CODE_P (code))
9043     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9044 
9045   return res;
9046 }
9047 
9048 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9049    operands OP0 and OP1.  LOC is the location of the resulting expression.
9050    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9051    Return the folded expression if folding is successful.  Otherwise,
9052    return NULL_TREE.  */
9053 static tree
fold_truth_andor(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,tree op0,tree op1)9054 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9055 		  tree arg0, tree arg1, tree op0, tree op1)
9056 {
9057   tree tem;
9058 
9059   /* We only do these simplifications if we are optimizing.  */
9060   if (!optimize)
9061     return NULL_TREE;
9062 
9063   /* Check for things like (A || B) && (A || C).  We can convert this
9064      to A || (B && C).  Note that either operator can be any of the four
9065      truth and/or operations and the transformation will still be
9066      valid.   Also note that we only care about order for the
9067      ANDIF and ORIF operators.  If B contains side effects, this
9068      might change the truth-value of A.  */
9069   if (TREE_CODE (arg0) == TREE_CODE (arg1)
9070       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9071 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9072 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
9073 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9074       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9075     {
9076       tree a00 = TREE_OPERAND (arg0, 0);
9077       tree a01 = TREE_OPERAND (arg0, 1);
9078       tree a10 = TREE_OPERAND (arg1, 0);
9079       tree a11 = TREE_OPERAND (arg1, 1);
9080       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9081 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9082 			 && (code == TRUTH_AND_EXPR
9083 			     || code == TRUTH_OR_EXPR));
9084 
9085       if (operand_equal_p (a00, a10, 0))
9086 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9087 			    fold_build2_loc (loc, code, type, a01, a11));
9088       else if (commutative && operand_equal_p (a00, a11, 0))
9089 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9090 			    fold_build2_loc (loc, code, type, a01, a10));
9091       else if (commutative && operand_equal_p (a01, a10, 0))
9092 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9093 			    fold_build2_loc (loc, code, type, a00, a11));
9094 
9095       /* This case if tricky because we must either have commutative
9096 	 operators or else A10 must not have side-effects.  */
9097 
9098       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9099 	       && operand_equal_p (a01, a11, 0))
9100 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
9101 			    fold_build2_loc (loc, code, type, a00, a10),
9102 			    a01);
9103     }
9104 
9105   /* See if we can build a range comparison.  */
9106   if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9107     return tem;
9108 
9109   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9110       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9111     {
9112       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9113       if (tem)
9114 	return fold_build2_loc (loc, code, type, tem, arg1);
9115     }
9116 
9117   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9118       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9119     {
9120       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9121       if (tem)
9122 	return fold_build2_loc (loc, code, type, arg0, tem);
9123     }
9124 
9125   /* Check for the possibility of merging component references.  If our
9126      lhs is another similar operation, try to merge its rhs with our
9127      rhs.  Then try to merge our lhs and rhs.  */
9128   if (TREE_CODE (arg0) == code
9129       && (tem = fold_truth_andor_1 (loc, code, type,
9130 				    TREE_OPERAND (arg0, 1), arg1)) != 0)
9131     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9132 
9133   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9134     return tem;
9135 
9136   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9137   if (param_logical_op_non_short_circuit != -1)
9138     logical_op_non_short_circuit
9139       = param_logical_op_non_short_circuit;
9140   if (logical_op_non_short_circuit
9141       && !flag_sanitize_coverage
9142       && (code == TRUTH_AND_EXPR
9143           || code == TRUTH_ANDIF_EXPR
9144           || code == TRUTH_OR_EXPR
9145           || code == TRUTH_ORIF_EXPR))
9146     {
9147       enum tree_code ncode, icode;
9148 
9149       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9150 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9151       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9152 
9153       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9154 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9155 	 We don't want to pack more than two leafs to a non-IF AND/OR
9156 	 expression.
9157 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9158 	 equal to IF-CODE, then we don't want to add right-hand operand.
9159 	 If the inner right-hand side of left-hand operand has
9160 	 side-effects, or isn't simple, then we can't add to it,
9161 	 as otherwise we might destroy if-sequence.  */
9162       if (TREE_CODE (arg0) == icode
9163 	  && simple_operand_p_2 (arg1)
9164 	  /* Needed for sequence points to handle trappings, and
9165 	     side-effects.  */
9166 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9167 	{
9168 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9169 				 arg1);
9170 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9171 				  tem);
9172 	}
9173 	/* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9174 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
9175       else if (TREE_CODE (arg1) == icode
9176 	  && simple_operand_p_2 (arg0)
9177 	  /* Needed for sequence points to handle trappings, and
9178 	     side-effects.  */
9179 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9180 	{
9181 	  tem = fold_build2_loc (loc, ncode, type,
9182 				 arg0, TREE_OPERAND (arg1, 0));
9183 	  return fold_build2_loc (loc, icode, type, tem,
9184 				  TREE_OPERAND (arg1, 1));
9185 	}
9186       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9187 	 into (A OR B).
9188 	 For sequence point consistancy, we need to check for trapping,
9189 	 and side-effects.  */
9190       else if (code == icode && simple_operand_p_2 (arg0)
9191                && simple_operand_p_2 (arg1))
9192 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
9193     }
9194 
9195   return NULL_TREE;
9196 }
9197 
9198 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9199    by changing CODE to reduce the magnitude of constants involved in
9200    ARG0 of the comparison.
9201    Returns a canonicalized comparison tree if a simplification was
9202    possible, otherwise returns NULL_TREE.
9203    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9204    valid if signed overflow is undefined.  */
9205 
9206 static tree
maybe_canonicalize_comparison_1(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,bool * strict_overflow_p)9207 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9208 				 tree arg0, tree arg1,
9209 				 bool *strict_overflow_p)
9210 {
9211   enum tree_code code0 = TREE_CODE (arg0);
9212   tree t, cst0 = NULL_TREE;
9213   int sgn0;
9214 
9215   /* Match A +- CST code arg1.  We can change this only if overflow
9216      is undefined.  */
9217   if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9218 	 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9219 	/* In principle pointers also have undefined overflow behavior,
9220 	   but that causes problems elsewhere.  */
9221 	&& !POINTER_TYPE_P (TREE_TYPE (arg0))
9222 	&& (code0 == MINUS_EXPR
9223 	    || code0 == PLUS_EXPR)
9224 	&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9225     return NULL_TREE;
9226 
9227   /* Identify the constant in arg0 and its sign.  */
9228   cst0 = TREE_OPERAND (arg0, 1);
9229   sgn0 = tree_int_cst_sgn (cst0);
9230 
9231   /* Overflowed constants and zero will cause problems.  */
9232   if (integer_zerop (cst0)
9233       || TREE_OVERFLOW (cst0))
9234     return NULL_TREE;
9235 
9236   /* See if we can reduce the magnitude of the constant in
9237      arg0 by changing the comparison code.  */
9238   /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
9239   if (code == LT_EXPR
9240       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9241     code = LE_EXPR;
9242   /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
9243   else if (code == GT_EXPR
9244 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9245     code = GE_EXPR;
9246   /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
9247   else if (code == LE_EXPR
9248 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9249     code = LT_EXPR;
9250   /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
9251   else if (code == GE_EXPR
9252 	   && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9253     code = GT_EXPR;
9254   else
9255     return NULL_TREE;
9256   *strict_overflow_p = true;
9257 
9258   /* Now build the constant reduced in magnitude.  But not if that
9259      would produce one outside of its types range.  */
9260   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9261       && ((sgn0 == 1
9262 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9263 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9264 	  || (sgn0 == -1
9265 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9266 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9267     return NULL_TREE;
9268 
9269   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9270 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
9271   t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9272   t = fold_convert (TREE_TYPE (arg1), t);
9273 
9274   return fold_build2_loc (loc, code, type, t, arg1);
9275 }
9276 
9277 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9278    overflow further.  Try to decrease the magnitude of constants involved
9279    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9280    and put sole constants at the second argument position.
9281    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
9282 
9283 static tree
maybe_canonicalize_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)9284 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9285 			       tree arg0, tree arg1)
9286 {
9287   tree t;
9288   bool strict_overflow_p;
9289   const char * const warnmsg = G_("assuming signed overflow does not occur "
9290 				  "when reducing constant in comparison");
9291 
9292   /* Try canonicalization by simplifying arg0.  */
9293   strict_overflow_p = false;
9294   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9295 				       &strict_overflow_p);
9296   if (t)
9297     {
9298       if (strict_overflow_p)
9299 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9300       return t;
9301     }
9302 
9303   /* Try canonicalization by simplifying arg1 using the swapped
9304      comparison.  */
9305   code = swap_tree_comparison (code);
9306   strict_overflow_p = false;
9307   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9308 				       &strict_overflow_p);
9309   if (t && strict_overflow_p)
9310     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9311   return t;
9312 }
9313 
9314 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9315    space.  This is used to avoid issuing overflow warnings for
9316    expressions like &p->x which cannot wrap.  */
9317 
9318 static bool
pointer_may_wrap_p(tree base,tree offset,poly_int64 bitpos)9319 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9320 {
9321   if (!POINTER_TYPE_P (TREE_TYPE (base)))
9322     return true;
9323 
9324   if (maybe_lt (bitpos, 0))
9325     return true;
9326 
9327   poly_wide_int wi_offset;
9328   int precision = TYPE_PRECISION (TREE_TYPE (base));
9329   if (offset == NULL_TREE)
9330     wi_offset = wi::zero (precision);
9331   else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9332     return true;
9333   else
9334     wi_offset = wi::to_poly_wide (offset);
9335 
9336   wi::overflow_type overflow;
9337   poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9338 				  precision);
9339   poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9340   if (overflow)
9341     return true;
9342 
9343   poly_uint64 total_hwi, size;
9344   if (!total.to_uhwi (&total_hwi)
9345       || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9346 			   &size)
9347       || known_eq (size, 0U))
9348     return true;
9349 
9350   if (known_le (total_hwi, size))
9351     return false;
9352 
9353   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9354      array.  */
9355   if (TREE_CODE (base) == ADDR_EXPR
9356       && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9357 			  &size)
9358       && maybe_ne (size, 0U)
9359       && known_le (total_hwi, size))
9360     return false;
9361 
9362   return true;
9363 }
9364 
9365 /* Return a positive integer when the symbol DECL is known to have
9366    a nonzero address, zero when it's known not to (e.g., it's a weak
9367    symbol), and a negative integer when the symbol is not yet in the
9368    symbol table and so whether or not its address is zero is unknown.
9369    For function local objects always return positive integer.  */
9370 static int
maybe_nonzero_address(tree decl)9371 maybe_nonzero_address (tree decl)
9372 {
9373   if (DECL_P (decl) && decl_in_symtab_p (decl))
9374     if (struct symtab_node *symbol = symtab_node::get_create (decl))
9375       return symbol->nonzero_address ();
9376 
9377   /* Function local objects are never NULL.  */
9378   if (DECL_P (decl)
9379       && (DECL_CONTEXT (decl)
9380       && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9381       && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9382     return 1;
9383 
9384   return -1;
9385 }
9386 
9387 /* Subroutine of fold_binary.  This routine performs all of the
9388    transformations that are common to the equality/inequality
9389    operators (EQ_EXPR and NE_EXPR) and the ordering operators
9390    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
9391    fold_binary should call fold_binary.  Fold a comparison with
9392    tree code CODE and type TYPE with operands OP0 and OP1.  Return
9393    the folded comparison or NULL_TREE.  */
9394 
9395 static tree
fold_comparison(location_t loc,enum tree_code code,tree type,tree op0,tree op1)9396 fold_comparison (location_t loc, enum tree_code code, tree type,
9397 		 tree op0, tree op1)
9398 {
9399   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9400   tree arg0, arg1, tem;
9401 
9402   arg0 = op0;
9403   arg1 = op1;
9404 
9405   STRIP_SIGN_NOPS (arg0);
9406   STRIP_SIGN_NOPS (arg1);
9407 
9408   /* For comparisons of pointers we can decompose it to a compile time
9409      comparison of the base objects and the offsets into the object.
9410      This requires at least one operand being an ADDR_EXPR or a
9411      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
9412   if (POINTER_TYPE_P (TREE_TYPE (arg0))
9413       && (TREE_CODE (arg0) == ADDR_EXPR
9414 	  || TREE_CODE (arg1) == ADDR_EXPR
9415 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9416 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9417     {
9418       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9419       poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9420       machine_mode mode;
9421       int volatilep, reversep, unsignedp;
9422       bool indirect_base0 = false, indirect_base1 = false;
9423 
9424       /* Get base and offset for the access.  Strip ADDR_EXPR for
9425 	 get_inner_reference, but put it back by stripping INDIRECT_REF
9426 	 off the base object if possible.  indirect_baseN will be true
9427 	 if baseN is not an address but refers to the object itself.  */
9428       base0 = arg0;
9429       if (TREE_CODE (arg0) == ADDR_EXPR)
9430 	{
9431 	  base0
9432 	    = get_inner_reference (TREE_OPERAND (arg0, 0),
9433 				   &bitsize, &bitpos0, &offset0, &mode,
9434 				   &unsignedp, &reversep, &volatilep);
9435 	  if (TREE_CODE (base0) == INDIRECT_REF)
9436 	    base0 = TREE_OPERAND (base0, 0);
9437 	  else
9438 	    indirect_base0 = true;
9439 	}
9440       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9441 	{
9442 	  base0 = TREE_OPERAND (arg0, 0);
9443 	  STRIP_SIGN_NOPS (base0);
9444 	  if (TREE_CODE (base0) == ADDR_EXPR)
9445 	    {
9446 	      base0
9447 		= get_inner_reference (TREE_OPERAND (base0, 0),
9448 				       &bitsize, &bitpos0, &offset0, &mode,
9449 				       &unsignedp, &reversep, &volatilep);
9450 	      if (TREE_CODE (base0) == INDIRECT_REF)
9451 		base0 = TREE_OPERAND (base0, 0);
9452 	      else
9453 		indirect_base0 = true;
9454 	    }
9455 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
9456 	    offset0 = TREE_OPERAND (arg0, 1);
9457 	  else
9458 	    offset0 = size_binop (PLUS_EXPR, offset0,
9459 				  TREE_OPERAND (arg0, 1));
9460 	  if (poly_int_tree_p (offset0))
9461 	    {
9462 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9463 					      TYPE_PRECISION (sizetype));
9464 	      tem <<= LOG2_BITS_PER_UNIT;
9465 	      tem += bitpos0;
9466 	      if (tem.to_shwi (&bitpos0))
9467 		offset0 = NULL_TREE;
9468 	    }
9469 	}
9470 
9471       base1 = arg1;
9472       if (TREE_CODE (arg1) == ADDR_EXPR)
9473 	{
9474 	  base1
9475 	    = get_inner_reference (TREE_OPERAND (arg1, 0),
9476 				   &bitsize, &bitpos1, &offset1, &mode,
9477 				   &unsignedp, &reversep, &volatilep);
9478 	  if (TREE_CODE (base1) == INDIRECT_REF)
9479 	    base1 = TREE_OPERAND (base1, 0);
9480 	  else
9481 	    indirect_base1 = true;
9482 	}
9483       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9484 	{
9485 	  base1 = TREE_OPERAND (arg1, 0);
9486 	  STRIP_SIGN_NOPS (base1);
9487 	  if (TREE_CODE (base1) == ADDR_EXPR)
9488 	    {
9489 	      base1
9490 		= get_inner_reference (TREE_OPERAND (base1, 0),
9491 				       &bitsize, &bitpos1, &offset1, &mode,
9492 				       &unsignedp, &reversep, &volatilep);
9493 	      if (TREE_CODE (base1) == INDIRECT_REF)
9494 		base1 = TREE_OPERAND (base1, 0);
9495 	      else
9496 		indirect_base1 = true;
9497 	    }
9498 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
9499 	    offset1 = TREE_OPERAND (arg1, 1);
9500 	  else
9501 	    offset1 = size_binop (PLUS_EXPR, offset1,
9502 				  TREE_OPERAND (arg1, 1));
9503 	  if (poly_int_tree_p (offset1))
9504 	    {
9505 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9506 					      TYPE_PRECISION (sizetype));
9507 	      tem <<= LOG2_BITS_PER_UNIT;
9508 	      tem += bitpos1;
9509 	      if (tem.to_shwi (&bitpos1))
9510 		offset1 = NULL_TREE;
9511 	    }
9512 	}
9513 
9514       /* If we have equivalent bases we might be able to simplify.  */
9515       if (indirect_base0 == indirect_base1
9516 	  && operand_equal_p (base0, base1,
9517 			      indirect_base0 ? OEP_ADDRESS_OF : 0))
9518 	{
9519 	  /* We can fold this expression to a constant if the non-constant
9520 	     offset parts are equal.  */
9521 	  if ((offset0 == offset1
9522 	       || (offset0 && offset1
9523 		   && operand_equal_p (offset0, offset1, 0)))
9524 	      && (equality_code
9525 		  || (indirect_base0
9526 		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9527 		  || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9528 	    {
9529 	      if (!equality_code
9530 		  && maybe_ne (bitpos0, bitpos1)
9531 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9532 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9533 		fold_overflow_warning (("assuming pointer wraparound does not "
9534 					"occur when comparing P +- C1 with "
9535 					"P +- C2"),
9536 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
9537 
9538 	      switch (code)
9539 		{
9540 		case EQ_EXPR:
9541 		  if (known_eq (bitpos0, bitpos1))
9542 		    return constant_boolean_node (true, type);
9543 		  if (known_ne (bitpos0, bitpos1))
9544 		    return constant_boolean_node (false, type);
9545 		  break;
9546 		case NE_EXPR:
9547 		  if (known_ne (bitpos0, bitpos1))
9548 		    return constant_boolean_node (true, type);
9549 		  if (known_eq (bitpos0, bitpos1))
9550 		    return constant_boolean_node (false, type);
9551 		  break;
9552 		case LT_EXPR:
9553 		  if (known_lt (bitpos0, bitpos1))
9554 		    return constant_boolean_node (true, type);
9555 		  if (known_ge (bitpos0, bitpos1))
9556 		    return constant_boolean_node (false, type);
9557 		  break;
9558 		case LE_EXPR:
9559 		  if (known_le (bitpos0, bitpos1))
9560 		    return constant_boolean_node (true, type);
9561 		  if (known_gt (bitpos0, bitpos1))
9562 		    return constant_boolean_node (false, type);
9563 		  break;
9564 		case GE_EXPR:
9565 		  if (known_ge (bitpos0, bitpos1))
9566 		    return constant_boolean_node (true, type);
9567 		  if (known_lt (bitpos0, bitpos1))
9568 		    return constant_boolean_node (false, type);
9569 		  break;
9570 		case GT_EXPR:
9571 		  if (known_gt (bitpos0, bitpos1))
9572 		    return constant_boolean_node (true, type);
9573 		  if (known_le (bitpos0, bitpos1))
9574 		    return constant_boolean_node (false, type);
9575 		  break;
9576 		default:;
9577 		}
9578 	    }
9579 	  /* We can simplify the comparison to a comparison of the variable
9580 	     offset parts if the constant offset parts are equal.
9581 	     Be careful to use signed sizetype here because otherwise we
9582 	     mess with array offsets in the wrong way.  This is possible
9583 	     because pointer arithmetic is restricted to retain within an
9584 	     object and overflow on pointer differences is undefined as of
9585 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
9586 	  else if (known_eq (bitpos0, bitpos1)
9587 		   && (equality_code
9588 		       || (indirect_base0
9589 			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9590 		       || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9591 	    {
9592 	      /* By converting to signed sizetype we cover middle-end pointer
9593 	         arithmetic which operates on unsigned pointer types of size
9594 	         type size and ARRAY_REF offsets which are properly sign or
9595 	         zero extended from their type in case it is narrower than
9596 	         sizetype.  */
9597 	      if (offset0 == NULL_TREE)
9598 		offset0 = build_int_cst (ssizetype, 0);
9599 	      else
9600 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
9601 	      if (offset1 == NULL_TREE)
9602 		offset1 = build_int_cst (ssizetype, 0);
9603 	      else
9604 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
9605 
9606 	      if (!equality_code
9607 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9608 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9609 		fold_overflow_warning (("assuming pointer wraparound does not "
9610 					"occur when comparing P +- C1 with "
9611 					"P +- C2"),
9612 				       WARN_STRICT_OVERFLOW_COMPARISON);
9613 
9614 	      return fold_build2_loc (loc, code, type, offset0, offset1);
9615 	    }
9616 	}
9617       /* For equal offsets we can simplify to a comparison of the
9618 	 base addresses.  */
9619       else if (known_eq (bitpos0, bitpos1)
9620 	       && (indirect_base0
9621 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9622 	       && (indirect_base1
9623 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9624 	       && ((offset0 == offset1)
9625 		   || (offset0 && offset1
9626 		       && operand_equal_p (offset0, offset1, 0))))
9627 	{
9628 	  if (indirect_base0)
9629 	    base0 = build_fold_addr_expr_loc (loc, base0);
9630 	  if (indirect_base1)
9631 	    base1 = build_fold_addr_expr_loc (loc, base1);
9632 	  return fold_build2_loc (loc, code, type, base0, base1);
9633 	}
9634       /* Comparison between an ordinary (non-weak) symbol and a null
9635 	 pointer can be eliminated since such symbols must have a non
9636 	 null address.  In C, relational expressions between pointers
9637 	 to objects and null pointers are undefined.  The results
9638 	 below follow the C++ rules with the additional property that
9639 	 every object pointer compares greater than a null pointer.
9640       */
9641       else if (((DECL_P (base0)
9642 		 && maybe_nonzero_address (base0) > 0
9643 		 /* Avoid folding references to struct members at offset 0 to
9644 		    prevent tests like '&ptr->firstmember == 0' from getting
9645 		    eliminated.  When ptr is null, although the -> expression
9646 		    is strictly speaking invalid, GCC retains it as a matter
9647 		    of QoI.  See PR c/44555. */
9648 		 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9649 		|| CONSTANT_CLASS_P (base0))
9650 	       && indirect_base0
9651 	       /* The caller guarantees that when one of the arguments is
9652 		  constant (i.e., null in this case) it is second.  */
9653 	       && integer_zerop (arg1))
9654 	{
9655 	  switch (code)
9656 	    {
9657 	    case EQ_EXPR:
9658 	    case LE_EXPR:
9659 	    case LT_EXPR:
9660 	      return constant_boolean_node (false, type);
9661 	    case GE_EXPR:
9662 	    case GT_EXPR:
9663 	    case NE_EXPR:
9664 	      return constant_boolean_node (true, type);
9665 	    default:
9666 	      gcc_unreachable ();
9667 	    }
9668 	}
9669     }
9670 
9671   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9672      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
9673      the resulting offset is smaller in absolute value than the
9674      original one and has the same sign.  */
9675   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9676       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9677       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9678       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9679 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9680       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9681       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9682 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9683     {
9684       tree const1 = TREE_OPERAND (arg0, 1);
9685       tree const2 = TREE_OPERAND (arg1, 1);
9686       tree variable1 = TREE_OPERAND (arg0, 0);
9687       tree variable2 = TREE_OPERAND (arg1, 0);
9688       tree cst;
9689       const char * const warnmsg = G_("assuming signed overflow does not "
9690 				      "occur when combining constants around "
9691 				      "a comparison");
9692 
9693       /* Put the constant on the side where it doesn't overflow and is
9694 	 of lower absolute value and of same sign than before.  */
9695       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9696 			     ? MINUS_EXPR : PLUS_EXPR,
9697 			     const2, const1);
9698       if (!TREE_OVERFLOW (cst)
9699 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9700 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9701 	{
9702 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9703 	  return fold_build2_loc (loc, code, type,
9704 				  variable1,
9705 				  fold_build2_loc (loc, TREE_CODE (arg1),
9706 						   TREE_TYPE (arg1),
9707 						   variable2, cst));
9708 	}
9709 
9710       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9711 			     ? MINUS_EXPR : PLUS_EXPR,
9712 			     const1, const2);
9713       if (!TREE_OVERFLOW (cst)
9714 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9715 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9716 	{
9717 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9718 	  return fold_build2_loc (loc, code, type,
9719 				  fold_build2_loc (loc, TREE_CODE (arg0),
9720 						   TREE_TYPE (arg0),
9721 						   variable1, cst),
9722 				  variable2);
9723 	}
9724     }
9725 
9726   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9727   if (tem)
9728     return tem;
9729 
9730   /* If we are comparing an expression that just has comparisons
9731      of two integer values, arithmetic expressions of those comparisons,
9732      and constants, we can simplify it.  There are only three cases
9733      to check: the two values can either be equal, the first can be
9734      greater, or the second can be greater.  Fold the expression for
9735      those three values.  Since each value must be 0 or 1, we have
9736      eight possibilities, each of which corresponds to the constant 0
9737      or 1 or one of the six possible comparisons.
9738 
9739      This handles common cases like (a > b) == 0 but also handles
9740      expressions like  ((x > y) - (y > x)) > 0, which supposedly
9741      occur in macroized code.  */
9742 
9743   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9744     {
9745       tree cval1 = 0, cval2 = 0;
9746 
9747       if (twoval_comparison_p (arg0, &cval1, &cval2)
9748 	  /* Don't handle degenerate cases here; they should already
9749 	     have been handled anyway.  */
9750 	  && cval1 != 0 && cval2 != 0
9751 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9752 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9753 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9754 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9755 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9756 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9757 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9758 	{
9759 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9760 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9761 
9762 	  /* We can't just pass T to eval_subst in case cval1 or cval2
9763 	     was the same as ARG1.  */
9764 
9765 	  tree high_result
9766 		= fold_build2_loc (loc, code, type,
9767 			       eval_subst (loc, arg0, cval1, maxval,
9768 					   cval2, minval),
9769 			       arg1);
9770 	  tree equal_result
9771 		= fold_build2_loc (loc, code, type,
9772 			       eval_subst (loc, arg0, cval1, maxval,
9773 					   cval2, maxval),
9774 			       arg1);
9775 	  tree low_result
9776 		= fold_build2_loc (loc, code, type,
9777 			       eval_subst (loc, arg0, cval1, minval,
9778 					   cval2, maxval),
9779 			       arg1);
9780 
9781 	  /* All three of these results should be 0 or 1.  Confirm they are.
9782 	     Then use those values to select the proper code to use.  */
9783 
9784 	  if (TREE_CODE (high_result) == INTEGER_CST
9785 	      && TREE_CODE (equal_result) == INTEGER_CST
9786 	      && TREE_CODE (low_result) == INTEGER_CST)
9787 	    {
9788 	      /* Make a 3-bit mask with the high-order bit being the
9789 		 value for `>', the next for '=', and the low for '<'.  */
9790 	      switch ((integer_onep (high_result) * 4)
9791 		      + (integer_onep (equal_result) * 2)
9792 		      + integer_onep (low_result))
9793 		{
9794 		case 0:
9795 		  /* Always false.  */
9796 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9797 		case 1:
9798 		  code = LT_EXPR;
9799 		  break;
9800 		case 2:
9801 		  code = EQ_EXPR;
9802 		  break;
9803 		case 3:
9804 		  code = LE_EXPR;
9805 		  break;
9806 		case 4:
9807 		  code = GT_EXPR;
9808 		  break;
9809 		case 5:
9810 		  code = NE_EXPR;
9811 		  break;
9812 		case 6:
9813 		  code = GE_EXPR;
9814 		  break;
9815 		case 7:
9816 		  /* Always true.  */
9817 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9818 		}
9819 
9820 	      return fold_build2_loc (loc, code, type, cval1, cval2);
9821 	    }
9822 	}
9823     }
9824 
9825   return NULL_TREE;
9826 }
9827 
9828 
9829 /* Subroutine of fold_binary.  Optimize complex multiplications of the
9830    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
9831    argument EXPR represents the expression "z" of type TYPE.  */
9832 
9833 static tree
fold_mult_zconjz(location_t loc,tree type,tree expr)9834 fold_mult_zconjz (location_t loc, tree type, tree expr)
9835 {
9836   tree itype = TREE_TYPE (type);
9837   tree rpart, ipart, tem;
9838 
9839   if (TREE_CODE (expr) == COMPLEX_EXPR)
9840     {
9841       rpart = TREE_OPERAND (expr, 0);
9842       ipart = TREE_OPERAND (expr, 1);
9843     }
9844   else if (TREE_CODE (expr) == COMPLEX_CST)
9845     {
9846       rpart = TREE_REALPART (expr);
9847       ipart = TREE_IMAGPART (expr);
9848     }
9849   else
9850     {
9851       expr = save_expr (expr);
9852       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9853       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9854     }
9855 
9856   rpart = save_expr (rpart);
9857   ipart = save_expr (ipart);
9858   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9859 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9860 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9861   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9862 			  build_zero_cst (itype));
9863 }
9864 
9865 
9866 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
9867    CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9868    true if successful.  */
9869 
9870 static bool
vec_cst_ctor_to_array(tree arg,unsigned int nelts,tree * elts)9871 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9872 {
9873   unsigned HOST_WIDE_INT i, nunits;
9874 
9875   if (TREE_CODE (arg) == VECTOR_CST
9876       && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9877     {
9878       for (i = 0; i < nunits; ++i)
9879 	elts[i] = VECTOR_CST_ELT (arg, i);
9880     }
9881   else if (TREE_CODE (arg) == CONSTRUCTOR)
9882     {
9883       constructor_elt *elt;
9884 
9885       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9886 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9887 	  return false;
9888 	else
9889 	  elts[i] = elt->value;
9890     }
9891   else
9892     return false;
9893   for (; i < nelts; i++)
9894     elts[i]
9895       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9896   return true;
9897 }
9898 
9899 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9900    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9901    NULL_TREE otherwise.  */
9902 
9903 tree
fold_vec_perm(tree type,tree arg0,tree arg1,const vec_perm_indices & sel)9904 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9905 {
9906   unsigned int i;
9907   unsigned HOST_WIDE_INT nelts;
9908   bool need_ctor = false;
9909 
9910   if (!sel.length ().is_constant (&nelts))
9911     return NULL_TREE;
9912   gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9913 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9914 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9915   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9916       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9917     return NULL_TREE;
9918 
9919   tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9920   if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9921       || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9922     return NULL_TREE;
9923 
9924   tree_vector_builder out_elts (type, nelts, 1);
9925   for (i = 0; i < nelts; i++)
9926     {
9927       HOST_WIDE_INT index;
9928       if (!sel[i].is_constant (&index))
9929 	return NULL_TREE;
9930       if (!CONSTANT_CLASS_P (in_elts[index]))
9931 	need_ctor = true;
9932       out_elts.quick_push (unshare_expr (in_elts[index]));
9933     }
9934 
9935   if (need_ctor)
9936     {
9937       vec<constructor_elt, va_gc> *v;
9938       vec_alloc (v, nelts);
9939       for (i = 0; i < nelts; i++)
9940 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9941       return build_constructor (type, v);
9942     }
9943   else
9944     return out_elts.build ();
9945 }
9946 
9947 /* Try to fold a pointer difference of type TYPE two address expressions of
9948    array references AREF0 and AREF1 using location LOC.  Return a
9949    simplified expression for the difference or NULL_TREE.  */
9950 
9951 static tree
fold_addr_of_array_ref_difference(location_t loc,tree type,tree aref0,tree aref1,bool use_pointer_diff)9952 fold_addr_of_array_ref_difference (location_t loc, tree type,
9953 				   tree aref0, tree aref1,
9954 				   bool use_pointer_diff)
9955 {
9956   tree base0 = TREE_OPERAND (aref0, 0);
9957   tree base1 = TREE_OPERAND (aref1, 0);
9958   tree base_offset = build_int_cst (type, 0);
9959 
9960   /* If the bases are array references as well, recurse.  If the bases
9961      are pointer indirections compute the difference of the pointers.
9962      If the bases are equal, we are set.  */
9963   if ((TREE_CODE (base0) == ARRAY_REF
9964        && TREE_CODE (base1) == ARRAY_REF
9965        && (base_offset
9966 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9967 						use_pointer_diff)))
9968       || (INDIRECT_REF_P (base0)
9969 	  && INDIRECT_REF_P (base1)
9970 	  && (base_offset
9971 	        = use_pointer_diff
9972 		  ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9973 				     TREE_OPERAND (base0, 0),
9974 				     TREE_OPERAND (base1, 0))
9975 		  : fold_binary_loc (loc, MINUS_EXPR, type,
9976 				     fold_convert (type,
9977 						   TREE_OPERAND (base0, 0)),
9978 				     fold_convert (type,
9979 						   TREE_OPERAND (base1, 0)))))
9980       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9981     {
9982       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9983       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9984       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9985       tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9986       return fold_build2_loc (loc, PLUS_EXPR, type,
9987 			      base_offset,
9988 			      fold_build2_loc (loc, MULT_EXPR, type,
9989 					       diff, esz));
9990     }
9991   return NULL_TREE;
9992 }
9993 
9994 /* If the real or vector real constant CST of type TYPE has an exact
9995    inverse, return it, else return NULL.  */
9996 
9997 tree
exact_inverse(tree type,tree cst)9998 exact_inverse (tree type, tree cst)
9999 {
10000   REAL_VALUE_TYPE r;
10001   tree unit_type;
10002   machine_mode mode;
10003 
10004   switch (TREE_CODE (cst))
10005     {
10006     case REAL_CST:
10007       r = TREE_REAL_CST (cst);
10008 
10009       if (exact_real_inverse (TYPE_MODE (type), &r))
10010 	return build_real (type, r);
10011 
10012       return NULL_TREE;
10013 
10014     case VECTOR_CST:
10015       {
10016 	unit_type = TREE_TYPE (type);
10017 	mode = TYPE_MODE (unit_type);
10018 
10019 	tree_vector_builder elts;
10020 	if (!elts.new_unary_operation (type, cst, false))
10021 	  return NULL_TREE;
10022 	unsigned int count = elts.encoded_nelts ();
10023 	for (unsigned int i = 0; i < count; ++i)
10024 	  {
10025 	    r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10026 	    if (!exact_real_inverse (mode, &r))
10027 	      return NULL_TREE;
10028 	    elts.quick_push (build_real (unit_type, r));
10029 	  }
10030 
10031 	return elts.build ();
10032       }
10033 
10034     default:
10035       return NULL_TREE;
10036     }
10037 }
10038 
10039 /*  Mask out the tz least significant bits of X of type TYPE where
10040     tz is the number of trailing zeroes in Y.  */
10041 static wide_int
mask_with_tz(tree type,const wide_int & x,const wide_int & y)10042 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10043 {
10044   int tz = wi::ctz (y);
10045   if (tz > 0)
10046     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10047   return x;
10048 }
10049 
10050 /* Return true when T is an address and is known to be nonzero.
10051    For floating point we further ensure that T is not denormal.
10052    Similar logic is present in nonzero_address in rtlanal.h.
10053 
10054    If the return value is based on the assumption that signed overflow
10055    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10056    change *STRICT_OVERFLOW_P.  */
10057 
10058 static bool
tree_expr_nonzero_warnv_p(tree t,bool * strict_overflow_p)10059 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10060 {
10061   tree type = TREE_TYPE (t);
10062   enum tree_code code;
10063 
10064   /* Doing something useful for floating point would need more work.  */
10065   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10066     return false;
10067 
10068   code = TREE_CODE (t);
10069   switch (TREE_CODE_CLASS (code))
10070     {
10071     case tcc_unary:
10072       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10073 					      strict_overflow_p);
10074     case tcc_binary:
10075     case tcc_comparison:
10076       return tree_binary_nonzero_warnv_p (code, type,
10077 					       TREE_OPERAND (t, 0),
10078 					       TREE_OPERAND (t, 1),
10079 					       strict_overflow_p);
10080     case tcc_constant:
10081     case tcc_declaration:
10082     case tcc_reference:
10083       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10084 
10085     default:
10086       break;
10087     }
10088 
10089   switch (code)
10090     {
10091     case TRUTH_NOT_EXPR:
10092       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10093 					      strict_overflow_p);
10094 
10095     case TRUTH_AND_EXPR:
10096     case TRUTH_OR_EXPR:
10097     case TRUTH_XOR_EXPR:
10098       return tree_binary_nonzero_warnv_p (code, type,
10099 					       TREE_OPERAND (t, 0),
10100 					       TREE_OPERAND (t, 1),
10101 					       strict_overflow_p);
10102 
10103     case COND_EXPR:
10104     case CONSTRUCTOR:
10105     case OBJ_TYPE_REF:
10106     case ASSERT_EXPR:
10107     case ADDR_EXPR:
10108     case WITH_SIZE_EXPR:
10109     case SSA_NAME:
10110       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10111 
10112     case COMPOUND_EXPR:
10113     case MODIFY_EXPR:
10114     case BIND_EXPR:
10115       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10116 					strict_overflow_p);
10117 
10118     case SAVE_EXPR:
10119       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10120 					strict_overflow_p);
10121 
10122     case CALL_EXPR:
10123       {
10124 	tree fndecl = get_callee_fndecl (t);
10125 	if (!fndecl) return false;
10126 	if (flag_delete_null_pointer_checks && !flag_check_new
10127 	    && DECL_IS_OPERATOR_NEW_P (fndecl)
10128 	    && !TREE_NOTHROW (fndecl))
10129 	  return true;
10130 	if (flag_delete_null_pointer_checks
10131 	    && lookup_attribute ("returns_nonnull",
10132 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10133 	  return true;
10134 	return alloca_call_p (t);
10135       }
10136 
10137     default:
10138       break;
10139     }
10140   return false;
10141 }
10142 
10143 /* Return true when T is an address and is known to be nonzero.
10144    Handle warnings about undefined signed overflow.  */
10145 
10146 bool
tree_expr_nonzero_p(tree t)10147 tree_expr_nonzero_p (tree t)
10148 {
10149   bool ret, strict_overflow_p;
10150 
10151   strict_overflow_p = false;
10152   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10153   if (strict_overflow_p)
10154     fold_overflow_warning (("assuming signed overflow does not occur when "
10155 			    "determining that expression is always "
10156 			    "non-zero"),
10157 			   WARN_STRICT_OVERFLOW_MISC);
10158   return ret;
10159 }
10160 
10161 /* Return true if T is known not to be equal to an integer W.  */
10162 
10163 bool
expr_not_equal_to(tree t,const wide_int & w)10164 expr_not_equal_to (tree t, const wide_int &w)
10165 {
10166   wide_int min, max, nz;
10167   value_range_kind rtype;
10168   switch (TREE_CODE (t))
10169     {
10170     case INTEGER_CST:
10171       return wi::to_wide (t) != w;
10172 
10173     case SSA_NAME:
10174       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10175 	return false;
10176       rtype = get_range_info (t, &min, &max);
10177       if (rtype == VR_RANGE)
10178 	{
10179 	  if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
10180 	    return true;
10181 	  if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
10182 	    return true;
10183 	}
10184       else if (rtype == VR_ANTI_RANGE
10185 	       && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
10186 	       && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
10187 	return true;
10188       /* If T has some known zero bits and W has any of those bits set,
10189 	 then T is known not to be equal to W.  */
10190       if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10191 			      TYPE_PRECISION (TREE_TYPE (t))), 0))
10192 	return true;
10193       return false;
10194 
10195     default:
10196       return false;
10197     }
10198 }
10199 
10200 /* Fold a binary expression of code CODE and type TYPE with operands
10201    OP0 and OP1.  LOC is the location of the resulting expression.
10202    Return the folded expression if folding is successful.  Otherwise,
10203    return NULL_TREE.  */
10204 
10205 tree
fold_binary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)10206 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10207 		 tree op0, tree op1)
10208 {
10209   enum tree_code_class kind = TREE_CODE_CLASS (code);
10210   tree arg0, arg1, tem;
10211   tree t1 = NULL_TREE;
10212   bool strict_overflow_p;
10213   unsigned int prec;
10214 
10215   gcc_assert (IS_EXPR_CODE_CLASS (kind)
10216 	      && TREE_CODE_LENGTH (code) == 2
10217 	      && op0 != NULL_TREE
10218 	      && op1 != NULL_TREE);
10219 
10220   arg0 = op0;
10221   arg1 = op1;
10222 
10223   /* Strip any conversions that don't change the mode.  This is
10224      safe for every expression, except for a comparison expression
10225      because its signedness is derived from its operands.  So, in
10226      the latter case, only strip conversions that don't change the
10227      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
10228      preserved.
10229 
10230      Note that this is done as an internal manipulation within the
10231      constant folder, in order to find the simplest representation
10232      of the arguments so that their form can be studied.  In any
10233      cases, the appropriate type conversions should be put back in
10234      the tree that will get out of the constant folder.  */
10235 
10236   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10237     {
10238       STRIP_SIGN_NOPS (arg0);
10239       STRIP_SIGN_NOPS (arg1);
10240     }
10241   else
10242     {
10243       STRIP_NOPS (arg0);
10244       STRIP_NOPS (arg1);
10245     }
10246 
10247   /* Note that TREE_CONSTANT isn't enough: static var addresses are
10248      constant but we can't do arithmetic on them.  */
10249   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10250     {
10251       tem = const_binop (code, type, arg0, arg1);
10252       if (tem != NULL_TREE)
10253 	{
10254 	  if (TREE_TYPE (tem) != type)
10255 	    tem = fold_convert_loc (loc, type, tem);
10256 	  return tem;
10257 	}
10258     }
10259 
10260   /* If this is a commutative operation, and ARG0 is a constant, move it
10261      to ARG1 to reduce the number of tests below.  */
10262   if (commutative_tree_code (code)
10263       && tree_swap_operands_p (arg0, arg1))
10264     return fold_build2_loc (loc, code, type, op1, op0);
10265 
10266   /* Likewise if this is a comparison, and ARG0 is a constant, move it
10267      to ARG1 to reduce the number of tests below.  */
10268   if (kind == tcc_comparison
10269       && tree_swap_operands_p (arg0, arg1))
10270     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10271 
10272   tem = generic_simplify (loc, code, type, op0, op1);
10273   if (tem)
10274     return tem;
10275 
10276   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10277 
10278      First check for cases where an arithmetic operation is applied to a
10279      compound, conditional, or comparison operation.  Push the arithmetic
10280      operation inside the compound or conditional to see if any folding
10281      can then be done.  Convert comparison to conditional for this purpose.
10282      The also optimizes non-constant cases that used to be done in
10283      expand_expr.
10284 
10285      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10286      one of the operands is a comparison and the other is a comparison, a
10287      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
10288      code below would make the expression more complex.  Change it to a
10289      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
10290      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
10291 
10292   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10293        || code == EQ_EXPR || code == NE_EXPR)
10294       && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10295       && ((truth_value_p (TREE_CODE (arg0))
10296 	   && (truth_value_p (TREE_CODE (arg1))
10297 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
10298 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
10299 	  || (truth_value_p (TREE_CODE (arg1))
10300 	      && (truth_value_p (TREE_CODE (arg0))
10301 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
10302 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
10303     {
10304       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10305 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10306 			 : TRUTH_XOR_EXPR,
10307 			 boolean_type_node,
10308 			 fold_convert_loc (loc, boolean_type_node, arg0),
10309 			 fold_convert_loc (loc, boolean_type_node, arg1));
10310 
10311       if (code == EQ_EXPR)
10312 	tem = invert_truthvalue_loc (loc, tem);
10313 
10314       return fold_convert_loc (loc, type, tem);
10315     }
10316 
10317   if (TREE_CODE_CLASS (code) == tcc_binary
10318       || TREE_CODE_CLASS (code) == tcc_comparison)
10319     {
10320       if (TREE_CODE (arg0) == COMPOUND_EXPR)
10321 	{
10322 	  tem = fold_build2_loc (loc, code, type,
10323 			     fold_convert_loc (loc, TREE_TYPE (op0),
10324 					       TREE_OPERAND (arg0, 1)), op1);
10325 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10326 			     tem);
10327 	}
10328       if (TREE_CODE (arg1) == COMPOUND_EXPR)
10329 	{
10330 	  tem = fold_build2_loc (loc, code, type, op0,
10331 			     fold_convert_loc (loc, TREE_TYPE (op1),
10332 					       TREE_OPERAND (arg1, 1)));
10333 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10334 			     tem);
10335 	}
10336 
10337       if (TREE_CODE (arg0) == COND_EXPR
10338 	  || TREE_CODE (arg0) == VEC_COND_EXPR
10339 	  || COMPARISON_CLASS_P (arg0))
10340 	{
10341 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10342 						     arg0, arg1,
10343 						     /*cond_first_p=*/1);
10344 	  if (tem != NULL_TREE)
10345 	    return tem;
10346 	}
10347 
10348       if (TREE_CODE (arg1) == COND_EXPR
10349 	  || TREE_CODE (arg1) == VEC_COND_EXPR
10350 	  || COMPARISON_CLASS_P (arg1))
10351 	{
10352 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10353 						     arg1, arg0,
10354 					             /*cond_first_p=*/0);
10355 	  if (tem != NULL_TREE)
10356 	    return tem;
10357 	}
10358     }
10359 
10360   switch (code)
10361     {
10362     case MEM_REF:
10363       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
10364       if (TREE_CODE (arg0) == ADDR_EXPR
10365 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10366 	{
10367 	  tree iref = TREE_OPERAND (arg0, 0);
10368 	  return fold_build2 (MEM_REF, type,
10369 			      TREE_OPERAND (iref, 0),
10370 			      int_const_binop (PLUS_EXPR, arg1,
10371 					       TREE_OPERAND (iref, 1)));
10372 	}
10373 
10374       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
10375       if (TREE_CODE (arg0) == ADDR_EXPR
10376 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
10377 	{
10378 	  tree base;
10379 	  poly_int64 coffset;
10380 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10381 						&coffset);
10382 	  if (!base)
10383 	    return NULL_TREE;
10384 	  return fold_build2 (MEM_REF, type,
10385 			      build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10386 			      int_const_binop (PLUS_EXPR, arg1,
10387 					       size_int (coffset)));
10388 	}
10389 
10390       return NULL_TREE;
10391 
10392     case POINTER_PLUS_EXPR:
10393       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
10394       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10395 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10396         return fold_convert_loc (loc, type,
10397 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10398 					      fold_convert_loc (loc, sizetype,
10399 								arg1),
10400 					      fold_convert_loc (loc, sizetype,
10401 								arg0)));
10402 
10403       return NULL_TREE;
10404 
10405     case PLUS_EXPR:
10406       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10407 	{
10408 	  /* X + (X / CST) * -CST is X % CST.  */
10409 	  if (TREE_CODE (arg1) == MULT_EXPR
10410 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10411 	      && operand_equal_p (arg0,
10412 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10413 	    {
10414 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10415 	      tree cst1 = TREE_OPERAND (arg1, 1);
10416 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10417 				      cst1, cst0);
10418 	      if (sum && integer_zerop (sum))
10419 		return fold_convert_loc (loc, type,
10420 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10421 						      TREE_TYPE (arg0), arg0,
10422 						      cst0));
10423 	    }
10424 	}
10425 
10426       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10427 	 one.  Make sure the type is not saturating and has the signedness of
10428 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10429 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10430       if ((TREE_CODE (arg0) == MULT_EXPR
10431 	   || TREE_CODE (arg1) == MULT_EXPR)
10432 	  && !TYPE_SATURATING (type)
10433 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10434 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10435 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10436         {
10437 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10438 	  if (tem)
10439 	    return tem;
10440 	}
10441 
10442       if (! FLOAT_TYPE_P (type))
10443 	{
10444 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10445 	     (plus (plus (mult) (mult)) (foo)) so that we can
10446 	     take advantage of the factoring cases below.  */
10447 	  if (ANY_INTEGRAL_TYPE_P (type)
10448 	      && TYPE_OVERFLOW_WRAPS (type)
10449 	      && (((TREE_CODE (arg0) == PLUS_EXPR
10450 		    || TREE_CODE (arg0) == MINUS_EXPR)
10451 		   && TREE_CODE (arg1) == MULT_EXPR)
10452 		  || ((TREE_CODE (arg1) == PLUS_EXPR
10453 		       || TREE_CODE (arg1) == MINUS_EXPR)
10454 		      && TREE_CODE (arg0) == MULT_EXPR)))
10455 	    {
10456 	      tree parg0, parg1, parg, marg;
10457 	      enum tree_code pcode;
10458 
10459 	      if (TREE_CODE (arg1) == MULT_EXPR)
10460 		parg = arg0, marg = arg1;
10461 	      else
10462 		parg = arg1, marg = arg0;
10463 	      pcode = TREE_CODE (parg);
10464 	      parg0 = TREE_OPERAND (parg, 0);
10465 	      parg1 = TREE_OPERAND (parg, 1);
10466 	      STRIP_NOPS (parg0);
10467 	      STRIP_NOPS (parg1);
10468 
10469 	      if (TREE_CODE (parg0) == MULT_EXPR
10470 		  && TREE_CODE (parg1) != MULT_EXPR)
10471 		return fold_build2_loc (loc, pcode, type,
10472 				    fold_build2_loc (loc, PLUS_EXPR, type,
10473 						 fold_convert_loc (loc, type,
10474 								   parg0),
10475 						 fold_convert_loc (loc, type,
10476 								   marg)),
10477 				    fold_convert_loc (loc, type, parg1));
10478 	      if (TREE_CODE (parg0) != MULT_EXPR
10479 		  && TREE_CODE (parg1) == MULT_EXPR)
10480 		return
10481 		  fold_build2_loc (loc, PLUS_EXPR, type,
10482 			       fold_convert_loc (loc, type, parg0),
10483 			       fold_build2_loc (loc, pcode, type,
10484 					    fold_convert_loc (loc, type, marg),
10485 					    fold_convert_loc (loc, type,
10486 							      parg1)));
10487 	    }
10488 	}
10489       else
10490 	{
10491 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10492 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
10493 	     if signed zeros are involved.  */
10494 	  if (!HONOR_SNANS (element_mode (arg0))
10495               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10496 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10497 	    {
10498 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10499 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10500 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10501 	      bool arg0rz = false, arg0iz = false;
10502 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
10503 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
10504 		{
10505 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10506 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10507 		  if (arg0rz && arg1i && real_zerop (arg1i))
10508 		    {
10509 		      tree rp = arg1r ? arg1r
10510 				  : build1 (REALPART_EXPR, rtype, arg1);
10511 		      tree ip = arg0i ? arg0i
10512 				  : build1 (IMAGPART_EXPR, rtype, arg0);
10513 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10514 		    }
10515 		  else if (arg0iz && arg1r && real_zerop (arg1r))
10516 		    {
10517 		      tree rp = arg0r ? arg0r
10518 				  : build1 (REALPART_EXPR, rtype, arg0);
10519 		      tree ip = arg1i ? arg1i
10520 				  : build1 (IMAGPART_EXPR, rtype, arg1);
10521 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10522 		    }
10523 		}
10524 	    }
10525 
10526           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10527              We associate floats only if the user has specified
10528              -fassociative-math.  */
10529           if (flag_associative_math
10530               && TREE_CODE (arg1) == PLUS_EXPR
10531               && TREE_CODE (arg0) != MULT_EXPR)
10532             {
10533               tree tree10 = TREE_OPERAND (arg1, 0);
10534               tree tree11 = TREE_OPERAND (arg1, 1);
10535               if (TREE_CODE (tree11) == MULT_EXPR
10536 		  && TREE_CODE (tree10) == MULT_EXPR)
10537                 {
10538                   tree tree0;
10539                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10540                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10541                 }
10542             }
10543           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10544              We associate floats only if the user has specified
10545              -fassociative-math.  */
10546           if (flag_associative_math
10547               && TREE_CODE (arg0) == PLUS_EXPR
10548               && TREE_CODE (arg1) != MULT_EXPR)
10549             {
10550               tree tree00 = TREE_OPERAND (arg0, 0);
10551               tree tree01 = TREE_OPERAND (arg0, 1);
10552               if (TREE_CODE (tree01) == MULT_EXPR
10553 		  && TREE_CODE (tree00) == MULT_EXPR)
10554                 {
10555                   tree tree0;
10556                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10557                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10558                 }
10559             }
10560 	}
10561 
10562      bit_rotate:
10563       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10564 	 is a rotate of A by C1 bits.  */
10565       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10566 	 is a rotate of A by B bits.
10567 	 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10568 	 though in this case CODE must be | and not + or ^, otherwise
10569 	 it doesn't return A when B is 0.  */
10570       {
10571 	enum tree_code code0, code1;
10572 	tree rtype;
10573 	code0 = TREE_CODE (arg0);
10574 	code1 = TREE_CODE (arg1);
10575 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10576 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10577 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
10578 			        TREE_OPERAND (arg1, 0), 0)
10579 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10580 	        TYPE_UNSIGNED (rtype))
10581 	    /* Only create rotates in complete modes.  Other cases are not
10582 	       expanded properly.  */
10583 	    && (element_precision (rtype)
10584 		== GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10585 	  {
10586 	    tree tree01, tree11;
10587 	    tree orig_tree01, orig_tree11;
10588 	    enum tree_code code01, code11;
10589 
10590 	    tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10591 	    tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10592 	    STRIP_NOPS (tree01);
10593 	    STRIP_NOPS (tree11);
10594 	    code01 = TREE_CODE (tree01);
10595 	    code11 = TREE_CODE (tree11);
10596 	    if (code11 != MINUS_EXPR
10597 		&& (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10598 	      {
10599 		std::swap (code0, code1);
10600 		std::swap (code01, code11);
10601 		std::swap (tree01, tree11);
10602 		std::swap (orig_tree01, orig_tree11);
10603 	      }
10604 	    if (code01 == INTEGER_CST
10605 		&& code11 == INTEGER_CST
10606 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
10607 		    == element_precision (rtype)))
10608 	      {
10609 		tem = build2_loc (loc, LROTATE_EXPR,
10610 				  rtype, TREE_OPERAND (arg0, 0),
10611 				  code0 == LSHIFT_EXPR
10612 				  ? orig_tree01 : orig_tree11);
10613 		return fold_convert_loc (loc, type, tem);
10614 	      }
10615 	    else if (code11 == MINUS_EXPR)
10616 	      {
10617 		tree tree110, tree111;
10618 		tree110 = TREE_OPERAND (tree11, 0);
10619 		tree111 = TREE_OPERAND (tree11, 1);
10620 		STRIP_NOPS (tree110);
10621 		STRIP_NOPS (tree111);
10622 		if (TREE_CODE (tree110) == INTEGER_CST
10623 		    && compare_tree_int (tree110,
10624 					 element_precision (rtype)) == 0
10625 		    && operand_equal_p (tree01, tree111, 0))
10626 		  {
10627 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10628 					    ? LROTATE_EXPR : RROTATE_EXPR),
10629 				      rtype, TREE_OPERAND (arg0, 0),
10630 				      orig_tree01);
10631 		    return fold_convert_loc (loc, type, tem);
10632 		  }
10633 	      }
10634 	    else if (code == BIT_IOR_EXPR
10635 		     && code11 == BIT_AND_EXPR
10636 		     && pow2p_hwi (element_precision (rtype)))
10637 	      {
10638 		tree tree110, tree111;
10639 		tree110 = TREE_OPERAND (tree11, 0);
10640 		tree111 = TREE_OPERAND (tree11, 1);
10641 		STRIP_NOPS (tree110);
10642 		STRIP_NOPS (tree111);
10643 		if (TREE_CODE (tree110) == NEGATE_EXPR
10644 		    && TREE_CODE (tree111) == INTEGER_CST
10645 		    && compare_tree_int (tree111,
10646 					 element_precision (rtype) - 1) == 0
10647 		    && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10648 		  {
10649 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10650 					    ? LROTATE_EXPR : RROTATE_EXPR),
10651 				      rtype, TREE_OPERAND (arg0, 0),
10652 				      orig_tree01);
10653 		    return fold_convert_loc (loc, type, tem);
10654 		  }
10655 	      }
10656 	  }
10657       }
10658 
10659     associate:
10660       /* In most languages, can't associate operations on floats through
10661 	 parentheses.  Rather than remember where the parentheses were, we
10662 	 don't associate floats at all, unless the user has specified
10663 	 -fassociative-math.
10664 	 And, we need to make sure type is not saturating.  */
10665 
10666       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10667 	  && !TYPE_SATURATING (type))
10668 	{
10669 	  tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10670 	  tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10671 	  tree atype = type;
10672 	  bool ok = true;
10673 
10674 	  /* Split both trees into variables, constants, and literals.  Then
10675 	     associate each group together, the constants with literals,
10676 	     then the result with variables.  This increases the chances of
10677 	     literals being recombined later and of generating relocatable
10678 	     expressions for the sum of a constant and literal.  */
10679 	  var0 = split_tree (arg0, type, code,
10680 			     &minus_var0, &con0, &minus_con0,
10681 			     &lit0, &minus_lit0, 0);
10682 	  var1 = split_tree (arg1, type, code,
10683 			     &minus_var1, &con1, &minus_con1,
10684 			     &lit1, &minus_lit1, code == MINUS_EXPR);
10685 
10686 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
10687 	  if (code == MINUS_EXPR)
10688 	    code = PLUS_EXPR;
10689 
10690 	  /* With undefined overflow prefer doing association in a type
10691 	     which wraps on overflow, if that is one of the operand types.  */
10692 	  if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10693 	      && !TYPE_OVERFLOW_WRAPS (type))
10694 	    {
10695 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10696 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10697 		atype = TREE_TYPE (arg0);
10698 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10699 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10700 		atype = TREE_TYPE (arg1);
10701 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10702 	    }
10703 
10704 	  /* With undefined overflow we can only associate constants with one
10705 	     variable, and constants whose association doesn't overflow.  */
10706 	  if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10707 	      && !TYPE_OVERFLOW_WRAPS (atype))
10708 	    {
10709 	      if ((var0 && var1) || (minus_var0 && minus_var1))
10710 		{
10711 		  /* ???  If split_tree would handle NEGATE_EXPR we could
10712 		     simply reject these cases and the allowed cases would
10713 		     be the var0/minus_var1 ones.  */
10714 		  tree tmp0 = var0 ? var0 : minus_var0;
10715 		  tree tmp1 = var1 ? var1 : minus_var1;
10716 		  bool one_neg = false;
10717 
10718 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
10719 		    {
10720 		      tmp0 = TREE_OPERAND (tmp0, 0);
10721 		      one_neg = !one_neg;
10722 		    }
10723 		  if (CONVERT_EXPR_P (tmp0)
10724 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10725 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10726 			  <= TYPE_PRECISION (atype)))
10727 		    tmp0 = TREE_OPERAND (tmp0, 0);
10728 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
10729 		    {
10730 		      tmp1 = TREE_OPERAND (tmp1, 0);
10731 		      one_neg = !one_neg;
10732 		    }
10733 		  if (CONVERT_EXPR_P (tmp1)
10734 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10735 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10736 			  <= TYPE_PRECISION (atype)))
10737 		    tmp1 = TREE_OPERAND (tmp1, 0);
10738 		  /* The only case we can still associate with two variables
10739 		     is if they cancel out.  */
10740 		  if (!one_neg
10741 		      || !operand_equal_p (tmp0, tmp1, 0))
10742 		    ok = false;
10743 		}
10744 	      else if ((var0 && minus_var1
10745 			&& ! operand_equal_p (var0, minus_var1, 0))
10746 		       || (minus_var0 && var1
10747 			   && ! operand_equal_p (minus_var0, var1, 0)))
10748 		ok = false;
10749 	    }
10750 
10751 	  /* Only do something if we found more than two objects.  Otherwise,
10752 	     nothing has changed and we risk infinite recursion.  */
10753 	  if (ok
10754 	      && ((var0 != 0) + (var1 != 0)
10755 		  + (minus_var0 != 0) + (minus_var1 != 0)
10756 		  + (con0 != 0) + (con1 != 0)
10757 		  + (minus_con0 != 0) + (minus_con1 != 0)
10758 		  + (lit0 != 0) + (lit1 != 0)
10759 		  + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10760 	    {
10761 	      var0 = associate_trees (loc, var0, var1, code, atype);
10762 	      minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10763 					    code, atype);
10764 	      con0 = associate_trees (loc, con0, con1, code, atype);
10765 	      minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10766 					    code, atype);
10767 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
10768 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10769 					    code, atype);
10770 
10771 	      if (minus_var0 && var0)
10772 		{
10773 		  var0 = associate_trees (loc, var0, minus_var0,
10774 					  MINUS_EXPR, atype);
10775 		  minus_var0 = 0;
10776 		}
10777 	      if (minus_con0 && con0)
10778 		{
10779 		  con0 = associate_trees (loc, con0, minus_con0,
10780 					  MINUS_EXPR, atype);
10781 		  minus_con0 = 0;
10782 		}
10783 
10784 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
10785 		 greater than the positive part.  Otherwise, the multiplicative
10786 		 folding code (i.e extract_muldiv) may be fooled in case
10787 		 unsigned constants are subtracted, like in the following
10788 		 example: ((X*2 + 4) - 8U)/2.  */
10789 	      if (minus_lit0 && lit0)
10790 		{
10791 		  if (TREE_CODE (lit0) == INTEGER_CST
10792 		      && TREE_CODE (minus_lit0) == INTEGER_CST
10793 		      && tree_int_cst_lt (lit0, minus_lit0)
10794 		      /* But avoid ending up with only negated parts.  */
10795 		      && (var0 || con0))
10796 		    {
10797 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10798 						    MINUS_EXPR, atype);
10799 		      lit0 = 0;
10800 		    }
10801 		  else
10802 		    {
10803 		      lit0 = associate_trees (loc, lit0, minus_lit0,
10804 					      MINUS_EXPR, atype);
10805 		      minus_lit0 = 0;
10806 		    }
10807 		}
10808 
10809 	      /* Don't introduce overflows through reassociation.  */
10810 	      if ((lit0 && TREE_OVERFLOW_P (lit0))
10811 		  || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10812 		return NULL_TREE;
10813 
10814 	      /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10815 	      con0 = associate_trees (loc, con0, lit0, code, atype);
10816 	      lit0 = 0;
10817 	      minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10818 					    code, atype);
10819 	      minus_lit0 = 0;
10820 
10821 	      /* Eliminate minus_con0.  */
10822 	      if (minus_con0)
10823 		{
10824 		  if (con0)
10825 		    con0 = associate_trees (loc, con0, minus_con0,
10826 					    MINUS_EXPR, atype);
10827 		  else if (var0)
10828 		    var0 = associate_trees (loc, var0, minus_con0,
10829 					    MINUS_EXPR, atype);
10830 		  else
10831 		    gcc_unreachable ();
10832 		  minus_con0 = 0;
10833 		}
10834 
10835 	      /* Eliminate minus_var0.  */
10836 	      if (minus_var0)
10837 		{
10838 		  if (con0)
10839 		    con0 = associate_trees (loc, con0, minus_var0,
10840 					    MINUS_EXPR, atype);
10841 		  else
10842 		    gcc_unreachable ();
10843 		  minus_var0 = 0;
10844 		}
10845 
10846 	      return
10847 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10848 							      code, atype));
10849 	    }
10850 	}
10851 
10852       return NULL_TREE;
10853 
10854     case POINTER_DIFF_EXPR:
10855     case MINUS_EXPR:
10856       /* Fold &a[i] - &a[j] to i-j.  */
10857       if (TREE_CODE (arg0) == ADDR_EXPR
10858 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10859 	  && TREE_CODE (arg1) == ADDR_EXPR
10860 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10861         {
10862 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
10863 							TREE_OPERAND (arg0, 0),
10864 							TREE_OPERAND (arg1, 0),
10865 							code
10866 							== POINTER_DIFF_EXPR);
10867 	  if (tem)
10868 	    return tem;
10869 	}
10870 
10871       /* Further transformations are not for pointers.  */
10872       if (code == POINTER_DIFF_EXPR)
10873 	return NULL_TREE;
10874 
10875       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
10876       if (TREE_CODE (arg0) == NEGATE_EXPR
10877 	  && negate_expr_p (op1)
10878 	  /* If arg0 is e.g. unsigned int and type is int, then this could
10879 	     introduce UB, because if A is INT_MIN at runtime, the original
10880 	     expression can be well defined while the latter is not.
10881 	     See PR83269.  */
10882 	  && !(ANY_INTEGRAL_TYPE_P (type)
10883 	       && TYPE_OVERFLOW_UNDEFINED (type)
10884 	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10885 	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10886 	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10887 			        fold_convert_loc (loc, type,
10888 						  TREE_OPERAND (arg0, 0)));
10889 
10890       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10891 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
10892 	 signed zeros are involved.  */
10893       if (!HONOR_SNANS (element_mode (arg0))
10894 	  && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10895 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10896         {
10897 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10898 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10899 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10900 	  bool arg0rz = false, arg0iz = false;
10901 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
10902 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
10903 	    {
10904 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10905 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10906 	      if (arg0rz && arg1i && real_zerop (arg1i))
10907 	        {
10908 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10909 					 arg1r ? arg1r
10910 					 : build1 (REALPART_EXPR, rtype, arg1));
10911 		  tree ip = arg0i ? arg0i
10912 		    : build1 (IMAGPART_EXPR, rtype, arg0);
10913 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10914 		}
10915 	      else if (arg0iz && arg1r && real_zerop (arg1r))
10916 	        {
10917 		  tree rp = arg0r ? arg0r
10918 		    : build1 (REALPART_EXPR, rtype, arg0);
10919 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10920 					 arg1i ? arg1i
10921 					 : build1 (IMAGPART_EXPR, rtype, arg1));
10922 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10923 		}
10924 	    }
10925 	}
10926 
10927       /* A - B -> A + (-B) if B is easily negatable.  */
10928       if (negate_expr_p (op1)
10929 	  && ! TYPE_OVERFLOW_SANITIZED (type)
10930 	  && ((FLOAT_TYPE_P (type)
10931                /* Avoid this transformation if B is a positive REAL_CST.  */
10932 	       && (TREE_CODE (op1) != REAL_CST
10933 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10934 	      || INTEGRAL_TYPE_P (type)))
10935 	return fold_build2_loc (loc, PLUS_EXPR, type,
10936 				fold_convert_loc (loc, type, arg0),
10937 				negate_expr (op1));
10938 
10939       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10940 	 one.  Make sure the type is not saturating and has the signedness of
10941 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10942 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10943       if ((TREE_CODE (arg0) == MULT_EXPR
10944 	   || TREE_CODE (arg1) == MULT_EXPR)
10945 	  && !TYPE_SATURATING (type)
10946 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10947 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10948 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10949         {
10950 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10951 	  if (tem)
10952 	    return tem;
10953 	}
10954 
10955       goto associate;
10956 
10957     case MULT_EXPR:
10958       if (! FLOAT_TYPE_P (type))
10959 	{
10960 	  /* Transform x * -C into -x * C if x is easily negatable.  */
10961 	  if (TREE_CODE (op1) == INTEGER_CST
10962 	      && tree_int_cst_sgn (op1) == -1
10963 	      && negate_expr_p (op0)
10964 	      && negate_expr_p (op1)
10965 	      && (tem = negate_expr (op1)) != op1
10966 	      && ! TREE_OVERFLOW (tem))
10967 	    return fold_build2_loc (loc, MULT_EXPR, type,
10968 				    fold_convert_loc (loc, type,
10969 						      negate_expr (op0)), tem);
10970 
10971 	  strict_overflow_p = false;
10972 	  if (TREE_CODE (arg1) == INTEGER_CST
10973 	      && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10974 					&strict_overflow_p)) != 0)
10975 	    {
10976 	      if (strict_overflow_p)
10977 		fold_overflow_warning (("assuming signed overflow does not "
10978 					"occur when simplifying "
10979 					"multiplication"),
10980 				       WARN_STRICT_OVERFLOW_MISC);
10981 	      return fold_convert_loc (loc, type, tem);
10982 	    }
10983 
10984 	  /* Optimize z * conj(z) for integer complex numbers.  */
10985 	  if (TREE_CODE (arg0) == CONJ_EXPR
10986 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10987 	    return fold_mult_zconjz (loc, type, arg1);
10988 	  if (TREE_CODE (arg1) == CONJ_EXPR
10989 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10990 	    return fold_mult_zconjz (loc, type, arg0);
10991 	}
10992       else
10993 	{
10994 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10995 	     This is not the same for NaNs or if signed zeros are
10996 	     involved.  */
10997 	  if (!HONOR_NANS (arg0)
10998               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10999 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11000 	      && TREE_CODE (arg1) == COMPLEX_CST
11001 	      && real_zerop (TREE_REALPART (arg1)))
11002 	    {
11003 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11004 	      if (real_onep (TREE_IMAGPART (arg1)))
11005 		return
11006 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11007 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11008 							     rtype, arg0)),
11009 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11010 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
11011 		return
11012 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11013 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11014 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11015 							     rtype, arg0)));
11016 	    }
11017 
11018 	  /* Optimize z * conj(z) for floating point complex numbers.
11019 	     Guarded by flag_unsafe_math_optimizations as non-finite
11020 	     imaginary components don't produce scalar results.  */
11021 	  if (flag_unsafe_math_optimizations
11022 	      && TREE_CODE (arg0) == CONJ_EXPR
11023 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11024 	    return fold_mult_zconjz (loc, type, arg1);
11025 	  if (flag_unsafe_math_optimizations
11026 	      && TREE_CODE (arg1) == CONJ_EXPR
11027 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11028 	    return fold_mult_zconjz (loc, type, arg0);
11029 	}
11030       goto associate;
11031 
11032     case BIT_IOR_EXPR:
11033       /* Canonicalize (X & C1) | C2.  */
11034       if (TREE_CODE (arg0) == BIT_AND_EXPR
11035 	  && TREE_CODE (arg1) == INTEGER_CST
11036 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11037 	{
11038 	  int width = TYPE_PRECISION (type), w;
11039 	  wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11040 	  wide_int c2 = wi::to_wide (arg1);
11041 
11042 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
11043 	  if ((c1 & c2) == c1)
11044 	    return omit_one_operand_loc (loc, type, arg1,
11045 					 TREE_OPERAND (arg0, 0));
11046 
11047 	  wide_int msk = wi::mask (width, false,
11048 				   TYPE_PRECISION (TREE_TYPE (arg1)));
11049 
11050 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
11051 	  if (wi::bit_and_not (msk, c1 | c2) == 0)
11052 	    {
11053 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11054 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11055 	    }
11056 
11057 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11058 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11059 	     mode which allows further optimizations.  */
11060 	  c1 &= msk;
11061 	  c2 &= msk;
11062 	  wide_int c3 = wi::bit_and_not (c1, c2);
11063 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11064 	    {
11065 	      wide_int mask = wi::mask (w, false,
11066 					TYPE_PRECISION (type));
11067 	      if (((c1 | c2) & mask) == mask
11068 		  && wi::bit_and_not (c1, mask) == 0)
11069 		{
11070 		  c3 = mask;
11071 		  break;
11072 		}
11073 	    }
11074 
11075 	  if (c3 != c1)
11076 	    {
11077 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11078 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11079 				     wide_int_to_tree (type, c3));
11080 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11081 	    }
11082 	}
11083 
11084       /* See if this can be simplified into a rotate first.  If that
11085 	 is unsuccessful continue in the association code.  */
11086       goto bit_rotate;
11087 
11088     case BIT_XOR_EXPR:
11089       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11090       if (TREE_CODE (arg0) == BIT_AND_EXPR
11091 	  && INTEGRAL_TYPE_P (type)
11092 	  && integer_onep (TREE_OPERAND (arg0, 1))
11093 	  && integer_onep (arg1))
11094 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11095 				build_zero_cst (TREE_TYPE (arg0)));
11096 
11097       /* See if this can be simplified into a rotate first.  If that
11098 	 is unsuccessful continue in the association code.  */
11099       goto bit_rotate;
11100 
11101     case BIT_AND_EXPR:
11102       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11103       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11104 	  && INTEGRAL_TYPE_P (type)
11105 	  && integer_onep (TREE_OPERAND (arg0, 1))
11106 	  && integer_onep (arg1))
11107 	{
11108 	  tree tem2;
11109 	  tem = TREE_OPERAND (arg0, 0);
11110 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11111 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11112 				  tem, tem2);
11113 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11114 				  build_zero_cst (TREE_TYPE (tem)));
11115 	}
11116       /* Fold ~X & 1 as (X & 1) == 0.  */
11117       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11118 	  && INTEGRAL_TYPE_P (type)
11119 	  && integer_onep (arg1))
11120 	{
11121 	  tree tem2;
11122 	  tem = TREE_OPERAND (arg0, 0);
11123 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11124 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11125 				  tem, tem2);
11126 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11127 				  build_zero_cst (TREE_TYPE (tem)));
11128 	}
11129       /* Fold !X & 1 as X == 0.  */
11130       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11131 	  && integer_onep (arg1))
11132 	{
11133 	  tem = TREE_OPERAND (arg0, 0);
11134 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
11135 				  build_zero_cst (TREE_TYPE (tem)));
11136 	}
11137 
11138       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11139          multiple of 1 << CST.  */
11140       if (TREE_CODE (arg1) == INTEGER_CST)
11141 	{
11142 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11143 	  wide_int ncst1 = -cst1;
11144 	  if ((cst1 & ncst1) == ncst1
11145 	      && multiple_of_p (type, arg0,
11146 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11147 	    return fold_convert_loc (loc, type, arg0);
11148 	}
11149 
11150       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11151          bits from CST2.  */
11152       if (TREE_CODE (arg1) == INTEGER_CST
11153 	  && TREE_CODE (arg0) == MULT_EXPR
11154 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11155 	{
11156 	  wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11157 	  wide_int masked
11158 	    = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11159 
11160 	  if (masked == 0)
11161 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
11162 	                                  arg0, arg1);
11163 	  else if (masked != warg1)
11164 	    {
11165 	      /* Avoid the transform if arg1 is a mask of some
11166 	         mode which allows further optimizations.  */
11167 	      int pop = wi::popcount (warg1);
11168 	      if (!(pop >= BITS_PER_UNIT
11169 		    && pow2p_hwi (pop)
11170 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11171 		return fold_build2_loc (loc, code, type, op0,
11172 					wide_int_to_tree (type, masked));
11173 	    }
11174 	}
11175 
11176       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
11177       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11178 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11179 	{
11180 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11181 
11182 	  wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11183 	  if (mask == -1)
11184 	    return
11185 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11186 	}
11187 
11188       goto associate;
11189 
11190     case RDIV_EXPR:
11191       /* Don't touch a floating-point divide by zero unless the mode
11192 	 of the constant can represent infinity.  */
11193       if (TREE_CODE (arg1) == REAL_CST
11194 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11195 	  && real_zerop (arg1))
11196 	return NULL_TREE;
11197 
11198       /* (-A) / (-B) -> A / B  */
11199       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11200 	return fold_build2_loc (loc, RDIV_EXPR, type,
11201 			    TREE_OPERAND (arg0, 0),
11202 			    negate_expr (arg1));
11203       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11204 	return fold_build2_loc (loc, RDIV_EXPR, type,
11205 			    negate_expr (arg0),
11206 			    TREE_OPERAND (arg1, 0));
11207       return NULL_TREE;
11208 
11209     case TRUNC_DIV_EXPR:
11210       /* Fall through */
11211 
11212     case FLOOR_DIV_EXPR:
11213       /* Simplify A / (B << N) where A and B are positive and B is
11214 	 a power of 2, to A >> (N + log2(B)).  */
11215       strict_overflow_p = false;
11216       if (TREE_CODE (arg1) == LSHIFT_EXPR
11217 	  && (TYPE_UNSIGNED (type)
11218 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11219 	{
11220 	  tree sval = TREE_OPERAND (arg1, 0);
11221 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11222 	    {
11223 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
11224 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11225 					 wi::exact_log2 (wi::to_wide (sval)));
11226 
11227 	      if (strict_overflow_p)
11228 		fold_overflow_warning (("assuming signed overflow does not "
11229 					"occur when simplifying A / (B << N)"),
11230 				       WARN_STRICT_OVERFLOW_MISC);
11231 
11232 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11233 					sh_cnt, pow2);
11234 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
11235 				      fold_convert_loc (loc, type, arg0), sh_cnt);
11236 	    }
11237 	}
11238 
11239       /* Fall through */
11240 
11241     case ROUND_DIV_EXPR:
11242     case CEIL_DIV_EXPR:
11243     case EXACT_DIV_EXPR:
11244       if (integer_zerop (arg1))
11245 	return NULL_TREE;
11246 
11247       /* Convert -A / -B to A / B when the type is signed and overflow is
11248 	 undefined.  */
11249       if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11250 	  && TREE_CODE (op0) == NEGATE_EXPR
11251 	  && negate_expr_p (op1))
11252 	{
11253 	  if (ANY_INTEGRAL_TYPE_P (type))
11254 	    fold_overflow_warning (("assuming signed overflow does not occur "
11255 				    "when distributing negation across "
11256 				    "division"),
11257 				   WARN_STRICT_OVERFLOW_MISC);
11258 	  return fold_build2_loc (loc, code, type,
11259 				  fold_convert_loc (loc, type,
11260 						    TREE_OPERAND (arg0, 0)),
11261 				  negate_expr (op1));
11262 	}
11263       if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11264 	  && TREE_CODE (arg1) == NEGATE_EXPR
11265 	  && negate_expr_p (op0))
11266 	{
11267 	  if (ANY_INTEGRAL_TYPE_P (type))
11268 	    fold_overflow_warning (("assuming signed overflow does not occur "
11269 				    "when distributing negation across "
11270 				    "division"),
11271 				   WARN_STRICT_OVERFLOW_MISC);
11272 	  return fold_build2_loc (loc, code, type,
11273 				  negate_expr (op0),
11274 				  fold_convert_loc (loc, type,
11275 						    TREE_OPERAND (arg1, 0)));
11276 	}
11277 
11278       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11279 	 operation, EXACT_DIV_EXPR.
11280 
11281 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11282 	 At one time others generated faster code, it's not clear if they do
11283 	 after the last round to changes to the DIV code in expmed.c.  */
11284       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11285 	  && multiple_of_p (type, arg0, arg1))
11286 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11287 				fold_convert (type, arg0),
11288 				fold_convert (type, arg1));
11289 
11290       strict_overflow_p = false;
11291       if (TREE_CODE (arg1) == INTEGER_CST
11292 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11293 				    &strict_overflow_p)) != 0)
11294 	{
11295 	  if (strict_overflow_p)
11296 	    fold_overflow_warning (("assuming signed overflow does not occur "
11297 				    "when simplifying division"),
11298 				   WARN_STRICT_OVERFLOW_MISC);
11299 	  return fold_convert_loc (loc, type, tem);
11300 	}
11301 
11302       return NULL_TREE;
11303 
11304     case CEIL_MOD_EXPR:
11305     case FLOOR_MOD_EXPR:
11306     case ROUND_MOD_EXPR:
11307     case TRUNC_MOD_EXPR:
11308       strict_overflow_p = false;
11309       if (TREE_CODE (arg1) == INTEGER_CST
11310 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11311 				    &strict_overflow_p)) != 0)
11312 	{
11313 	  if (strict_overflow_p)
11314 	    fold_overflow_warning (("assuming signed overflow does not occur "
11315 				    "when simplifying modulus"),
11316 				   WARN_STRICT_OVERFLOW_MISC);
11317 	  return fold_convert_loc (loc, type, tem);
11318 	}
11319 
11320       return NULL_TREE;
11321 
11322     case LROTATE_EXPR:
11323     case RROTATE_EXPR:
11324     case RSHIFT_EXPR:
11325     case LSHIFT_EXPR:
11326       /* Since negative shift count is not well-defined,
11327 	 don't try to compute it in the compiler.  */
11328       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11329 	return NULL_TREE;
11330 
11331       prec = element_precision (type);
11332 
11333       /* If we have a rotate of a bit operation with the rotate count and
11334 	 the second operand of the bit operation both constant,
11335 	 permute the two operations.  */
11336       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11337 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
11338 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
11339 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
11340 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11341 	{
11342 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11343 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11344 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
11345 				  fold_build2_loc (loc, code, type,
11346 						   arg00, arg1),
11347 				  fold_build2_loc (loc, code, type,
11348 						   arg01, arg1));
11349 	}
11350 
11351       /* Two consecutive rotates adding up to the some integer
11352 	 multiple of the precision of the type can be ignored.  */
11353       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11354 	  && TREE_CODE (arg0) == RROTATE_EXPR
11355 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11356 	  && wi::umod_trunc (wi::to_wide (arg1)
11357 			     + wi::to_wide (TREE_OPERAND (arg0, 1)),
11358 			     prec) == 0)
11359 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11360 
11361       return NULL_TREE;
11362 
11363     case MIN_EXPR:
11364     case MAX_EXPR:
11365       goto associate;
11366 
11367     case TRUTH_ANDIF_EXPR:
11368       /* Note that the operands of this must be ints
11369 	 and their values must be 0 or 1.
11370 	 ("true" is a fixed value perhaps depending on the language.)  */
11371       /* If first arg is constant zero, return it.  */
11372       if (integer_zerop (arg0))
11373 	return fold_convert_loc (loc, type, arg0);
11374       /* FALLTHRU */
11375     case TRUTH_AND_EXPR:
11376       /* If either arg is constant true, drop it.  */
11377       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11378 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11379       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11380 	  /* Preserve sequence points.  */
11381 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11382 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11383       /* If second arg is constant zero, result is zero, but first arg
11384 	 must be evaluated.  */
11385       if (integer_zerop (arg1))
11386 	return omit_one_operand_loc (loc, type, arg1, arg0);
11387       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11388 	 case will be handled here.  */
11389       if (integer_zerop (arg0))
11390 	return omit_one_operand_loc (loc, type, arg0, arg1);
11391 
11392       /* !X && X is always false.  */
11393       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11394 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11395 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11396       /* X && !X is always false.  */
11397       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11398 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11399 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11400 
11401       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
11402 	 means A >= Y && A != MAX, but in this case we know that
11403 	 A < X <= MAX.  */
11404 
11405       if (!TREE_SIDE_EFFECTS (arg0)
11406 	  && !TREE_SIDE_EFFECTS (arg1))
11407 	{
11408 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11409 	  if (tem && !operand_equal_p (tem, arg0, 0))
11410 	    return fold_build2_loc (loc, code, type, tem, arg1);
11411 
11412 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11413 	  if (tem && !operand_equal_p (tem, arg1, 0))
11414 	    return fold_build2_loc (loc, code, type, arg0, tem);
11415 	}
11416 
11417       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11418           != NULL_TREE)
11419         return tem;
11420 
11421       return NULL_TREE;
11422 
11423     case TRUTH_ORIF_EXPR:
11424       /* Note that the operands of this must be ints
11425 	 and their values must be 0 or true.
11426 	 ("true" is a fixed value perhaps depending on the language.)  */
11427       /* If first arg is constant true, return it.  */
11428       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11429 	return fold_convert_loc (loc, type, arg0);
11430       /* FALLTHRU */
11431     case TRUTH_OR_EXPR:
11432       /* If either arg is constant zero, drop it.  */
11433       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11434 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11435       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11436 	  /* Preserve sequence points.  */
11437 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11438 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11439       /* If second arg is constant true, result is true, but we must
11440 	 evaluate first arg.  */
11441       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11442 	return omit_one_operand_loc (loc, type, arg1, arg0);
11443       /* Likewise for first arg, but note this only occurs here for
11444 	 TRUTH_OR_EXPR.  */
11445       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11446 	return omit_one_operand_loc (loc, type, arg0, arg1);
11447 
11448       /* !X || X is always true.  */
11449       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11450 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11451 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11452       /* X || !X is always true.  */
11453       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11454 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11455 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11456 
11457       /* (X && !Y) || (!X && Y) is X ^ Y */
11458       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11459 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11460         {
11461 	  tree a0, a1, l0, l1, n0, n1;
11462 
11463 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11464 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11465 
11466 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11467 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11468 
11469 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11470 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11471 
11472 	  if ((operand_equal_p (n0, a0, 0)
11473 	       && operand_equal_p (n1, a1, 0))
11474 	      || (operand_equal_p (n0, a1, 0)
11475 		  && operand_equal_p (n1, a0, 0)))
11476 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11477 	}
11478 
11479       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11480           != NULL_TREE)
11481         return tem;
11482 
11483       return NULL_TREE;
11484 
11485     case TRUTH_XOR_EXPR:
11486       /* If the second arg is constant zero, drop it.  */
11487       if (integer_zerop (arg1))
11488 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11489       /* If the second arg is constant true, this is a logical inversion.  */
11490       if (integer_onep (arg1))
11491 	{
11492 	  tem = invert_truthvalue_loc (loc, arg0);
11493 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11494 	}
11495       /* Identical arguments cancel to zero.  */
11496       if (operand_equal_p (arg0, arg1, 0))
11497 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11498 
11499       /* !X ^ X is always true.  */
11500       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11501 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11502 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11503 
11504       /* X ^ !X is always true.  */
11505       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11506 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11507 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11508 
11509       return NULL_TREE;
11510 
11511     case EQ_EXPR:
11512     case NE_EXPR:
11513       STRIP_NOPS (arg0);
11514       STRIP_NOPS (arg1);
11515 
11516       tem = fold_comparison (loc, code, type, op0, op1);
11517       if (tem != NULL_TREE)
11518 	return tem;
11519 
11520       /* bool_var != 1 becomes !bool_var. */
11521       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11522           && code == NE_EXPR)
11523         return fold_convert_loc (loc, type,
11524 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11525 						  TREE_TYPE (arg0), arg0));
11526 
11527       /* bool_var == 0 becomes !bool_var. */
11528       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11529           && code == EQ_EXPR)
11530         return fold_convert_loc (loc, type,
11531 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11532 						  TREE_TYPE (arg0), arg0));
11533 
11534       /* !exp != 0 becomes !exp */
11535       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11536 	  && code == NE_EXPR)
11537         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11538 
11539       /* If this is an EQ or NE comparison with zero and ARG0 is
11540 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
11541 	 two operations, but the latter can be done in one less insn
11542 	 on machines that have only two-operand insns or on which a
11543 	 constant cannot be the first operand.  */
11544       if (TREE_CODE (arg0) == BIT_AND_EXPR
11545 	  && integer_zerop (arg1))
11546 	{
11547 	  tree arg00 = TREE_OPERAND (arg0, 0);
11548 	  tree arg01 = TREE_OPERAND (arg0, 1);
11549 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
11550 	      && integer_onep (TREE_OPERAND (arg00, 0)))
11551 	    {
11552 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11553 					  arg01, TREE_OPERAND (arg00, 1));
11554 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11555 				     build_one_cst (TREE_TYPE (arg0)));
11556 	      return fold_build2_loc (loc, code, type,
11557 				      fold_convert_loc (loc, TREE_TYPE (arg1),
11558 							tem), arg1);
11559 	    }
11560 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
11561 		   && integer_onep (TREE_OPERAND (arg01, 0)))
11562 	    {
11563 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11564 					  arg00, TREE_OPERAND (arg01, 1));
11565 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11566 				     build_one_cst (TREE_TYPE (arg0)));
11567 	      return fold_build2_loc (loc, code, type,
11568 				      fold_convert_loc (loc, TREE_TYPE (arg1),
11569 							tem), arg1);
11570 	    }
11571 	}
11572 
11573       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11574 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
11575 	 a single bit.  */
11576       if (TREE_CODE (arg0) == BIT_AND_EXPR
11577 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11578 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11579 	     == INTEGER_CST
11580 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
11581 	  && integer_zerop (arg1))
11582 	{
11583 	  tree itype = TREE_TYPE (arg0);
11584 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11585 	  prec = TYPE_PRECISION (itype);
11586 
11587 	  /* Check for a valid shift count.  */
11588 	  if (wi::ltu_p (wi::to_wide (arg001), prec))
11589 	    {
11590 	      tree arg01 = TREE_OPERAND (arg0, 1);
11591 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11592 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11593 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11594 		 can be rewritten as (X & (C2 << C1)) != 0.  */
11595 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11596 		{
11597 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11598 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11599 		  return fold_build2_loc (loc, code, type, tem,
11600 					  fold_convert_loc (loc, itype, arg1));
11601 		}
11602 	      /* Otherwise, for signed (arithmetic) shifts,
11603 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11604 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
11605 	      else if (!TYPE_UNSIGNED (itype))
11606 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11607 				    arg000, build_int_cst (itype, 0));
11608 	      /* Otherwise, of unsigned (logical) shifts,
11609 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11610 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
11611 	      else
11612 		return omit_one_operand_loc (loc, type,
11613 					 code == EQ_EXPR ? integer_one_node
11614 							 : integer_zero_node,
11615 					 arg000);
11616 	    }
11617 	}
11618 
11619       /* If this is a comparison of a field, we may be able to simplify it.  */
11620       if ((TREE_CODE (arg0) == COMPONENT_REF
11621 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
11622 	  /* Handle the constant case even without -O
11623 	     to make sure the warnings are given.  */
11624 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11625 	{
11626 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11627 	  if (t1)
11628 	    return t1;
11629 	}
11630 
11631       /* Optimize comparisons of strlen vs zero to a compare of the
11632 	 first character of the string vs zero.  To wit,
11633 		strlen(ptr) == 0   =>  *ptr == 0
11634 		strlen(ptr) != 0   =>  *ptr != 0
11635 	 Other cases should reduce to one of these two (or a constant)
11636 	 due to the return value of strlen being unsigned.  */
11637       if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11638 	{
11639 	  tree fndecl = get_callee_fndecl (arg0);
11640 
11641 	  if (fndecl
11642 	      && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11643 	      && call_expr_nargs (arg0) == 1
11644 	      && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11645 		  == POINTER_TYPE))
11646 	    {
11647 	      tree ptrtype
11648 		= build_pointer_type (build_qualified_type (char_type_node,
11649 							    TYPE_QUAL_CONST));
11650 	      tree ptr = fold_convert_loc (loc, ptrtype,
11651 					   CALL_EXPR_ARG (arg0, 0));
11652 	      tree iref = build_fold_indirect_ref_loc (loc, ptr);
11653 	      return fold_build2_loc (loc, code, type, iref,
11654 				      build_int_cst (TREE_TYPE (iref), 0));
11655 	    }
11656 	}
11657 
11658       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11659 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
11660       if (TREE_CODE (arg0) == RSHIFT_EXPR
11661 	  && integer_zerop (arg1)
11662 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11663 	{
11664 	  tree arg00 = TREE_OPERAND (arg0, 0);
11665 	  tree arg01 = TREE_OPERAND (arg0, 1);
11666 	  tree itype = TREE_TYPE (arg00);
11667 	  if (wi::to_wide (arg01) == element_precision (itype) - 1)
11668 	    {
11669 	      if (TYPE_UNSIGNED (itype))
11670 		{
11671 		  itype = signed_type_for (itype);
11672 		  arg00 = fold_convert_loc (loc, itype, arg00);
11673 		}
11674 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11675 				  type, arg00, build_zero_cst (itype));
11676 	    }
11677 	}
11678 
11679       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11680 	 (X & C) == 0 when C is a single bit.  */
11681       if (TREE_CODE (arg0) == BIT_AND_EXPR
11682 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11683 	  && integer_zerop (arg1)
11684 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
11685 	{
11686 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11687 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11688 				 TREE_OPERAND (arg0, 1));
11689 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11690 				  type, tem,
11691 				  fold_convert_loc (loc, TREE_TYPE (arg0),
11692 						    arg1));
11693 	}
11694 
11695       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11696 	 constant C is a power of two, i.e. a single bit.  */
11697       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11698 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11699 	  && integer_zerop (arg1)
11700 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
11701 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11702 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11703 	{
11704 	  tree arg00 = TREE_OPERAND (arg0, 0);
11705 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11706 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
11707 	}
11708 
11709       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11710 	 when is C is a power of two, i.e. a single bit.  */
11711       if (TREE_CODE (arg0) == BIT_AND_EXPR
11712 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11713 	  && integer_zerop (arg1)
11714 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
11715 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11716 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11717 	{
11718 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11719 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11720 			     arg000, TREE_OPERAND (arg0, 1));
11721 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11722 			      tem, build_int_cst (TREE_TYPE (tem), 0));
11723 	}
11724 
11725       if (integer_zerop (arg1)
11726 	  && tree_expr_nonzero_p (arg0))
11727         {
11728 	  tree res = constant_boolean_node (code==NE_EXPR, type);
11729 	  return omit_one_operand_loc (loc, type, res, arg0);
11730 	}
11731 
11732       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
11733       if (TREE_CODE (arg0) == BIT_AND_EXPR
11734 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
11735 	{
11736 	  tree arg00 = TREE_OPERAND (arg0, 0);
11737 	  tree arg01 = TREE_OPERAND (arg0, 1);
11738 	  tree arg10 = TREE_OPERAND (arg1, 0);
11739 	  tree arg11 = TREE_OPERAND (arg1, 1);
11740 	  tree itype = TREE_TYPE (arg0);
11741 
11742 	  if (operand_equal_p (arg01, arg11, 0))
11743 	    {
11744 	      tem = fold_convert_loc (loc, itype, arg10);
11745 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11746 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11747 	      return fold_build2_loc (loc, code, type, tem,
11748 				      build_zero_cst (itype));
11749 	    }
11750 	  if (operand_equal_p (arg01, arg10, 0))
11751 	    {
11752 	      tem = fold_convert_loc (loc, itype, arg11);
11753 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11754 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11755 	      return fold_build2_loc (loc, code, type, tem,
11756 				      build_zero_cst (itype));
11757 	    }
11758 	  if (operand_equal_p (arg00, arg11, 0))
11759 	    {
11760 	      tem = fold_convert_loc (loc, itype, arg10);
11761 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11762 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11763 	      return fold_build2_loc (loc, code, type, tem,
11764 				      build_zero_cst (itype));
11765 	    }
11766 	  if (operand_equal_p (arg00, arg10, 0))
11767 	    {
11768 	      tem = fold_convert_loc (loc, itype, arg11);
11769 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11770 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11771 	      return fold_build2_loc (loc, code, type, tem,
11772 				      build_zero_cst (itype));
11773 	    }
11774 	}
11775 
11776       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11777 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
11778 	{
11779 	  tree arg00 = TREE_OPERAND (arg0, 0);
11780 	  tree arg01 = TREE_OPERAND (arg0, 1);
11781 	  tree arg10 = TREE_OPERAND (arg1, 0);
11782 	  tree arg11 = TREE_OPERAND (arg1, 1);
11783 	  tree itype = TREE_TYPE (arg0);
11784 
11785 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11786 	     operand_equal_p guarantees no side-effects so we don't need
11787 	     to use omit_one_operand on Z.  */
11788 	  if (operand_equal_p (arg01, arg11, 0))
11789 	    return fold_build2_loc (loc, code, type, arg00,
11790 				    fold_convert_loc (loc, TREE_TYPE (arg00),
11791 						      arg10));
11792 	  if (operand_equal_p (arg01, arg10, 0))
11793 	    return fold_build2_loc (loc, code, type, arg00,
11794 				    fold_convert_loc (loc, TREE_TYPE (arg00),
11795 						      arg11));
11796 	  if (operand_equal_p (arg00, arg11, 0))
11797 	    return fold_build2_loc (loc, code, type, arg01,
11798 				    fold_convert_loc (loc, TREE_TYPE (arg01),
11799 						      arg10));
11800 	  if (operand_equal_p (arg00, arg10, 0))
11801 	    return fold_build2_loc (loc, code, type, arg01,
11802 				    fold_convert_loc (loc, TREE_TYPE (arg01),
11803 						      arg11));
11804 
11805 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
11806 	  if (TREE_CODE (arg01) == INTEGER_CST
11807 	      && TREE_CODE (arg11) == INTEGER_CST)
11808 	    {
11809 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11810 				     fold_convert_loc (loc, itype, arg11));
11811 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11812 	      return fold_build2_loc (loc, code, type, tem,
11813 				      fold_convert_loc (loc, itype, arg10));
11814 	    }
11815 	}
11816 
11817       /* Attempt to simplify equality/inequality comparisons of complex
11818 	 values.  Only lower the comparison if the result is known or
11819 	 can be simplified to a single scalar comparison.  */
11820       if ((TREE_CODE (arg0) == COMPLEX_EXPR
11821 	   || TREE_CODE (arg0) == COMPLEX_CST)
11822 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
11823 	      || TREE_CODE (arg1) == COMPLEX_CST))
11824 	{
11825 	  tree real0, imag0, real1, imag1;
11826 	  tree rcond, icond;
11827 
11828 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
11829 	    {
11830 	      real0 = TREE_OPERAND (arg0, 0);
11831 	      imag0 = TREE_OPERAND (arg0, 1);
11832 	    }
11833 	  else
11834 	    {
11835 	      real0 = TREE_REALPART (arg0);
11836 	      imag0 = TREE_IMAGPART (arg0);
11837 	    }
11838 
11839 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
11840 	    {
11841 	      real1 = TREE_OPERAND (arg1, 0);
11842 	      imag1 = TREE_OPERAND (arg1, 1);
11843 	    }
11844 	  else
11845 	    {
11846 	      real1 = TREE_REALPART (arg1);
11847 	      imag1 = TREE_IMAGPART (arg1);
11848 	    }
11849 
11850 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
11851 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11852 	    {
11853 	      if (integer_zerop (rcond))
11854 		{
11855 		  if (code == EQ_EXPR)
11856 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11857 					      imag0, imag1);
11858 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11859 		}
11860 	      else
11861 		{
11862 		  if (code == NE_EXPR)
11863 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11864 					      imag0, imag1);
11865 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11866 		}
11867 	    }
11868 
11869 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
11870 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
11871 	    {
11872 	      if (integer_zerop (icond))
11873 		{
11874 		  if (code == EQ_EXPR)
11875 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11876 					      real0, real1);
11877 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11878 		}
11879 	      else
11880 		{
11881 		  if (code == NE_EXPR)
11882 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11883 					      real0, real1);
11884 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11885 		}
11886 	    }
11887 	}
11888 
11889       return NULL_TREE;
11890 
11891     case LT_EXPR:
11892     case GT_EXPR:
11893     case LE_EXPR:
11894     case GE_EXPR:
11895       tem = fold_comparison (loc, code, type, op0, op1);
11896       if (tem != NULL_TREE)
11897 	return tem;
11898 
11899       /* Transform comparisons of the form X +- C CMP X.  */
11900       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11901 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11902 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11903 	  && !HONOR_SNANS (arg0))
11904 	{
11905 	  tree arg01 = TREE_OPERAND (arg0, 1);
11906 	  enum tree_code code0 = TREE_CODE (arg0);
11907 	  int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11908 
11909 	  /* (X - c) > X becomes false.  */
11910 	  if (code == GT_EXPR
11911 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11912 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11913 	    return constant_boolean_node (0, type);
11914 
11915 	  /* Likewise (X + c) < X becomes false.  */
11916 	  if (code == LT_EXPR
11917 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11918 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11919 	    return constant_boolean_node (0, type);
11920 
11921 	  /* Convert (X - c) <= X to true.  */
11922 	  if (!HONOR_NANS (arg1)
11923 	      && code == LE_EXPR
11924 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11925 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11926 	    return constant_boolean_node (1, type);
11927 
11928 	  /* Convert (X + c) >= X to true.  */
11929 	  if (!HONOR_NANS (arg1)
11930 	      && code == GE_EXPR
11931 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11932 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11933 	    return constant_boolean_node (1, type);
11934 	}
11935 
11936       /* If we are comparing an ABS_EXPR with a constant, we can
11937 	 convert all the cases into explicit comparisons, but they may
11938 	 well not be faster than doing the ABS and one comparison.
11939 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11940 	 and a comparison, and is probably faster.  */
11941       if (code == LE_EXPR
11942 	  && TREE_CODE (arg1) == INTEGER_CST
11943 	  && TREE_CODE (arg0) == ABS_EXPR
11944 	  && ! TREE_SIDE_EFFECTS (arg0)
11945 	  && (tem = negate_expr (arg1)) != 0
11946 	  && TREE_CODE (tem) == INTEGER_CST
11947 	  && !TREE_OVERFLOW (tem))
11948 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11949 			    build2 (GE_EXPR, type,
11950 				    TREE_OPERAND (arg0, 0), tem),
11951 			    build2 (LE_EXPR, type,
11952 				    TREE_OPERAND (arg0, 0), arg1));
11953 
11954       /* Convert ABS_EXPR<x> >= 0 to true.  */
11955       strict_overflow_p = false;
11956       if (code == GE_EXPR
11957 	  && (integer_zerop (arg1)
11958 	      || (! HONOR_NANS (arg0)
11959 		  && real_zerop (arg1)))
11960 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11961 	{
11962 	  if (strict_overflow_p)
11963 	    fold_overflow_warning (("assuming signed overflow does not occur "
11964 				    "when simplifying comparison of "
11965 				    "absolute value and zero"),
11966 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11967 	  return omit_one_operand_loc (loc, type,
11968 				       constant_boolean_node (true, type),
11969 				       arg0);
11970 	}
11971 
11972       /* Convert ABS_EXPR<x> < 0 to false.  */
11973       strict_overflow_p = false;
11974       if (code == LT_EXPR
11975 	  && (integer_zerop (arg1) || real_zerop (arg1))
11976 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11977 	{
11978 	  if (strict_overflow_p)
11979 	    fold_overflow_warning (("assuming signed overflow does not occur "
11980 				    "when simplifying comparison of "
11981 				    "absolute value and zero"),
11982 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11983 	  return omit_one_operand_loc (loc, type,
11984 				       constant_boolean_node (false, type),
11985 				       arg0);
11986 	}
11987 
11988       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11989 	 and similarly for >= into !=.  */
11990       if ((code == LT_EXPR || code == GE_EXPR)
11991 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11992 	  && TREE_CODE (arg1) == LSHIFT_EXPR
11993 	  && integer_onep (TREE_OPERAND (arg1, 0)))
11994 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11995 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11996 				   TREE_OPERAND (arg1, 1)),
11997 			   build_zero_cst (TREE_TYPE (arg0)));
11998 
11999       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
12000 	 otherwise Y might be >= # of bits in X's type and thus e.g.
12001 	 (unsigned char) (1 << Y) for Y 15 might be 0.
12002 	 If the cast is widening, then 1 << Y should have unsigned type,
12003 	 otherwise if Y is number of bits in the signed shift type minus 1,
12004 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
12005 	 31 might be 0xffffffff80000000.  */
12006       if ((code == LT_EXPR || code == GE_EXPR)
12007 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
12008 	  && CONVERT_EXPR_P (arg1)
12009 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12010 	  && (element_precision (TREE_TYPE (arg1))
12011 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12012 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12013 	      || (element_precision (TREE_TYPE (arg1))
12014 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12015 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12016 	{
12017 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12018 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12019 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12020 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12021 			     build_zero_cst (TREE_TYPE (arg0)));
12022 	}
12023 
12024       return NULL_TREE;
12025 
12026     case UNORDERED_EXPR:
12027     case ORDERED_EXPR:
12028     case UNLT_EXPR:
12029     case UNLE_EXPR:
12030     case UNGT_EXPR:
12031     case UNGE_EXPR:
12032     case UNEQ_EXPR:
12033     case LTGT_EXPR:
12034       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
12035       {
12036 	tree targ0 = strip_float_extensions (arg0);
12037 	tree targ1 = strip_float_extensions (arg1);
12038 	tree newtype = TREE_TYPE (targ0);
12039 
12040 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12041 	  newtype = TREE_TYPE (targ1);
12042 
12043 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12044 	  return fold_build2_loc (loc, code, type,
12045 			      fold_convert_loc (loc, newtype, targ0),
12046 			      fold_convert_loc (loc, newtype, targ1));
12047       }
12048 
12049       return NULL_TREE;
12050 
12051     case COMPOUND_EXPR:
12052       /* When pedantic, a compound expression can be neither an lvalue
12053 	 nor an integer constant expression.  */
12054       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12055 	return NULL_TREE;
12056       /* Don't let (0, 0) be null pointer constant.  */
12057       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12058 				 : fold_convert_loc (loc, type, arg1);
12059       return pedantic_non_lvalue_loc (loc, tem);
12060 
12061     case ASSERT_EXPR:
12062       /* An ASSERT_EXPR should never be passed to fold_binary.  */
12063       gcc_unreachable ();
12064 
12065     default:
12066       return NULL_TREE;
12067     } /* switch (code) */
12068 }
12069 
12070 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12071    ((A & N) + B) & M -> (A + B) & M
12072    Similarly if (N & M) == 0,
12073    ((A | N) + B) & M -> (A + B) & M
12074    and for - instead of + (or unary - instead of +)
12075    and/or ^ instead of |.
12076    If B is constant and (B & M) == 0, fold into A & M.
12077 
12078    This function is a helper for match.pd patterns.  Return non-NULL
12079    type in which the simplified operation should be performed only
12080    if any optimization is possible.
12081 
12082    ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12083    then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12084    Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12085    +/-.  */
12086 tree
fold_bit_and_mask(tree type,tree arg1,enum tree_code code,tree arg00,enum tree_code code00,tree arg000,tree arg001,tree arg01,enum tree_code code01,tree arg010,tree arg011,tree * pmop)12087 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12088 		   tree arg00, enum tree_code code00, tree arg000, tree arg001,
12089 		   tree arg01, enum tree_code code01, tree arg010, tree arg011,
12090 		   tree *pmop)
12091 {
12092   gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12093   gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12094   wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12095   if (~cst1 == 0
12096       || (cst1 & (cst1 + 1)) != 0
12097       || !INTEGRAL_TYPE_P (type)
12098       || (!TYPE_OVERFLOW_WRAPS (type)
12099 	  && TREE_CODE (type) != INTEGER_TYPE)
12100       || (wi::max_value (type) & cst1) != cst1)
12101     return NULL_TREE;
12102 
12103   enum tree_code codes[2] = { code00, code01 };
12104   tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12105   int which = 0;
12106   wide_int cst0;
12107 
12108   /* Now we know that arg0 is (C + D) or (C - D) or -C and
12109      arg1 (M) is == (1LL << cst) - 1.
12110      Store C into PMOP[0] and D into PMOP[1].  */
12111   pmop[0] = arg00;
12112   pmop[1] = arg01;
12113   which = code != NEGATE_EXPR;
12114 
12115   for (; which >= 0; which--)
12116     switch (codes[which])
12117       {
12118       case BIT_AND_EXPR:
12119       case BIT_IOR_EXPR:
12120       case BIT_XOR_EXPR:
12121 	gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12122 	cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12123 	if (codes[which] == BIT_AND_EXPR)
12124 	  {
12125 	    if (cst0 != cst1)
12126 	      break;
12127 	  }
12128 	else if (cst0 != 0)
12129 	  break;
12130 	/* If C or D is of the form (A & N) where
12131 	   (N & M) == M, or of the form (A | N) or
12132 	   (A ^ N) where (N & M) == 0, replace it with A.  */
12133 	pmop[which] = arg0xx[2 * which];
12134 	break;
12135       case ERROR_MARK:
12136 	if (TREE_CODE (pmop[which]) != INTEGER_CST)
12137 	  break;
12138 	/* If C or D is a N where (N & M) == 0, it can be
12139 	   omitted (replaced with 0).  */
12140 	if ((code == PLUS_EXPR
12141 	     || (code == MINUS_EXPR && which == 0))
12142 	    && (cst1 & wi::to_wide (pmop[which])) == 0)
12143 	  pmop[which] = build_int_cst (type, 0);
12144 	/* Similarly, with C - N where (-N & M) == 0.  */
12145 	if (code == MINUS_EXPR
12146 	    && which == 1
12147 	    && (cst1 & -wi::to_wide (pmop[which])) == 0)
12148 	  pmop[which] = build_int_cst (type, 0);
12149 	break;
12150       default:
12151 	gcc_unreachable ();
12152       }
12153 
12154   /* Only build anything new if we optimized one or both arguments above.  */
12155   if (pmop[0] == arg00 && pmop[1] == arg01)
12156     return NULL_TREE;
12157 
12158   if (TYPE_OVERFLOW_WRAPS (type))
12159     return type;
12160   else
12161     return unsigned_type_for (type);
12162 }
12163 
12164 /* Used by contains_label_[p1].  */
12165 
12166 struct contains_label_data
12167 {
12168   hash_set<tree> *pset;
12169   bool inside_switch_p;
12170 };
12171 
12172 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
12173    a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12174    return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
12175 
12176 static tree
contains_label_1(tree * tp,int * walk_subtrees,void * data)12177 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12178 {
12179   contains_label_data *d = (contains_label_data *) data;
12180   switch (TREE_CODE (*tp))
12181     {
12182     case LABEL_EXPR:
12183       return *tp;
12184 
12185     case CASE_LABEL_EXPR:
12186       if (!d->inside_switch_p)
12187 	return *tp;
12188       return NULL_TREE;
12189 
12190     case SWITCH_EXPR:
12191       if (!d->inside_switch_p)
12192 	{
12193 	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12194 	    return *tp;
12195 	  d->inside_switch_p = true;
12196 	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12197 	    return *tp;
12198 	  d->inside_switch_p = false;
12199 	  *walk_subtrees = 0;
12200 	}
12201       return NULL_TREE;
12202 
12203     case GOTO_EXPR:
12204       *walk_subtrees = 0;
12205       return NULL_TREE;
12206 
12207     default:
12208       return NULL_TREE;
12209     }
12210 }
12211 
12212 /* Return whether the sub-tree ST contains a label which is accessible from
12213    outside the sub-tree.  */
12214 
12215 static bool
contains_label_p(tree st)12216 contains_label_p (tree st)
12217 {
12218   hash_set<tree> pset;
12219   contains_label_data data = { &pset, false };
12220   return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12221 }
12222 
12223 /* Fold a ternary expression of code CODE and type TYPE with operands
12224    OP0, OP1, and OP2.  Return the folded expression if folding is
12225    successful.  Otherwise, return NULL_TREE.  */
12226 
12227 tree
fold_ternary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)12228 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12229 		  tree op0, tree op1, tree op2)
12230 {
12231   tree tem;
12232   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12233   enum tree_code_class kind = TREE_CODE_CLASS (code);
12234 
12235   gcc_assert (IS_EXPR_CODE_CLASS (kind)
12236 	      && TREE_CODE_LENGTH (code) == 3);
12237 
12238   /* If this is a commutative operation, and OP0 is a constant, move it
12239      to OP1 to reduce the number of tests below.  */
12240   if (commutative_ternary_tree_code (code)
12241       && tree_swap_operands_p (op0, op1))
12242     return fold_build3_loc (loc, code, type, op1, op0, op2);
12243 
12244   tem = generic_simplify (loc, code, type, op0, op1, op2);
12245   if (tem)
12246     return tem;
12247 
12248   /* Strip any conversions that don't change the mode.  This is safe
12249      for every expression, except for a comparison expression because
12250      its signedness is derived from its operands.  So, in the latter
12251      case, only strip conversions that don't change the signedness.
12252 
12253      Note that this is done as an internal manipulation within the
12254      constant folder, in order to find the simplest representation of
12255      the arguments so that their form can be studied.  In any cases,
12256      the appropriate type conversions should be put back in the tree
12257      that will get out of the constant folder.  */
12258   if (op0)
12259     {
12260       arg0 = op0;
12261       STRIP_NOPS (arg0);
12262     }
12263 
12264   if (op1)
12265     {
12266       arg1 = op1;
12267       STRIP_NOPS (arg1);
12268     }
12269 
12270   if (op2)
12271     {
12272       arg2 = op2;
12273       STRIP_NOPS (arg2);
12274     }
12275 
12276   switch (code)
12277     {
12278     case COMPONENT_REF:
12279       if (TREE_CODE (arg0) == CONSTRUCTOR
12280 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12281 	{
12282 	  unsigned HOST_WIDE_INT idx;
12283 	  tree field, value;
12284 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12285 	    if (field == arg1)
12286 	      return value;
12287 	}
12288       return NULL_TREE;
12289 
12290     case COND_EXPR:
12291     case VEC_COND_EXPR:
12292       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12293 	 so all simple results must be passed through pedantic_non_lvalue.  */
12294       if (TREE_CODE (arg0) == INTEGER_CST)
12295 	{
12296 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
12297 	  tem = integer_zerop (arg0) ? op2 : op1;
12298 	  /* Only optimize constant conditions when the selected branch
12299 	     has the same type as the COND_EXPR.  This avoids optimizing
12300              away "c ? x : throw", where the throw has a void type.
12301              Avoid throwing away that operand which contains label.  */
12302           if ((!TREE_SIDE_EFFECTS (unused_op)
12303                || !contains_label_p (unused_op))
12304               && (! VOID_TYPE_P (TREE_TYPE (tem))
12305                   || VOID_TYPE_P (type)))
12306 	    return pedantic_non_lvalue_loc (loc, tem);
12307 	  return NULL_TREE;
12308 	}
12309       else if (TREE_CODE (arg0) == VECTOR_CST)
12310 	{
12311 	  unsigned HOST_WIDE_INT nelts;
12312 	  if ((TREE_CODE (arg1) == VECTOR_CST
12313 	       || TREE_CODE (arg1) == CONSTRUCTOR)
12314 	      && (TREE_CODE (arg2) == VECTOR_CST
12315 		  || TREE_CODE (arg2) == CONSTRUCTOR)
12316 	      && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12317 	    {
12318 	      vec_perm_builder sel (nelts, nelts, 1);
12319 	      for (unsigned int i = 0; i < nelts; i++)
12320 		{
12321 		  tree val = VECTOR_CST_ELT (arg0, i);
12322 		  if (integer_all_onesp (val))
12323 		    sel.quick_push (i);
12324 		  else if (integer_zerop (val))
12325 		    sel.quick_push (nelts + i);
12326 		  else /* Currently unreachable.  */
12327 		    return NULL_TREE;
12328 		}
12329 	      vec_perm_indices indices (sel, 2, nelts);
12330 	      tree t = fold_vec_perm (type, arg1, arg2, indices);
12331 	      if (t != NULL_TREE)
12332 		return t;
12333 	    }
12334 	}
12335 
12336       /* If we have A op B ? A : C, we may be able to convert this to a
12337 	 simpler expression, depending on the operation and the values
12338 	 of B and C.  Signed zeros prevent all of these transformations,
12339 	 for reasons given above each one.
12340 
12341          Also try swapping the arguments and inverting the conditional.  */
12342       if (COMPARISON_CLASS_P (arg0)
12343 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12344 	  && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12345 	{
12346 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12347 	  if (tem)
12348 	    return tem;
12349 	}
12350 
12351       if (COMPARISON_CLASS_P (arg0)
12352 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12353 	  && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12354 	{
12355 	  location_t loc0 = expr_location_or (arg0, loc);
12356 	  tem = fold_invert_truthvalue (loc0, arg0);
12357 	  if (tem && COMPARISON_CLASS_P (tem))
12358 	    {
12359 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12360 	      if (tem)
12361 		return tem;
12362 	    }
12363 	}
12364 
12365       /* If the second operand is simpler than the third, swap them
12366 	 since that produces better jump optimization results.  */
12367       if (truth_value_p (TREE_CODE (arg0))
12368 	  && tree_swap_operands_p (op1, op2))
12369 	{
12370 	  location_t loc0 = expr_location_or (arg0, loc);
12371 	  /* See if this can be inverted.  If it can't, possibly because
12372 	     it was a floating-point inequality comparison, don't do
12373 	     anything.  */
12374 	  tem = fold_invert_truthvalue (loc0, arg0);
12375 	  if (tem)
12376 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
12377 	}
12378 
12379       /* Convert A ? 1 : 0 to simply A.  */
12380       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12381 				 : (integer_onep (op1)
12382 				    && !VECTOR_TYPE_P (type)))
12383 	  && integer_zerop (op2)
12384 	  /* If we try to convert OP0 to our type, the
12385 	     call to fold will try to move the conversion inside
12386 	     a COND, which will recurse.  In that case, the COND_EXPR
12387 	     is probably the best choice, so leave it alone.  */
12388 	  && type == TREE_TYPE (arg0))
12389 	return pedantic_non_lvalue_loc (loc, arg0);
12390 
12391       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
12392 	 over COND_EXPR in cases such as floating point comparisons.  */
12393       if (integer_zerop (op1)
12394 	  && code == COND_EXPR
12395 	  && integer_onep (op2)
12396 	  && !VECTOR_TYPE_P (type)
12397 	  && truth_value_p (TREE_CODE (arg0)))
12398 	return pedantic_non_lvalue_loc (loc,
12399 				    fold_convert_loc (loc, type,
12400 					      invert_truthvalue_loc (loc,
12401 								     arg0)));
12402 
12403       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
12404       if (TREE_CODE (arg0) == LT_EXPR
12405 	  && integer_zerop (TREE_OPERAND (arg0, 1))
12406 	  && integer_zerop (op2)
12407 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12408 	{
12409 	  /* sign_bit_p looks through both zero and sign extensions,
12410 	     but for this optimization only sign extensions are
12411 	     usable.  */
12412 	  tree tem2 = TREE_OPERAND (arg0, 0);
12413 	  while (tem != tem2)
12414 	    {
12415 	      if (TREE_CODE (tem2) != NOP_EXPR
12416 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12417 		{
12418 		  tem = NULL_TREE;
12419 		  break;
12420 		}
12421 	      tem2 = TREE_OPERAND (tem2, 0);
12422 	    }
12423 	  /* sign_bit_p only checks ARG1 bits within A's precision.
12424 	     If <sign bit of A> has wider type than A, bits outside
12425 	     of A's precision in <sign bit of A> need to be checked.
12426 	     If they are all 0, this optimization needs to be done
12427 	     in unsigned A's type, if they are all 1 in signed A's type,
12428 	     otherwise this can't be done.  */
12429 	  if (tem
12430 	      && TYPE_PRECISION (TREE_TYPE (tem))
12431 		 < TYPE_PRECISION (TREE_TYPE (arg1))
12432 	      && TYPE_PRECISION (TREE_TYPE (tem))
12433 		 < TYPE_PRECISION (type))
12434 	    {
12435 	      int inner_width, outer_width;
12436 	      tree tem_type;
12437 
12438 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12439 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12440 	      if (outer_width > TYPE_PRECISION (type))
12441 		outer_width = TYPE_PRECISION (type);
12442 
12443 	      wide_int mask = wi::shifted_mask
12444 		(inner_width, outer_width - inner_width, false,
12445 		 TYPE_PRECISION (TREE_TYPE (arg1)));
12446 
12447 	      wide_int common = mask & wi::to_wide (arg1);
12448 	      if (common == mask)
12449 		{
12450 		  tem_type = signed_type_for (TREE_TYPE (tem));
12451 		  tem = fold_convert_loc (loc, tem_type, tem);
12452 		}
12453 	      else if (common == 0)
12454 		{
12455 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
12456 		  tem = fold_convert_loc (loc, tem_type, tem);
12457 		}
12458 	      else
12459 		tem = NULL;
12460 	    }
12461 
12462 	  if (tem)
12463 	    return
12464 	      fold_convert_loc (loc, type,
12465 				fold_build2_loc (loc, BIT_AND_EXPR,
12466 					     TREE_TYPE (tem), tem,
12467 					     fold_convert_loc (loc,
12468 							       TREE_TYPE (tem),
12469 							       arg1)));
12470 	}
12471 
12472       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
12473 	 already handled above.  */
12474       if (TREE_CODE (arg0) == BIT_AND_EXPR
12475 	  && integer_onep (TREE_OPERAND (arg0, 1))
12476 	  && integer_zerop (op2)
12477 	  && integer_pow2p (arg1))
12478 	{
12479 	  tree tem = TREE_OPERAND (arg0, 0);
12480 	  STRIP_NOPS (tem);
12481 	  if (TREE_CODE (tem) == RSHIFT_EXPR
12482 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12483               && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12484 		 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12485 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
12486 				    fold_convert_loc (loc, type,
12487 						      TREE_OPERAND (tem, 0)),
12488 				    op1);
12489 	}
12490 
12491       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
12492 	 is probably obsolete because the first operand should be a
12493 	 truth value (that's why we have the two cases above), but let's
12494 	 leave it in until we can confirm this for all front-ends.  */
12495       if (integer_zerop (op2)
12496 	  && TREE_CODE (arg0) == NE_EXPR
12497 	  && integer_zerop (TREE_OPERAND (arg0, 1))
12498 	  && integer_pow2p (arg1)
12499 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12500 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12501 			      arg1, OEP_ONLY_CONST)
12502 	  /* operand_equal_p compares just value, not precision, so e.g.
12503 	     arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12504 	     second operand 32-bit -128, which is not a power of two (or vice
12505 	     versa.  */
12506 	  && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12507 	return pedantic_non_lvalue_loc (loc,
12508 					fold_convert_loc (loc, type,
12509 							  TREE_OPERAND (arg0,
12510 									0)));
12511 
12512       /* Disable the transformations below for vectors, since
12513 	 fold_binary_op_with_conditional_arg may undo them immediately,
12514 	 yielding an infinite loop.  */
12515       if (code == VEC_COND_EXPR)
12516 	return NULL_TREE;
12517 
12518       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
12519       if (integer_zerop (op2)
12520 	  && truth_value_p (TREE_CODE (arg0))
12521 	  && truth_value_p (TREE_CODE (arg1))
12522 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12523 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12524 							   : TRUTH_ANDIF_EXPR,
12525 				type, fold_convert_loc (loc, type, arg0), op1);
12526 
12527       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
12528       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12529 	  && truth_value_p (TREE_CODE (arg0))
12530 	  && truth_value_p (TREE_CODE (arg1))
12531 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12532 	{
12533 	  location_t loc0 = expr_location_or (arg0, loc);
12534 	  /* Only perform transformation if ARG0 is easily inverted.  */
12535 	  tem = fold_invert_truthvalue (loc0, arg0);
12536 	  if (tem)
12537 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
12538 					 ? BIT_IOR_EXPR
12539 					 : TRUTH_ORIF_EXPR,
12540 				    type, fold_convert_loc (loc, type, tem),
12541 				    op1);
12542 	}
12543 
12544       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
12545       if (integer_zerop (arg1)
12546 	  && truth_value_p (TREE_CODE (arg0))
12547 	  && truth_value_p (TREE_CODE (op2))
12548 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12549 	{
12550 	  location_t loc0 = expr_location_or (arg0, loc);
12551 	  /* Only perform transformation if ARG0 is easily inverted.  */
12552 	  tem = fold_invert_truthvalue (loc0, arg0);
12553 	  if (tem)
12554 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
12555 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12556 				    type, fold_convert_loc (loc, type, tem),
12557 				    op2);
12558 	}
12559 
12560       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
12561       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12562 	  && truth_value_p (TREE_CODE (arg0))
12563 	  && truth_value_p (TREE_CODE (op2))
12564 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12565 	return fold_build2_loc (loc, code == VEC_COND_EXPR
12566 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12567 				type, fold_convert_loc (loc, type, arg0), op2);
12568 
12569       return NULL_TREE;
12570 
12571     case CALL_EXPR:
12572       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
12573 	 of fold_ternary on them.  */
12574       gcc_unreachable ();
12575 
12576     case BIT_FIELD_REF:
12577       if (TREE_CODE (arg0) == VECTOR_CST
12578 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
12579 	      || (VECTOR_TYPE_P (type)
12580 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12581 	  && tree_fits_uhwi_p (op1)
12582 	  && tree_fits_uhwi_p (op2))
12583 	{
12584 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12585 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12586 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12587 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12588 
12589 	  if (n != 0
12590 	      && (idx % width) == 0
12591 	      && (n % width) == 0
12592 	      && known_le ((idx + n) / width,
12593 			   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12594 	    {
12595 	      idx = idx / width;
12596 	      n = n / width;
12597 
12598 	      if (TREE_CODE (arg0) == VECTOR_CST)
12599 		{
12600 		  if (n == 1)
12601 		    {
12602 		      tem = VECTOR_CST_ELT (arg0, idx);
12603 		      if (VECTOR_TYPE_P (type))
12604 			tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12605 		      return tem;
12606 		    }
12607 
12608 		  tree_vector_builder vals (type, n, 1);
12609 		  for (unsigned i = 0; i < n; ++i)
12610 		    vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12611 		  return vals.build ();
12612 		}
12613 	    }
12614 	}
12615 
12616       /* On constants we can use native encode/interpret to constant
12617          fold (nearly) all BIT_FIELD_REFs.  */
12618       if (CONSTANT_CLASS_P (arg0)
12619 	  && can_native_interpret_type_p (type)
12620 	  && BITS_PER_UNIT == 8
12621 	  && tree_fits_uhwi_p (op1)
12622 	  && tree_fits_uhwi_p (op2))
12623 	{
12624 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12625 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12626 	  /* Limit us to a reasonable amount of work.  To relax the
12627 	     other limitations we need bit-shifting of the buffer
12628 	     and rounding up the size.  */
12629 	  if (bitpos % BITS_PER_UNIT == 0
12630 	      && bitsize % BITS_PER_UNIT == 0
12631 	      && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12632 	    {
12633 	      unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12634 	      unsigned HOST_WIDE_INT len
12635 		= native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12636 				      bitpos / BITS_PER_UNIT);
12637 	      if (len > 0
12638 		  && len * BITS_PER_UNIT >= bitsize)
12639 		{
12640 		  tree v = native_interpret_expr (type, b,
12641 						  bitsize / BITS_PER_UNIT);
12642 		  if (v)
12643 		    return v;
12644 		}
12645 	    }
12646 	}
12647 
12648       return NULL_TREE;
12649 
12650     case VEC_PERM_EXPR:
12651       /* Perform constant folding of BIT_INSERT_EXPR.  */
12652       if (TREE_CODE (arg2) == VECTOR_CST
12653 	  && TREE_CODE (op0) == VECTOR_CST
12654 	  && TREE_CODE (op1) == VECTOR_CST)
12655 	{
12656 	  /* Build a vector of integers from the tree mask.  */
12657 	  vec_perm_builder builder;
12658 	  if (!tree_to_vec_perm_builder (&builder, arg2))
12659 	    return NULL_TREE;
12660 
12661 	  /* Create a vec_perm_indices for the integer vector.  */
12662 	  poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12663 	  bool single_arg = (op0 == op1);
12664 	  vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12665 	  return fold_vec_perm (type, op0, op1, sel);
12666 	}
12667       return NULL_TREE;
12668 
12669     case BIT_INSERT_EXPR:
12670       /* Perform (partial) constant folding of BIT_INSERT_EXPR.  */
12671       if (TREE_CODE (arg0) == INTEGER_CST
12672 	  && TREE_CODE (arg1) == INTEGER_CST)
12673 	{
12674 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12675 	  unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12676 	  wide_int tem = (wi::to_wide (arg0)
12677 			  & wi::shifted_mask (bitpos, bitsize, true,
12678 					      TYPE_PRECISION (type)));
12679 	  wide_int tem2
12680 	    = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12681 				    bitsize), bitpos);
12682 	  return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12683 	}
12684       else if (TREE_CODE (arg0) == VECTOR_CST
12685 	       && CONSTANT_CLASS_P (arg1)
12686 	       && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12687 				      TREE_TYPE (arg1)))
12688 	{
12689 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12690 	  unsigned HOST_WIDE_INT elsize
12691 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12692 	  if (bitpos % elsize == 0)
12693 	    {
12694 	      unsigned k = bitpos / elsize;
12695 	      unsigned HOST_WIDE_INT nelts;
12696 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12697 		return arg0;
12698 	      else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12699 		{
12700 		  tree_vector_builder elts (type, nelts, 1);
12701 		  elts.quick_grow (nelts);
12702 		  for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12703 		    elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12704 		  return elts.build ();
12705 		}
12706 	    }
12707 	}
12708       return NULL_TREE;
12709 
12710     default:
12711       return NULL_TREE;
12712     } /* switch (code) */
12713 }
12714 
12715 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12716    of an array (or vector).  *CTOR_IDX if non-NULL is updated with the
12717    constructor element index of the value returned.  If the element is
12718    not found NULL_TREE is returned and *CTOR_IDX is updated to
12719    the index of the element after the ACCESS_INDEX position (which
12720    may be outside of the CTOR array).  */
12721 
12722 tree
get_array_ctor_element_at_index(tree ctor,offset_int access_index,unsigned * ctor_idx)12723 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12724 				 unsigned *ctor_idx)
12725 {
12726   tree index_type = NULL_TREE;
12727   signop index_sgn = UNSIGNED;
12728   offset_int low_bound = 0;
12729 
12730   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12731     {
12732       tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12733       if (domain_type && TYPE_MIN_VALUE (domain_type))
12734 	{
12735 	  /* Static constructors for variably sized objects makes no sense.  */
12736 	  gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12737 	  index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12738 	  /* ???  When it is obvious that the range is signed, treat it so.  */
12739 	  if (TYPE_UNSIGNED (index_type)
12740 	      && TYPE_MAX_VALUE (domain_type)
12741 	      && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12742 				  TYPE_MIN_VALUE (domain_type)))
12743 	    {
12744 	      index_sgn = SIGNED;
12745 	      low_bound
12746 		= offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12747 				    SIGNED);
12748 	    }
12749 	  else
12750 	    {
12751 	      index_sgn = TYPE_SIGN (index_type);
12752 	      low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12753 	    }
12754 	}
12755     }
12756 
12757   if (index_type)
12758     access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12759 			    index_sgn);
12760 
12761   offset_int index = low_bound;
12762   if (index_type)
12763     index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12764 
12765   offset_int max_index = index;
12766   unsigned cnt;
12767   tree cfield, cval;
12768   bool first_p = true;
12769 
12770   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12771     {
12772       /* Array constructor might explicitly set index, or specify a range,
12773 	 or leave index NULL meaning that it is next index after previous
12774 	 one.  */
12775       if (cfield)
12776 	{
12777 	  if (TREE_CODE (cfield) == INTEGER_CST)
12778 	    max_index = index
12779 	      = offset_int::from (wi::to_wide (cfield), index_sgn);
12780 	  else
12781 	    {
12782 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12783 	      index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12784 					index_sgn);
12785 	      max_index
12786 	        = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12787 				    index_sgn);
12788 	      gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12789 	    }
12790 	}
12791       else if (!first_p)
12792 	{
12793 	  index = max_index + 1;
12794 	  if (index_type)
12795 	    index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12796 	  gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12797 	  max_index = index;
12798 	}
12799       else
12800 	first_p = false;
12801 
12802       /* Do we have match?  */
12803       if (wi::cmp (access_index, index, index_sgn) >= 0)
12804 	{
12805 	  if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12806 	    {
12807 	      if (ctor_idx)
12808 		*ctor_idx = cnt;
12809 	      return cval;
12810 	    }
12811 	}
12812       else if (in_gimple_form)
12813 	/* We're past the element we search for.  Note during parsing
12814 	   the elements might not be sorted.
12815 	   ???  We should use a binary search and a flag on the
12816 	   CONSTRUCTOR as to whether elements are sorted in declaration
12817 	   order.  */
12818 	break;
12819     }
12820   if (ctor_idx)
12821     *ctor_idx = cnt;
12822   return NULL_TREE;
12823 }
12824 
12825 /* Perform constant folding and related simplification of EXPR.
12826    The related simplifications include x*1 => x, x*0 => 0, etc.,
12827    and application of the associative law.
12828    NOP_EXPR conversions may be removed freely (as long as we
12829    are careful not to change the type of the overall expression).
12830    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12831    but we can constant-fold them if they have constant operands.  */
12832 
12833 #ifdef ENABLE_FOLD_CHECKING
12834 # define fold(x) fold_1 (x)
12835 static tree fold_1 (tree);
12836 static
12837 #endif
12838 tree
fold(tree expr)12839 fold (tree expr)
12840 {
12841   const tree t = expr;
12842   enum tree_code code = TREE_CODE (t);
12843   enum tree_code_class kind = TREE_CODE_CLASS (code);
12844   tree tem;
12845   location_t loc = EXPR_LOCATION (expr);
12846 
12847   /* Return right away if a constant.  */
12848   if (kind == tcc_constant)
12849     return t;
12850 
12851   /* CALL_EXPR-like objects with variable numbers of operands are
12852      treated specially.  */
12853   if (kind == tcc_vl_exp)
12854     {
12855       if (code == CALL_EXPR)
12856 	{
12857 	  tem = fold_call_expr (loc, expr, false);
12858 	  return tem ? tem : expr;
12859 	}
12860       return expr;
12861     }
12862 
12863   if (IS_EXPR_CODE_CLASS (kind))
12864     {
12865       tree type = TREE_TYPE (t);
12866       tree op0, op1, op2;
12867 
12868       switch (TREE_CODE_LENGTH (code))
12869 	{
12870 	case 1:
12871 	  op0 = TREE_OPERAND (t, 0);
12872 	  tem = fold_unary_loc (loc, code, type, op0);
12873 	  return tem ? tem : expr;
12874 	case 2:
12875 	  op0 = TREE_OPERAND (t, 0);
12876 	  op1 = TREE_OPERAND (t, 1);
12877 	  tem = fold_binary_loc (loc, code, type, op0, op1);
12878 	  return tem ? tem : expr;
12879 	case 3:
12880 	  op0 = TREE_OPERAND (t, 0);
12881 	  op1 = TREE_OPERAND (t, 1);
12882 	  op2 = TREE_OPERAND (t, 2);
12883 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12884 	  return tem ? tem : expr;
12885 	default:
12886 	  break;
12887 	}
12888     }
12889 
12890   switch (code)
12891     {
12892     case ARRAY_REF:
12893       {
12894 	tree op0 = TREE_OPERAND (t, 0);
12895 	tree op1 = TREE_OPERAND (t, 1);
12896 
12897 	if (TREE_CODE (op1) == INTEGER_CST
12898 	    && TREE_CODE (op0) == CONSTRUCTOR
12899 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12900 	  {
12901 	    tree val = get_array_ctor_element_at_index (op0,
12902 							wi::to_offset (op1));
12903 	    if (val)
12904 	      return val;
12905 	  }
12906 
12907 	return t;
12908       }
12909 
12910       /* Return a VECTOR_CST if possible.  */
12911     case CONSTRUCTOR:
12912       {
12913 	tree type = TREE_TYPE (t);
12914 	if (TREE_CODE (type) != VECTOR_TYPE)
12915 	  return t;
12916 
12917 	unsigned i;
12918 	tree val;
12919 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12920 	  if (! CONSTANT_CLASS_P (val))
12921 	    return t;
12922 
12923 	return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12924       }
12925 
12926     case CONST_DECL:
12927       return fold (DECL_INITIAL (t));
12928 
12929     default:
12930       return t;
12931     } /* switch (code) */
12932 }
12933 
12934 #ifdef ENABLE_FOLD_CHECKING
12935 #undef fold
12936 
12937 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12938 				hash_table<nofree_ptr_hash<const tree_node> > *);
12939 static void fold_check_failed (const_tree, const_tree);
12940 void print_fold_checksum (const_tree);
12941 
12942 /* When --enable-checking=fold, compute a digest of expr before
12943    and after actual fold call to see if fold did not accidentally
12944    change original expr.  */
12945 
12946 tree
fold(tree expr)12947 fold (tree expr)
12948 {
12949   tree ret;
12950   struct md5_ctx ctx;
12951   unsigned char checksum_before[16], checksum_after[16];
12952   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12953 
12954   md5_init_ctx (&ctx);
12955   fold_checksum_tree (expr, &ctx, &ht);
12956   md5_finish_ctx (&ctx, checksum_before);
12957   ht.empty ();
12958 
12959   ret = fold_1 (expr);
12960 
12961   md5_init_ctx (&ctx);
12962   fold_checksum_tree (expr, &ctx, &ht);
12963   md5_finish_ctx (&ctx, checksum_after);
12964 
12965   if (memcmp (checksum_before, checksum_after, 16))
12966     fold_check_failed (expr, ret);
12967 
12968   return ret;
12969 }
12970 
12971 void
print_fold_checksum(const_tree expr)12972 print_fold_checksum (const_tree expr)
12973 {
12974   struct md5_ctx ctx;
12975   unsigned char checksum[16], cnt;
12976   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12977 
12978   md5_init_ctx (&ctx);
12979   fold_checksum_tree (expr, &ctx, &ht);
12980   md5_finish_ctx (&ctx, checksum);
12981   for (cnt = 0; cnt < 16; ++cnt)
12982     fprintf (stderr, "%02x", checksum[cnt]);
12983   putc ('\n', stderr);
12984 }
12985 
12986 static void
fold_check_failed(const_tree expr ATTRIBUTE_UNUSED,const_tree ret ATTRIBUTE_UNUSED)12987 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12988 {
12989   internal_error ("fold check: original tree changed by fold");
12990 }
12991 
12992 static void
fold_checksum_tree(const_tree expr,struct md5_ctx * ctx,hash_table<nofree_ptr_hash<const tree_node>> * ht)12993 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12994 		    hash_table<nofree_ptr_hash <const tree_node> > *ht)
12995 {
12996   const tree_node **slot;
12997   enum tree_code code;
12998   union tree_node *buf;
12999   int i, len;
13000 
13001  recursive_label:
13002   if (expr == NULL)
13003     return;
13004   slot = ht->find_slot (expr, INSERT);
13005   if (*slot != NULL)
13006     return;
13007   *slot = expr;
13008   code = TREE_CODE (expr);
13009   if (TREE_CODE_CLASS (code) == tcc_declaration
13010       && HAS_DECL_ASSEMBLER_NAME_P (expr))
13011     {
13012       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
13013       size_t sz = tree_size (expr);
13014       buf = XALLOCAVAR (union tree_node, sz);
13015       memcpy ((char *) buf, expr, sz);
13016       SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13017       buf->decl_with_vis.symtab_node = NULL;
13018       buf->base.nowarning_flag = 0;
13019       expr = (tree) buf;
13020     }
13021   else if (TREE_CODE_CLASS (code) == tcc_type
13022 	   && (TYPE_POINTER_TO (expr)
13023 	       || TYPE_REFERENCE_TO (expr)
13024 	       || TYPE_CACHED_VALUES_P (expr)
13025 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13026 	       || TYPE_NEXT_VARIANT (expr)
13027 	       || TYPE_ALIAS_SET_KNOWN_P (expr)))
13028     {
13029       /* Allow these fields to be modified.  */
13030       tree tmp;
13031       size_t sz = tree_size (expr);
13032       buf = XALLOCAVAR (union tree_node, sz);
13033       memcpy ((char *) buf, expr, sz);
13034       expr = tmp = (tree) buf;
13035       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13036       TYPE_POINTER_TO (tmp) = NULL;
13037       TYPE_REFERENCE_TO (tmp) = NULL;
13038       TYPE_NEXT_VARIANT (tmp) = NULL;
13039       TYPE_ALIAS_SET (tmp) = -1;
13040       if (TYPE_CACHED_VALUES_P (tmp))
13041 	{
13042 	  TYPE_CACHED_VALUES_P (tmp) = 0;
13043 	  TYPE_CACHED_VALUES (tmp) = NULL;
13044 	}
13045     }
13046   else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
13047     {
13048       /* Allow TREE_NO_WARNING to be set.  Perhaps we shouldn't allow that
13049 	 and change builtins.c etc. instead - see PR89543.  */
13050       size_t sz = tree_size (expr);
13051       buf = XALLOCAVAR (union tree_node, sz);
13052       memcpy ((char *) buf, expr, sz);
13053       buf->base.nowarning_flag = 0;
13054       expr = (tree) buf;
13055     }
13056   md5_process_bytes (expr, tree_size (expr), ctx);
13057   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13058     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13059   if (TREE_CODE_CLASS (code) != tcc_type
13060       && TREE_CODE_CLASS (code) != tcc_declaration
13061       && code != TREE_LIST
13062       && code != SSA_NAME
13063       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13064     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13065   switch (TREE_CODE_CLASS (code))
13066     {
13067     case tcc_constant:
13068       switch (code)
13069 	{
13070 	case STRING_CST:
13071 	  md5_process_bytes (TREE_STRING_POINTER (expr),
13072 			     TREE_STRING_LENGTH (expr), ctx);
13073 	  break;
13074 	case COMPLEX_CST:
13075 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13076 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13077 	  break;
13078 	case VECTOR_CST:
13079 	  len = vector_cst_encoded_nelts (expr);
13080 	  for (i = 0; i < len; ++i)
13081 	    fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13082 	  break;
13083 	default:
13084 	  break;
13085 	}
13086       break;
13087     case tcc_exceptional:
13088       switch (code)
13089 	{
13090 	case TREE_LIST:
13091 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13092 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13093 	  expr = TREE_CHAIN (expr);
13094 	  goto recursive_label;
13095 	  break;
13096 	case TREE_VEC:
13097 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13098 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13099 	  break;
13100 	default:
13101 	  break;
13102 	}
13103       break;
13104     case tcc_expression:
13105     case tcc_reference:
13106     case tcc_comparison:
13107     case tcc_unary:
13108     case tcc_binary:
13109     case tcc_statement:
13110     case tcc_vl_exp:
13111       len = TREE_OPERAND_LENGTH (expr);
13112       for (i = 0; i < len; ++i)
13113 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13114       break;
13115     case tcc_declaration:
13116       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13117       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13118       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13119 	{
13120 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13121 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13122 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13123 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13124 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13125 	}
13126 
13127       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13128 	{
13129 	  if (TREE_CODE (expr) == FUNCTION_DECL)
13130 	    {
13131 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13132 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13133 	    }
13134 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13135 	}
13136       break;
13137     case tcc_type:
13138       if (TREE_CODE (expr) == ENUMERAL_TYPE)
13139         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13140       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13141       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13142       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13143       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13144       if (INTEGRAL_TYPE_P (expr)
13145           || SCALAR_FLOAT_TYPE_P (expr))
13146 	{
13147 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13148 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13149 	}
13150       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13151       if (TREE_CODE (expr) == RECORD_TYPE
13152 	  || TREE_CODE (expr) == UNION_TYPE
13153 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
13154 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13155       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13156       break;
13157     default:
13158       break;
13159     }
13160 }
13161 
13162 /* Helper function for outputting the checksum of a tree T.  When
13163    debugging with gdb, you can "define mynext" to be "next" followed
13164    by "call debug_fold_checksum (op0)", then just trace down till the
13165    outputs differ.  */
13166 
13167 DEBUG_FUNCTION void
debug_fold_checksum(const_tree t)13168 debug_fold_checksum (const_tree t)
13169 {
13170   int i;
13171   unsigned char checksum[16];
13172   struct md5_ctx ctx;
13173   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13174 
13175   md5_init_ctx (&ctx);
13176   fold_checksum_tree (t, &ctx, &ht);
13177   md5_finish_ctx (&ctx, checksum);
13178   ht.empty ();
13179 
13180   for (i = 0; i < 16; i++)
13181     fprintf (stderr, "%d ", checksum[i]);
13182 
13183   fprintf (stderr, "\n");
13184 }
13185 
13186 #endif
13187 
13188 /* Fold a unary tree expression with code CODE of type TYPE with an
13189    operand OP0.  LOC is the location of the resulting expression.
13190    Return a folded expression if successful.  Otherwise, return a tree
13191    expression with code CODE of type TYPE with an operand OP0.  */
13192 
13193 tree
fold_build1_loc(location_t loc,enum tree_code code,tree type,tree op0 MEM_STAT_DECL)13194 fold_build1_loc (location_t loc,
13195 		 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13196 {
13197   tree tem;
13198 #ifdef ENABLE_FOLD_CHECKING
13199   unsigned char checksum_before[16], checksum_after[16];
13200   struct md5_ctx ctx;
13201   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13202 
13203   md5_init_ctx (&ctx);
13204   fold_checksum_tree (op0, &ctx, &ht);
13205   md5_finish_ctx (&ctx, checksum_before);
13206   ht.empty ();
13207 #endif
13208 
13209   tem = fold_unary_loc (loc, code, type, op0);
13210   if (!tem)
13211     tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13212 
13213 #ifdef ENABLE_FOLD_CHECKING
13214   md5_init_ctx (&ctx);
13215   fold_checksum_tree (op0, &ctx, &ht);
13216   md5_finish_ctx (&ctx, checksum_after);
13217 
13218   if (memcmp (checksum_before, checksum_after, 16))
13219     fold_check_failed (op0, tem);
13220 #endif
13221   return tem;
13222 }
13223 
13224 /* Fold a binary tree expression with code CODE of type TYPE with
13225    operands OP0 and OP1.  LOC is the location of the resulting
13226    expression.  Return a folded expression if successful.  Otherwise,
13227    return a tree expression with code CODE of type TYPE with operands
13228    OP0 and OP1.  */
13229 
13230 tree
fold_build2_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1 MEM_STAT_DECL)13231 fold_build2_loc (location_t loc,
13232 		      enum tree_code code, tree type, tree op0, tree op1
13233 		      MEM_STAT_DECL)
13234 {
13235   tree tem;
13236 #ifdef ENABLE_FOLD_CHECKING
13237   unsigned char checksum_before_op0[16],
13238                 checksum_before_op1[16],
13239 		checksum_after_op0[16],
13240 		checksum_after_op1[16];
13241   struct md5_ctx ctx;
13242   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13243 
13244   md5_init_ctx (&ctx);
13245   fold_checksum_tree (op0, &ctx, &ht);
13246   md5_finish_ctx (&ctx, checksum_before_op0);
13247   ht.empty ();
13248 
13249   md5_init_ctx (&ctx);
13250   fold_checksum_tree (op1, &ctx, &ht);
13251   md5_finish_ctx (&ctx, checksum_before_op1);
13252   ht.empty ();
13253 #endif
13254 
13255   tem = fold_binary_loc (loc, code, type, op0, op1);
13256   if (!tem)
13257     tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13258 
13259 #ifdef ENABLE_FOLD_CHECKING
13260   md5_init_ctx (&ctx);
13261   fold_checksum_tree (op0, &ctx, &ht);
13262   md5_finish_ctx (&ctx, checksum_after_op0);
13263   ht.empty ();
13264 
13265   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13266     fold_check_failed (op0, tem);
13267 
13268   md5_init_ctx (&ctx);
13269   fold_checksum_tree (op1, &ctx, &ht);
13270   md5_finish_ctx (&ctx, checksum_after_op1);
13271 
13272   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13273     fold_check_failed (op1, tem);
13274 #endif
13275   return tem;
13276 }
13277 
13278 /* Fold a ternary tree expression with code CODE of type TYPE with
13279    operands OP0, OP1, and OP2.  Return a folded expression if
13280    successful.  Otherwise, return a tree expression with code CODE of
13281    type TYPE with operands OP0, OP1, and OP2.  */
13282 
13283 tree
fold_build3_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2 MEM_STAT_DECL)13284 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13285 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
13286 {
13287   tree tem;
13288 #ifdef ENABLE_FOLD_CHECKING
13289   unsigned char checksum_before_op0[16],
13290                 checksum_before_op1[16],
13291                 checksum_before_op2[16],
13292 		checksum_after_op0[16],
13293 		checksum_after_op1[16],
13294 		checksum_after_op2[16];
13295   struct md5_ctx ctx;
13296   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13297 
13298   md5_init_ctx (&ctx);
13299   fold_checksum_tree (op0, &ctx, &ht);
13300   md5_finish_ctx (&ctx, checksum_before_op0);
13301   ht.empty ();
13302 
13303   md5_init_ctx (&ctx);
13304   fold_checksum_tree (op1, &ctx, &ht);
13305   md5_finish_ctx (&ctx, checksum_before_op1);
13306   ht.empty ();
13307 
13308   md5_init_ctx (&ctx);
13309   fold_checksum_tree (op2, &ctx, &ht);
13310   md5_finish_ctx (&ctx, checksum_before_op2);
13311   ht.empty ();
13312 #endif
13313 
13314   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13315   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13316   if (!tem)
13317     tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13318 
13319 #ifdef ENABLE_FOLD_CHECKING
13320   md5_init_ctx (&ctx);
13321   fold_checksum_tree (op0, &ctx, &ht);
13322   md5_finish_ctx (&ctx, checksum_after_op0);
13323   ht.empty ();
13324 
13325   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13326     fold_check_failed (op0, tem);
13327 
13328   md5_init_ctx (&ctx);
13329   fold_checksum_tree (op1, &ctx, &ht);
13330   md5_finish_ctx (&ctx, checksum_after_op1);
13331   ht.empty ();
13332 
13333   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13334     fold_check_failed (op1, tem);
13335 
13336   md5_init_ctx (&ctx);
13337   fold_checksum_tree (op2, &ctx, &ht);
13338   md5_finish_ctx (&ctx, checksum_after_op2);
13339 
13340   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13341     fold_check_failed (op2, tem);
13342 #endif
13343   return tem;
13344 }
13345 
13346 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13347    arguments in ARGARRAY, and a null static chain.
13348    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
13349    of type TYPE from the given operands as constructed by build_call_array.  */
13350 
13351 tree
fold_build_call_array_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)13352 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13353 			   int nargs, tree *argarray)
13354 {
13355   tree tem;
13356 #ifdef ENABLE_FOLD_CHECKING
13357   unsigned char checksum_before_fn[16],
13358                 checksum_before_arglist[16],
13359 		checksum_after_fn[16],
13360 		checksum_after_arglist[16];
13361   struct md5_ctx ctx;
13362   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13363   int i;
13364 
13365   md5_init_ctx (&ctx);
13366   fold_checksum_tree (fn, &ctx, &ht);
13367   md5_finish_ctx (&ctx, checksum_before_fn);
13368   ht.empty ();
13369 
13370   md5_init_ctx (&ctx);
13371   for (i = 0; i < nargs; i++)
13372     fold_checksum_tree (argarray[i], &ctx, &ht);
13373   md5_finish_ctx (&ctx, checksum_before_arglist);
13374   ht.empty ();
13375 #endif
13376 
13377   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13378   if (!tem)
13379     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13380 
13381 #ifdef ENABLE_FOLD_CHECKING
13382   md5_init_ctx (&ctx);
13383   fold_checksum_tree (fn, &ctx, &ht);
13384   md5_finish_ctx (&ctx, checksum_after_fn);
13385   ht.empty ();
13386 
13387   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13388     fold_check_failed (fn, tem);
13389 
13390   md5_init_ctx (&ctx);
13391   for (i = 0; i < nargs; i++)
13392     fold_checksum_tree (argarray[i], &ctx, &ht);
13393   md5_finish_ctx (&ctx, checksum_after_arglist);
13394 
13395   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13396     fold_check_failed (NULL_TREE, tem);
13397 #endif
13398   return tem;
13399 }
13400 
13401 /* Perform constant folding and related simplification of initializer
13402    expression EXPR.  These behave identically to "fold_buildN" but ignore
13403    potential run-time traps and exceptions that fold must preserve.  */
13404 
13405 #define START_FOLD_INIT \
13406   int saved_signaling_nans = flag_signaling_nans;\
13407   int saved_trapping_math = flag_trapping_math;\
13408   int saved_rounding_math = flag_rounding_math;\
13409   int saved_trapv = flag_trapv;\
13410   int saved_folding_initializer = folding_initializer;\
13411   flag_signaling_nans = 0;\
13412   flag_trapping_math = 0;\
13413   flag_rounding_math = 0;\
13414   flag_trapv = 0;\
13415   folding_initializer = 1;
13416 
13417 #define END_FOLD_INIT \
13418   flag_signaling_nans = saved_signaling_nans;\
13419   flag_trapping_math = saved_trapping_math;\
13420   flag_rounding_math = saved_rounding_math;\
13421   flag_trapv = saved_trapv;\
13422   folding_initializer = saved_folding_initializer;
13423 
13424 tree
fold_build1_initializer_loc(location_t loc,enum tree_code code,tree type,tree op)13425 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13426 			     tree type, tree op)
13427 {
13428   tree result;
13429   START_FOLD_INIT;
13430 
13431   result = fold_build1_loc (loc, code, type, op);
13432 
13433   END_FOLD_INIT;
13434   return result;
13435 }
13436 
13437 tree
fold_build2_initializer_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)13438 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13439 			     tree type, tree op0, tree op1)
13440 {
13441   tree result;
13442   START_FOLD_INIT;
13443 
13444   result = fold_build2_loc (loc, code, type, op0, op1);
13445 
13446   END_FOLD_INIT;
13447   return result;
13448 }
13449 
13450 tree
fold_build_call_array_initializer_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)13451 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13452 				       int nargs, tree *argarray)
13453 {
13454   tree result;
13455   START_FOLD_INIT;
13456 
13457   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13458 
13459   END_FOLD_INIT;
13460   return result;
13461 }
13462 
13463 #undef START_FOLD_INIT
13464 #undef END_FOLD_INIT
13465 
13466 /* Determine if first argument is a multiple of second argument.  Return 0 if
13467    it is not, or we cannot easily determined it to be.
13468 
13469    An example of the sort of thing we care about (at this point; this routine
13470    could surely be made more general, and expanded to do what the *_DIV_EXPR's
13471    fold cases do now) is discovering that
13472 
13473      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13474 
13475    is a multiple of
13476 
13477      SAVE_EXPR (J * 8)
13478 
13479    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13480 
13481    This code also handles discovering that
13482 
13483      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13484 
13485    is a multiple of 8 so we don't have to worry about dealing with a
13486    possible remainder.
13487 
13488    Note that we *look* inside a SAVE_EXPR only to determine how it was
13489    calculated; it is not safe for fold to do much of anything else with the
13490    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13491    at run time.  For example, the latter example above *cannot* be implemented
13492    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13493    evaluation time of the original SAVE_EXPR is not necessarily the same at
13494    the time the new expression is evaluated.  The only optimization of this
13495    sort that would be valid is changing
13496 
13497      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13498 
13499    divided by 8 to
13500 
13501      SAVE_EXPR (I) * SAVE_EXPR (J)
13502 
13503    (where the same SAVE_EXPR (J) is used in the original and the
13504    transformed version).  */
13505 
13506 int
multiple_of_p(tree type,const_tree top,const_tree bottom)13507 multiple_of_p (tree type, const_tree top, const_tree bottom)
13508 {
13509   gimple *stmt;
13510   tree t1, op1, op2;
13511 
13512   if (operand_equal_p (top, bottom, 0))
13513     return 1;
13514 
13515   if (TREE_CODE (type) != INTEGER_TYPE)
13516     return 0;
13517 
13518   switch (TREE_CODE (top))
13519     {
13520     case BIT_AND_EXPR:
13521       /* Bitwise and provides a power of two multiple.  If the mask is
13522 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
13523       if (!integer_pow2p (bottom))
13524 	return 0;
13525       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13526 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13527 
13528     case MULT_EXPR:
13529       if (TREE_CODE (bottom) == INTEGER_CST)
13530 	{
13531 	  op1 = TREE_OPERAND (top, 0);
13532 	  op2 = TREE_OPERAND (top, 1);
13533 	  if (TREE_CODE (op1) == INTEGER_CST)
13534 	    std::swap (op1, op2);
13535 	  if (TREE_CODE (op2) == INTEGER_CST)
13536 	    {
13537 	      if (multiple_of_p (type, op2, bottom))
13538 		return 1;
13539 	      /* Handle multiple_of_p ((x * 2 + 2) * 4, 8).  */
13540 	      if (multiple_of_p (type, bottom, op2))
13541 		{
13542 		  widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13543 						 wi::to_widest (op2));
13544 		  if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13545 		    {
13546 		      op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13547 		      return multiple_of_p (type, op1, op2);
13548 		    }
13549 		}
13550 	      return multiple_of_p (type, op1, bottom);
13551 	    }
13552 	}
13553       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13554 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13555 
13556     case MINUS_EXPR:
13557       /* It is impossible to prove if op0 - op1 is multiple of bottom
13558 	 precisely, so be conservative here checking if both op0 and op1
13559 	 are multiple of bottom.  Note we check the second operand first
13560 	 since it's usually simpler.  */
13561       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13562 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13563 
13564     case PLUS_EXPR:
13565       /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13566 	 as op0 - 3 if the expression has unsigned type.  For example,
13567 	 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not.  */
13568       op1 = TREE_OPERAND (top, 1);
13569       if (TYPE_UNSIGNED (type)
13570 	  && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13571 	op1 = fold_build1 (NEGATE_EXPR, type, op1);
13572       return (multiple_of_p (type, op1, bottom)
13573 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13574 
13575     case LSHIFT_EXPR:
13576       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13577 	{
13578 	  op1 = TREE_OPERAND (top, 1);
13579 	  /* const_binop may not detect overflow correctly,
13580 	     so check for it explicitly here.  */
13581 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13582 			 wi::to_wide (op1))
13583 	      && (t1 = fold_convert (type,
13584 				     const_binop (LSHIFT_EXPR, size_one_node,
13585 						  op1))) != 0
13586 	      && !TREE_OVERFLOW (t1))
13587 	    return multiple_of_p (type, t1, bottom);
13588 	}
13589       return 0;
13590 
13591     case NOP_EXPR:
13592       /* Can't handle conversions from non-integral or wider integral type.  */
13593       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13594 	  || (TYPE_PRECISION (type)
13595 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13596 	return 0;
13597 
13598       /* fall through */
13599 
13600     case SAVE_EXPR:
13601       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13602 
13603     case COND_EXPR:
13604       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13605 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13606 
13607     case INTEGER_CST:
13608       if (TREE_CODE (bottom) != INTEGER_CST
13609 	  || integer_zerop (bottom)
13610 	  || (TYPE_UNSIGNED (type)
13611 	      && (tree_int_cst_sgn (top) < 0
13612 		  || tree_int_cst_sgn (bottom) < 0)))
13613 	return 0;
13614       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13615 				SIGNED);
13616 
13617     case SSA_NAME:
13618       if (TREE_CODE (bottom) == INTEGER_CST
13619 	  && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13620 	  && gimple_code (stmt) == GIMPLE_ASSIGN)
13621 	{
13622 	  enum tree_code code = gimple_assign_rhs_code (stmt);
13623 
13624 	  /* Check for special cases to see if top is defined as multiple
13625 	     of bottom:
13626 
13627 	       top = (X & ~(bottom - 1) ; bottom is power of 2
13628 
13629 	     or
13630 
13631 	       Y = X % bottom
13632 	       top = X - Y.  */
13633 	  if (code == BIT_AND_EXPR
13634 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13635 	      && TREE_CODE (op2) == INTEGER_CST
13636 	      && integer_pow2p (bottom)
13637 	      && wi::multiple_of_p (wi::to_widest (op2),
13638 				    wi::to_widest (bottom), UNSIGNED))
13639 	    return 1;
13640 
13641 	  op1 = gimple_assign_rhs1 (stmt);
13642 	  if (code == MINUS_EXPR
13643 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13644 	      && TREE_CODE (op2) == SSA_NAME
13645 	      && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13646 	      && gimple_code (stmt) == GIMPLE_ASSIGN
13647 	      && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13648 	      && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13649 	      && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13650 	    return 1;
13651 	}
13652 
13653       /* fall through */
13654 
13655     default:
13656       if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13657 	return multiple_p (wi::to_poly_widest (top),
13658 			   wi::to_poly_widest (bottom));
13659 
13660       return 0;
13661     }
13662 }
13663 
13664 #define tree_expr_nonnegative_warnv_p(X, Y) \
13665   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13666 
13667 #define RECURSE(X) \
13668   ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13669 
13670 /* Return true if CODE or TYPE is known to be non-negative. */
13671 
13672 static bool
tree_simple_nonnegative_warnv_p(enum tree_code code,tree type)13673 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13674 {
13675   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13676       && truth_value_p (code))
13677     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13678        have a signed:1 type (where the value is -1 and 0).  */
13679     return true;
13680   return false;
13681 }
13682 
13683 /* Return true if (CODE OP0) is known to be non-negative.  If the return
13684    value is based on the assumption that signed overflow is undefined,
13685    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13686    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13687 
13688 bool
tree_unary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p,int depth)13689 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13690 				bool *strict_overflow_p, int depth)
13691 {
13692   if (TYPE_UNSIGNED (type))
13693     return true;
13694 
13695   switch (code)
13696     {
13697     case ABS_EXPR:
13698       /* We can't return 1 if flag_wrapv is set because
13699 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
13700       if (!ANY_INTEGRAL_TYPE_P (type))
13701 	return true;
13702       if (TYPE_OVERFLOW_UNDEFINED (type))
13703 	{
13704 	  *strict_overflow_p = true;
13705 	  return true;
13706 	}
13707       break;
13708 
13709     case NON_LVALUE_EXPR:
13710     case FLOAT_EXPR:
13711     case FIX_TRUNC_EXPR:
13712       return RECURSE (op0);
13713 
13714     CASE_CONVERT:
13715       {
13716 	tree inner_type = TREE_TYPE (op0);
13717 	tree outer_type = type;
13718 
13719 	if (TREE_CODE (outer_type) == REAL_TYPE)
13720 	  {
13721 	    if (TREE_CODE (inner_type) == REAL_TYPE)
13722 	      return RECURSE (op0);
13723 	    if (INTEGRAL_TYPE_P (inner_type))
13724 	      {
13725 		if (TYPE_UNSIGNED (inner_type))
13726 		  return true;
13727 		return RECURSE (op0);
13728 	      }
13729 	  }
13730 	else if (INTEGRAL_TYPE_P (outer_type))
13731 	  {
13732 	    if (TREE_CODE (inner_type) == REAL_TYPE)
13733 	      return RECURSE (op0);
13734 	    if (INTEGRAL_TYPE_P (inner_type))
13735 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13736 		      && TYPE_UNSIGNED (inner_type);
13737 	  }
13738       }
13739       break;
13740 
13741     default:
13742       return tree_simple_nonnegative_warnv_p (code, type);
13743     }
13744 
13745   /* We don't know sign of `t', so be conservative and return false.  */
13746   return false;
13747 }
13748 
13749 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
13750    value is based on the assumption that signed overflow is undefined,
13751    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13752    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13753 
13754 bool
tree_binary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p,int depth)13755 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13756 				 tree op1, bool *strict_overflow_p,
13757 				 int depth)
13758 {
13759   if (TYPE_UNSIGNED (type))
13760     return true;
13761 
13762   switch (code)
13763     {
13764     case POINTER_PLUS_EXPR:
13765     case PLUS_EXPR:
13766       if (FLOAT_TYPE_P (type))
13767 	return RECURSE (op0) && RECURSE (op1);
13768 
13769       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13770 	 both unsigned and at least 2 bits shorter than the result.  */
13771       if (TREE_CODE (type) == INTEGER_TYPE
13772 	  && TREE_CODE (op0) == NOP_EXPR
13773 	  && TREE_CODE (op1) == NOP_EXPR)
13774 	{
13775 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13776 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13777 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13778 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13779 	    {
13780 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
13781 				       TYPE_PRECISION (inner2)) + 1;
13782 	      return prec < TYPE_PRECISION (type);
13783 	    }
13784 	}
13785       break;
13786 
13787     case MULT_EXPR:
13788       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13789 	{
13790 	  /* x * x is always non-negative for floating point x
13791 	     or without overflow.  */
13792 	  if (operand_equal_p (op0, op1, 0)
13793 	      || (RECURSE (op0) && RECURSE (op1)))
13794 	    {
13795 	      if (ANY_INTEGRAL_TYPE_P (type)
13796 		  && TYPE_OVERFLOW_UNDEFINED (type))
13797 		*strict_overflow_p = true;
13798 	      return true;
13799 	    }
13800 	}
13801 
13802       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13803 	 both unsigned and their total bits is shorter than the result.  */
13804       if (TREE_CODE (type) == INTEGER_TYPE
13805 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13806 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13807 	{
13808 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13809 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
13810 	    : TREE_TYPE (op0);
13811 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13812 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
13813 	    : TREE_TYPE (op1);
13814 
13815 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
13816 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
13817 
13818 	  if (TREE_CODE (op0) == INTEGER_CST)
13819 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13820 
13821 	  if (TREE_CODE (op1) == INTEGER_CST)
13822 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13823 
13824 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13825 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13826 	    {
13827 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13828 		? tree_int_cst_min_precision (op0, UNSIGNED)
13829 		: TYPE_PRECISION (inner0);
13830 
13831 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13832 		? tree_int_cst_min_precision (op1, UNSIGNED)
13833 		: TYPE_PRECISION (inner1);
13834 
13835 	      return precision0 + precision1 < TYPE_PRECISION (type);
13836 	    }
13837 	}
13838       return false;
13839 
13840     case BIT_AND_EXPR:
13841     case MAX_EXPR:
13842       return RECURSE (op0) || RECURSE (op1);
13843 
13844     case BIT_IOR_EXPR:
13845     case BIT_XOR_EXPR:
13846     case MIN_EXPR:
13847     case RDIV_EXPR:
13848     case TRUNC_DIV_EXPR:
13849     case CEIL_DIV_EXPR:
13850     case FLOOR_DIV_EXPR:
13851     case ROUND_DIV_EXPR:
13852       return RECURSE (op0) && RECURSE (op1);
13853 
13854     case TRUNC_MOD_EXPR:
13855       return RECURSE (op0);
13856 
13857     case FLOOR_MOD_EXPR:
13858       return RECURSE (op1);
13859 
13860     case CEIL_MOD_EXPR:
13861     case ROUND_MOD_EXPR:
13862     default:
13863       return tree_simple_nonnegative_warnv_p (code, type);
13864     }
13865 
13866   /* We don't know sign of `t', so be conservative and return false.  */
13867   return false;
13868 }
13869 
13870 /* Return true if T is known to be non-negative.  If the return
13871    value is based on the assumption that signed overflow is undefined,
13872    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13873    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13874 
13875 bool
tree_single_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)13876 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13877 {
13878   if (TYPE_UNSIGNED (TREE_TYPE (t)))
13879     return true;
13880 
13881   switch (TREE_CODE (t))
13882     {
13883     case INTEGER_CST:
13884       return tree_int_cst_sgn (t) >= 0;
13885 
13886     case REAL_CST:
13887       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13888 
13889     case FIXED_CST:
13890       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13891 
13892     case COND_EXPR:
13893       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13894 
13895     case SSA_NAME:
13896       /* Limit the depth of recursion to avoid quadratic behavior.
13897 	 This is expected to catch almost all occurrences in practice.
13898 	 If this code misses important cases that unbounded recursion
13899 	 would not, passes that need this information could be revised
13900 	 to provide it through dataflow propagation.  */
13901       return (!name_registered_for_update_p (t)
13902 	      && depth < param_max_ssa_name_query_depth
13903 	      && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13904 						  strict_overflow_p, depth));
13905 
13906     default:
13907       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13908     }
13909 }
13910 
13911 /* Return true if T is known to be non-negative.  If the return
13912    value is based on the assumption that signed overflow is undefined,
13913    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13914    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13915 
13916 bool
tree_call_nonnegative_warnv_p(tree type,combined_fn fn,tree arg0,tree arg1,bool * strict_overflow_p,int depth)13917 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13918 			       bool *strict_overflow_p, int depth)
13919 {
13920   switch (fn)
13921     {
13922     CASE_CFN_ACOS:
13923     CASE_CFN_ACOSH:
13924     CASE_CFN_CABS:
13925     CASE_CFN_COSH:
13926     CASE_CFN_ERFC:
13927     CASE_CFN_EXP:
13928     CASE_CFN_EXP10:
13929     CASE_CFN_EXP2:
13930     CASE_CFN_FABS:
13931     CASE_CFN_FDIM:
13932     CASE_CFN_HYPOT:
13933     CASE_CFN_POW10:
13934     CASE_CFN_FFS:
13935     CASE_CFN_PARITY:
13936     CASE_CFN_POPCOUNT:
13937     CASE_CFN_CLZ:
13938     CASE_CFN_CLRSB:
13939     case CFN_BUILT_IN_BSWAP32:
13940     case CFN_BUILT_IN_BSWAP64:
13941       /* Always true.  */
13942       return true;
13943 
13944     CASE_CFN_SQRT:
13945     CASE_CFN_SQRT_FN:
13946       /* sqrt(-0.0) is -0.0.  */
13947       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13948 	return true;
13949       return RECURSE (arg0);
13950 
13951     CASE_CFN_ASINH:
13952     CASE_CFN_ATAN:
13953     CASE_CFN_ATANH:
13954     CASE_CFN_CBRT:
13955     CASE_CFN_CEIL:
13956     CASE_CFN_CEIL_FN:
13957     CASE_CFN_ERF:
13958     CASE_CFN_EXPM1:
13959     CASE_CFN_FLOOR:
13960     CASE_CFN_FLOOR_FN:
13961     CASE_CFN_FMOD:
13962     CASE_CFN_FREXP:
13963     CASE_CFN_ICEIL:
13964     CASE_CFN_IFLOOR:
13965     CASE_CFN_IRINT:
13966     CASE_CFN_IROUND:
13967     CASE_CFN_LCEIL:
13968     CASE_CFN_LDEXP:
13969     CASE_CFN_LFLOOR:
13970     CASE_CFN_LLCEIL:
13971     CASE_CFN_LLFLOOR:
13972     CASE_CFN_LLRINT:
13973     CASE_CFN_LLROUND:
13974     CASE_CFN_LRINT:
13975     CASE_CFN_LROUND:
13976     CASE_CFN_MODF:
13977     CASE_CFN_NEARBYINT:
13978     CASE_CFN_NEARBYINT_FN:
13979     CASE_CFN_RINT:
13980     CASE_CFN_RINT_FN:
13981     CASE_CFN_ROUND:
13982     CASE_CFN_ROUND_FN:
13983     CASE_CFN_ROUNDEVEN:
13984     CASE_CFN_ROUNDEVEN_FN:
13985     CASE_CFN_SCALB:
13986     CASE_CFN_SCALBLN:
13987     CASE_CFN_SCALBN:
13988     CASE_CFN_SIGNBIT:
13989     CASE_CFN_SIGNIFICAND:
13990     CASE_CFN_SINH:
13991     CASE_CFN_TANH:
13992     CASE_CFN_TRUNC:
13993     CASE_CFN_TRUNC_FN:
13994       /* True if the 1st argument is nonnegative.  */
13995       return RECURSE (arg0);
13996 
13997     CASE_CFN_FMAX:
13998     CASE_CFN_FMAX_FN:
13999       /* True if the 1st OR 2nd arguments are nonnegative.  */
14000       return RECURSE (arg0) || RECURSE (arg1);
14001 
14002     CASE_CFN_FMIN:
14003     CASE_CFN_FMIN_FN:
14004       /* True if the 1st AND 2nd arguments are nonnegative.  */
14005       return RECURSE (arg0) && RECURSE (arg1);
14006 
14007     CASE_CFN_COPYSIGN:
14008     CASE_CFN_COPYSIGN_FN:
14009       /* True if the 2nd argument is nonnegative.  */
14010       return RECURSE (arg1);
14011 
14012     CASE_CFN_POWI:
14013       /* True if the 1st argument is nonnegative or the second
14014 	 argument is an even integer.  */
14015       if (TREE_CODE (arg1) == INTEGER_CST
14016 	  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14017 	return true;
14018       return RECURSE (arg0);
14019 
14020     CASE_CFN_POW:
14021       /* True if the 1st argument is nonnegative or the second
14022 	 argument is an even integer valued real.  */
14023       if (TREE_CODE (arg1) == REAL_CST)
14024 	{
14025 	  REAL_VALUE_TYPE c;
14026 	  HOST_WIDE_INT n;
14027 
14028 	  c = TREE_REAL_CST (arg1);
14029 	  n = real_to_integer (&c);
14030 	  if ((n & 1) == 0)
14031 	    {
14032 	      REAL_VALUE_TYPE cint;
14033 	      real_from_integer (&cint, VOIDmode, n, SIGNED);
14034 	      if (real_identical (&c, &cint))
14035 		return true;
14036 	    }
14037 	}
14038       return RECURSE (arg0);
14039 
14040     default:
14041       break;
14042     }
14043   return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14044 }
14045 
14046 /* Return true if T is known to be non-negative.  If the return
14047    value is based on the assumption that signed overflow is undefined,
14048    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14049    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14050 
14051 static bool
tree_invalid_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)14052 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14053 {
14054   enum tree_code code = TREE_CODE (t);
14055   if (TYPE_UNSIGNED (TREE_TYPE (t)))
14056     return true;
14057 
14058   switch (code)
14059     {
14060     case TARGET_EXPR:
14061       {
14062 	tree temp = TARGET_EXPR_SLOT (t);
14063 	t = TARGET_EXPR_INITIAL (t);
14064 
14065 	/* If the initializer is non-void, then it's a normal expression
14066 	   that will be assigned to the slot.  */
14067 	if (!VOID_TYPE_P (t))
14068 	  return RECURSE (t);
14069 
14070 	/* Otherwise, the initializer sets the slot in some way.  One common
14071 	   way is an assignment statement at the end of the initializer.  */
14072 	while (1)
14073 	  {
14074 	    if (TREE_CODE (t) == BIND_EXPR)
14075 	      t = expr_last (BIND_EXPR_BODY (t));
14076 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14077 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
14078 	      t = expr_last (TREE_OPERAND (t, 0));
14079 	    else if (TREE_CODE (t) == STATEMENT_LIST)
14080 	      t = expr_last (t);
14081 	    else
14082 	      break;
14083 	  }
14084 	if (TREE_CODE (t) == MODIFY_EXPR
14085 	    && TREE_OPERAND (t, 0) == temp)
14086 	  return RECURSE (TREE_OPERAND (t, 1));
14087 
14088 	return false;
14089       }
14090 
14091     case CALL_EXPR:
14092       {
14093 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
14094 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
14095 
14096 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14097 					      get_call_combined_fn (t),
14098 					      arg0,
14099 					      arg1,
14100 					      strict_overflow_p, depth);
14101       }
14102     case COMPOUND_EXPR:
14103     case MODIFY_EXPR:
14104       return RECURSE (TREE_OPERAND (t, 1));
14105 
14106     case BIND_EXPR:
14107       return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14108 
14109     case SAVE_EXPR:
14110       return RECURSE (TREE_OPERAND (t, 0));
14111 
14112     default:
14113       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14114     }
14115 }
14116 
14117 #undef RECURSE
14118 #undef tree_expr_nonnegative_warnv_p
14119 
14120 /* Return true if T is known to be non-negative.  If the return
14121    value is based on the assumption that signed overflow is undefined,
14122    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14123    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14124 
14125 bool
tree_expr_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)14126 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14127 {
14128   enum tree_code code;
14129   if (t == error_mark_node)
14130     return false;
14131 
14132   code = TREE_CODE (t);
14133   switch (TREE_CODE_CLASS (code))
14134     {
14135     case tcc_binary:
14136     case tcc_comparison:
14137       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14138 					      TREE_TYPE (t),
14139 					      TREE_OPERAND (t, 0),
14140 					      TREE_OPERAND (t, 1),
14141 					      strict_overflow_p, depth);
14142 
14143     case tcc_unary:
14144       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14145 					     TREE_TYPE (t),
14146 					     TREE_OPERAND (t, 0),
14147 					     strict_overflow_p, depth);
14148 
14149     case tcc_constant:
14150     case tcc_declaration:
14151     case tcc_reference:
14152       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14153 
14154     default:
14155       break;
14156     }
14157 
14158   switch (code)
14159     {
14160     case TRUTH_AND_EXPR:
14161     case TRUTH_OR_EXPR:
14162     case TRUTH_XOR_EXPR:
14163       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14164 					      TREE_TYPE (t),
14165 					      TREE_OPERAND (t, 0),
14166 					      TREE_OPERAND (t, 1),
14167 					      strict_overflow_p, depth);
14168     case TRUTH_NOT_EXPR:
14169       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14170 					     TREE_TYPE (t),
14171 					     TREE_OPERAND (t, 0),
14172 					     strict_overflow_p, depth);
14173 
14174     case COND_EXPR:
14175     case CONSTRUCTOR:
14176     case OBJ_TYPE_REF:
14177     case ASSERT_EXPR:
14178     case ADDR_EXPR:
14179     case WITH_SIZE_EXPR:
14180     case SSA_NAME:
14181       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14182 
14183     default:
14184       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14185     }
14186 }
14187 
14188 /* Return true if `t' is known to be non-negative.  Handle warnings
14189    about undefined signed overflow.  */
14190 
14191 bool
tree_expr_nonnegative_p(tree t)14192 tree_expr_nonnegative_p (tree t)
14193 {
14194   bool ret, strict_overflow_p;
14195 
14196   strict_overflow_p = false;
14197   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14198   if (strict_overflow_p)
14199     fold_overflow_warning (("assuming signed overflow does not occur when "
14200 			    "determining that expression is always "
14201 			    "non-negative"),
14202 			   WARN_STRICT_OVERFLOW_MISC);
14203   return ret;
14204 }
14205 
14206 
14207 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14208    For floating point we further ensure that T is not denormal.
14209    Similar logic is present in nonzero_address in rtlanal.h.
14210 
14211    If the return value is based on the assumption that signed overflow
14212    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14213    change *STRICT_OVERFLOW_P.  */
14214 
14215 bool
tree_unary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p)14216 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14217 				 bool *strict_overflow_p)
14218 {
14219   switch (code)
14220     {
14221     case ABS_EXPR:
14222       return tree_expr_nonzero_warnv_p (op0,
14223 					strict_overflow_p);
14224 
14225     case NOP_EXPR:
14226       {
14227 	tree inner_type = TREE_TYPE (op0);
14228 	tree outer_type = type;
14229 
14230 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14231 		&& tree_expr_nonzero_warnv_p (op0,
14232 					      strict_overflow_p));
14233       }
14234       break;
14235 
14236     case NON_LVALUE_EXPR:
14237       return tree_expr_nonzero_warnv_p (op0,
14238 					strict_overflow_p);
14239 
14240     default:
14241       break;
14242   }
14243 
14244   return false;
14245 }
14246 
14247 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14248    For floating point we further ensure that T is not denormal.
14249    Similar logic is present in nonzero_address in rtlanal.h.
14250 
14251    If the return value is based on the assumption that signed overflow
14252    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14253    change *STRICT_OVERFLOW_P.  */
14254 
14255 bool
tree_binary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p)14256 tree_binary_nonzero_warnv_p (enum tree_code code,
14257 			     tree type,
14258 			     tree op0,
14259 			     tree op1, bool *strict_overflow_p)
14260 {
14261   bool sub_strict_overflow_p;
14262   switch (code)
14263     {
14264     case POINTER_PLUS_EXPR:
14265     case PLUS_EXPR:
14266       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14267 	{
14268 	  /* With the presence of negative values it is hard
14269 	     to say something.  */
14270 	  sub_strict_overflow_p = false;
14271 	  if (!tree_expr_nonnegative_warnv_p (op0,
14272 					      &sub_strict_overflow_p)
14273 	      || !tree_expr_nonnegative_warnv_p (op1,
14274 						 &sub_strict_overflow_p))
14275 	    return false;
14276 	  /* One of operands must be positive and the other non-negative.  */
14277 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
14278 	     overflows, on a twos-complement machine the sum of two
14279 	     nonnegative numbers can never be zero.  */
14280 	  return (tree_expr_nonzero_warnv_p (op0,
14281 					     strict_overflow_p)
14282 		  || tree_expr_nonzero_warnv_p (op1,
14283 						strict_overflow_p));
14284 	}
14285       break;
14286 
14287     case MULT_EXPR:
14288       if (TYPE_OVERFLOW_UNDEFINED (type))
14289 	{
14290 	  if (tree_expr_nonzero_warnv_p (op0,
14291 					 strict_overflow_p)
14292 	      && tree_expr_nonzero_warnv_p (op1,
14293 					    strict_overflow_p))
14294 	    {
14295 	      *strict_overflow_p = true;
14296 	      return true;
14297 	    }
14298 	}
14299       break;
14300 
14301     case MIN_EXPR:
14302       sub_strict_overflow_p = false;
14303       if (tree_expr_nonzero_warnv_p (op0,
14304 				     &sub_strict_overflow_p)
14305 	  && tree_expr_nonzero_warnv_p (op1,
14306 					&sub_strict_overflow_p))
14307 	{
14308 	  if (sub_strict_overflow_p)
14309 	    *strict_overflow_p = true;
14310 	}
14311       break;
14312 
14313     case MAX_EXPR:
14314       sub_strict_overflow_p = false;
14315       if (tree_expr_nonzero_warnv_p (op0,
14316 				     &sub_strict_overflow_p))
14317 	{
14318 	  if (sub_strict_overflow_p)
14319 	    *strict_overflow_p = true;
14320 
14321 	  /* When both operands are nonzero, then MAX must be too.  */
14322 	  if (tree_expr_nonzero_warnv_p (op1,
14323 					 strict_overflow_p))
14324 	    return true;
14325 
14326 	  /* MAX where operand 0 is positive is positive.  */
14327 	  return tree_expr_nonnegative_warnv_p (op0,
14328 					       strict_overflow_p);
14329 	}
14330       /* MAX where operand 1 is positive is positive.  */
14331       else if (tree_expr_nonzero_warnv_p (op1,
14332 					  &sub_strict_overflow_p)
14333 	       && tree_expr_nonnegative_warnv_p (op1,
14334 						 &sub_strict_overflow_p))
14335 	{
14336 	  if (sub_strict_overflow_p)
14337 	    *strict_overflow_p = true;
14338 	  return true;
14339 	}
14340       break;
14341 
14342     case BIT_IOR_EXPR:
14343       return (tree_expr_nonzero_warnv_p (op1,
14344 					 strict_overflow_p)
14345 	      || tree_expr_nonzero_warnv_p (op0,
14346 					    strict_overflow_p));
14347 
14348     default:
14349       break;
14350   }
14351 
14352   return false;
14353 }
14354 
14355 /* Return true when T is an address and is known to be nonzero.
14356    For floating point we further ensure that T is not denormal.
14357    Similar logic is present in nonzero_address in rtlanal.h.
14358 
14359    If the return value is based on the assumption that signed overflow
14360    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14361    change *STRICT_OVERFLOW_P.  */
14362 
14363 bool
tree_single_nonzero_warnv_p(tree t,bool * strict_overflow_p)14364 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14365 {
14366   bool sub_strict_overflow_p;
14367   switch (TREE_CODE (t))
14368     {
14369     case INTEGER_CST:
14370       return !integer_zerop (t);
14371 
14372     case ADDR_EXPR:
14373       {
14374 	tree base = TREE_OPERAND (t, 0);
14375 
14376 	if (!DECL_P (base))
14377 	  base = get_base_address (base);
14378 
14379 	if (base && TREE_CODE (base) == TARGET_EXPR)
14380 	  base = TARGET_EXPR_SLOT (base);
14381 
14382 	if (!base)
14383 	  return false;
14384 
14385 	/* For objects in symbol table check if we know they are non-zero.
14386 	   Don't do anything for variables and functions before symtab is built;
14387 	   it is quite possible that they will be declared weak later.  */
14388 	int nonzero_addr = maybe_nonzero_address (base);
14389 	if (nonzero_addr >= 0)
14390 	  return nonzero_addr;
14391 
14392 	/* Constants are never weak.  */
14393 	if (CONSTANT_CLASS_P (base))
14394 	  return true;
14395 
14396 	return false;
14397       }
14398 
14399     case COND_EXPR:
14400       sub_strict_overflow_p = false;
14401       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14402 				     &sub_strict_overflow_p)
14403 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14404 					&sub_strict_overflow_p))
14405 	{
14406 	  if (sub_strict_overflow_p)
14407 	    *strict_overflow_p = true;
14408 	  return true;
14409 	}
14410       break;
14411 
14412     case SSA_NAME:
14413       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14414 	break;
14415       return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
14416 
14417     default:
14418       break;
14419     }
14420   return false;
14421 }
14422 
14423 #define integer_valued_real_p(X) \
14424   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14425 
14426 #define RECURSE(X) \
14427   ((integer_valued_real_p) (X, depth + 1))
14428 
14429 /* Return true if the floating point result of (CODE OP0) has an
14430    integer value.  We also allow +Inf, -Inf and NaN to be considered
14431    integer values. Return false for signaling NaN.
14432 
14433    DEPTH is the current nesting depth of the query.  */
14434 
14435 bool
integer_valued_real_unary_p(tree_code code,tree op0,int depth)14436 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
14437 {
14438   switch (code)
14439     {
14440     case FLOAT_EXPR:
14441       return true;
14442 
14443     case ABS_EXPR:
14444       return RECURSE (op0);
14445 
14446     CASE_CONVERT:
14447       {
14448 	tree type = TREE_TYPE (op0);
14449 	if (TREE_CODE (type) == INTEGER_TYPE)
14450 	  return true;
14451 	if (TREE_CODE (type) == REAL_TYPE)
14452 	  return RECURSE (op0);
14453 	break;
14454       }
14455 
14456     default:
14457       break;
14458     }
14459   return false;
14460 }
14461 
14462 /* Return true if the floating point result of (CODE OP0 OP1) has an
14463    integer value.  We also allow +Inf, -Inf and NaN to be considered
14464    integer values. Return false for signaling NaN.
14465 
14466    DEPTH is the current nesting depth of the query.  */
14467 
14468 bool
integer_valued_real_binary_p(tree_code code,tree op0,tree op1,int depth)14469 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
14470 {
14471   switch (code)
14472     {
14473     case PLUS_EXPR:
14474     case MINUS_EXPR:
14475     case MULT_EXPR:
14476     case MIN_EXPR:
14477     case MAX_EXPR:
14478       return RECURSE (op0) && RECURSE (op1);
14479 
14480     default:
14481       break;
14482     }
14483   return false;
14484 }
14485 
14486 /* Return true if the floating point result of calling FNDECL with arguments
14487    ARG0 and ARG1 has an integer value.  We also allow +Inf, -Inf and NaN to be
14488    considered integer values. Return false for signaling NaN.  If FNDECL
14489    takes fewer than 2 arguments, the remaining ARGn are null.
14490 
14491    DEPTH is the current nesting depth of the query.  */
14492 
14493 bool
integer_valued_real_call_p(combined_fn fn,tree arg0,tree arg1,int depth)14494 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
14495 {
14496   switch (fn)
14497     {
14498     CASE_CFN_CEIL:
14499     CASE_CFN_CEIL_FN:
14500     CASE_CFN_FLOOR:
14501     CASE_CFN_FLOOR_FN:
14502     CASE_CFN_NEARBYINT:
14503     CASE_CFN_NEARBYINT_FN:
14504     CASE_CFN_RINT:
14505     CASE_CFN_RINT_FN:
14506     CASE_CFN_ROUND:
14507     CASE_CFN_ROUND_FN:
14508     CASE_CFN_ROUNDEVEN:
14509     CASE_CFN_ROUNDEVEN_FN:
14510     CASE_CFN_TRUNC:
14511     CASE_CFN_TRUNC_FN:
14512       return true;
14513 
14514     CASE_CFN_FMIN:
14515     CASE_CFN_FMIN_FN:
14516     CASE_CFN_FMAX:
14517     CASE_CFN_FMAX_FN:
14518       return RECURSE (arg0) && RECURSE (arg1);
14519 
14520     default:
14521       break;
14522     }
14523   return false;
14524 }
14525 
14526 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14527    has an integer value.  We also allow +Inf, -Inf and NaN to be
14528    considered integer values. Return false for signaling NaN.
14529 
14530    DEPTH is the current nesting depth of the query.  */
14531 
14532 bool
integer_valued_real_single_p(tree t,int depth)14533 integer_valued_real_single_p (tree t, int depth)
14534 {
14535   switch (TREE_CODE (t))
14536     {
14537     case REAL_CST:
14538       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14539 
14540     case COND_EXPR:
14541       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14542 
14543     case SSA_NAME:
14544       /* Limit the depth of recursion to avoid quadratic behavior.
14545 	 This is expected to catch almost all occurrences in practice.
14546 	 If this code misses important cases that unbounded recursion
14547 	 would not, passes that need this information could be revised
14548 	 to provide it through dataflow propagation.  */
14549       return (!name_registered_for_update_p (t)
14550 	      && depth < param_max_ssa_name_query_depth
14551 	      && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14552 						    depth));
14553 
14554     default:
14555       break;
14556     }
14557   return false;
14558 }
14559 
14560 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14561    has an integer value.  We also allow +Inf, -Inf and NaN to be
14562    considered integer values. Return false for signaling NaN.
14563 
14564    DEPTH is the current nesting depth of the query.  */
14565 
14566 static bool
integer_valued_real_invalid_p(tree t,int depth)14567 integer_valued_real_invalid_p (tree t, int depth)
14568 {
14569   switch (TREE_CODE (t))
14570     {
14571     case COMPOUND_EXPR:
14572     case MODIFY_EXPR:
14573     case BIND_EXPR:
14574       return RECURSE (TREE_OPERAND (t, 1));
14575 
14576     case SAVE_EXPR:
14577       return RECURSE (TREE_OPERAND (t, 0));
14578 
14579     default:
14580       break;
14581     }
14582   return false;
14583 }
14584 
14585 #undef RECURSE
14586 #undef integer_valued_real_p
14587 
14588 /* Return true if the floating point expression T has an integer value.
14589    We also allow +Inf, -Inf and NaN to be considered integer values.
14590    Return false for signaling NaN.
14591 
14592    DEPTH is the current nesting depth of the query.  */
14593 
14594 bool
integer_valued_real_p(tree t,int depth)14595 integer_valued_real_p (tree t, int depth)
14596 {
14597   if (t == error_mark_node)
14598     return false;
14599 
14600   STRIP_ANY_LOCATION_WRAPPER (t);
14601 
14602   tree_code code = TREE_CODE (t);
14603   switch (TREE_CODE_CLASS (code))
14604     {
14605     case tcc_binary:
14606     case tcc_comparison:
14607       return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14608 					   TREE_OPERAND (t, 1), depth);
14609 
14610     case tcc_unary:
14611       return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14612 
14613     case tcc_constant:
14614     case tcc_declaration:
14615     case tcc_reference:
14616       return integer_valued_real_single_p (t, depth);
14617 
14618     default:
14619       break;
14620     }
14621 
14622   switch (code)
14623     {
14624     case COND_EXPR:
14625     case SSA_NAME:
14626       return integer_valued_real_single_p (t, depth);
14627 
14628     case CALL_EXPR:
14629       {
14630 	tree arg0 = (call_expr_nargs (t) > 0
14631 		     ? CALL_EXPR_ARG (t, 0)
14632 		     : NULL_TREE);
14633 	tree arg1 = (call_expr_nargs (t) > 1
14634 		     ? CALL_EXPR_ARG (t, 1)
14635 		     : NULL_TREE);
14636 	return integer_valued_real_call_p (get_call_combined_fn (t),
14637 					   arg0, arg1, depth);
14638       }
14639 
14640     default:
14641       return integer_valued_real_invalid_p (t, depth);
14642     }
14643 }
14644 
14645 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14646    attempt to fold the expression to a constant without modifying TYPE,
14647    OP0 or OP1.
14648 
14649    If the expression could be simplified to a constant, then return
14650    the constant.  If the expression would not be simplified to a
14651    constant, then return NULL_TREE.  */
14652 
14653 tree
fold_binary_to_constant(enum tree_code code,tree type,tree op0,tree op1)14654 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14655 {
14656   tree tem = fold_binary (code, type, op0, op1);
14657   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14658 }
14659 
14660 /* Given the components of a unary expression CODE, TYPE and OP0,
14661    attempt to fold the expression to a constant without modifying
14662    TYPE or OP0.
14663 
14664    If the expression could be simplified to a constant, then return
14665    the constant.  If the expression would not be simplified to a
14666    constant, then return NULL_TREE.  */
14667 
14668 tree
fold_unary_to_constant(enum tree_code code,tree type,tree op0)14669 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14670 {
14671   tree tem = fold_unary (code, type, op0);
14672   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14673 }
14674 
14675 /* If EXP represents referencing an element in a constant string
14676    (either via pointer arithmetic or array indexing), return the
14677    tree representing the value accessed, otherwise return NULL.  */
14678 
14679 tree
fold_read_from_constant_string(tree exp)14680 fold_read_from_constant_string (tree exp)
14681 {
14682   if ((TREE_CODE (exp) == INDIRECT_REF
14683        || TREE_CODE (exp) == ARRAY_REF)
14684       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14685     {
14686       tree exp1 = TREE_OPERAND (exp, 0);
14687       tree index;
14688       tree string;
14689       location_t loc = EXPR_LOCATION (exp);
14690 
14691       if (TREE_CODE (exp) == INDIRECT_REF)
14692 	string = string_constant (exp1, &index, NULL, NULL);
14693       else
14694 	{
14695 	  tree low_bound = array_ref_low_bound (exp);
14696 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14697 
14698 	  /* Optimize the special-case of a zero lower bound.
14699 
14700 	     We convert the low_bound to sizetype to avoid some problems
14701 	     with constant folding.  (E.g. suppose the lower bound is 1,
14702 	     and its mode is QI.  Without the conversion,l (ARRAY
14703 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14704 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
14705 	  if (! integer_zerop (low_bound))
14706 	    index = size_diffop_loc (loc, index,
14707 				 fold_convert_loc (loc, sizetype, low_bound));
14708 
14709 	  string = exp1;
14710 	}
14711 
14712       scalar_int_mode char_mode;
14713       if (string
14714 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14715 	  && TREE_CODE (string) == STRING_CST
14716 	  && tree_fits_uhwi_p (index)
14717 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14718 	  && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14719 			  &char_mode)
14720 	  && GET_MODE_SIZE (char_mode) == 1)
14721 	return build_int_cst_type (TREE_TYPE (exp),
14722 				   (TREE_STRING_POINTER (string)
14723 				    [TREE_INT_CST_LOW (index)]));
14724     }
14725   return NULL;
14726 }
14727 
14728 /* Folds a read from vector element at IDX of vector ARG.  */
14729 
14730 tree
fold_read_from_vector(tree arg,poly_uint64 idx)14731 fold_read_from_vector (tree arg, poly_uint64 idx)
14732 {
14733   unsigned HOST_WIDE_INT i;
14734   if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14735       && known_ge (idx, 0u)
14736       && idx.is_constant (&i))
14737     {
14738       if (TREE_CODE (arg) == VECTOR_CST)
14739 	return VECTOR_CST_ELT (arg, i);
14740       else if (TREE_CODE (arg) == CONSTRUCTOR)
14741 	{
14742 	  if (CONSTRUCTOR_NELTS (arg)
14743 	      && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
14744 	    return NULL_TREE;
14745 	  if (i >= CONSTRUCTOR_NELTS (arg))
14746 	    return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14747 	  return CONSTRUCTOR_ELT (arg, i)->value;
14748 	}
14749     }
14750   return NULL_TREE;
14751 }
14752 
14753 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14754    an integer constant, real, or fixed-point constant.
14755 
14756    TYPE is the type of the result.  */
14757 
14758 static tree
fold_negate_const(tree arg0,tree type)14759 fold_negate_const (tree arg0, tree type)
14760 {
14761   tree t = NULL_TREE;
14762 
14763   switch (TREE_CODE (arg0))
14764     {
14765     case REAL_CST:
14766       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14767       break;
14768 
14769     case FIXED_CST:
14770       {
14771         FIXED_VALUE_TYPE f;
14772         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14773 					    &(TREE_FIXED_CST (arg0)), NULL,
14774 					    TYPE_SATURATING (type));
14775 	t = build_fixed (type, f);
14776 	/* Propagate overflow flags.  */
14777 	if (overflow_p | TREE_OVERFLOW (arg0))
14778 	  TREE_OVERFLOW (t) = 1;
14779 	break;
14780       }
14781 
14782     default:
14783       if (poly_int_tree_p (arg0))
14784 	{
14785 	  wi::overflow_type overflow;
14786 	  poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14787 	  t = force_fit_type (type, res, 1,
14788 			      (overflow && ! TYPE_UNSIGNED (type))
14789 			      || TREE_OVERFLOW (arg0));
14790 	  break;
14791 	}
14792 
14793       gcc_unreachable ();
14794     }
14795 
14796   return t;
14797 }
14798 
14799 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14800    an integer constant or real constant.
14801 
14802    TYPE is the type of the result.  */
14803 
14804 tree
fold_abs_const(tree arg0,tree type)14805 fold_abs_const (tree arg0, tree type)
14806 {
14807   tree t = NULL_TREE;
14808 
14809   switch (TREE_CODE (arg0))
14810     {
14811     case INTEGER_CST:
14812       {
14813         /* If the value is unsigned or non-negative, then the absolute value
14814 	   is the same as the ordinary value.  */
14815 	wide_int val = wi::to_wide (arg0);
14816 	wi::overflow_type overflow = wi::OVF_NONE;
14817 	if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14818 	  ;
14819 
14820 	/* If the value is negative, then the absolute value is
14821 	   its negation.  */
14822 	else
14823 	  val = wi::neg (val, &overflow);
14824 
14825 	/* Force to the destination type, set TREE_OVERFLOW for signed
14826 	   TYPE only.  */
14827 	t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14828       }
14829     break;
14830 
14831     case REAL_CST:
14832       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14833 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14834       else
14835 	t =  arg0;
14836       break;
14837 
14838     default:
14839       gcc_unreachable ();
14840     }
14841 
14842   return t;
14843 }
14844 
14845 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14846    constant.  TYPE is the type of the result.  */
14847 
14848 static tree
fold_not_const(const_tree arg0,tree type)14849 fold_not_const (const_tree arg0, tree type)
14850 {
14851   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14852 
14853   return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14854 }
14855 
14856 /* Given CODE, a relational operator, the target type, TYPE and two
14857    constant operands OP0 and OP1, return the result of the
14858    relational operation.  If the result is not a compile time
14859    constant, then return NULL_TREE.  */
14860 
14861 static tree
fold_relational_const(enum tree_code code,tree type,tree op0,tree op1)14862 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14863 {
14864   int result, invert;
14865 
14866   /* From here on, the only cases we handle are when the result is
14867      known to be a constant.  */
14868 
14869   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14870     {
14871       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14872       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14873 
14874       /* Handle the cases where either operand is a NaN.  */
14875       if (real_isnan (c0) || real_isnan (c1))
14876 	{
14877 	  switch (code)
14878 	    {
14879 	    case EQ_EXPR:
14880 	    case ORDERED_EXPR:
14881 	      result = 0;
14882 	      break;
14883 
14884 	    case NE_EXPR:
14885 	    case UNORDERED_EXPR:
14886 	    case UNLT_EXPR:
14887 	    case UNLE_EXPR:
14888 	    case UNGT_EXPR:
14889 	    case UNGE_EXPR:
14890 	    case UNEQ_EXPR:
14891               result = 1;
14892 	      break;
14893 
14894 	    case LT_EXPR:
14895 	    case LE_EXPR:
14896 	    case GT_EXPR:
14897 	    case GE_EXPR:
14898 	    case LTGT_EXPR:
14899 	      if (flag_trapping_math)
14900 		return NULL_TREE;
14901 	      result = 0;
14902 	      break;
14903 
14904 	    default:
14905 	      gcc_unreachable ();
14906 	    }
14907 
14908 	  return constant_boolean_node (result, type);
14909 	}
14910 
14911       return constant_boolean_node (real_compare (code, c0, c1), type);
14912     }
14913 
14914   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14915     {
14916       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14917       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14918       return constant_boolean_node (fixed_compare (code, c0, c1), type);
14919     }
14920 
14921   /* Handle equality/inequality of complex constants.  */
14922   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14923     {
14924       tree rcond = fold_relational_const (code, type,
14925 					  TREE_REALPART (op0),
14926 					  TREE_REALPART (op1));
14927       tree icond = fold_relational_const (code, type,
14928 					  TREE_IMAGPART (op0),
14929 					  TREE_IMAGPART (op1));
14930       if (code == EQ_EXPR)
14931 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14932       else if (code == NE_EXPR)
14933 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14934       else
14935 	return NULL_TREE;
14936     }
14937 
14938   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14939     {
14940       if (!VECTOR_TYPE_P (type))
14941 	{
14942 	  /* Have vector comparison with scalar boolean result.  */
14943 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14944 		      && known_eq (VECTOR_CST_NELTS (op0),
14945 				   VECTOR_CST_NELTS (op1)));
14946 	  unsigned HOST_WIDE_INT nunits;
14947 	  if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14948 	    return NULL_TREE;
14949 	  for (unsigned i = 0; i < nunits; i++)
14950 	    {
14951 	      tree elem0 = VECTOR_CST_ELT (op0, i);
14952 	      tree elem1 = VECTOR_CST_ELT (op1, i);
14953 	      tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14954 	      if (tmp == NULL_TREE)
14955 		return NULL_TREE;
14956 	      if (integer_zerop (tmp))
14957 		return constant_boolean_node (code == NE_EXPR, type);
14958 	    }
14959 	  return constant_boolean_node (code == EQ_EXPR, type);
14960 	}
14961       tree_vector_builder elts;
14962       if (!elts.new_binary_operation (type, op0, op1, false))
14963 	return NULL_TREE;
14964       unsigned int count = elts.encoded_nelts ();
14965       for (unsigned i = 0; i < count; i++)
14966 	{
14967 	  tree elem_type = TREE_TYPE (type);
14968 	  tree elem0 = VECTOR_CST_ELT (op0, i);
14969 	  tree elem1 = VECTOR_CST_ELT (op1, i);
14970 
14971 	  tree tem = fold_relational_const (code, elem_type,
14972 					    elem0, elem1);
14973 
14974 	  if (tem == NULL_TREE)
14975 	    return NULL_TREE;
14976 
14977 	  elts.quick_push (build_int_cst (elem_type,
14978 					  integer_zerop (tem) ? 0 : -1));
14979 	}
14980 
14981       return elts.build ();
14982     }
14983 
14984   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14985 
14986      To compute GT, swap the arguments and do LT.
14987      To compute GE, do LT and invert the result.
14988      To compute LE, swap the arguments, do LT and invert the result.
14989      To compute NE, do EQ and invert the result.
14990 
14991      Therefore, the code below must handle only EQ and LT.  */
14992 
14993   if (code == LE_EXPR || code == GT_EXPR)
14994     {
14995       std::swap (op0, op1);
14996       code = swap_tree_comparison (code);
14997     }
14998 
14999   /* Note that it is safe to invert for real values here because we
15000      have already handled the one case that it matters.  */
15001 
15002   invert = 0;
15003   if (code == NE_EXPR || code == GE_EXPR)
15004     {
15005       invert = 1;
15006       code = invert_tree_comparison (code, false);
15007     }
15008 
15009   /* Compute a result for LT or EQ if args permit;
15010      Otherwise return T.  */
15011   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15012     {
15013       if (code == EQ_EXPR)
15014 	result = tree_int_cst_equal (op0, op1);
15015       else
15016 	result = tree_int_cst_lt (op0, op1);
15017     }
15018   else
15019     return NULL_TREE;
15020 
15021   if (invert)
15022     result ^= 1;
15023   return constant_boolean_node (result, type);
15024 }
15025 
15026 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15027    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
15028    itself.  */
15029 
15030 tree
fold_build_cleanup_point_expr(tree type,tree expr)15031 fold_build_cleanup_point_expr (tree type, tree expr)
15032 {
15033   /* If the expression does not have side effects then we don't have to wrap
15034      it with a cleanup point expression.  */
15035   if (!TREE_SIDE_EFFECTS (expr))
15036     return expr;
15037 
15038   /* If the expression is a return, check to see if the expression inside the
15039      return has no side effects or the right hand side of the modify expression
15040      inside the return. If either don't have side effects set we don't need to
15041      wrap the expression in a cleanup point expression.  Note we don't check the
15042      left hand side of the modify because it should always be a return decl.  */
15043   if (TREE_CODE (expr) == RETURN_EXPR)
15044     {
15045       tree op = TREE_OPERAND (expr, 0);
15046       if (!op || !TREE_SIDE_EFFECTS (op))
15047         return expr;
15048       op = TREE_OPERAND (op, 1);
15049       if (!TREE_SIDE_EFFECTS (op))
15050         return expr;
15051     }
15052 
15053   return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15054 }
15055 
15056 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15057    of an indirection through OP0, or NULL_TREE if no simplification is
15058    possible.  */
15059 
15060 tree
fold_indirect_ref_1(location_t loc,tree type,tree op0)15061 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15062 {
15063   tree sub = op0;
15064   tree subtype;
15065   poly_uint64 const_op01;
15066 
15067   STRIP_NOPS (sub);
15068   subtype = TREE_TYPE (sub);
15069   if (!POINTER_TYPE_P (subtype)
15070       || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15071     return NULL_TREE;
15072 
15073   if (TREE_CODE (sub) == ADDR_EXPR)
15074     {
15075       tree op = TREE_OPERAND (sub, 0);
15076       tree optype = TREE_TYPE (op);
15077 
15078       /* *&CONST_DECL -> to the value of the const decl.  */
15079       if (TREE_CODE (op) == CONST_DECL)
15080 	return DECL_INITIAL (op);
15081       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
15082       if (type == optype)
15083 	{
15084 	  tree fop = fold_read_from_constant_string (op);
15085 	  if (fop)
15086 	    return fop;
15087 	  else
15088 	    return op;
15089 	}
15090       /* *(foo *)&fooarray => fooarray[0] */
15091       else if (TREE_CODE (optype) == ARRAY_TYPE
15092 	       && type == TREE_TYPE (optype)
15093 	       && (!in_gimple_form
15094 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15095 	{
15096 	  tree type_domain = TYPE_DOMAIN (optype);
15097 	  tree min_val = size_zero_node;
15098 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
15099 	    min_val = TYPE_MIN_VALUE (type_domain);
15100 	  if (in_gimple_form
15101 	      && TREE_CODE (min_val) != INTEGER_CST)
15102 	    return NULL_TREE;
15103 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
15104 			     NULL_TREE, NULL_TREE);
15105 	}
15106       /* *(foo *)&complexfoo => __real__ complexfoo */
15107       else if (TREE_CODE (optype) == COMPLEX_TYPE
15108 	       && type == TREE_TYPE (optype))
15109 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
15110       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15111       else if (VECTOR_TYPE_P (optype)
15112 	       && type == TREE_TYPE (optype))
15113 	{
15114 	  tree part_width = TYPE_SIZE (type);
15115 	  tree index = bitsize_int (0);
15116 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15117 				  index);
15118 	}
15119     }
15120 
15121   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15122       && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15123     {
15124       tree op00 = TREE_OPERAND (sub, 0);
15125       tree op01 = TREE_OPERAND (sub, 1);
15126 
15127       STRIP_NOPS (op00);
15128       if (TREE_CODE (op00) == ADDR_EXPR)
15129 	{
15130 	  tree op00type;
15131 	  op00 = TREE_OPERAND (op00, 0);
15132 	  op00type = TREE_TYPE (op00);
15133 
15134 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15135 	  if (VECTOR_TYPE_P (op00type)
15136 	      && type == TREE_TYPE (op00type)
15137 	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15138 		 but we want to treat offsets with MSB set as negative.
15139 		 For the code below negative offsets are invalid and
15140 		 TYPE_SIZE of the element is something unsigned, so
15141 		 check whether op01 fits into poly_int64, which implies
15142 		 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15143 		 then just use poly_uint64 because we want to treat the
15144 		 value as unsigned.  */
15145 	      && tree_fits_poly_int64_p (op01))
15146 	    {
15147 	      tree part_width = TYPE_SIZE (type);
15148 	      poly_uint64 max_offset
15149 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
15150 		   * TYPE_VECTOR_SUBPARTS (op00type));
15151 	      if (known_lt (const_op01, max_offset))
15152 		{
15153 		  tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15154 		  return fold_build3_loc (loc,
15155 					  BIT_FIELD_REF, type, op00,
15156 					  part_width, index);
15157 		}
15158 	    }
15159 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15160 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
15161 		   && type == TREE_TYPE (op00type))
15162 	    {
15163 	      if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15164 			    const_op01))
15165 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15166 	    }
15167 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
15168 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
15169 		   && type == TREE_TYPE (op00type))
15170 	    {
15171 	      tree type_domain = TYPE_DOMAIN (op00type);
15172 	      tree min_val = size_zero_node;
15173 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
15174 		min_val = TYPE_MIN_VALUE (type_domain);
15175 	      poly_uint64 type_size, index;
15176 	      if (poly_int_tree_p (min_val)
15177 		  && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15178 		  && multiple_p (const_op01, type_size, &index))
15179 		{
15180 		  poly_offset_int off = index + wi::to_poly_offset (min_val);
15181 		  op01 = wide_int_to_tree (sizetype, off);
15182 		  return build4_loc (loc, ARRAY_REF, type, op00, op01,
15183 				     NULL_TREE, NULL_TREE);
15184 		}
15185 	    }
15186 	}
15187     }
15188 
15189   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15190   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15191       && type == TREE_TYPE (TREE_TYPE (subtype))
15192       && (!in_gimple_form
15193 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15194     {
15195       tree type_domain;
15196       tree min_val = size_zero_node;
15197       sub = build_fold_indirect_ref_loc (loc, sub);
15198       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15199       if (type_domain && TYPE_MIN_VALUE (type_domain))
15200 	min_val = TYPE_MIN_VALUE (type_domain);
15201       if (in_gimple_form
15202 	  && TREE_CODE (min_val) != INTEGER_CST)
15203 	return NULL_TREE;
15204       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15205 			 NULL_TREE);
15206     }
15207 
15208   return NULL_TREE;
15209 }
15210 
15211 /* Builds an expression for an indirection through T, simplifying some
15212    cases.  */
15213 
15214 tree
build_fold_indirect_ref_loc(location_t loc,tree t)15215 build_fold_indirect_ref_loc (location_t loc, tree t)
15216 {
15217   tree type = TREE_TYPE (TREE_TYPE (t));
15218   tree sub = fold_indirect_ref_1 (loc, type, t);
15219 
15220   if (sub)
15221     return sub;
15222 
15223   return build1_loc (loc, INDIRECT_REF, type, t);
15224 }
15225 
15226 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
15227 
15228 tree
fold_indirect_ref_loc(location_t loc,tree t)15229 fold_indirect_ref_loc (location_t loc, tree t)
15230 {
15231   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15232 
15233   if (sub)
15234     return sub;
15235   else
15236     return t;
15237 }
15238 
15239 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15240    whose result is ignored.  The type of the returned tree need not be
15241    the same as the original expression.  */
15242 
15243 tree
fold_ignored_result(tree t)15244 fold_ignored_result (tree t)
15245 {
15246   if (!TREE_SIDE_EFFECTS (t))
15247     return integer_zero_node;
15248 
15249   for (;;)
15250     switch (TREE_CODE_CLASS (TREE_CODE (t)))
15251       {
15252       case tcc_unary:
15253 	t = TREE_OPERAND (t, 0);
15254 	break;
15255 
15256       case tcc_binary:
15257       case tcc_comparison:
15258 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15259 	  t = TREE_OPERAND (t, 0);
15260 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15261 	  t = TREE_OPERAND (t, 1);
15262 	else
15263 	  return t;
15264 	break;
15265 
15266       case tcc_expression:
15267 	switch (TREE_CODE (t))
15268 	  {
15269 	  case COMPOUND_EXPR:
15270 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15271 	      return t;
15272 	    t = TREE_OPERAND (t, 0);
15273 	    break;
15274 
15275 	  case COND_EXPR:
15276 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15277 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15278 	      return t;
15279 	    t = TREE_OPERAND (t, 0);
15280 	    break;
15281 
15282 	  default:
15283 	    return t;
15284 	  }
15285 	break;
15286 
15287       default:
15288 	return t;
15289       }
15290 }
15291 
15292 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15293 
15294 tree
round_up_loc(location_t loc,tree value,unsigned int divisor)15295 round_up_loc (location_t loc, tree value, unsigned int divisor)
15296 {
15297   tree div = NULL_TREE;
15298 
15299   if (divisor == 1)
15300     return value;
15301 
15302   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
15303      have to do anything.  Only do this when we are not given a const,
15304      because in that case, this check is more expensive than just
15305      doing it.  */
15306   if (TREE_CODE (value) != INTEGER_CST)
15307     {
15308       div = build_int_cst (TREE_TYPE (value), divisor);
15309 
15310       if (multiple_of_p (TREE_TYPE (value), value, div))
15311 	return value;
15312     }
15313 
15314   /* If divisor is a power of two, simplify this to bit manipulation.  */
15315   if (pow2_or_zerop (divisor))
15316     {
15317       if (TREE_CODE (value) == INTEGER_CST)
15318 	{
15319 	  wide_int val = wi::to_wide (value);
15320 	  bool overflow_p;
15321 
15322 	  if ((val & (divisor - 1)) == 0)
15323 	    return value;
15324 
15325 	  overflow_p = TREE_OVERFLOW (value);
15326 	  val += divisor - 1;
15327 	  val &= (int) -divisor;
15328 	  if (val == 0)
15329 	    overflow_p = true;
15330 
15331 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15332 	}
15333       else
15334 	{
15335 	  tree t;
15336 
15337 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
15338 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
15339 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15340 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15341 	}
15342     }
15343   else
15344     {
15345       if (!div)
15346 	div = build_int_cst (TREE_TYPE (value), divisor);
15347       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15348       value = size_binop_loc (loc, MULT_EXPR, value, div);
15349     }
15350 
15351   return value;
15352 }
15353 
15354 /* Likewise, but round down.  */
15355 
15356 tree
round_down_loc(location_t loc,tree value,int divisor)15357 round_down_loc (location_t loc, tree value, int divisor)
15358 {
15359   tree div = NULL_TREE;
15360 
15361   gcc_assert (divisor > 0);
15362   if (divisor == 1)
15363     return value;
15364 
15365   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
15366      have to do anything.  Only do this when we are not given a const,
15367      because in that case, this check is more expensive than just
15368      doing it.  */
15369   if (TREE_CODE (value) != INTEGER_CST)
15370     {
15371       div = build_int_cst (TREE_TYPE (value), divisor);
15372 
15373       if (multiple_of_p (TREE_TYPE (value), value, div))
15374 	return value;
15375     }
15376 
15377   /* If divisor is a power of two, simplify this to bit manipulation.  */
15378   if (pow2_or_zerop (divisor))
15379     {
15380       tree t;
15381 
15382       t = build_int_cst (TREE_TYPE (value), -divisor);
15383       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15384     }
15385   else
15386     {
15387       if (!div)
15388 	div = build_int_cst (TREE_TYPE (value), divisor);
15389       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15390       value = size_binop_loc (loc, MULT_EXPR, value, div);
15391     }
15392 
15393   return value;
15394 }
15395 
15396 /* Returns the pointer to the base of the object addressed by EXP and
15397    extracts the information about the offset of the access, storing it
15398    to PBITPOS and POFFSET.  */
15399 
15400 static tree
split_address_to_core_and_offset(tree exp,poly_int64_pod * pbitpos,tree * poffset)15401 split_address_to_core_and_offset (tree exp,
15402 				  poly_int64_pod *pbitpos, tree *poffset)
15403 {
15404   tree core;
15405   machine_mode mode;
15406   int unsignedp, reversep, volatilep;
15407   poly_int64 bitsize;
15408   location_t loc = EXPR_LOCATION (exp);
15409 
15410   if (TREE_CODE (exp) == ADDR_EXPR)
15411     {
15412       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15413 				  poffset, &mode, &unsignedp, &reversep,
15414 				  &volatilep);
15415       core = build_fold_addr_expr_loc (loc, core);
15416     }
15417   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
15418     {
15419       core = TREE_OPERAND (exp, 0);
15420       STRIP_NOPS (core);
15421       *pbitpos = 0;
15422       *poffset = TREE_OPERAND (exp, 1);
15423       if (poly_int_tree_p (*poffset))
15424 	{
15425 	  poly_offset_int tem
15426 	    = wi::sext (wi::to_poly_offset (*poffset),
15427 			TYPE_PRECISION (TREE_TYPE (*poffset)));
15428 	  tem <<= LOG2_BITS_PER_UNIT;
15429 	  if (tem.to_shwi (pbitpos))
15430 	    *poffset = NULL_TREE;
15431 	}
15432     }
15433   else
15434     {
15435       core = exp;
15436       *pbitpos = 0;
15437       *poffset = NULL_TREE;
15438     }
15439 
15440   return core;
15441 }
15442 
15443 /* Returns true if addresses of E1 and E2 differ by a constant, false
15444    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
15445 
15446 bool
ptr_difference_const(tree e1,tree e2,poly_int64_pod * diff)15447 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
15448 {
15449   tree core1, core2;
15450   poly_int64 bitpos1, bitpos2;
15451   tree toffset1, toffset2, tdiff, type;
15452 
15453   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15454   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15455 
15456   poly_int64 bytepos1, bytepos2;
15457   if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
15458       || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
15459       || !operand_equal_p (core1, core2, 0))
15460     return false;
15461 
15462   if (toffset1 && toffset2)
15463     {
15464       type = TREE_TYPE (toffset1);
15465       if (type != TREE_TYPE (toffset2))
15466 	toffset2 = fold_convert (type, toffset2);
15467 
15468       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15469       if (!cst_and_fits_in_hwi (tdiff))
15470 	return false;
15471 
15472       *diff = int_cst_value (tdiff);
15473     }
15474   else if (toffset1 || toffset2)
15475     {
15476       /* If only one of the offsets is non-constant, the difference cannot
15477 	 be a constant.  */
15478       return false;
15479     }
15480   else
15481     *diff = 0;
15482 
15483   *diff += bytepos1 - bytepos2;
15484   return true;
15485 }
15486 
15487 /* Return OFF converted to a pointer offset type suitable as offset for
15488    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
15489 tree
convert_to_ptrofftype_loc(location_t loc,tree off)15490 convert_to_ptrofftype_loc (location_t loc, tree off)
15491 {
15492   return fold_convert_loc (loc, sizetype, off);
15493 }
15494 
15495 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
15496 tree
fold_build_pointer_plus_loc(location_t loc,tree ptr,tree off)15497 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15498 {
15499   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15500 			  ptr, convert_to_ptrofftype_loc (loc, off));
15501 }
15502 
15503 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
15504 tree
fold_build_pointer_plus_hwi_loc(location_t loc,tree ptr,HOST_WIDE_INT off)15505 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15506 {
15507   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15508 			  ptr, size_int (off));
15509 }
15510 
15511 /* Return a pointer P to a NUL-terminated string containing the sequence
15512    of bytes corresponding to the representation of the object referred to
15513    by SRC (or a subsequence of such bytes within it if SRC is a reference
15514    to an initialized constant array plus some constant offset).
15515    If STRSIZE is non-null, store the number of bytes in the constant
15516    sequence including the terminating NUL byte.  *STRSIZE is equal to
15517    sizeof(A) - OFFSET where A is the array that stores the constant
15518    sequence that SRC points to and OFFSET is the byte offset of SRC from
15519    the beginning of A.  SRC need not point to a string or even an array
15520    of characters but may point to an object of any type.  */
15521 
15522 const char *
c_getstr(tree src,unsigned HOST_WIDE_INT * strsize)15523 c_getstr (tree src, unsigned HOST_WIDE_INT *strsize /* = NULL */)
15524 {
15525   /* The offset into the array A storing the string, and A's byte size.  */
15526   tree offset_node;
15527   tree mem_size;
15528 
15529   if (strsize)
15530     *strsize = 0;
15531 
15532   src = string_constant (src, &offset_node, &mem_size, NULL);
15533   if (!src)
15534     return NULL;
15535 
15536   unsigned HOST_WIDE_INT offset = 0;
15537   if (offset_node != NULL_TREE)
15538     {
15539       if (!tree_fits_uhwi_p (offset_node))
15540 	return NULL;
15541       else
15542 	offset = tree_to_uhwi (offset_node);
15543     }
15544 
15545   if (!tree_fits_uhwi_p (mem_size))
15546     return NULL;
15547 
15548   /* ARRAY_SIZE is the byte size of the array the constant sequence
15549      is stored in and equal to sizeof A.  INIT_BYTES is the number
15550      of bytes in the constant sequence used to initialize the array,
15551      including any embedded NULs as well as the terminating NUL (for
15552      strings), but not including any trailing zeros/NULs past
15553      the terminating one appended implicitly to a string literal to
15554      zero out the remainder of the array it's stored in.  For example,
15555      given:
15556        const char a[7] = "abc\0d";
15557        n = strlen (a + 1);
15558      ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1.  For a valid
15559      (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
15560      is equal to strlen (A) + 1.  */
15561   const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
15562   unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
15563 
15564   /* Ideally this would turn into a gcc_checking_assert over time.  */
15565   if (init_bytes > array_size)
15566     init_bytes = array_size;
15567 
15568   const char *string = TREE_STRING_POINTER (src);
15569 
15570   /* Ideally this would turn into a gcc_checking_assert over time.  */
15571   if (init_bytes > array_size)
15572     init_bytes = array_size;
15573 
15574   if (init_bytes == 0 || offset >= array_size)
15575     return NULL;
15576 
15577   if (strsize)
15578     {
15579       /* Compute and store the number of characters from the beginning
15580 	 of the substring at OFFSET to the end, including the terminating
15581 	 nul.  Offsets past the initial length refer to null strings.  */
15582       if (offset < init_bytes)
15583 	*strsize = init_bytes - offset;
15584       else
15585 	*strsize = 1;
15586     }
15587   else
15588     {
15589       tree eltype = TREE_TYPE (TREE_TYPE (src));
15590       /* Support only properly NUL-terminated single byte strings.  */
15591       if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15592 	return NULL;
15593       if (string[init_bytes - 1] != '\0')
15594 	return NULL;
15595     }
15596 
15597   return offset < init_bytes ? string + offset : "";
15598 }
15599 
15600 /* Given a tree T, compute which bits in T may be nonzero.  */
15601 
15602 wide_int
tree_nonzero_bits(const_tree t)15603 tree_nonzero_bits (const_tree t)
15604 {
15605   switch (TREE_CODE (t))
15606     {
15607     case INTEGER_CST:
15608       return wi::to_wide (t);
15609     case SSA_NAME:
15610       return get_nonzero_bits (t);
15611     case NON_LVALUE_EXPR:
15612     case SAVE_EXPR:
15613       return tree_nonzero_bits (TREE_OPERAND (t, 0));
15614     case BIT_AND_EXPR:
15615       return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15616 			  tree_nonzero_bits (TREE_OPERAND (t, 1)));
15617     case BIT_IOR_EXPR:
15618     case BIT_XOR_EXPR:
15619       return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15620 			 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15621     case COND_EXPR:
15622       return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15623 			 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15624     CASE_CONVERT:
15625       return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15626 			     TYPE_PRECISION (TREE_TYPE (t)),
15627 			     TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15628     case PLUS_EXPR:
15629       if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15630 	{
15631 	  wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15632 	  wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15633 	  if (wi::bit_and (nzbits1, nzbits2) == 0)
15634 	    return wi::bit_or (nzbits1, nzbits2);
15635 	}
15636       break;
15637     case LSHIFT_EXPR:
15638       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15639 	{
15640 	  tree type = TREE_TYPE (t);
15641 	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15642 	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15643 				       TYPE_PRECISION (type));
15644 	  return wi::neg_p (arg1)
15645 		 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15646 		 : wi::lshift (nzbits, arg1);
15647 	}
15648       break;
15649     case RSHIFT_EXPR:
15650       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15651         {
15652 	  tree type = TREE_TYPE (t);
15653 	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15654 	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15655 				       TYPE_PRECISION (type));
15656 	  return wi::neg_p (arg1)
15657 		 ? wi::lshift (nzbits, -arg1)
15658 		 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15659         }
15660       break;
15661     default:
15662       break;
15663     }
15664 
15665   return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15666 }
15667 
15668 #if CHECKING_P
15669 
15670 namespace selftest {
15671 
15672 /* Helper functions for writing tests of folding trees.  */
15673 
15674 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT.  */
15675 
15676 static void
assert_binop_folds_to_const(tree lhs,enum tree_code code,tree rhs,tree constant)15677 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15678 			     tree constant)
15679 {
15680   ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15681 }
15682 
15683 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15684    wrapping WRAPPED_EXPR.  */
15685 
15686 static void
assert_binop_folds_to_nonlvalue(tree lhs,enum tree_code code,tree rhs,tree wrapped_expr)15687 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15688 				 tree wrapped_expr)
15689 {
15690   tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15691   ASSERT_NE (wrapped_expr, result);
15692   ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15693   ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15694 }
15695 
15696 /* Verify that various arithmetic binary operations are folded
15697    correctly.  */
15698 
15699 static void
test_arithmetic_folding()15700 test_arithmetic_folding ()
15701 {
15702   tree type = integer_type_node;
15703   tree x = create_tmp_var_raw (type, "x");
15704   tree zero = build_zero_cst (type);
15705   tree one = build_int_cst (type, 1);
15706 
15707   /* Addition.  */
15708   /* 1 <-- (0 + 1) */
15709   assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15710 			       one);
15711   assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15712 			       one);
15713 
15714   /* (nonlvalue)x <-- (x + 0) */
15715   assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15716 				   x);
15717 
15718   /* Subtraction.  */
15719   /* 0 <-- (x - x) */
15720   assert_binop_folds_to_const (x, MINUS_EXPR, x,
15721 			       zero);
15722   assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15723 				   x);
15724 
15725   /* Multiplication.  */
15726   /* 0 <-- (x * 0) */
15727   assert_binop_folds_to_const (x, MULT_EXPR, zero,
15728 			       zero);
15729 
15730   /* (nonlvalue)x <-- (x * 1) */
15731   assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15732 				   x);
15733 }
15734 
15735 /* Verify that various binary operations on vectors are folded
15736    correctly.  */
15737 
15738 static void
test_vector_folding()15739 test_vector_folding ()
15740 {
15741   tree inner_type = integer_type_node;
15742   tree type = build_vector_type (inner_type, 4);
15743   tree zero = build_zero_cst (type);
15744   tree one = build_one_cst (type);
15745   tree index = build_index_vector (type, 0, 1);
15746 
15747   /* Verify equality tests that return a scalar boolean result.  */
15748   tree res_type = boolean_type_node;
15749   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15750   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15751   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15752   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15753   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15754   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15755 					       index, one)));
15756   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15757 					      index, index)));
15758   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15759 					      index, index)));
15760 }
15761 
15762 /* Verify folding of VEC_DUPLICATE_EXPRs.  */
15763 
15764 static void
test_vec_duplicate_folding()15765 test_vec_duplicate_folding ()
15766 {
15767   scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15768   machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15769   /* This will be 1 if VEC_MODE isn't a vector mode.  */
15770   poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15771 
15772   tree type = build_vector_type (ssizetype, nunits);
15773   tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15774   tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15775   ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15776 }
15777 
15778 /* Run all of the selftests within this file.  */
15779 
15780 void
fold_const_c_tests()15781 fold_const_c_tests ()
15782 {
15783   test_arithmetic_folding ();
15784   test_vector_folding ();
15785   test_vec_duplicate_folding ();
15786 }
15787 
15788 } // namespace selftest
15789 
15790 #endif /* CHECKING_P */
15791