xref: /netbsd-src/external/gpl3/gcc/dist/gcc/c/c-fold.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* Support for fully folding sub-trees of an expression for C compiler.
2    Copyright (C) 1992-2022 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "target.h"
24 #include "function.h"
25 #include "bitmap.h"
26 #include "c-tree.h"
27 #include "intl.h"
28 #include "gimplify.h"
29 
30 static tree c_fully_fold_internal (tree expr, bool, bool *, bool *, bool,
31 				   bool);
32 
33 /* If DISABLE is true, stop issuing warnings.  This is used when
34    parsing code that we know will not be executed.  This function may
35    be called multiple times, and works as a stack.  */
36 
37 static void
c_disable_warnings(bool disable)38 c_disable_warnings (bool disable)
39 {
40   if (disable)
41     {
42       ++c_inhibit_evaluation_warnings;
43       fold_defer_overflow_warnings ();
44     }
45 }
46 
47 /* If ENABLE is true, reenable issuing warnings.  */
48 
49 static void
c_enable_warnings(bool enable)50 c_enable_warnings (bool enable)
51 {
52   if (enable)
53     {
54       --c_inhibit_evaluation_warnings;
55       fold_undefer_and_ignore_overflow_warnings ();
56     }
57 }
58 
59 /* Try to fold ARRAY_REF ary[index] if possible and not handled by
60    normal fold, return NULL_TREE otherwise.  */
61 
62 static tree
c_fold_array_ref(tree type,tree ary,tree index)63 c_fold_array_ref (tree type, tree ary, tree index)
64 {
65   if (TREE_CODE (ary) != STRING_CST
66       || TREE_CODE (index) != INTEGER_CST
67       || TREE_OVERFLOW (index)
68       || TREE_CODE (TREE_TYPE (ary)) != ARRAY_TYPE
69       || !tree_fits_uhwi_p (index))
70     return NULL_TREE;
71 
72   tree elem_type = TREE_TYPE (TREE_TYPE (ary));
73   unsigned elem_nchars = (TYPE_PRECISION (elem_type)
74 			  / TYPE_PRECISION (char_type_node));
75   unsigned len = (unsigned) TREE_STRING_LENGTH (ary) / elem_nchars;
76   tree nelts = array_type_nelts (TREE_TYPE (ary));
77   bool dummy1 = true, dummy2 = true;
78   nelts = c_fully_fold_internal (nelts, true, &dummy1, &dummy2, false, false);
79   unsigned HOST_WIDE_INT i = tree_to_uhwi (index);
80   if (!tree_int_cst_le (index, nelts)
81       || i >= len
82       || i + elem_nchars > len)
83     return NULL_TREE;
84 
85   if (elem_nchars == 1)
86     return build_int_cst (type, TREE_STRING_POINTER (ary)[i]);
87 
88   const unsigned char *ptr
89     = ((const unsigned char *)TREE_STRING_POINTER (ary) + i * elem_nchars);
90   return native_interpret_expr (type, ptr, elem_nchars);
91 }
92 
93 /* Fully fold EXPR, an expression that was not folded (beyond integer
94    constant expressions and null pointer constants) when being built
95    up.  If IN_INIT, this is in a static initializer and certain
96    changes are made to the folding done.  Clear *MAYBE_CONST if
97    MAYBE_CONST is not NULL and EXPR is definitely not a constant
98    expression because it contains an evaluated operator (in C99) or an
99    operator outside of sizeof returning an integer constant (in C90)
100    not permitted in constant expressions, or because it contains an
101    evaluated arithmetic overflow.  (*MAYBE_CONST should typically be
102    set to true by callers before calling this function.)  Return the
103    folded expression.  Function arguments have already been folded
104    before calling this function, as have the contents of SAVE_EXPR,
105    TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and
106    C_MAYBE_CONST_EXPR.  LVAL is true if it should be treated as an
107    lvalue.  */
108 
109 tree
c_fully_fold(tree expr,bool in_init,bool * maybe_const,bool lval)110 c_fully_fold (tree expr, bool in_init, bool *maybe_const, bool lval)
111 {
112   tree ret;
113   tree eptype = NULL_TREE;
114   bool dummy = true;
115   bool maybe_const_itself = true;
116   location_t loc = EXPR_LOCATION (expr);
117 
118   if (!maybe_const)
119     maybe_const = &dummy;
120   if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR)
121     {
122       eptype = TREE_TYPE (expr);
123       expr = TREE_OPERAND (expr, 0);
124     }
125   ret = c_fully_fold_internal (expr, in_init, maybe_const,
126 			       &maybe_const_itself, false, lval);
127   if (eptype)
128     ret = fold_convert_loc (loc, eptype, ret);
129   *maybe_const &= maybe_const_itself;
130   return ret;
131 }
132 
133 /* Internal helper for c_fully_fold.  EXPR and IN_INIT are as for
134    c_fully_fold.  *MAYBE_CONST_OPERANDS is cleared because of operands
135    not permitted, while *MAYBE_CONST_ITSELF is cleared because of
136    arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from
137    both evaluated and unevaluated subexpressions while
138    *MAYBE_CONST_ITSELF is carried from only evaluated
139    subexpressions).  FOR_INT_CONST indicates if EXPR is an expression
140    with integer constant operands, and if any of the operands doesn't
141    get folded to an integer constant, don't fold the expression itself.
142    LVAL indicates folding of lvalue, where we can't replace it with
143    an rvalue.  */
144 
145 static tree
c_fully_fold_internal(tree expr,bool in_init,bool * maybe_const_operands,bool * maybe_const_itself,bool for_int_const,bool lval)146 c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands,
147 		       bool *maybe_const_itself, bool for_int_const, bool lval)
148 {
149   tree ret = expr;
150   enum tree_code code = TREE_CODE (expr);
151   enum tree_code_class kind = TREE_CODE_CLASS (code);
152   location_t loc = EXPR_LOCATION (expr);
153   tree op0, op1, op2, op3;
154   tree orig_op0, orig_op1, orig_op2;
155   bool op0_const = true, op1_const = true, op2_const = true;
156   bool op0_const_self = true, op1_const_self = true, op2_const_self = true;
157   bool nowarning = warning_suppressed_p (expr, OPT_Woverflow);
158   bool unused_p;
159   bool op0_lval = false;
160   source_range old_range;
161 
162   /* Constants, declarations, statements, errors, and anything else not
163      counted as an expression cannot usefully be folded further at this
164      point.  */
165   if (!IS_EXPR_CODE_CLASS (kind) || kind == tcc_statement)
166     {
167       /* Except for variables which we can optimize to its initializer.  */
168       if (VAR_P (expr) && !lval && (optimize || in_init))
169 	{
170 	  if (in_init)
171 	    ret = decl_constant_value_1 (expr, true);
172 	  else
173 	    {
174 	      ret = decl_constant_value (expr);
175 	      if (ret != expr
176 		  && (TYPE_MODE (TREE_TYPE (ret)) == BLKmode
177 		      || TREE_CODE (TREE_TYPE (ret)) == ARRAY_TYPE))
178 		return expr;
179 	    }
180 	  /* Avoid unwanted tree sharing between the initializer and current
181 	     function's body where the tree can be modified e.g. by the
182 	     gimplifier.  */
183 	  if (ret != expr && TREE_STATIC (expr))
184 	    ret = unshare_expr (ret);
185 	  return ret;
186 	}
187       return expr;
188     }
189 
190   if (IS_EXPR_CODE_CLASS (kind))
191     old_range = EXPR_LOCATION_RANGE (expr);
192 
193   /* Operands of variable-length expressions (function calls) have
194      already been folded, as have __builtin_* function calls, and such
195      expressions cannot occur in constant expressions.  */
196   if (kind == tcc_vl_exp)
197     {
198       *maybe_const_operands = false;
199       ret = fold (expr);
200       goto out;
201     }
202 
203   if (code == C_MAYBE_CONST_EXPR)
204     {
205       tree pre = C_MAYBE_CONST_EXPR_PRE (expr);
206       tree inner = C_MAYBE_CONST_EXPR_EXPR (expr);
207       if (C_MAYBE_CONST_EXPR_NON_CONST (expr))
208 	*maybe_const_operands = false;
209       if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr))
210 	{
211 	  *maybe_const_itself = false;
212 	  inner = c_fully_fold_internal (inner, in_init, maybe_const_operands,
213 					 maybe_const_itself, true, lval);
214 	}
215       if (pre && !in_init)
216 	ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner);
217       else
218 	ret = inner;
219       goto out;
220     }
221 
222   /* Assignment, increment, decrement, function call and comma
223      operators, and statement expressions, cannot occur in constant
224      expressions if evaluated / outside of sizeof.  (Function calls
225      were handled above, though VA_ARG_EXPR is treated like a function
226      call here, and statement expressions are handled through
227      C_MAYBE_CONST_EXPR to avoid folding inside them.)  */
228   switch (code)
229     {
230     case MODIFY_EXPR:
231     case PREDECREMENT_EXPR:
232     case PREINCREMENT_EXPR:
233     case POSTDECREMENT_EXPR:
234     case POSTINCREMENT_EXPR:
235     case COMPOUND_EXPR:
236       *maybe_const_operands = false;
237       break;
238 
239     case VA_ARG_EXPR:
240     case TARGET_EXPR:
241     case BIND_EXPR:
242     case OBJ_TYPE_REF:
243       *maybe_const_operands = false;
244       ret = fold (expr);
245       goto out;
246 
247     default:
248       break;
249     }
250 
251   /* Fold individual tree codes as appropriate.  */
252   switch (code)
253     {
254     case COMPOUND_LITERAL_EXPR:
255       /* Any non-constancy will have been marked in a containing
256 	 C_MAYBE_CONST_EXPR; there is no more folding to do here.  */
257       goto out;
258 
259     case COMPONENT_REF:
260       orig_op0 = op0 = TREE_OPERAND (expr, 0);
261       op1 = TREE_OPERAND (expr, 1);
262       op2 = TREE_OPERAND (expr, 2);
263       op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
264 				   maybe_const_itself, for_int_const, lval);
265       STRIP_TYPE_NOPS (op0);
266       if (op0 != orig_op0)
267 	ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2);
268       if (ret != expr)
269 	{
270 	  TREE_READONLY (ret) = TREE_READONLY (expr);
271 	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
272 	}
273       if (!lval)
274 	ret = fold (ret);
275       goto out;
276 
277     case ARRAY_REF:
278       orig_op0 = op0 = TREE_OPERAND (expr, 0);
279       orig_op1 = op1 = TREE_OPERAND (expr, 1);
280       op2 = TREE_OPERAND (expr, 2);
281       op3 = TREE_OPERAND (expr, 3);
282       op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
283 				   maybe_const_itself, for_int_const, lval);
284       STRIP_TYPE_NOPS (op0);
285       op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
286 				   maybe_const_itself, for_int_const, false);
287       STRIP_TYPE_NOPS (op1);
288       /* Fold "foo"[2] in initializers.  */
289       if (!lval && in_init)
290 	{
291 	  ret = c_fold_array_ref (TREE_TYPE (expr), op0, op1);
292 	  if (ret)
293 	    goto out;
294 	  ret = expr;
295 	}
296       if (op0 != orig_op0 || op1 != orig_op1)
297 	ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3);
298       if (ret != expr)
299 	{
300 	  TREE_READONLY (ret) = TREE_READONLY (expr);
301 	  TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
302 	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
303 	}
304       if (!lval)
305 	ret = fold (ret);
306       goto out;
307 
308     case MODIFY_EXPR:
309     case PREDECREMENT_EXPR:
310     case PREINCREMENT_EXPR:
311     case POSTDECREMENT_EXPR:
312     case POSTINCREMENT_EXPR:
313       op0_lval = true;
314       /* FALLTHRU */
315     case COMPOUND_EXPR:
316     case PLUS_EXPR:
317     case MINUS_EXPR:
318     case MULT_EXPR:
319     case POINTER_PLUS_EXPR:
320     case POINTER_DIFF_EXPR:
321     case TRUNC_DIV_EXPR:
322     case CEIL_DIV_EXPR:
323     case FLOOR_DIV_EXPR:
324     case TRUNC_MOD_EXPR:
325     case RDIV_EXPR:
326     case EXACT_DIV_EXPR:
327     case LSHIFT_EXPR:
328     case RSHIFT_EXPR:
329     case BIT_IOR_EXPR:
330     case BIT_XOR_EXPR:
331     case BIT_AND_EXPR:
332     case LT_EXPR:
333     case LE_EXPR:
334     case GT_EXPR:
335     case GE_EXPR:
336     case EQ_EXPR:
337     case NE_EXPR:
338     case COMPLEX_EXPR:
339     case TRUTH_AND_EXPR:
340     case TRUTH_OR_EXPR:
341     case TRUTH_XOR_EXPR:
342     case UNORDERED_EXPR:
343     case ORDERED_EXPR:
344     case UNLT_EXPR:
345     case UNLE_EXPR:
346     case UNGT_EXPR:
347     case UNGE_EXPR:
348     case UNEQ_EXPR:
349     case MEM_REF:
350       /* Binary operations evaluating both arguments (increment and
351 	 decrement are binary internally in GCC).  */
352       orig_op0 = op0 = TREE_OPERAND (expr, 0);
353       orig_op1 = op1 = TREE_OPERAND (expr, 1);
354       op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
355 				   maybe_const_itself, for_int_const,
356 				   op0_lval);
357       STRIP_TYPE_NOPS (op0);
358       /* The RHS of a MODIFY_EXPR was fully folded when building that
359 	 expression for the sake of conversion warnings.  */
360       if (code != MODIFY_EXPR)
361 	op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
362 				     maybe_const_itself, for_int_const, false);
363       STRIP_TYPE_NOPS (op1);
364 
365       if (for_int_const && (TREE_CODE (op0) != INTEGER_CST
366 			    || TREE_CODE (op1) != INTEGER_CST))
367 	goto out;
368 
369       if (op0 != orig_op0 || op1 != orig_op1 || in_init)
370 	ret = in_init
371 	  ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
372 	  : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
373       else
374 	ret = fold (expr);
375       if (TREE_OVERFLOW_P (ret)
376 	  && !TREE_OVERFLOW_P (op0)
377 	  && !(BINARY_CLASS_P (op0) && TREE_OVERFLOW_P (TREE_OPERAND (op0, 1)))
378 	  && !TREE_OVERFLOW_P (op1))
379 	overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret, expr);
380       if (code == LSHIFT_EXPR
381 	  && TREE_CODE (orig_op0) != INTEGER_CST
382 	  && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
383 	  && TREE_CODE (op0) == INTEGER_CST
384 	  && c_inhibit_evaluation_warnings == 0
385 	  && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (orig_op0))
386 	  && tree_int_cst_sgn (op0) < 0)
387 	warning_at (loc, OPT_Wshift_negative_value,
388 		    "left shift of negative value");
389       if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
390 	  && TREE_CODE (orig_op1) != INTEGER_CST
391 	  && TREE_CODE (op1) == INTEGER_CST
392 	  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
393 	  && c_inhibit_evaluation_warnings == 0)
394 	{
395 	  if (tree_int_cst_sgn (op1) < 0)
396 	    warning_at (loc, OPT_Wshift_count_negative,
397 			(code == LSHIFT_EXPR
398 			 ? G_("left shift count is negative")
399 			 : G_("right shift count is negative")));
400 	  else if ((TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
401 		    || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
402 		   && compare_tree_int (op1,
403 					TYPE_PRECISION (TREE_TYPE (orig_op0)))
404 		      >= 0)
405 	    warning_at (loc, OPT_Wshift_count_overflow,
406 			(code == LSHIFT_EXPR
407 			 ? G_("left shift count >= width of type")
408 			 : G_("right shift count >= width of type")));
409 	  else if (TREE_CODE (TREE_TYPE (orig_op0)) == VECTOR_TYPE
410 		   && compare_tree_int (op1,
411 					TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0))))
412 		      >= 0)
413 	    warning_at (loc, OPT_Wshift_count_overflow,
414 			code == LSHIFT_EXPR
415 			? G_("left shift count >= width of vector element")
416 			: G_("right shift count >= width of vector element"));
417 	}
418       if (code == LSHIFT_EXPR
419 	  /* If either OP0 has been folded to INTEGER_CST...  */
420 	  && ((TREE_CODE (orig_op0) != INTEGER_CST
421 	       && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
422 	       && TREE_CODE (op0) == INTEGER_CST)
423 	      /* ...or if OP1 has been folded to INTEGER_CST...  */
424 	      || (TREE_CODE (orig_op1) != INTEGER_CST
425 		  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
426 		  && TREE_CODE (op1) == INTEGER_CST))
427 	  && c_inhibit_evaluation_warnings == 0)
428 	/* ...then maybe we can detect an overflow.  */
429 	maybe_warn_shift_overflow (loc, op0, op1);
430       if ((code == TRUNC_DIV_EXPR
431 	   || code == CEIL_DIV_EXPR
432 	   || code == FLOOR_DIV_EXPR
433 	   || code == EXACT_DIV_EXPR
434 	   || code == TRUNC_MOD_EXPR)
435 	  && TREE_CODE (orig_op1) != INTEGER_CST
436 	  && TREE_CODE (op1) == INTEGER_CST
437 	  && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
438 	      || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
439 	  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE)
440 	warn_for_div_by_zero (loc, op1);
441       if (code == MEM_REF
442 	  && ret != expr
443 	  && TREE_CODE (ret) == MEM_REF)
444 	{
445 	  TREE_READONLY (ret) = TREE_READONLY (expr);
446 	  TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
447 	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
448 	}
449       goto out;
450 
451     case ADDR_EXPR:
452       op0_lval = true;
453       goto unary;
454     case REALPART_EXPR:
455     case IMAGPART_EXPR:
456     case VIEW_CONVERT_EXPR:
457       op0_lval = lval;
458       /* FALLTHRU */
459     case INDIRECT_REF:
460     case FIX_TRUNC_EXPR:
461     case FLOAT_EXPR:
462     CASE_CONVERT:
463     case ADDR_SPACE_CONVERT_EXPR:
464     case NON_LVALUE_EXPR:
465     case NEGATE_EXPR:
466     case BIT_NOT_EXPR:
467     case TRUTH_NOT_EXPR:
468     case CONJ_EXPR:
469     case PAREN_EXPR:
470     unary:
471       /* Unary operations.  */
472       orig_op0 = op0 = TREE_OPERAND (expr, 0);
473       op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
474 				   maybe_const_itself, for_int_const,
475 				   op0_lval);
476       STRIP_TYPE_NOPS (op0);
477 
478       if (for_int_const && TREE_CODE (op0) != INTEGER_CST)
479 	goto out;
480 
481       /* ??? Cope with user tricks that amount to offsetof.  The middle-end is
482 	 not prepared to deal with them if they occur in initializers.  */
483       if (op0 != orig_op0
484 	  && code == ADDR_EXPR
485 	  && (op1 = get_base_address (op0)) != NULL_TREE
486 	  && INDIRECT_REF_P (op1)
487 	  && TREE_CONSTANT (TREE_OPERAND (op1, 0)))
488 	ret = fold_offsetof (op0, TREE_TYPE (expr));
489       else if (op0 != orig_op0 || in_init)
490 	ret = in_init
491 	  ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0)
492 	  : fold_build1_loc (loc, code, TREE_TYPE (expr), op0);
493       else
494 	ret = fold (expr);
495       if (code == INDIRECT_REF
496 	  && ret != expr
497 	  && INDIRECT_REF_P (ret))
498 	{
499 	  TREE_READONLY (ret) = TREE_READONLY (expr);
500 	  TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
501 	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
502 	}
503       switch (code)
504 	{
505 	case FIX_TRUNC_EXPR:
506 	case FLOAT_EXPR:
507 	CASE_CONVERT:
508 	  /* Don't warn about explicit conversions.  We will already
509 	     have warned about suspect implicit conversions.  */
510 	  break;
511 
512 	default:
513 	  if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0))
514 	    overflow_warning (EXPR_LOCATION (expr), ret, op0);
515 	  break;
516 	}
517       goto out;
518 
519     case TRUTH_ANDIF_EXPR:
520     case TRUTH_ORIF_EXPR:
521       /* Binary operations not necessarily evaluating both
522 	 arguments.  */
523       orig_op0 = op0 = TREE_OPERAND (expr, 0);
524       orig_op1 = op1 = TREE_OPERAND (expr, 1);
525       op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
526 				   for_int_const, false);
527       STRIP_TYPE_NOPS (op0);
528 
529       unused_p = (op0 == (code == TRUTH_ANDIF_EXPR
530 			  ? truthvalue_false_node
531 			  : truthvalue_true_node));
532       c_disable_warnings (unused_p);
533       op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
534 				   for_int_const, false);
535       STRIP_TYPE_NOPS (op1);
536       c_enable_warnings (unused_p);
537 
538       if (for_int_const
539 	  && (TREE_CODE (op0) != INTEGER_CST
540 	      /* Require OP1 be an INTEGER_CST only if it's evaluated.  */
541 	      || (!unused_p && TREE_CODE (op1) != INTEGER_CST)))
542 	goto out;
543 
544       if (op0 != orig_op0 || op1 != orig_op1 || in_init)
545 	ret = in_init
546 	  ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
547 	  : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
548       else
549 	ret = fold (expr);
550       *maybe_const_operands &= op0_const;
551       *maybe_const_itself &= op0_const_self;
552       if (!(flag_isoc99
553 	    && op0_const
554 	    && op0_const_self
555 	    && (code == TRUTH_ANDIF_EXPR
556 		? op0 == truthvalue_false_node
557 		: op0 == truthvalue_true_node)))
558 	*maybe_const_operands &= op1_const;
559       if (!(op0_const
560 	    && op0_const_self
561 	    && (code == TRUTH_ANDIF_EXPR
562 		? op0 == truthvalue_false_node
563 		: op0 == truthvalue_true_node)))
564 	*maybe_const_itself &= op1_const_self;
565       goto out;
566 
567     case COND_EXPR:
568       orig_op0 = op0 = TREE_OPERAND (expr, 0);
569       orig_op1 = op1 = TREE_OPERAND (expr, 1);
570       orig_op2 = op2 = TREE_OPERAND (expr, 2);
571       op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
572 				   for_int_const, false);
573 
574       STRIP_TYPE_NOPS (op0);
575       c_disable_warnings (op0 == truthvalue_false_node);
576       op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
577 				   for_int_const, false);
578       STRIP_TYPE_NOPS (op1);
579       c_enable_warnings (op0 == truthvalue_false_node);
580 
581       c_disable_warnings (op0 == truthvalue_true_node);
582       op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self,
583 				   for_int_const, false);
584       STRIP_TYPE_NOPS (op2);
585       c_enable_warnings (op0 == truthvalue_true_node);
586 
587       if (for_int_const
588 	  && (TREE_CODE (op0) != INTEGER_CST
589 	      /* Only the evaluated operand must be an INTEGER_CST.  */
590 	      || (op0 == truthvalue_true_node
591 		  ? TREE_CODE (op1) != INTEGER_CST
592 		  : TREE_CODE (op2) != INTEGER_CST)))
593 	goto out;
594 
595       if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
596 	ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
597       else
598 	ret = fold (expr);
599       *maybe_const_operands &= op0_const;
600       *maybe_const_itself &= op0_const_self;
601       if (!(flag_isoc99
602 	    && op0_const
603 	    && op0_const_self
604 	    && op0 == truthvalue_false_node))
605 	*maybe_const_operands &= op1_const;
606       if (!(op0_const
607 	    && op0_const_self
608 	    && op0 == truthvalue_false_node))
609 	*maybe_const_itself &= op1_const_self;
610       if (!(flag_isoc99
611 	    && op0_const
612 	    && op0_const_self
613 	    && op0 == truthvalue_true_node))
614 	*maybe_const_operands &= op2_const;
615       if (!(op0_const
616 	    && op0_const_self
617 	    && op0 == truthvalue_true_node))
618 	*maybe_const_itself &= op2_const_self;
619       goto out;
620 
621     case VEC_COND_EXPR:
622       orig_op0 = op0 = TREE_OPERAND (expr, 0);
623       orig_op1 = op1 = TREE_OPERAND (expr, 1);
624       orig_op2 = op2 = TREE_OPERAND (expr, 2);
625       op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
626 				   maybe_const_itself, for_int_const, false);
627       STRIP_TYPE_NOPS (op0);
628       op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
629 				   maybe_const_itself, for_int_const, false);
630       STRIP_TYPE_NOPS (op1);
631       op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands,
632 				   maybe_const_itself, for_int_const, false);
633       STRIP_TYPE_NOPS (op2);
634 
635       if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
636 	ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
637       else
638 	ret = fold (expr);
639       goto out;
640 
641     case EXCESS_PRECISION_EXPR:
642       /* Each case where an operand with excess precision may be
643 	 encountered must remove the EXCESS_PRECISION_EXPR around
644 	 inner operands and possibly put one around the whole
645 	 expression or possibly convert to the semantic type (which
646 	 c_fully_fold does); we cannot tell at this stage which is
647 	 appropriate in any particular case.  */
648       gcc_unreachable ();
649 
650     case SAVE_EXPR:
651       /* Make sure to fold the contents of a SAVE_EXPR exactly once.  */
652       op0 = TREE_OPERAND (expr, 0);
653       if (!SAVE_EXPR_FOLDED_P (expr))
654 	{
655 	  op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
656 				       maybe_const_itself, for_int_const,
657 				       false);
658 	  TREE_OPERAND (expr, 0) = op0;
659 	  SAVE_EXPR_FOLDED_P (expr) = true;
660 	}
661       /* Return the SAVE_EXPR operand if it is invariant.  */
662       if (tree_invariant_p (op0))
663 	ret = op0;
664       goto out;
665 
666     default:
667       /* Various codes may appear through folding built-in functions
668 	 and their arguments.  */
669       goto out;
670     }
671 
672  out:
673   /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
674      have been done by this point, so remove them again.  */
675   nowarning |= warning_suppressed_p (ret, OPT_Woverflow);
676   STRIP_TYPE_NOPS (ret);
677   if (nowarning && !warning_suppressed_p (ret, OPT_Woverflow))
678     {
679       if (!CAN_HAVE_LOCATION_P (ret))
680 	ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
681       suppress_warning (ret, OPT_Woverflow);
682     }
683   if (ret != expr)
684     {
685       protected_set_expr_location (ret, loc);
686       if (IS_EXPR_CODE_CLASS (kind))
687 	set_source_range (ret, old_range.m_start, old_range.m_finish);
688     }
689   return ret;
690 }
691 
692 /* Fold X for consideration by one of the warning functions when checking
693    whether an expression has a constant value.  */
694 
695 tree
fold_for_warn(tree x)696 fold_for_warn (tree x)
697 {
698   /* The C front-end has already folded X appropriately.  */
699   return x;
700 }
701