1 /* Support for fully folding sub-trees of an expression for C compiler. 2 Copyright (C) 1992-2017 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "target.h" 24 #include "function.h" 25 #include "bitmap.h" 26 #include "c-tree.h" 27 #include "intl.h" 28 #include "gimplify.h" 29 30 static tree c_fully_fold_internal (tree expr, bool, bool *, bool *, bool); 31 32 /* If DISABLE is true, stop issuing warnings. This is used when 33 parsing code that we know will not be executed. This function may 34 be called multiple times, and works as a stack. */ 35 36 static void 37 c_disable_warnings (bool disable) 38 { 39 if (disable) 40 { 41 ++c_inhibit_evaluation_warnings; 42 fold_defer_overflow_warnings (); 43 } 44 } 45 46 /* If ENABLE is true, reenable issuing warnings. */ 47 48 static void 49 c_enable_warnings (bool enable) 50 { 51 if (enable) 52 { 53 --c_inhibit_evaluation_warnings; 54 fold_undefer_and_ignore_overflow_warnings (); 55 } 56 } 57 58 /* Fully fold EXPR, an expression that was not folded (beyond integer 59 constant expressions and null pointer constants) when being built 60 up. If IN_INIT, this is in a static initializer and certain 61 changes are made to the folding done. Clear *MAYBE_CONST if 62 MAYBE_CONST is not NULL and EXPR is definitely not a constant 63 expression because it contains an evaluated operator (in C99) or an 64 operator outside of sizeof returning an integer constant (in C90) 65 not permitted in constant expressions, or because it contains an 66 evaluated arithmetic overflow. (*MAYBE_CONST should typically be 67 set to true by callers before calling this function.) Return the 68 folded expression. Function arguments have already been folded 69 before calling this function, as have the contents of SAVE_EXPR, 70 TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and 71 C_MAYBE_CONST_EXPR. */ 72 73 tree 74 c_fully_fold (tree expr, bool in_init, bool *maybe_const) 75 { 76 tree ret; 77 tree eptype = NULL_TREE; 78 bool dummy = true; 79 bool maybe_const_itself = true; 80 location_t loc = EXPR_LOCATION (expr); 81 82 if (!maybe_const) 83 maybe_const = &dummy; 84 if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR) 85 { 86 eptype = TREE_TYPE (expr); 87 expr = TREE_OPERAND (expr, 0); 88 } 89 ret = c_fully_fold_internal (expr, in_init, maybe_const, 90 &maybe_const_itself, false); 91 if (eptype) 92 ret = fold_convert_loc (loc, eptype, ret); 93 *maybe_const &= maybe_const_itself; 94 return ret; 95 } 96 97 /* Internal helper for c_fully_fold. EXPR and IN_INIT are as for 98 c_fully_fold. *MAYBE_CONST_OPERANDS is cleared because of operands 99 not permitted, while *MAYBE_CONST_ITSELF is cleared because of 100 arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from 101 both evaluated and unevaluated subexpressions while 102 *MAYBE_CONST_ITSELF is carried from only evaluated 103 subexpressions). FOR_INT_CONST indicates if EXPR is an expression 104 with integer constant operands, and if any of the operands doesn't 105 get folded to an integer constant, don't fold the expression itself. */ 106 107 static tree 108 c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands, 109 bool *maybe_const_itself, bool for_int_const) 110 { 111 tree ret = expr; 112 enum tree_code code = TREE_CODE (expr); 113 enum tree_code_class kind = TREE_CODE_CLASS (code); 114 location_t loc = EXPR_LOCATION (expr); 115 tree op0, op1, op2, op3; 116 tree orig_op0, orig_op1, orig_op2; 117 bool op0_const = true, op1_const = true, op2_const = true; 118 bool op0_const_self = true, op1_const_self = true, op2_const_self = true; 119 bool nowarning = TREE_NO_WARNING (expr); 120 bool unused_p; 121 source_range old_range; 122 123 /* Constants, declarations, statements, errors, SAVE_EXPRs and 124 anything else not counted as an expression cannot usefully be 125 folded further at this point. */ 126 if (!IS_EXPR_CODE_CLASS (kind) 127 || kind == tcc_statement 128 || code == SAVE_EXPR) 129 return expr; 130 131 if (IS_EXPR_CODE_CLASS (kind)) 132 old_range = EXPR_LOCATION_RANGE (expr); 133 134 /* Operands of variable-length expressions (function calls) have 135 already been folded, as have __builtin_* function calls, and such 136 expressions cannot occur in constant expressions. */ 137 if (kind == tcc_vl_exp) 138 { 139 *maybe_const_operands = false; 140 ret = fold (expr); 141 goto out; 142 } 143 144 if (code == C_MAYBE_CONST_EXPR) 145 { 146 tree pre = C_MAYBE_CONST_EXPR_PRE (expr); 147 tree inner = C_MAYBE_CONST_EXPR_EXPR (expr); 148 if (C_MAYBE_CONST_EXPR_NON_CONST (expr)) 149 *maybe_const_operands = false; 150 if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr)) 151 { 152 *maybe_const_itself = false; 153 inner = c_fully_fold_internal (inner, in_init, maybe_const_operands, 154 maybe_const_itself, true); 155 } 156 if (pre && !in_init) 157 ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner); 158 else 159 ret = inner; 160 goto out; 161 } 162 163 /* Assignment, increment, decrement, function call and comma 164 operators, and statement expressions, cannot occur in constant 165 expressions if evaluated / outside of sizeof. (Function calls 166 were handled above, though VA_ARG_EXPR is treated like a function 167 call here, and statement expressions are handled through 168 C_MAYBE_CONST_EXPR to avoid folding inside them.) */ 169 switch (code) 170 { 171 case MODIFY_EXPR: 172 case PREDECREMENT_EXPR: 173 case PREINCREMENT_EXPR: 174 case POSTDECREMENT_EXPR: 175 case POSTINCREMENT_EXPR: 176 case COMPOUND_EXPR: 177 *maybe_const_operands = false; 178 break; 179 180 case VA_ARG_EXPR: 181 case TARGET_EXPR: 182 case BIND_EXPR: 183 case OBJ_TYPE_REF: 184 *maybe_const_operands = false; 185 ret = fold (expr); 186 goto out; 187 188 default: 189 break; 190 } 191 192 /* Fold individual tree codes as appropriate. */ 193 switch (code) 194 { 195 case COMPOUND_LITERAL_EXPR: 196 /* Any non-constancy will have been marked in a containing 197 C_MAYBE_CONST_EXPR; there is no more folding to do here. */ 198 goto out; 199 200 case COMPONENT_REF: 201 orig_op0 = op0 = TREE_OPERAND (expr, 0); 202 op1 = TREE_OPERAND (expr, 1); 203 op2 = TREE_OPERAND (expr, 2); 204 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, 205 maybe_const_itself, for_int_const); 206 STRIP_TYPE_NOPS (op0); 207 if (op0 != orig_op0) 208 ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2); 209 if (ret != expr) 210 { 211 TREE_READONLY (ret) = TREE_READONLY (expr); 212 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); 213 } 214 goto out; 215 216 case ARRAY_REF: 217 orig_op0 = op0 = TREE_OPERAND (expr, 0); 218 orig_op1 = op1 = TREE_OPERAND (expr, 1); 219 op2 = TREE_OPERAND (expr, 2); 220 op3 = TREE_OPERAND (expr, 3); 221 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, 222 maybe_const_itself, for_int_const); 223 STRIP_TYPE_NOPS (op0); 224 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, 225 maybe_const_itself, for_int_const); 226 STRIP_TYPE_NOPS (op1); 227 op1 = decl_constant_value_for_optimization (op1); 228 if (op0 != orig_op0 || op1 != orig_op1) 229 ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3); 230 if (ret != expr) 231 { 232 TREE_READONLY (ret) = TREE_READONLY (expr); 233 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr); 234 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); 235 } 236 ret = fold (ret); 237 goto out; 238 239 case COMPOUND_EXPR: 240 case MODIFY_EXPR: 241 case PREDECREMENT_EXPR: 242 case PREINCREMENT_EXPR: 243 case POSTDECREMENT_EXPR: 244 case POSTINCREMENT_EXPR: 245 case PLUS_EXPR: 246 case MINUS_EXPR: 247 case MULT_EXPR: 248 case POINTER_PLUS_EXPR: 249 case TRUNC_DIV_EXPR: 250 case CEIL_DIV_EXPR: 251 case FLOOR_DIV_EXPR: 252 case TRUNC_MOD_EXPR: 253 case RDIV_EXPR: 254 case EXACT_DIV_EXPR: 255 case LSHIFT_EXPR: 256 case RSHIFT_EXPR: 257 case BIT_IOR_EXPR: 258 case BIT_XOR_EXPR: 259 case BIT_AND_EXPR: 260 case LT_EXPR: 261 case LE_EXPR: 262 case GT_EXPR: 263 case GE_EXPR: 264 case EQ_EXPR: 265 case NE_EXPR: 266 case COMPLEX_EXPR: 267 case TRUTH_AND_EXPR: 268 case TRUTH_OR_EXPR: 269 case TRUTH_XOR_EXPR: 270 case UNORDERED_EXPR: 271 case ORDERED_EXPR: 272 case UNLT_EXPR: 273 case UNLE_EXPR: 274 case UNGT_EXPR: 275 case UNGE_EXPR: 276 case UNEQ_EXPR: 277 /* Binary operations evaluating both arguments (increment and 278 decrement are binary internally in GCC). */ 279 orig_op0 = op0 = TREE_OPERAND (expr, 0); 280 orig_op1 = op1 = TREE_OPERAND (expr, 1); 281 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, 282 maybe_const_itself, for_int_const); 283 STRIP_TYPE_NOPS (op0); 284 if (code != MODIFY_EXPR 285 && code != PREDECREMENT_EXPR 286 && code != PREINCREMENT_EXPR 287 && code != POSTDECREMENT_EXPR 288 && code != POSTINCREMENT_EXPR) 289 op0 = decl_constant_value_for_optimization (op0); 290 /* The RHS of a MODIFY_EXPR was fully folded when building that 291 expression for the sake of conversion warnings. */ 292 if (code != MODIFY_EXPR) 293 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, 294 maybe_const_itself, for_int_const); 295 STRIP_TYPE_NOPS (op1); 296 op1 = decl_constant_value_for_optimization (op1); 297 298 if (for_int_const && (TREE_CODE (op0) != INTEGER_CST 299 || TREE_CODE (op1) != INTEGER_CST)) 300 goto out; 301 302 if (op0 != orig_op0 || op1 != orig_op1 || in_init) 303 ret = in_init 304 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1) 305 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1); 306 else 307 ret = fold (expr); 308 if (TREE_OVERFLOW_P (ret) 309 && !TREE_OVERFLOW_P (op0) 310 && !TREE_OVERFLOW_P (op1)) 311 overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret); 312 if (code == LSHIFT_EXPR 313 && TREE_CODE (orig_op0) != INTEGER_CST 314 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE 315 && TREE_CODE (op0) == INTEGER_CST 316 && c_inhibit_evaluation_warnings == 0 317 && tree_int_cst_sgn (op0) < 0) 318 warning_at (loc, OPT_Wshift_negative_value, 319 "left shift of negative value"); 320 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR) 321 && TREE_CODE (orig_op1) != INTEGER_CST 322 && TREE_CODE (op1) == INTEGER_CST 323 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE 324 && c_inhibit_evaluation_warnings == 0) 325 { 326 if (tree_int_cst_sgn (op1) < 0) 327 warning_at (loc, OPT_Wshift_count_negative, 328 (code == LSHIFT_EXPR 329 ? G_("left shift count is negative") 330 : G_("right shift count is negative"))); 331 else if ((TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE 332 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE) 333 && compare_tree_int (op1, 334 TYPE_PRECISION (TREE_TYPE (orig_op0))) 335 >= 0) 336 warning_at (loc, OPT_Wshift_count_overflow, 337 (code == LSHIFT_EXPR 338 ? G_("left shift count >= width of type") 339 : G_("right shift count >= width of type"))); 340 else if (TREE_CODE (TREE_TYPE (orig_op0)) == VECTOR_TYPE 341 && compare_tree_int (op1, 342 TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0)))) 343 >= 0) 344 warning_at (loc, OPT_Wshift_count_overflow, 345 code == LSHIFT_EXPR 346 ? G_("left shift count >= width of vector element") 347 : G_("right shift count >= width of vector element")); 348 } 349 if (code == LSHIFT_EXPR 350 /* If either OP0 has been folded to INTEGER_CST... */ 351 && ((TREE_CODE (orig_op0) != INTEGER_CST 352 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE 353 && TREE_CODE (op0) == INTEGER_CST) 354 /* ...or if OP1 has been folded to INTEGER_CST... */ 355 || (TREE_CODE (orig_op1) != INTEGER_CST 356 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE 357 && TREE_CODE (op1) == INTEGER_CST)) 358 && c_inhibit_evaluation_warnings == 0) 359 /* ...then maybe we can detect an overflow. */ 360 maybe_warn_shift_overflow (loc, op0, op1); 361 if ((code == TRUNC_DIV_EXPR 362 || code == CEIL_DIV_EXPR 363 || code == FLOOR_DIV_EXPR 364 || code == EXACT_DIV_EXPR 365 || code == TRUNC_MOD_EXPR) 366 && TREE_CODE (orig_op1) != INTEGER_CST 367 && TREE_CODE (op1) == INTEGER_CST 368 && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE 369 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE) 370 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE) 371 warn_for_div_by_zero (loc, op1); 372 goto out; 373 374 case INDIRECT_REF: 375 case FIX_TRUNC_EXPR: 376 case FLOAT_EXPR: 377 CASE_CONVERT: 378 case ADDR_SPACE_CONVERT_EXPR: 379 case VIEW_CONVERT_EXPR: 380 case NON_LVALUE_EXPR: 381 case NEGATE_EXPR: 382 case BIT_NOT_EXPR: 383 case TRUTH_NOT_EXPR: 384 case ADDR_EXPR: 385 case CONJ_EXPR: 386 case REALPART_EXPR: 387 case IMAGPART_EXPR: 388 /* Unary operations. */ 389 orig_op0 = op0 = TREE_OPERAND (expr, 0); 390 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, 391 maybe_const_itself, for_int_const); 392 STRIP_TYPE_NOPS (op0); 393 if (code != ADDR_EXPR && code != REALPART_EXPR && code != IMAGPART_EXPR) 394 op0 = decl_constant_value_for_optimization (op0); 395 396 if (for_int_const && TREE_CODE (op0) != INTEGER_CST) 397 goto out; 398 399 /* ??? Cope with user tricks that amount to offsetof. The middle-end is 400 not prepared to deal with them if they occur in initializers. */ 401 if (op0 != orig_op0 402 && code == ADDR_EXPR 403 && (op1 = get_base_address (op0)) != NULL_TREE 404 && INDIRECT_REF_P (op1) 405 && TREE_CONSTANT (TREE_OPERAND (op1, 0))) 406 ret = fold_offsetof (op0, TREE_TYPE (expr)); 407 else if (op0 != orig_op0 || in_init) 408 ret = in_init 409 ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0) 410 : fold_build1_loc (loc, code, TREE_TYPE (expr), op0); 411 else 412 ret = fold (expr); 413 if (code == INDIRECT_REF 414 && ret != expr 415 && INDIRECT_REF_P (ret)) 416 { 417 TREE_READONLY (ret) = TREE_READONLY (expr); 418 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr); 419 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); 420 } 421 switch (code) 422 { 423 case FIX_TRUNC_EXPR: 424 case FLOAT_EXPR: 425 CASE_CONVERT: 426 /* Don't warn about explicit conversions. We will already 427 have warned about suspect implicit conversions. */ 428 break; 429 430 default: 431 if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0)) 432 overflow_warning (EXPR_LOCATION (expr), ret); 433 break; 434 } 435 goto out; 436 437 case TRUTH_ANDIF_EXPR: 438 case TRUTH_ORIF_EXPR: 439 /* Binary operations not necessarily evaluating both 440 arguments. */ 441 orig_op0 = op0 = TREE_OPERAND (expr, 0); 442 orig_op1 = op1 = TREE_OPERAND (expr, 1); 443 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self, 444 for_int_const); 445 STRIP_TYPE_NOPS (op0); 446 447 unused_p = (op0 == (code == TRUTH_ANDIF_EXPR 448 ? truthvalue_false_node 449 : truthvalue_true_node)); 450 c_disable_warnings (unused_p); 451 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self, 452 for_int_const); 453 STRIP_TYPE_NOPS (op1); 454 c_enable_warnings (unused_p); 455 456 if (for_int_const 457 && (TREE_CODE (op0) != INTEGER_CST 458 /* Require OP1 be an INTEGER_CST only if it's evaluated. */ 459 || (!unused_p && TREE_CODE (op1) != INTEGER_CST))) 460 goto out; 461 462 if (op0 != orig_op0 || op1 != orig_op1 || in_init) 463 ret = in_init 464 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1) 465 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1); 466 else 467 ret = fold (expr); 468 *maybe_const_operands &= op0_const; 469 *maybe_const_itself &= op0_const_self; 470 if (!(flag_isoc99 471 && op0_const 472 && op0_const_self 473 && (code == TRUTH_ANDIF_EXPR 474 ? op0 == truthvalue_false_node 475 : op0 == truthvalue_true_node))) 476 *maybe_const_operands &= op1_const; 477 if (!(op0_const 478 && op0_const_self 479 && (code == TRUTH_ANDIF_EXPR 480 ? op0 == truthvalue_false_node 481 : op0 == truthvalue_true_node))) 482 *maybe_const_itself &= op1_const_self; 483 goto out; 484 485 case COND_EXPR: 486 orig_op0 = op0 = TREE_OPERAND (expr, 0); 487 orig_op1 = op1 = TREE_OPERAND (expr, 1); 488 orig_op2 = op2 = TREE_OPERAND (expr, 2); 489 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self, 490 for_int_const); 491 492 STRIP_TYPE_NOPS (op0); 493 c_disable_warnings (op0 == truthvalue_false_node); 494 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self, 495 for_int_const); 496 STRIP_TYPE_NOPS (op1); 497 c_enable_warnings (op0 == truthvalue_false_node); 498 499 c_disable_warnings (op0 == truthvalue_true_node); 500 op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self, 501 for_int_const); 502 STRIP_TYPE_NOPS (op2); 503 c_enable_warnings (op0 == truthvalue_true_node); 504 505 if (for_int_const 506 && (TREE_CODE (op0) != INTEGER_CST 507 /* Only the evaluated operand must be an INTEGER_CST. */ 508 || (op0 == truthvalue_true_node 509 ? TREE_CODE (op1) != INTEGER_CST 510 : TREE_CODE (op2) != INTEGER_CST))) 511 goto out; 512 513 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2) 514 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2); 515 else 516 ret = fold (expr); 517 *maybe_const_operands &= op0_const; 518 *maybe_const_itself &= op0_const_self; 519 if (!(flag_isoc99 520 && op0_const 521 && op0_const_self 522 && op0 == truthvalue_false_node)) 523 *maybe_const_operands &= op1_const; 524 if (!(op0_const 525 && op0_const_self 526 && op0 == truthvalue_false_node)) 527 *maybe_const_itself &= op1_const_self; 528 if (!(flag_isoc99 529 && op0_const 530 && op0_const_self 531 && op0 == truthvalue_true_node)) 532 *maybe_const_operands &= op2_const; 533 if (!(op0_const 534 && op0_const_self 535 && op0 == truthvalue_true_node)) 536 *maybe_const_itself &= op2_const_self; 537 goto out; 538 539 case VEC_COND_EXPR: 540 orig_op0 = op0 = TREE_OPERAND (expr, 0); 541 orig_op1 = op1 = TREE_OPERAND (expr, 1); 542 orig_op2 = op2 = TREE_OPERAND (expr, 2); 543 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, 544 maybe_const_itself, for_int_const); 545 STRIP_TYPE_NOPS (op0); 546 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, 547 maybe_const_itself, for_int_const); 548 STRIP_TYPE_NOPS (op1); 549 op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands, 550 maybe_const_itself, for_int_const); 551 STRIP_TYPE_NOPS (op2); 552 553 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2) 554 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2); 555 else 556 ret = fold (expr); 557 goto out; 558 559 case EXCESS_PRECISION_EXPR: 560 /* Each case where an operand with excess precision may be 561 encountered must remove the EXCESS_PRECISION_EXPR around 562 inner operands and possibly put one around the whole 563 expression or possibly convert to the semantic type (which 564 c_fully_fold does); we cannot tell at this stage which is 565 appropriate in any particular case. */ 566 gcc_unreachable (); 567 568 default: 569 /* Various codes may appear through folding built-in functions 570 and their arguments. */ 571 goto out; 572 } 573 574 out: 575 /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks 576 have been done by this point, so remove them again. */ 577 nowarning |= TREE_NO_WARNING (ret); 578 STRIP_TYPE_NOPS (ret); 579 if (nowarning && !TREE_NO_WARNING (ret)) 580 { 581 if (!CAN_HAVE_LOCATION_P (ret)) 582 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret); 583 TREE_NO_WARNING (ret) = 1; 584 } 585 if (ret != expr) 586 { 587 protected_set_expr_location (ret, loc); 588 if (IS_EXPR_CODE_CLASS (kind)) 589 set_source_range (ret, old_range.m_start, old_range.m_finish); 590 } 591 return ret; 592 } 593 594 /* If not optimizing, EXP is not a VAR_DECL, or EXP has array type, 595 return EXP. Otherwise, return either EXP or its known constant 596 value (if it has one), but return EXP if EXP has mode BLKmode. ??? 597 Is the BLKmode test appropriate? */ 598 599 tree 600 decl_constant_value_for_optimization (tree exp) 601 { 602 tree ret; 603 604 if (!optimize 605 || !VAR_P (exp) 606 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE 607 || DECL_MODE (exp) == BLKmode) 608 return exp; 609 610 ret = decl_constant_value (exp); 611 /* Avoid unwanted tree sharing between the initializer and current 612 function's body where the tree can be modified e.g. by the 613 gimplifier. */ 614 if (ret != exp && TREE_STATIC (exp)) 615 ret = unshare_expr (ret); 616 return ret; 617 } 618