1 /* Support for fully folding sub-trees of an expression for C compiler. 2 Copyright (C) 1992-2016 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "target.h" 24 #include "function.h" 25 #include "bitmap.h" 26 #include "c-tree.h" 27 #include "intl.h" 28 #include "gimplify.h" 29 30 static tree c_fully_fold_internal (tree expr, bool, bool *, bool *, bool); 31 32 /* If DISABLE is true, stop issuing warnings. This is used when 33 parsing code that we know will not be executed. This function may 34 be called multiple times, and works as a stack. */ 35 36 static void 37 c_disable_warnings (bool disable) 38 { 39 if (disable) 40 { 41 ++c_inhibit_evaluation_warnings; 42 fold_defer_overflow_warnings (); 43 } 44 } 45 46 /* If ENABLE is true, reenable issuing warnings. */ 47 48 static void 49 c_enable_warnings (bool enable) 50 { 51 if (enable) 52 { 53 --c_inhibit_evaluation_warnings; 54 fold_undefer_and_ignore_overflow_warnings (); 55 } 56 } 57 58 /* Fully fold EXPR, an expression that was not folded (beyond integer 59 constant expressions and null pointer constants) when being built 60 up. If IN_INIT, this is in a static initializer and certain 61 changes are made to the folding done. Clear *MAYBE_CONST if 62 MAYBE_CONST is not NULL and EXPR is definitely not a constant 63 expression because it contains an evaluated operator (in C99) or an 64 operator outside of sizeof returning an integer constant (in C90) 65 not permitted in constant expressions, or because it contains an 66 evaluated arithmetic overflow. (*MAYBE_CONST should typically be 67 set to true by callers before calling this function.) Return the 68 folded expression. Function arguments have already been folded 69 before calling this function, as have the contents of SAVE_EXPR, 70 TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and 71 C_MAYBE_CONST_EXPR. */ 72 73 tree 74 c_fully_fold (tree expr, bool in_init, bool *maybe_const) 75 { 76 tree ret; 77 tree eptype = NULL_TREE; 78 bool dummy = true; 79 bool maybe_const_itself = true; 80 location_t loc = EXPR_LOCATION (expr); 81 82 if (!maybe_const) 83 maybe_const = &dummy; 84 if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR) 85 { 86 eptype = TREE_TYPE (expr); 87 expr = TREE_OPERAND (expr, 0); 88 } 89 ret = c_fully_fold_internal (expr, in_init, maybe_const, 90 &maybe_const_itself, false); 91 if (eptype) 92 ret = fold_convert_loc (loc, eptype, ret); 93 *maybe_const &= maybe_const_itself; 94 return ret; 95 } 96 97 /* Internal helper for c_fully_fold. EXPR and IN_INIT are as for 98 c_fully_fold. *MAYBE_CONST_OPERANDS is cleared because of operands 99 not permitted, while *MAYBE_CONST_ITSELF is cleared because of 100 arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from 101 both evaluated and unevaluated subexpressions while 102 *MAYBE_CONST_ITSELF is carried from only evaluated 103 subexpressions). FOR_INT_CONST indicates if EXPR is an expression 104 with integer constant operands, and if any of the operands doesn't 105 get folded to an integer constant, don't fold the expression itself. */ 106 107 static tree 108 c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands, 109 bool *maybe_const_itself, bool for_int_const) 110 { 111 tree ret = expr; 112 enum tree_code code = TREE_CODE (expr); 113 enum tree_code_class kind = TREE_CODE_CLASS (code); 114 location_t loc = EXPR_LOCATION (expr); 115 tree op0, op1, op2, op3; 116 tree orig_op0, orig_op1, orig_op2; 117 bool op0_const = true, op1_const = true, op2_const = true; 118 bool op0_const_self = true, op1_const_self = true, op2_const_self = true; 119 bool nowarning = TREE_NO_WARNING (expr); 120 bool unused_p; 121 source_range old_range; 122 123 /* Constants, declarations, statements, errors, SAVE_EXPRs and 124 anything else not counted as an expression cannot usefully be 125 folded further at this point. */ 126 if (!IS_EXPR_CODE_CLASS (kind) 127 || kind == tcc_statement 128 || code == SAVE_EXPR) 129 return expr; 130 131 if (IS_EXPR_CODE_CLASS (kind)) 132 old_range = EXPR_LOCATION_RANGE (expr); 133 134 /* Operands of variable-length expressions (function calls) have 135 already been folded, as have __builtin_* function calls, and such 136 expressions cannot occur in constant expressions. */ 137 if (kind == tcc_vl_exp) 138 { 139 *maybe_const_operands = false; 140 ret = fold (expr); 141 goto out; 142 } 143 144 if (code == C_MAYBE_CONST_EXPR) 145 { 146 tree pre = C_MAYBE_CONST_EXPR_PRE (expr); 147 tree inner = C_MAYBE_CONST_EXPR_EXPR (expr); 148 if (C_MAYBE_CONST_EXPR_NON_CONST (expr)) 149 *maybe_const_operands = false; 150 if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr)) 151 { 152 *maybe_const_itself = false; 153 inner = c_fully_fold_internal (inner, in_init, maybe_const_operands, 154 maybe_const_itself, true); 155 } 156 if (pre && !in_init) 157 ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner); 158 else 159 ret = inner; 160 goto out; 161 } 162 163 /* Assignment, increment, decrement, function call and comma 164 operators, and statement expressions, cannot occur in constant 165 expressions if evaluated / outside of sizeof. (Function calls 166 were handled above, though VA_ARG_EXPR is treated like a function 167 call here, and statement expressions are handled through 168 C_MAYBE_CONST_EXPR to avoid folding inside them.) */ 169 switch (code) 170 { 171 case MODIFY_EXPR: 172 case PREDECREMENT_EXPR: 173 case PREINCREMENT_EXPR: 174 case POSTDECREMENT_EXPR: 175 case POSTINCREMENT_EXPR: 176 case COMPOUND_EXPR: 177 *maybe_const_operands = false; 178 break; 179 180 case VA_ARG_EXPR: 181 case TARGET_EXPR: 182 case BIND_EXPR: 183 case OBJ_TYPE_REF: 184 *maybe_const_operands = false; 185 ret = fold (expr); 186 goto out; 187 188 default: 189 break; 190 } 191 192 /* Fold individual tree codes as appropriate. */ 193 switch (code) 194 { 195 case COMPOUND_LITERAL_EXPR: 196 /* Any non-constancy will have been marked in a containing 197 C_MAYBE_CONST_EXPR; there is no more folding to do here. */ 198 goto out; 199 200 case COMPONENT_REF: 201 orig_op0 = op0 = TREE_OPERAND (expr, 0); 202 op1 = TREE_OPERAND (expr, 1); 203 op2 = TREE_OPERAND (expr, 2); 204 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, 205 maybe_const_itself, for_int_const); 206 STRIP_TYPE_NOPS (op0); 207 if (op0 != orig_op0) 208 ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2); 209 if (ret != expr) 210 { 211 TREE_READONLY (ret) = TREE_READONLY (expr); 212 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); 213 } 214 goto out; 215 216 case ARRAY_REF: 217 orig_op0 = op0 = TREE_OPERAND (expr, 0); 218 orig_op1 = op1 = TREE_OPERAND (expr, 1); 219 op2 = TREE_OPERAND (expr, 2); 220 op3 = TREE_OPERAND (expr, 3); 221 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, 222 maybe_const_itself, for_int_const); 223 STRIP_TYPE_NOPS (op0); 224 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, 225 maybe_const_itself, for_int_const); 226 STRIP_TYPE_NOPS (op1); 227 op1 = decl_constant_value_for_optimization (op1); 228 if (op0 != orig_op0 || op1 != orig_op1) 229 ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3); 230 if (ret != expr) 231 { 232 TREE_READONLY (ret) = TREE_READONLY (expr); 233 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr); 234 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); 235 } 236 ret = fold (ret); 237 goto out; 238 239 case COMPOUND_EXPR: 240 case MODIFY_EXPR: 241 case PREDECREMENT_EXPR: 242 case PREINCREMENT_EXPR: 243 case POSTDECREMENT_EXPR: 244 case POSTINCREMENT_EXPR: 245 case PLUS_EXPR: 246 case MINUS_EXPR: 247 case MULT_EXPR: 248 case POINTER_PLUS_EXPR: 249 case TRUNC_DIV_EXPR: 250 case CEIL_DIV_EXPR: 251 case FLOOR_DIV_EXPR: 252 case TRUNC_MOD_EXPR: 253 case RDIV_EXPR: 254 case EXACT_DIV_EXPR: 255 case LSHIFT_EXPR: 256 case RSHIFT_EXPR: 257 case BIT_IOR_EXPR: 258 case BIT_XOR_EXPR: 259 case BIT_AND_EXPR: 260 case LT_EXPR: 261 case LE_EXPR: 262 case GT_EXPR: 263 case GE_EXPR: 264 case EQ_EXPR: 265 case NE_EXPR: 266 case COMPLEX_EXPR: 267 case TRUTH_AND_EXPR: 268 case TRUTH_OR_EXPR: 269 case TRUTH_XOR_EXPR: 270 case UNORDERED_EXPR: 271 case ORDERED_EXPR: 272 case UNLT_EXPR: 273 case UNLE_EXPR: 274 case UNGT_EXPR: 275 case UNGE_EXPR: 276 case UNEQ_EXPR: 277 /* Binary operations evaluating both arguments (increment and 278 decrement are binary internally in GCC). */ 279 orig_op0 = op0 = TREE_OPERAND (expr, 0); 280 orig_op1 = op1 = TREE_OPERAND (expr, 1); 281 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, 282 maybe_const_itself, for_int_const); 283 STRIP_TYPE_NOPS (op0); 284 if (code != MODIFY_EXPR 285 && code != PREDECREMENT_EXPR 286 && code != PREINCREMENT_EXPR 287 && code != POSTDECREMENT_EXPR 288 && code != POSTINCREMENT_EXPR) 289 op0 = decl_constant_value_for_optimization (op0); 290 /* The RHS of a MODIFY_EXPR was fully folded when building that 291 expression for the sake of conversion warnings. */ 292 if (code != MODIFY_EXPR) 293 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, 294 maybe_const_itself, for_int_const); 295 STRIP_TYPE_NOPS (op1); 296 op1 = decl_constant_value_for_optimization (op1); 297 298 if (for_int_const && (TREE_CODE (op0) != INTEGER_CST 299 || TREE_CODE (op1) != INTEGER_CST)) 300 goto out; 301 302 if (op0 != orig_op0 || op1 != orig_op1 || in_init) 303 ret = in_init 304 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1) 305 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1); 306 else 307 ret = fold (expr); 308 if (TREE_OVERFLOW_P (ret) 309 && !TREE_OVERFLOW_P (op0) 310 && !TREE_OVERFLOW_P (op1)) 311 overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret); 312 if (code == LSHIFT_EXPR 313 && TREE_CODE (orig_op0) != INTEGER_CST 314 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE 315 && TREE_CODE (op0) == INTEGER_CST 316 && c_inhibit_evaluation_warnings == 0 317 && tree_int_cst_sgn (op0) < 0) 318 warning_at (loc, OPT_Wshift_negative_value, 319 "left shift of negative value"); 320 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR) 321 && TREE_CODE (orig_op1) != INTEGER_CST 322 && TREE_CODE (op1) == INTEGER_CST 323 && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE 324 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE) 325 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE 326 && c_inhibit_evaluation_warnings == 0) 327 { 328 if (tree_int_cst_sgn (op1) < 0) 329 warning_at (loc, OPT_Wshift_count_negative, 330 (code == LSHIFT_EXPR 331 ? G_("left shift count is negative") 332 : G_("right shift count is negative"))); 333 else if (compare_tree_int (op1, 334 TYPE_PRECISION (TREE_TYPE (orig_op0))) 335 >= 0) 336 warning_at (loc, OPT_Wshift_count_overflow, 337 (code == LSHIFT_EXPR 338 ? G_("left shift count >= width of type") 339 : G_("right shift count >= width of type"))); 340 } 341 if (code == LSHIFT_EXPR 342 /* If either OP0 has been folded to INTEGER_CST... */ 343 && ((TREE_CODE (orig_op0) != INTEGER_CST 344 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE 345 && TREE_CODE (op0) == INTEGER_CST) 346 /* ...or if OP1 has been folded to INTEGER_CST... */ 347 || (TREE_CODE (orig_op1) != INTEGER_CST 348 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE 349 && TREE_CODE (op1) == INTEGER_CST)) 350 && c_inhibit_evaluation_warnings == 0) 351 /* ...then maybe we can detect an overflow. */ 352 maybe_warn_shift_overflow (loc, op0, op1); 353 if ((code == TRUNC_DIV_EXPR 354 || code == CEIL_DIV_EXPR 355 || code == FLOOR_DIV_EXPR 356 || code == EXACT_DIV_EXPR 357 || code == TRUNC_MOD_EXPR) 358 && TREE_CODE (orig_op1) != INTEGER_CST 359 && TREE_CODE (op1) == INTEGER_CST 360 && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE 361 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE) 362 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE) 363 warn_for_div_by_zero (loc, op1); 364 goto out; 365 366 case INDIRECT_REF: 367 case FIX_TRUNC_EXPR: 368 case FLOAT_EXPR: 369 CASE_CONVERT: 370 case ADDR_SPACE_CONVERT_EXPR: 371 case VIEW_CONVERT_EXPR: 372 case NON_LVALUE_EXPR: 373 case NEGATE_EXPR: 374 case BIT_NOT_EXPR: 375 case TRUTH_NOT_EXPR: 376 case ADDR_EXPR: 377 case CONJ_EXPR: 378 case REALPART_EXPR: 379 case IMAGPART_EXPR: 380 /* Unary operations. */ 381 orig_op0 = op0 = TREE_OPERAND (expr, 0); 382 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, 383 maybe_const_itself, for_int_const); 384 STRIP_TYPE_NOPS (op0); 385 if (code != ADDR_EXPR && code != REALPART_EXPR && code != IMAGPART_EXPR) 386 op0 = decl_constant_value_for_optimization (op0); 387 388 if (for_int_const && TREE_CODE (op0) != INTEGER_CST) 389 goto out; 390 391 /* ??? Cope with user tricks that amount to offsetof. The middle-end is 392 not prepared to deal with them if they occur in initializers. */ 393 if (op0 != orig_op0 394 && code == ADDR_EXPR 395 && (op1 = get_base_address (op0)) != NULL_TREE 396 && INDIRECT_REF_P (op1) 397 && TREE_CONSTANT (TREE_OPERAND (op1, 0))) 398 ret = fold_convert_loc (loc, TREE_TYPE (expr), fold_offsetof_1 (op0)); 399 else if (op0 != orig_op0 || in_init) 400 ret = in_init 401 ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0) 402 : fold_build1_loc (loc, code, TREE_TYPE (expr), op0); 403 else 404 ret = fold (expr); 405 if (code == INDIRECT_REF 406 && ret != expr 407 && INDIRECT_REF_P (ret)) 408 { 409 TREE_READONLY (ret) = TREE_READONLY (expr); 410 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr); 411 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); 412 } 413 switch (code) 414 { 415 case FIX_TRUNC_EXPR: 416 case FLOAT_EXPR: 417 CASE_CONVERT: 418 /* Don't warn about explicit conversions. We will already 419 have warned about suspect implicit conversions. */ 420 break; 421 422 default: 423 if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0)) 424 overflow_warning (EXPR_LOCATION (expr), ret); 425 break; 426 } 427 goto out; 428 429 case TRUTH_ANDIF_EXPR: 430 case TRUTH_ORIF_EXPR: 431 /* Binary operations not necessarily evaluating both 432 arguments. */ 433 orig_op0 = op0 = TREE_OPERAND (expr, 0); 434 orig_op1 = op1 = TREE_OPERAND (expr, 1); 435 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self, 436 for_int_const); 437 STRIP_TYPE_NOPS (op0); 438 439 unused_p = (op0 == (code == TRUTH_ANDIF_EXPR 440 ? truthvalue_false_node 441 : truthvalue_true_node)); 442 c_disable_warnings (unused_p); 443 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self, 444 for_int_const); 445 STRIP_TYPE_NOPS (op1); 446 c_enable_warnings (unused_p); 447 448 if (for_int_const 449 && (TREE_CODE (op0) != INTEGER_CST 450 /* Require OP1 be an INTEGER_CST only if it's evaluated. */ 451 || (!unused_p && TREE_CODE (op1) != INTEGER_CST))) 452 goto out; 453 454 if (op0 != orig_op0 || op1 != orig_op1 || in_init) 455 ret = in_init 456 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1) 457 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1); 458 else 459 ret = fold (expr); 460 *maybe_const_operands &= op0_const; 461 *maybe_const_itself &= op0_const_self; 462 if (!(flag_isoc99 463 && op0_const 464 && op0_const_self 465 && (code == TRUTH_ANDIF_EXPR 466 ? op0 == truthvalue_false_node 467 : op0 == truthvalue_true_node))) 468 *maybe_const_operands &= op1_const; 469 if (!(op0_const 470 && op0_const_self 471 && (code == TRUTH_ANDIF_EXPR 472 ? op0 == truthvalue_false_node 473 : op0 == truthvalue_true_node))) 474 *maybe_const_itself &= op1_const_self; 475 goto out; 476 477 case COND_EXPR: 478 orig_op0 = op0 = TREE_OPERAND (expr, 0); 479 orig_op1 = op1 = TREE_OPERAND (expr, 1); 480 orig_op2 = op2 = TREE_OPERAND (expr, 2); 481 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self, 482 for_int_const); 483 484 STRIP_TYPE_NOPS (op0); 485 c_disable_warnings (op0 == truthvalue_false_node); 486 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self, 487 for_int_const); 488 STRIP_TYPE_NOPS (op1); 489 c_enable_warnings (op0 == truthvalue_false_node); 490 491 c_disable_warnings (op0 == truthvalue_true_node); 492 op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self, 493 for_int_const); 494 STRIP_TYPE_NOPS (op2); 495 c_enable_warnings (op0 == truthvalue_true_node); 496 497 if (for_int_const 498 && (TREE_CODE (op0) != INTEGER_CST 499 /* Only the evaluated operand must be an INTEGER_CST. */ 500 || (op0 == truthvalue_true_node 501 ? TREE_CODE (op1) != INTEGER_CST 502 : TREE_CODE (op2) != INTEGER_CST))) 503 goto out; 504 505 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2) 506 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2); 507 else 508 ret = fold (expr); 509 *maybe_const_operands &= op0_const; 510 *maybe_const_itself &= op0_const_self; 511 if (!(flag_isoc99 512 && op0_const 513 && op0_const_self 514 && op0 == truthvalue_false_node)) 515 *maybe_const_operands &= op1_const; 516 if (!(op0_const 517 && op0_const_self 518 && op0 == truthvalue_false_node)) 519 *maybe_const_itself &= op1_const_self; 520 if (!(flag_isoc99 521 && op0_const 522 && op0_const_self 523 && op0 == truthvalue_true_node)) 524 *maybe_const_operands &= op2_const; 525 if (!(op0_const 526 && op0_const_self 527 && op0 == truthvalue_true_node)) 528 *maybe_const_itself &= op2_const_self; 529 goto out; 530 531 case VEC_COND_EXPR: 532 orig_op0 = op0 = TREE_OPERAND (expr, 0); 533 orig_op1 = op1 = TREE_OPERAND (expr, 1); 534 orig_op2 = op2 = TREE_OPERAND (expr, 2); 535 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, 536 maybe_const_itself, for_int_const); 537 STRIP_TYPE_NOPS (op0); 538 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, 539 maybe_const_itself, for_int_const); 540 STRIP_TYPE_NOPS (op1); 541 op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands, 542 maybe_const_itself, for_int_const); 543 STRIP_TYPE_NOPS (op2); 544 545 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2) 546 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2); 547 else 548 ret = fold (expr); 549 goto out; 550 551 case EXCESS_PRECISION_EXPR: 552 /* Each case where an operand with excess precision may be 553 encountered must remove the EXCESS_PRECISION_EXPR around 554 inner operands and possibly put one around the whole 555 expression or possibly convert to the semantic type (which 556 c_fully_fold does); we cannot tell at this stage which is 557 appropriate in any particular case. */ 558 gcc_unreachable (); 559 560 default: 561 /* Various codes may appear through folding built-in functions 562 and their arguments. */ 563 goto out; 564 } 565 566 out: 567 /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks 568 have been done by this point, so remove them again. */ 569 nowarning |= TREE_NO_WARNING (ret); 570 STRIP_TYPE_NOPS (ret); 571 if (nowarning && !TREE_NO_WARNING (ret)) 572 { 573 if (!CAN_HAVE_LOCATION_P (ret)) 574 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret); 575 TREE_NO_WARNING (ret) = 1; 576 } 577 if (ret != expr) 578 { 579 protected_set_expr_location (ret, loc); 580 if (IS_EXPR_CODE_CLASS (kind)) 581 set_source_range (ret, old_range.m_start, old_range.m_finish); 582 } 583 return ret; 584 } 585 586 /* If not optimizing, EXP is not a VAR_DECL, or EXP has array type, 587 return EXP. Otherwise, return either EXP or its known constant 588 value (if it has one), but return EXP if EXP has mode BLKmode. ??? 589 Is the BLKmode test appropriate? */ 590 591 tree 592 decl_constant_value_for_optimization (tree exp) 593 { 594 tree ret; 595 596 if (!optimize 597 || !VAR_P (exp) 598 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE 599 || DECL_MODE (exp) == BLKmode) 600 return exp; 601 602 ret = decl_constant_value (exp); 603 /* Avoid unwanted tree sharing between the initializer and current 604 function's body where the tree can be modified e.g. by the 605 gimplifier. */ 606 if (ret != exp && TREE_STATIC (exp)) 607 ret = unshare_expr (ret); 608 return ret; 609 } 610