1 /* Fold a constant sub-tree into a single node for C-compiler 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 4 Free Software Foundation, Inc. 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify it under 9 the terms of the GNU General Public License as published by the Free 10 Software Foundation; either version 3, or (at your option) any later 11 version. 12 13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14 WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 /*@@ This file should be rewritten to use an arbitrary precision 23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst". 24 @@ Perhaps the routines could also be used for bc/dc, and made a lib. 25 @@ The routines that translate from the ap rep should 26 @@ warn if precision et. al. is lost. 27 @@ This would also make life easier when this technology is used 28 @@ for cross-compilers. */ 29 30 /* The entry points in this file are fold, size_int_wide, size_binop 31 and force_fit_type_double. 32 33 fold takes a tree as argument and returns a simplified tree. 34 35 size_binop takes a tree code for an arithmetic operation 36 and two operands that are trees, and produces a tree for the 37 result, assuming the type comes from `sizetype'. 38 39 size_int takes an integer value, and creates a tree constant 40 with type from `sizetype'. 41 42 force_fit_type_double takes a constant, an overflowable flag and a 43 prior overflow indicator. It forces the value to fit the type and 44 sets TREE_OVERFLOW. 45 46 Note: Since the folders get called on non-gimple code as well as 47 gimple code, we need to handle GIMPLE tuples as well as their 48 corresponding tree equivalents. */ 49 50 #include "config.h" 51 #include "system.h" 52 #include "coretypes.h" 53 #include "tm.h" 54 #include "flags.h" 55 #include "tree.h" 56 #include "real.h" 57 #include "fixed-value.h" 58 #include "rtl.h" 59 #include "expr.h" 60 #include "tm_p.h" 61 #include "target.h" 62 #include "toplev.h" 63 #include "intl.h" 64 #include "ggc.h" 65 #include "hashtab.h" 66 #include "langhooks.h" 67 #include "md5.h" 68 #include "gimple.h" 69 70 /* Nonzero if we are folding constants inside an initializer; zero 71 otherwise. */ 72 int folding_initializer = 0; 73 74 /* The following constants represent a bit based encoding of GCC's 75 comparison operators. This encoding simplifies transformations 76 on relational comparison operators, such as AND and OR. */ 77 enum comparison_code { 78 COMPCODE_FALSE = 0, 79 COMPCODE_LT = 1, 80 COMPCODE_EQ = 2, 81 COMPCODE_LE = 3, 82 COMPCODE_GT = 4, 83 COMPCODE_LTGT = 5, 84 COMPCODE_GE = 6, 85 COMPCODE_ORD = 7, 86 COMPCODE_UNORD = 8, 87 COMPCODE_UNLT = 9, 88 COMPCODE_UNEQ = 10, 89 COMPCODE_UNLE = 11, 90 COMPCODE_UNGT = 12, 91 COMPCODE_NE = 13, 92 COMPCODE_UNGE = 14, 93 COMPCODE_TRUE = 15 94 }; 95 96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT); 97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *); 98 static bool negate_mathfn_p (enum built_in_function); 99 static bool negate_expr_p (tree); 100 static tree negate_expr (tree); 101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int); 102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree); 103 static tree const_binop (enum tree_code, tree, tree, int); 104 static enum comparison_code comparison_to_compcode (enum tree_code); 105 static enum tree_code compcode_to_comparison (enum comparison_code); 106 static int operand_equal_for_comparison_p (tree, tree, tree); 107 static int twoval_comparison_p (tree, tree *, tree *, int *); 108 static tree eval_subst (location_t, tree, tree, tree, tree, tree); 109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree); 110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree); 111 static tree make_bit_field_ref (location_t, tree, tree, 112 HOST_WIDE_INT, HOST_WIDE_INT, int); 113 static tree optimize_bit_field_compare (location_t, enum tree_code, 114 tree, tree, tree); 115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *, 116 HOST_WIDE_INT *, 117 enum machine_mode *, int *, int *, 118 tree *, tree *); 119 static int all_ones_mask_p (const_tree, int); 120 static tree sign_bit_p (tree, const_tree); 121 static int simple_operand_p (const_tree); 122 static tree range_binop (enum tree_code, tree, tree, int, tree, int); 123 static tree range_predecessor (tree); 124 static tree range_successor (tree); 125 extern tree make_range (tree, int *, tree *, tree *, bool *); 126 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int, 127 tree, tree); 128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree); 129 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree); 130 static tree unextend (tree, int, int, tree); 131 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree); 132 static tree optimize_minmax_comparison (location_t, enum tree_code, 133 tree, tree, tree); 134 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *); 135 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *); 136 static tree fold_binary_op_with_conditional_arg (location_t, 137 enum tree_code, tree, 138 tree, tree, 139 tree, tree, int); 140 static tree fold_mathfn_compare (location_t, 141 enum built_in_function, enum tree_code, 142 tree, tree, tree); 143 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree); 144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree); 145 static bool reorder_operands_p (const_tree, const_tree); 146 static tree fold_negate_const (tree, tree); 147 static tree fold_not_const (tree, tree); 148 static tree fold_relational_const (enum tree_code, tree, tree, tree); 149 static tree fold_convert_const (enum tree_code, tree, tree); 150 151 152 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring 153 overflow. Suppose A, B and SUM have the same respective signs as A1, B1, 154 and SUM1. Then this yields nonzero if overflow occurred during the 155 addition. 156 157 Overflow occurs if A and B have the same sign, but A and SUM differ in 158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the 159 sign. */ 160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0) 161 162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic. 163 We do that by representing the two-word integer in 4 words, with only 164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive 165 number. The value of the word is LOWPART + HIGHPART * BASE. */ 166 167 #define LOWPART(x) \ 168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1)) 169 #define HIGHPART(x) \ 170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2) 171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2) 172 173 /* Unpack a two-word integer into 4 words. 174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces. 175 WORDS points to the array of HOST_WIDE_INTs. */ 176 177 static void 178 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi) 179 { 180 words[0] = LOWPART (low); 181 words[1] = HIGHPART (low); 182 words[2] = LOWPART (hi); 183 words[3] = HIGHPART (hi); 184 } 185 186 /* Pack an array of 4 words into a two-word integer. 187 WORDS points to the array of words. 188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */ 189 190 static void 191 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low, 192 HOST_WIDE_INT *hi) 193 { 194 *low = words[0] + words[1] * BASE; 195 *hi = words[2] + words[3] * BASE; 196 } 197 198 /* Force the double-word integer L1, H1 to be within the range of the 199 integer type TYPE. Stores the properly truncated and sign-extended 200 double-word integer in *LV, *HV. Returns true if the operation 201 overflows, that is, argument and result are different. */ 202 203 int 204 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 205 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type) 206 { 207 unsigned HOST_WIDE_INT low0 = l1; 208 HOST_WIDE_INT high0 = h1; 209 unsigned int prec = TYPE_PRECISION (type); 210 int sign_extended_type; 211 212 /* Size types *are* sign extended. */ 213 sign_extended_type = (!TYPE_UNSIGNED (type) 214 || (TREE_CODE (type) == INTEGER_TYPE 215 && TYPE_IS_SIZETYPE (type))); 216 217 /* First clear all bits that are beyond the type's precision. */ 218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT) 219 ; 220 else if (prec > HOST_BITS_PER_WIDE_INT) 221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT)); 222 else 223 { 224 h1 = 0; 225 if (prec < HOST_BITS_PER_WIDE_INT) 226 l1 &= ~((HOST_WIDE_INT) (-1) << prec); 227 } 228 229 /* Then do sign extension if necessary. */ 230 if (!sign_extended_type) 231 /* No sign extension */; 232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT) 233 /* Correct width already. */; 234 else if (prec > HOST_BITS_PER_WIDE_INT) 235 { 236 /* Sign extend top half? */ 237 if (h1 & ((unsigned HOST_WIDE_INT)1 238 << (prec - HOST_BITS_PER_WIDE_INT - 1))) 239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT); 240 } 241 else if (prec == HOST_BITS_PER_WIDE_INT) 242 { 243 if ((HOST_WIDE_INT)l1 < 0) 244 h1 = -1; 245 } 246 else 247 { 248 /* Sign extend bottom half? */ 249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1))) 250 { 251 h1 = -1; 252 l1 |= (HOST_WIDE_INT)(-1) << prec; 253 } 254 } 255 256 *lv = l1; 257 *hv = h1; 258 259 /* If the value didn't fit, signal overflow. */ 260 return l1 != low0 || h1 != high0; 261 } 262 263 /* We force the double-int HIGH:LOW to the range of the type TYPE by 264 sign or zero extending it. 265 OVERFLOWABLE indicates if we are interested 266 in overflow of the value, when >0 we are only interested in signed 267 overflow, for <0 we are interested in any overflow. OVERFLOWED 268 indicates whether overflow has already occurred. CONST_OVERFLOWED 269 indicates whether constant overflow has already occurred. We force 270 T's value to be within range of T's type (by setting to 0 or 1 all 271 the bits outside the type's range). We set TREE_OVERFLOWED if, 272 OVERFLOWED is nonzero, 273 or OVERFLOWABLE is >0 and signed overflow occurs 274 or OVERFLOWABLE is <0 and any overflow occurs 275 We return a new tree node for the extended double-int. The node 276 is shared if no overflow flags are set. */ 277 278 tree 279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low, 280 HOST_WIDE_INT high, int overflowable, 281 bool overflowed) 282 { 283 int sign_extended_type; 284 bool overflow; 285 286 /* Size types *are* sign extended. */ 287 sign_extended_type = (!TYPE_UNSIGNED (type) 288 || (TREE_CODE (type) == INTEGER_TYPE 289 && TYPE_IS_SIZETYPE (type))); 290 291 overflow = fit_double_type (low, high, &low, &high, type); 292 293 /* If we need to set overflow flags, return a new unshared node. */ 294 if (overflowed || overflow) 295 { 296 if (overflowed 297 || overflowable < 0 298 || (overflowable > 0 && sign_extended_type)) 299 { 300 tree t = make_node (INTEGER_CST); 301 TREE_INT_CST_LOW (t) = low; 302 TREE_INT_CST_HIGH (t) = high; 303 TREE_TYPE (t) = type; 304 TREE_OVERFLOW (t) = 1; 305 return t; 306 } 307 } 308 309 /* Else build a shared node. */ 310 return build_int_cst_wide (type, low, high); 311 } 312 313 /* Add two doubleword integers with doubleword result. 314 Return nonzero if the operation overflows according to UNSIGNED_P. 315 Each argument is given as two `HOST_WIDE_INT' pieces. 316 One argument is L1 and H1; the other, L2 and H2. 317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 318 319 int 320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2, 322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, 323 bool unsigned_p) 324 { 325 unsigned HOST_WIDE_INT l; 326 HOST_WIDE_INT h; 327 328 l = l1 + l2; 329 h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1 330 + (unsigned HOST_WIDE_INT) h2 331 + (l < l1)); 332 333 *lv = l; 334 *hv = h; 335 336 if (unsigned_p) 337 return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1 338 || (h == h1 339 && l < l1)); 340 else 341 return OVERFLOW_SUM_SIGN (h1, h2, h); 342 } 343 344 /* Negate a doubleword integer with doubleword result. 345 Return nonzero if the operation overflows, assuming it's signed. 346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1. 347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 348 349 int 350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) 352 { 353 if (l1 == 0) 354 { 355 *lv = 0; 356 *hv = - h1; 357 return (*hv & h1) < 0; 358 } 359 else 360 { 361 *lv = -l1; 362 *hv = ~h1; 363 return 0; 364 } 365 } 366 367 /* Multiply two doubleword integers with doubleword result. 368 Return nonzero if the operation overflows according to UNSIGNED_P. 369 Each argument is given as two `HOST_WIDE_INT' pieces. 370 One argument is L1 and H1; the other, L2 and H2. 371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 372 373 int 374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2, 376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, 377 bool unsigned_p) 378 { 379 HOST_WIDE_INT arg1[4]; 380 HOST_WIDE_INT arg2[4]; 381 HOST_WIDE_INT prod[4 * 2]; 382 unsigned HOST_WIDE_INT carry; 383 int i, j, k; 384 unsigned HOST_WIDE_INT toplow, neglow; 385 HOST_WIDE_INT tophigh, neghigh; 386 387 encode (arg1, l1, h1); 388 encode (arg2, l2, h2); 389 390 memset (prod, 0, sizeof prod); 391 392 for (i = 0; i < 4; i++) 393 { 394 carry = 0; 395 for (j = 0; j < 4; j++) 396 { 397 k = i + j; 398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */ 399 carry += arg1[i] * arg2[j]; 400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */ 401 carry += prod[k]; 402 prod[k] = LOWPART (carry); 403 carry = HIGHPART (carry); 404 } 405 prod[i + 4] = carry; 406 } 407 408 decode (prod, lv, hv); 409 decode (prod + 4, &toplow, &tophigh); 410 411 /* Unsigned overflow is immediate. */ 412 if (unsigned_p) 413 return (toplow | tophigh) != 0; 414 415 /* Check for signed overflow by calculating the signed representation of the 416 top half of the result; it should agree with the low half's sign bit. */ 417 if (h1 < 0) 418 { 419 neg_double (l2, h2, &neglow, &neghigh); 420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh); 421 } 422 if (h2 < 0) 423 { 424 neg_double (l1, h1, &neglow, &neghigh); 425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh); 426 } 427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0; 428 } 429 430 /* Shift the doubleword integer in L1, H1 left by COUNT places 431 keeping only PREC bits of result. 432 Shift right if COUNT is negative. 433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift. 434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 435 436 void 437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 438 HOST_WIDE_INT count, unsigned int prec, 439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith) 440 { 441 unsigned HOST_WIDE_INT signmask; 442 443 if (count < 0) 444 { 445 rshift_double (l1, h1, -count, prec, lv, hv, arith); 446 return; 447 } 448 449 if (SHIFT_COUNT_TRUNCATED) 450 count %= prec; 451 452 if (count >= 2 * HOST_BITS_PER_WIDE_INT) 453 { 454 /* Shifting by the host word size is undefined according to the 455 ANSI standard, so we must handle this as a special case. */ 456 *hv = 0; 457 *lv = 0; 458 } 459 else if (count >= HOST_BITS_PER_WIDE_INT) 460 { 461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT); 462 *lv = 0; 463 } 464 else 465 { 466 *hv = (((unsigned HOST_WIDE_INT) h1 << count) 467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1)); 468 *lv = l1 << count; 469 } 470 471 /* Sign extend all bits that are beyond the precision. */ 472 473 signmask = -((prec > HOST_BITS_PER_WIDE_INT 474 ? ((unsigned HOST_WIDE_INT) *hv 475 >> (prec - HOST_BITS_PER_WIDE_INT - 1)) 476 : (*lv >> (prec - 1))) & 1); 477 478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT) 479 ; 480 else if (prec >= HOST_BITS_PER_WIDE_INT) 481 { 482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT)); 483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT); 484 } 485 else 486 { 487 *hv = signmask; 488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec); 489 *lv |= signmask << prec; 490 } 491 } 492 493 /* Shift the doubleword integer in L1, H1 right by COUNT places 494 keeping only PREC bits of result. COUNT must be positive. 495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift. 496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 497 498 void 499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 500 HOST_WIDE_INT count, unsigned int prec, 501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, 502 int arith) 503 { 504 unsigned HOST_WIDE_INT signmask; 505 506 signmask = (arith 507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1)) 508 : 0); 509 510 if (SHIFT_COUNT_TRUNCATED) 511 count %= prec; 512 513 if (count >= 2 * HOST_BITS_PER_WIDE_INT) 514 { 515 /* Shifting by the host word size is undefined according to the 516 ANSI standard, so we must handle this as a special case. */ 517 *hv = 0; 518 *lv = 0; 519 } 520 else if (count >= HOST_BITS_PER_WIDE_INT) 521 { 522 *hv = 0; 523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT); 524 } 525 else 526 { 527 *hv = (unsigned HOST_WIDE_INT) h1 >> count; 528 *lv = ((l1 >> count) 529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1)); 530 } 531 532 /* Zero / sign extend all bits that are beyond the precision. */ 533 534 if (count >= (HOST_WIDE_INT)prec) 535 { 536 *hv = signmask; 537 *lv = signmask; 538 } 539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT) 540 ; 541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT) 542 { 543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT)); 544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT); 545 } 546 else 547 { 548 *hv = signmask; 549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count)); 550 *lv |= signmask << (prec - count); 551 } 552 } 553 554 /* Rotate the doubleword integer in L1, H1 left by COUNT places 555 keeping only PREC bits of result. 556 Rotate right if COUNT is negative. 557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 558 559 void 560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 561 HOST_WIDE_INT count, unsigned int prec, 562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) 563 { 564 unsigned HOST_WIDE_INT s1l, s2l; 565 HOST_WIDE_INT s1h, s2h; 566 567 count %= prec; 568 if (count < 0) 569 count += prec; 570 571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0); 572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0); 573 *lv = s1l | s2l; 574 *hv = s1h | s2h; 575 } 576 577 /* Rotate the doubleword integer in L1, H1 left by COUNT places 578 keeping only PREC bits of result. COUNT must be positive. 579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 580 581 void 582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 583 HOST_WIDE_INT count, unsigned int prec, 584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) 585 { 586 unsigned HOST_WIDE_INT s1l, s2l; 587 HOST_WIDE_INT s1h, s2h; 588 589 count %= prec; 590 if (count < 0) 591 count += prec; 592 593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0); 594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0); 595 *lv = s1l | s2l; 596 *hv = s1h | s2h; 597 } 598 599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN 600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM). 601 CODE is a tree code for a kind of division, one of 602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR 603 or EXACT_DIV_EXPR 604 It controls how the quotient is rounded to an integer. 605 Return nonzero if the operation overflows. 606 UNS nonzero says do unsigned division. */ 607 608 int 609 div_and_round_double (enum tree_code code, int uns, 610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */ 611 HOST_WIDE_INT hnum_orig, 612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */ 613 HOST_WIDE_INT hden_orig, 614 unsigned HOST_WIDE_INT *lquo, 615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem, 616 HOST_WIDE_INT *hrem) 617 { 618 int quo_neg = 0; 619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */ 620 HOST_WIDE_INT den[4], quo[4]; 621 int i, j; 622 unsigned HOST_WIDE_INT work; 623 unsigned HOST_WIDE_INT carry = 0; 624 unsigned HOST_WIDE_INT lnum = lnum_orig; 625 HOST_WIDE_INT hnum = hnum_orig; 626 unsigned HOST_WIDE_INT lden = lden_orig; 627 HOST_WIDE_INT hden = hden_orig; 628 int overflow = 0; 629 630 if (hden == 0 && lden == 0) 631 overflow = 1, lden = 1; 632 633 /* Calculate quotient sign and convert operands to unsigned. */ 634 if (!uns) 635 { 636 if (hnum < 0) 637 { 638 quo_neg = ~ quo_neg; 639 /* (minimum integer) / (-1) is the only overflow case. */ 640 if (neg_double (lnum, hnum, &lnum, &hnum) 641 && ((HOST_WIDE_INT) lden & hden) == -1) 642 overflow = 1; 643 } 644 if (hden < 0) 645 { 646 quo_neg = ~ quo_neg; 647 neg_double (lden, hden, &lden, &hden); 648 } 649 } 650 651 if (hnum == 0 && hden == 0) 652 { /* single precision */ 653 *hquo = *hrem = 0; 654 /* This unsigned division rounds toward zero. */ 655 *lquo = lnum / lden; 656 goto finish_up; 657 } 658 659 if (hnum == 0) 660 { /* trivial case: dividend < divisor */ 661 /* hden != 0 already checked. */ 662 *hquo = *lquo = 0; 663 *hrem = hnum; 664 *lrem = lnum; 665 goto finish_up; 666 } 667 668 memset (quo, 0, sizeof quo); 669 670 memset (num, 0, sizeof num); /* to zero 9th element */ 671 memset (den, 0, sizeof den); 672 673 encode (num, lnum, hnum); 674 encode (den, lden, hden); 675 676 /* Special code for when the divisor < BASE. */ 677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE) 678 { 679 /* hnum != 0 already checked. */ 680 for (i = 4 - 1; i >= 0; i--) 681 { 682 work = num[i] + carry * BASE; 683 quo[i] = work / lden; 684 carry = work % lden; 685 } 686 } 687 else 688 { 689 /* Full double precision division, 690 with thanks to Don Knuth's "Seminumerical Algorithms". */ 691 int num_hi_sig, den_hi_sig; 692 unsigned HOST_WIDE_INT quo_est, scale; 693 694 /* Find the highest nonzero divisor digit. */ 695 for (i = 4 - 1;; i--) 696 if (den[i] != 0) 697 { 698 den_hi_sig = i; 699 break; 700 } 701 702 /* Insure that the first digit of the divisor is at least BASE/2. 703 This is required by the quotient digit estimation algorithm. */ 704 705 scale = BASE / (den[den_hi_sig] + 1); 706 if (scale > 1) 707 { /* scale divisor and dividend */ 708 carry = 0; 709 for (i = 0; i <= 4 - 1; i++) 710 { 711 work = (num[i] * scale) + carry; 712 num[i] = LOWPART (work); 713 carry = HIGHPART (work); 714 } 715 716 num[4] = carry; 717 carry = 0; 718 for (i = 0; i <= 4 - 1; i++) 719 { 720 work = (den[i] * scale) + carry; 721 den[i] = LOWPART (work); 722 carry = HIGHPART (work); 723 if (den[i] != 0) den_hi_sig = i; 724 } 725 } 726 727 num_hi_sig = 4; 728 729 /* Main loop */ 730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) 731 { 732 /* Guess the next quotient digit, quo_est, by dividing the first 733 two remaining dividend digits by the high order quotient digit. 734 quo_est is never low and is at most 2 high. */ 735 unsigned HOST_WIDE_INT tmp; 736 737 num_hi_sig = i + den_hi_sig + 1; 738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1]; 739 if (num[num_hi_sig] != den[den_hi_sig]) 740 quo_est = work / den[den_hi_sig]; 741 else 742 quo_est = BASE - 1; 743 744 /* Refine quo_est so it's usually correct, and at most one high. */ 745 tmp = work - quo_est * den[den_hi_sig]; 746 if (tmp < BASE 747 && (den[den_hi_sig - 1] * quo_est 748 > (tmp * BASE + num[num_hi_sig - 2]))) 749 quo_est--; 750 751 /* Try QUO_EST as the quotient digit, by multiplying the 752 divisor by QUO_EST and subtracting from the remaining dividend. 753 Keep in mind that QUO_EST is the I - 1st digit. */ 754 755 carry = 0; 756 for (j = 0; j <= den_hi_sig; j++) 757 { 758 work = quo_est * den[j] + carry; 759 carry = HIGHPART (work); 760 work = num[i + j] - LOWPART (work); 761 num[i + j] = LOWPART (work); 762 carry += HIGHPART (work) != 0; 763 } 764 765 /* If quo_est was high by one, then num[i] went negative and 766 we need to correct things. */ 767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry) 768 { 769 quo_est--; 770 carry = 0; /* add divisor back in */ 771 for (j = 0; j <= den_hi_sig; j++) 772 { 773 work = num[i + j] + den[j] + carry; 774 carry = HIGHPART (work); 775 num[i + j] = LOWPART (work); 776 } 777 778 num [num_hi_sig] += carry; 779 } 780 781 /* Store the quotient digit. */ 782 quo[i] = quo_est; 783 } 784 } 785 786 decode (quo, lquo, hquo); 787 788 finish_up: 789 /* If result is negative, make it so. */ 790 if (quo_neg) 791 neg_double (*lquo, *hquo, lquo, hquo); 792 793 /* Compute trial remainder: rem = num - (quo * den) */ 794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem); 795 neg_double (*lrem, *hrem, lrem, hrem); 796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem); 797 798 switch (code) 799 { 800 case TRUNC_DIV_EXPR: 801 case TRUNC_MOD_EXPR: /* round toward zero */ 802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */ 803 return overflow; 804 805 case FLOOR_DIV_EXPR: 806 case FLOOR_MOD_EXPR: /* round toward negative infinity */ 807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */ 808 { 809 /* quo = quo - 1; */ 810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, 811 lquo, hquo); 812 } 813 else 814 return overflow; 815 break; 816 817 case CEIL_DIV_EXPR: 818 case CEIL_MOD_EXPR: /* round toward positive infinity */ 819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */ 820 { 821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0, 822 lquo, hquo); 823 } 824 else 825 return overflow; 826 break; 827 828 case ROUND_DIV_EXPR: 829 case ROUND_MOD_EXPR: /* round to closest integer */ 830 { 831 unsigned HOST_WIDE_INT labs_rem = *lrem; 832 HOST_WIDE_INT habs_rem = *hrem; 833 unsigned HOST_WIDE_INT labs_den = lden, ltwice; 834 HOST_WIDE_INT habs_den = hden, htwice; 835 836 /* Get absolute values. */ 837 if (*hrem < 0) 838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem); 839 if (hden < 0) 840 neg_double (lden, hden, &labs_den, &habs_den); 841 842 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */ 843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0, 844 labs_rem, habs_rem, <wice, &htwice); 845 846 if (((unsigned HOST_WIDE_INT) habs_den 847 < (unsigned HOST_WIDE_INT) htwice) 848 || (((unsigned HOST_WIDE_INT) habs_den 849 == (unsigned HOST_WIDE_INT) htwice) 850 && (labs_den <= ltwice))) 851 { 852 if (*hquo < 0) 853 /* quo = quo - 1; */ 854 add_double (*lquo, *hquo, 855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo); 856 else 857 /* quo = quo + 1; */ 858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0, 859 lquo, hquo); 860 } 861 else 862 return overflow; 863 } 864 break; 865 866 default: 867 gcc_unreachable (); 868 } 869 870 /* Compute true remainder: rem = num - (quo * den) */ 871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem); 872 neg_double (*lrem, *hrem, lrem, hrem); 873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem); 874 return overflow; 875 } 876 877 /* If ARG2 divides ARG1 with zero remainder, carries out the division 878 of type CODE and returns the quotient. 879 Otherwise returns NULL_TREE. */ 880 881 tree 882 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2) 883 { 884 unsigned HOST_WIDE_INT int1l, int2l; 885 HOST_WIDE_INT int1h, int2h; 886 unsigned HOST_WIDE_INT quol, reml; 887 HOST_WIDE_INT quoh, remh; 888 int uns; 889 890 /* The sign of the division is according to operand two, that 891 does the correct thing for POINTER_PLUS_EXPR where we want 892 a signed division. */ 893 uns = TYPE_UNSIGNED (TREE_TYPE (arg2)); 894 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE 895 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2))) 896 uns = false; 897 898 int1l = TREE_INT_CST_LOW (arg1); 899 int1h = TREE_INT_CST_HIGH (arg1); 900 int2l = TREE_INT_CST_LOW (arg2); 901 int2h = TREE_INT_CST_HIGH (arg2); 902 903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h, 904 &quol, &quoh, &reml, &remh); 905 if (remh != 0 || reml != 0) 906 return NULL_TREE; 907 908 return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh); 909 } 910 911 /* This is nonzero if we should defer warnings about undefined 912 overflow. This facility exists because these warnings are a 913 special case. The code to estimate loop iterations does not want 914 to issue any warnings, since it works with expressions which do not 915 occur in user code. Various bits of cleanup code call fold(), but 916 only use the result if it has certain characteristics (e.g., is a 917 constant); that code only wants to issue a warning if the result is 918 used. */ 919 920 static int fold_deferring_overflow_warnings; 921 922 /* If a warning about undefined overflow is deferred, this is the 923 warning. Note that this may cause us to turn two warnings into 924 one, but that is fine since it is sufficient to only give one 925 warning per expression. */ 926 927 static const char* fold_deferred_overflow_warning; 928 929 /* If a warning about undefined overflow is deferred, this is the 930 level at which the warning should be emitted. */ 931 932 static enum warn_strict_overflow_code fold_deferred_overflow_code; 933 934 /* Start deferring overflow warnings. We could use a stack here to 935 permit nested calls, but at present it is not necessary. */ 936 937 void 938 fold_defer_overflow_warnings (void) 939 { 940 ++fold_deferring_overflow_warnings; 941 } 942 943 /* Stop deferring overflow warnings. If there is a pending warning, 944 and ISSUE is true, then issue the warning if appropriate. STMT is 945 the statement with which the warning should be associated (used for 946 location information); STMT may be NULL. CODE is the level of the 947 warning--a warn_strict_overflow_code value. This function will use 948 the smaller of CODE and the deferred code when deciding whether to 949 issue the warning. CODE may be zero to mean to always use the 950 deferred code. */ 951 952 void 953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code) 954 { 955 const char *warnmsg; 956 location_t locus; 957 958 gcc_assert (fold_deferring_overflow_warnings > 0); 959 --fold_deferring_overflow_warnings; 960 if (fold_deferring_overflow_warnings > 0) 961 { 962 if (fold_deferred_overflow_warning != NULL 963 && code != 0 964 && code < (int) fold_deferred_overflow_code) 965 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code; 966 return; 967 } 968 969 warnmsg = fold_deferred_overflow_warning; 970 fold_deferred_overflow_warning = NULL; 971 972 if (!issue || warnmsg == NULL) 973 return; 974 975 if (gimple_no_warning_p (stmt)) 976 return; 977 978 /* Use the smallest code level when deciding to issue the 979 warning. */ 980 if (code == 0 || code > (int) fold_deferred_overflow_code) 981 code = fold_deferred_overflow_code; 982 983 if (!issue_strict_overflow_warning (code)) 984 return; 985 986 if (stmt == NULL) 987 locus = input_location; 988 else 989 locus = gimple_location (stmt); 990 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg); 991 } 992 993 /* Stop deferring overflow warnings, ignoring any deferred 994 warnings. */ 995 996 void 997 fold_undefer_and_ignore_overflow_warnings (void) 998 { 999 fold_undefer_overflow_warnings (false, NULL, 0); 1000 } 1001 1002 /* Whether we are deferring overflow warnings. */ 1003 1004 bool 1005 fold_deferring_overflow_warnings_p (void) 1006 { 1007 return fold_deferring_overflow_warnings > 0; 1008 } 1009 1010 /* This is called when we fold something based on the fact that signed 1011 overflow is undefined. */ 1012 1013 static void 1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc) 1015 { 1016 if (fold_deferring_overflow_warnings > 0) 1017 { 1018 if (fold_deferred_overflow_warning == NULL 1019 || wc < fold_deferred_overflow_code) 1020 { 1021 fold_deferred_overflow_warning = gmsgid; 1022 fold_deferred_overflow_code = wc; 1023 } 1024 } 1025 else if (issue_strict_overflow_warning (wc)) 1026 warning (OPT_Wstrict_overflow, gmsgid); 1027 } 1028 1029 /* Return true if the built-in mathematical function specified by CODE 1030 is odd, i.e. -f(x) == f(-x). */ 1031 1032 static bool 1033 negate_mathfn_p (enum built_in_function code) 1034 { 1035 switch (code) 1036 { 1037 CASE_FLT_FN (BUILT_IN_ASIN): 1038 CASE_FLT_FN (BUILT_IN_ASINH): 1039 CASE_FLT_FN (BUILT_IN_ATAN): 1040 CASE_FLT_FN (BUILT_IN_ATANH): 1041 CASE_FLT_FN (BUILT_IN_CASIN): 1042 CASE_FLT_FN (BUILT_IN_CASINH): 1043 CASE_FLT_FN (BUILT_IN_CATAN): 1044 CASE_FLT_FN (BUILT_IN_CATANH): 1045 CASE_FLT_FN (BUILT_IN_CBRT): 1046 CASE_FLT_FN (BUILT_IN_CPROJ): 1047 CASE_FLT_FN (BUILT_IN_CSIN): 1048 CASE_FLT_FN (BUILT_IN_CSINH): 1049 CASE_FLT_FN (BUILT_IN_CTAN): 1050 CASE_FLT_FN (BUILT_IN_CTANH): 1051 CASE_FLT_FN (BUILT_IN_ERF): 1052 CASE_FLT_FN (BUILT_IN_LLROUND): 1053 CASE_FLT_FN (BUILT_IN_LROUND): 1054 CASE_FLT_FN (BUILT_IN_ROUND): 1055 CASE_FLT_FN (BUILT_IN_SIN): 1056 CASE_FLT_FN (BUILT_IN_SINH): 1057 CASE_FLT_FN (BUILT_IN_TAN): 1058 CASE_FLT_FN (BUILT_IN_TANH): 1059 CASE_FLT_FN (BUILT_IN_TRUNC): 1060 return true; 1061 1062 CASE_FLT_FN (BUILT_IN_LLRINT): 1063 CASE_FLT_FN (BUILT_IN_LRINT): 1064 CASE_FLT_FN (BUILT_IN_NEARBYINT): 1065 CASE_FLT_FN (BUILT_IN_RINT): 1066 return !flag_rounding_math; 1067 1068 default: 1069 break; 1070 } 1071 return false; 1072 } 1073 1074 /* Check whether we may negate an integer constant T without causing 1075 overflow. */ 1076 1077 bool 1078 may_negate_without_overflow_p (const_tree t) 1079 { 1080 unsigned HOST_WIDE_INT val; 1081 unsigned int prec; 1082 tree type; 1083 1084 gcc_assert (TREE_CODE (t) == INTEGER_CST); 1085 1086 type = TREE_TYPE (t); 1087 if (TYPE_UNSIGNED (type)) 1088 return false; 1089 1090 prec = TYPE_PRECISION (type); 1091 if (prec > HOST_BITS_PER_WIDE_INT) 1092 { 1093 if (TREE_INT_CST_LOW (t) != 0) 1094 return true; 1095 prec -= HOST_BITS_PER_WIDE_INT; 1096 val = TREE_INT_CST_HIGH (t); 1097 } 1098 else 1099 val = TREE_INT_CST_LOW (t); 1100 if (prec < HOST_BITS_PER_WIDE_INT) 1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1; 1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1)); 1103 } 1104 1105 /* Determine whether an expression T can be cheaply negated using 1106 the function negate_expr without introducing undefined overflow. */ 1107 1108 static bool 1109 negate_expr_p (tree t) 1110 { 1111 tree type; 1112 1113 if (t == 0) 1114 return false; 1115 1116 type = TREE_TYPE (t); 1117 1118 STRIP_SIGN_NOPS (t); 1119 switch (TREE_CODE (t)) 1120 { 1121 case INTEGER_CST: 1122 if (TYPE_OVERFLOW_WRAPS (type)) 1123 return true; 1124 1125 /* Check that -CST will not overflow type. */ 1126 return may_negate_without_overflow_p (t); 1127 case BIT_NOT_EXPR: 1128 return (INTEGRAL_TYPE_P (type) 1129 && TYPE_OVERFLOW_WRAPS (type)); 1130 1131 case FIXED_CST: 1132 case NEGATE_EXPR: 1133 return true; 1134 1135 case REAL_CST: 1136 /* We want to canonicalize to positive real constants. Pretend 1137 that only negative ones can be easily negated. */ 1138 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t)); 1139 1140 case COMPLEX_CST: 1141 return negate_expr_p (TREE_REALPART (t)) 1142 && negate_expr_p (TREE_IMAGPART (t)); 1143 1144 case COMPLEX_EXPR: 1145 return negate_expr_p (TREE_OPERAND (t, 0)) 1146 && negate_expr_p (TREE_OPERAND (t, 1)); 1147 1148 case CONJ_EXPR: 1149 return negate_expr_p (TREE_OPERAND (t, 0)); 1150 1151 case PLUS_EXPR: 1152 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) 1153 || HONOR_SIGNED_ZEROS (TYPE_MODE (type))) 1154 return false; 1155 /* -(A + B) -> (-B) - A. */ 1156 if (negate_expr_p (TREE_OPERAND (t, 1)) 1157 && reorder_operands_p (TREE_OPERAND (t, 0), 1158 TREE_OPERAND (t, 1))) 1159 return true; 1160 /* -(A + B) -> (-A) - B. */ 1161 return negate_expr_p (TREE_OPERAND (t, 0)); 1162 1163 case MINUS_EXPR: 1164 /* We can't turn -(A-B) into B-A when we honor signed zeros. */ 1165 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) 1166 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)) 1167 && reorder_operands_p (TREE_OPERAND (t, 0), 1168 TREE_OPERAND (t, 1)); 1169 1170 case MULT_EXPR: 1171 if (TYPE_UNSIGNED (TREE_TYPE (t))) 1172 break; 1173 1174 /* Fall through. */ 1175 1176 case RDIV_EXPR: 1177 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t)))) 1178 return negate_expr_p (TREE_OPERAND (t, 1)) 1179 || negate_expr_p (TREE_OPERAND (t, 0)); 1180 break; 1181 1182 case TRUNC_DIV_EXPR: 1183 case ROUND_DIV_EXPR: 1184 case FLOOR_DIV_EXPR: 1185 case CEIL_DIV_EXPR: 1186 case EXACT_DIV_EXPR: 1187 /* In general we can't negate A / B, because if A is INT_MIN and 1188 B is 1, we may turn this into INT_MIN / -1 which is undefined 1189 and actually traps on some architectures. But if overflow is 1190 undefined, we can negate, because - (INT_MIN / 1) is an 1191 overflow. */ 1192 if (INTEGRAL_TYPE_P (TREE_TYPE (t)) 1193 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))) 1194 break; 1195 return negate_expr_p (TREE_OPERAND (t, 1)) 1196 || negate_expr_p (TREE_OPERAND (t, 0)); 1197 1198 case NOP_EXPR: 1199 /* Negate -((double)float) as (double)(-float). */ 1200 if (TREE_CODE (type) == REAL_TYPE) 1201 { 1202 tree tem = strip_float_extensions (t); 1203 if (tem != t) 1204 return negate_expr_p (tem); 1205 } 1206 break; 1207 1208 case CALL_EXPR: 1209 /* Negate -f(x) as f(-x). */ 1210 if (negate_mathfn_p (builtin_mathfn_code (t))) 1211 return negate_expr_p (CALL_EXPR_ARG (t, 0)); 1212 break; 1213 1214 case RSHIFT_EXPR: 1215 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */ 1216 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) 1217 { 1218 tree op1 = TREE_OPERAND (t, 1); 1219 if (TREE_INT_CST_HIGH (op1) == 0 1220 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1) 1221 == TREE_INT_CST_LOW (op1)) 1222 return true; 1223 } 1224 break; 1225 1226 default: 1227 break; 1228 } 1229 return false; 1230 } 1231 1232 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no 1233 simplification is possible. 1234 If negate_expr_p would return true for T, NULL_TREE will never be 1235 returned. */ 1236 1237 static tree 1238 fold_negate_expr (location_t loc, tree t) 1239 { 1240 tree type = TREE_TYPE (t); 1241 tree tem; 1242 1243 switch (TREE_CODE (t)) 1244 { 1245 /* Convert - (~A) to A + 1. */ 1246 case BIT_NOT_EXPR: 1247 if (INTEGRAL_TYPE_P (type)) 1248 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0), 1249 build_int_cst (type, 1)); 1250 break; 1251 1252 case INTEGER_CST: 1253 tem = fold_negate_const (t, type); 1254 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t) 1255 || !TYPE_OVERFLOW_TRAPS (type)) 1256 return tem; 1257 break; 1258 1259 case REAL_CST: 1260 tem = fold_negate_const (t, type); 1261 /* Two's complement FP formats, such as c4x, may overflow. */ 1262 if (!TREE_OVERFLOW (tem) || !flag_trapping_math) 1263 return tem; 1264 break; 1265 1266 case FIXED_CST: 1267 tem = fold_negate_const (t, type); 1268 return tem; 1269 1270 case COMPLEX_CST: 1271 { 1272 tree rpart = negate_expr (TREE_REALPART (t)); 1273 tree ipart = negate_expr (TREE_IMAGPART (t)); 1274 1275 if ((TREE_CODE (rpart) == REAL_CST 1276 && TREE_CODE (ipart) == REAL_CST) 1277 || (TREE_CODE (rpart) == INTEGER_CST 1278 && TREE_CODE (ipart) == INTEGER_CST)) 1279 return build_complex (type, rpart, ipart); 1280 } 1281 break; 1282 1283 case COMPLEX_EXPR: 1284 if (negate_expr_p (t)) 1285 return fold_build2_loc (loc, COMPLEX_EXPR, type, 1286 fold_negate_expr (loc, TREE_OPERAND (t, 0)), 1287 fold_negate_expr (loc, TREE_OPERAND (t, 1))); 1288 break; 1289 1290 case CONJ_EXPR: 1291 if (negate_expr_p (t)) 1292 return fold_build1_loc (loc, CONJ_EXPR, type, 1293 fold_negate_expr (loc, TREE_OPERAND (t, 0))); 1294 break; 1295 1296 case NEGATE_EXPR: 1297 return TREE_OPERAND (t, 0); 1298 1299 case PLUS_EXPR: 1300 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) 1301 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))) 1302 { 1303 /* -(A + B) -> (-B) - A. */ 1304 if (negate_expr_p (TREE_OPERAND (t, 1)) 1305 && reorder_operands_p (TREE_OPERAND (t, 0), 1306 TREE_OPERAND (t, 1))) 1307 { 1308 tem = negate_expr (TREE_OPERAND (t, 1)); 1309 return fold_build2_loc (loc, MINUS_EXPR, type, 1310 tem, TREE_OPERAND (t, 0)); 1311 } 1312 1313 /* -(A + B) -> (-A) - B. */ 1314 if (negate_expr_p (TREE_OPERAND (t, 0))) 1315 { 1316 tem = negate_expr (TREE_OPERAND (t, 0)); 1317 return fold_build2_loc (loc, MINUS_EXPR, type, 1318 tem, TREE_OPERAND (t, 1)); 1319 } 1320 } 1321 break; 1322 1323 case MINUS_EXPR: 1324 /* - (A - B) -> B - A */ 1325 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) 1326 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)) 1327 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1))) 1328 return fold_build2_loc (loc, MINUS_EXPR, type, 1329 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0)); 1330 break; 1331 1332 case MULT_EXPR: 1333 if (TYPE_UNSIGNED (type)) 1334 break; 1335 1336 /* Fall through. */ 1337 1338 case RDIV_EXPR: 1339 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))) 1340 { 1341 tem = TREE_OPERAND (t, 1); 1342 if (negate_expr_p (tem)) 1343 return fold_build2_loc (loc, TREE_CODE (t), type, 1344 TREE_OPERAND (t, 0), negate_expr (tem)); 1345 tem = TREE_OPERAND (t, 0); 1346 if (negate_expr_p (tem)) 1347 return fold_build2_loc (loc, TREE_CODE (t), type, 1348 negate_expr (tem), TREE_OPERAND (t, 1)); 1349 } 1350 break; 1351 1352 case TRUNC_DIV_EXPR: 1353 case ROUND_DIV_EXPR: 1354 case FLOOR_DIV_EXPR: 1355 case CEIL_DIV_EXPR: 1356 case EXACT_DIV_EXPR: 1357 /* In general we can't negate A / B, because if A is INT_MIN and 1358 B is 1, we may turn this into INT_MIN / -1 which is undefined 1359 and actually traps on some architectures. But if overflow is 1360 undefined, we can negate, because - (INT_MIN / 1) is an 1361 overflow. */ 1362 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 1363 { 1364 const char * const warnmsg = G_("assuming signed overflow does not " 1365 "occur when negating a division"); 1366 tem = TREE_OPERAND (t, 1); 1367 if (negate_expr_p (tem)) 1368 { 1369 if (INTEGRAL_TYPE_P (type) 1370 && (TREE_CODE (tem) != INTEGER_CST 1371 || integer_onep (tem))) 1372 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); 1373 return fold_build2_loc (loc, TREE_CODE (t), type, 1374 TREE_OPERAND (t, 0), negate_expr (tem)); 1375 } 1376 tem = TREE_OPERAND (t, 0); 1377 if (negate_expr_p (tem)) 1378 { 1379 if (INTEGRAL_TYPE_P (type) 1380 && (TREE_CODE (tem) != INTEGER_CST 1381 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type)))) 1382 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); 1383 return fold_build2_loc (loc, TREE_CODE (t), type, 1384 negate_expr (tem), TREE_OPERAND (t, 1)); 1385 } 1386 } 1387 break; 1388 1389 case NOP_EXPR: 1390 /* Convert -((double)float) into (double)(-float). */ 1391 if (TREE_CODE (type) == REAL_TYPE) 1392 { 1393 tem = strip_float_extensions (t); 1394 if (tem != t && negate_expr_p (tem)) 1395 return fold_convert_loc (loc, type, negate_expr (tem)); 1396 } 1397 break; 1398 1399 case CALL_EXPR: 1400 /* Negate -f(x) as f(-x). */ 1401 if (negate_mathfn_p (builtin_mathfn_code (t)) 1402 && negate_expr_p (CALL_EXPR_ARG (t, 0))) 1403 { 1404 tree fndecl, arg; 1405 1406 fndecl = get_callee_fndecl (t); 1407 arg = negate_expr (CALL_EXPR_ARG (t, 0)); 1408 return build_call_expr_loc (loc, fndecl, 1, arg); 1409 } 1410 break; 1411 1412 case RSHIFT_EXPR: 1413 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */ 1414 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) 1415 { 1416 tree op1 = TREE_OPERAND (t, 1); 1417 if (TREE_INT_CST_HIGH (op1) == 0 1418 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1) 1419 == TREE_INT_CST_LOW (op1)) 1420 { 1421 tree ntype = TYPE_UNSIGNED (type) 1422 ? signed_type_for (type) 1423 : unsigned_type_for (type); 1424 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0)); 1425 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1); 1426 return fold_convert_loc (loc, type, temp); 1427 } 1428 } 1429 break; 1430 1431 default: 1432 break; 1433 } 1434 1435 return NULL_TREE; 1436 } 1437 1438 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be 1439 negated in a simpler way. Also allow for T to be NULL_TREE, in which case 1440 return NULL_TREE. */ 1441 1442 static tree 1443 negate_expr (tree t) 1444 { 1445 tree type, tem; 1446 location_t loc; 1447 1448 if (t == NULL_TREE) 1449 return NULL_TREE; 1450 1451 loc = EXPR_LOCATION (t); 1452 type = TREE_TYPE (t); 1453 STRIP_SIGN_NOPS (t); 1454 1455 tem = fold_negate_expr (loc, t); 1456 if (!tem) 1457 { 1458 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t); 1459 SET_EXPR_LOCATION (tem, loc); 1460 } 1461 return fold_convert_loc (loc, type, tem); 1462 } 1463 1464 /* Split a tree IN into a constant, literal and variable parts that could be 1465 combined with CODE to make IN. "constant" means an expression with 1466 TREE_CONSTANT but that isn't an actual constant. CODE must be a 1467 commutative arithmetic operation. Store the constant part into *CONP, 1468 the literal in *LITP and return the variable part. If a part isn't 1469 present, set it to null. If the tree does not decompose in this way, 1470 return the entire tree as the variable part and the other parts as null. 1471 1472 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that 1473 case, we negate an operand that was subtracted. Except if it is a 1474 literal for which we use *MINUS_LITP instead. 1475 1476 If NEGATE_P is true, we are negating all of IN, again except a literal 1477 for which we use *MINUS_LITP instead. 1478 1479 If IN is itself a literal or constant, return it as appropriate. 1480 1481 Note that we do not guarantee that any of the three values will be the 1482 same type as IN, but they will have the same signedness and mode. */ 1483 1484 static tree 1485 split_tree (tree in, enum tree_code code, tree *conp, tree *litp, 1486 tree *minus_litp, int negate_p) 1487 { 1488 tree var = 0; 1489 1490 *conp = 0; 1491 *litp = 0; 1492 *minus_litp = 0; 1493 1494 /* Strip any conversions that don't change the machine mode or signedness. */ 1495 STRIP_SIGN_NOPS (in); 1496 1497 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST 1498 || TREE_CODE (in) == FIXED_CST) 1499 *litp = in; 1500 else if (TREE_CODE (in) == code 1501 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math) 1502 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in)) 1503 /* We can associate addition and subtraction together (even 1504 though the C standard doesn't say so) for integers because 1505 the value is not affected. For reals, the value might be 1506 affected, so we can't. */ 1507 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR) 1508 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR)))) 1509 { 1510 tree op0 = TREE_OPERAND (in, 0); 1511 tree op1 = TREE_OPERAND (in, 1); 1512 int neg1_p = TREE_CODE (in) == MINUS_EXPR; 1513 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0; 1514 1515 /* First see if either of the operands is a literal, then a constant. */ 1516 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST 1517 || TREE_CODE (op0) == FIXED_CST) 1518 *litp = op0, op0 = 0; 1519 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST 1520 || TREE_CODE (op1) == FIXED_CST) 1521 *litp = op1, neg_litp_p = neg1_p, op1 = 0; 1522 1523 if (op0 != 0 && TREE_CONSTANT (op0)) 1524 *conp = op0, op0 = 0; 1525 else if (op1 != 0 && TREE_CONSTANT (op1)) 1526 *conp = op1, neg_conp_p = neg1_p, op1 = 0; 1527 1528 /* If we haven't dealt with either operand, this is not a case we can 1529 decompose. Otherwise, VAR is either of the ones remaining, if any. */ 1530 if (op0 != 0 && op1 != 0) 1531 var = in; 1532 else if (op0 != 0) 1533 var = op0; 1534 else 1535 var = op1, neg_var_p = neg1_p; 1536 1537 /* Now do any needed negations. */ 1538 if (neg_litp_p) 1539 *minus_litp = *litp, *litp = 0; 1540 if (neg_conp_p) 1541 *conp = negate_expr (*conp); 1542 if (neg_var_p) 1543 var = negate_expr (var); 1544 } 1545 else if (TREE_CONSTANT (in)) 1546 *conp = in; 1547 else 1548 var = in; 1549 1550 if (negate_p) 1551 { 1552 if (*litp) 1553 *minus_litp = *litp, *litp = 0; 1554 else if (*minus_litp) 1555 *litp = *minus_litp, *minus_litp = 0; 1556 *conp = negate_expr (*conp); 1557 var = negate_expr (var); 1558 } 1559 1560 return var; 1561 } 1562 1563 /* Re-associate trees split by the above function. T1 and T2 are 1564 either expressions to associate or null. Return the new 1565 expression, if any. LOC is the location of the new expression. If 1566 we build an operation, do it in TYPE and with CODE. */ 1567 1568 static tree 1569 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type) 1570 { 1571 tree tem; 1572 1573 if (t1 == 0) 1574 return t2; 1575 else if (t2 == 0) 1576 return t1; 1577 1578 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't 1579 try to fold this since we will have infinite recursion. But do 1580 deal with any NEGATE_EXPRs. */ 1581 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code 1582 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR) 1583 { 1584 if (code == PLUS_EXPR) 1585 { 1586 if (TREE_CODE (t1) == NEGATE_EXPR) 1587 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2), 1588 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0))); 1589 else if (TREE_CODE (t2) == NEGATE_EXPR) 1590 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1), 1591 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0))); 1592 else if (integer_zerop (t2)) 1593 return fold_convert_loc (loc, type, t1); 1594 } 1595 else if (code == MINUS_EXPR) 1596 { 1597 if (integer_zerop (t2)) 1598 return fold_convert_loc (loc, type, t1); 1599 } 1600 1601 tem = build2 (code, type, fold_convert_loc (loc, type, t1), 1602 fold_convert_loc (loc, type, t2)); 1603 goto associate_trees_exit; 1604 } 1605 1606 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1), 1607 fold_convert_loc (loc, type, t2)); 1608 associate_trees_exit: 1609 protected_set_expr_location (tem, loc); 1610 return tem; 1611 } 1612 1613 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable 1614 for use in int_const_binop, size_binop and size_diffop. */ 1615 1616 static bool 1617 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2) 1618 { 1619 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1)) 1620 return false; 1621 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2)) 1622 return false; 1623 1624 switch (code) 1625 { 1626 case LSHIFT_EXPR: 1627 case RSHIFT_EXPR: 1628 case LROTATE_EXPR: 1629 case RROTATE_EXPR: 1630 return true; 1631 1632 default: 1633 break; 1634 } 1635 1636 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2) 1637 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2) 1638 && TYPE_MODE (type1) == TYPE_MODE (type2); 1639 } 1640 1641 1642 /* Combine two integer constants ARG1 and ARG2 under operation CODE 1643 to produce a new constant. Return NULL_TREE if we don't know how 1644 to evaluate CODE at compile-time. 1645 1646 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */ 1647 1648 tree 1649 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc) 1650 { 1651 unsigned HOST_WIDE_INT int1l, int2l; 1652 HOST_WIDE_INT int1h, int2h; 1653 unsigned HOST_WIDE_INT low; 1654 HOST_WIDE_INT hi; 1655 unsigned HOST_WIDE_INT garbagel; 1656 HOST_WIDE_INT garbageh; 1657 tree t; 1658 tree type = TREE_TYPE (arg1); 1659 int uns = TYPE_UNSIGNED (type); 1660 int is_sizetype 1661 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)); 1662 int overflow = 0; 1663 1664 int1l = TREE_INT_CST_LOW (arg1); 1665 int1h = TREE_INT_CST_HIGH (arg1); 1666 int2l = TREE_INT_CST_LOW (arg2); 1667 int2h = TREE_INT_CST_HIGH (arg2); 1668 1669 switch (code) 1670 { 1671 case BIT_IOR_EXPR: 1672 low = int1l | int2l, hi = int1h | int2h; 1673 break; 1674 1675 case BIT_XOR_EXPR: 1676 low = int1l ^ int2l, hi = int1h ^ int2h; 1677 break; 1678 1679 case BIT_AND_EXPR: 1680 low = int1l & int2l, hi = int1h & int2h; 1681 break; 1682 1683 case RSHIFT_EXPR: 1684 int2l = -int2l; 1685 case LSHIFT_EXPR: 1686 /* It's unclear from the C standard whether shifts can overflow. 1687 The following code ignores overflow; perhaps a C standard 1688 interpretation ruling is needed. */ 1689 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type), 1690 &low, &hi, !uns); 1691 break; 1692 1693 case RROTATE_EXPR: 1694 int2l = - int2l; 1695 case LROTATE_EXPR: 1696 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type), 1697 &low, &hi); 1698 break; 1699 1700 case PLUS_EXPR: 1701 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi); 1702 break; 1703 1704 case MINUS_EXPR: 1705 neg_double (int2l, int2h, &low, &hi); 1706 add_double (int1l, int1h, low, hi, &low, &hi); 1707 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h); 1708 break; 1709 1710 case MULT_EXPR: 1711 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi); 1712 break; 1713 1714 case TRUNC_DIV_EXPR: 1715 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR: 1716 case EXACT_DIV_EXPR: 1717 /* This is a shortcut for a common special case. */ 1718 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0 1719 && !TREE_OVERFLOW (arg1) 1720 && !TREE_OVERFLOW (arg2) 1721 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0) 1722 { 1723 if (code == CEIL_DIV_EXPR) 1724 int1l += int2l - 1; 1725 1726 low = int1l / int2l, hi = 0; 1727 break; 1728 } 1729 1730 /* ... fall through ... */ 1731 1732 case ROUND_DIV_EXPR: 1733 if (int2h == 0 && int2l == 0) 1734 return NULL_TREE; 1735 if (int2h == 0 && int2l == 1) 1736 { 1737 low = int1l, hi = int1h; 1738 break; 1739 } 1740 if (int1l == int2l && int1h == int2h 1741 && ! (int1l == 0 && int1h == 0)) 1742 { 1743 low = 1, hi = 0; 1744 break; 1745 } 1746 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h, 1747 &low, &hi, &garbagel, &garbageh); 1748 break; 1749 1750 case TRUNC_MOD_EXPR: 1751 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR: 1752 /* This is a shortcut for a common special case. */ 1753 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0 1754 && !TREE_OVERFLOW (arg1) 1755 && !TREE_OVERFLOW (arg2) 1756 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0) 1757 { 1758 if (code == CEIL_MOD_EXPR) 1759 int1l += int2l - 1; 1760 low = int1l % int2l, hi = 0; 1761 break; 1762 } 1763 1764 /* ... fall through ... */ 1765 1766 case ROUND_MOD_EXPR: 1767 if (int2h == 0 && int2l == 0) 1768 return NULL_TREE; 1769 overflow = div_and_round_double (code, uns, 1770 int1l, int1h, int2l, int2h, 1771 &garbagel, &garbageh, &low, &hi); 1772 break; 1773 1774 case MIN_EXPR: 1775 case MAX_EXPR: 1776 if (uns) 1777 low = (((unsigned HOST_WIDE_INT) int1h 1778 < (unsigned HOST_WIDE_INT) int2h) 1779 || (((unsigned HOST_WIDE_INT) int1h 1780 == (unsigned HOST_WIDE_INT) int2h) 1781 && int1l < int2l)); 1782 else 1783 low = (int1h < int2h 1784 || (int1h == int2h && int1l < int2l)); 1785 1786 if (low == (code == MIN_EXPR)) 1787 low = int1l, hi = int1h; 1788 else 1789 low = int2l, hi = int2h; 1790 break; 1791 1792 default: 1793 return NULL_TREE; 1794 } 1795 1796 if (notrunc) 1797 { 1798 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi); 1799 1800 /* Propagate overflow flags ourselves. */ 1801 if (((!uns || is_sizetype) && overflow) 1802 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)) 1803 { 1804 t = copy_node (t); 1805 TREE_OVERFLOW (t) = 1; 1806 } 1807 } 1808 else 1809 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1, 1810 ((!uns || is_sizetype) && overflow) 1811 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)); 1812 1813 return t; 1814 } 1815 1816 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new 1817 constant. We assume ARG1 and ARG2 have the same data type, or at least 1818 are the same kind of constant and the same machine mode. Return zero if 1819 combining the constants is not allowed in the current operating mode. 1820 1821 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */ 1822 1823 static tree 1824 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) 1825 { 1826 /* Sanity check for the recursive cases. */ 1827 if (!arg1 || !arg2) 1828 return NULL_TREE; 1829 1830 STRIP_NOPS (arg1); 1831 STRIP_NOPS (arg2); 1832 1833 if (TREE_CODE (arg1) == INTEGER_CST) 1834 return int_const_binop (code, arg1, arg2, notrunc); 1835 1836 if (TREE_CODE (arg1) == REAL_CST) 1837 { 1838 enum machine_mode mode; 1839 REAL_VALUE_TYPE d1; 1840 REAL_VALUE_TYPE d2; 1841 REAL_VALUE_TYPE value; 1842 REAL_VALUE_TYPE result; 1843 bool inexact; 1844 tree t, type; 1845 1846 /* The following codes are handled by real_arithmetic. */ 1847 switch (code) 1848 { 1849 case PLUS_EXPR: 1850 case MINUS_EXPR: 1851 case MULT_EXPR: 1852 case RDIV_EXPR: 1853 case MIN_EXPR: 1854 case MAX_EXPR: 1855 break; 1856 1857 default: 1858 return NULL_TREE; 1859 } 1860 1861 d1 = TREE_REAL_CST (arg1); 1862 d2 = TREE_REAL_CST (arg2); 1863 1864 type = TREE_TYPE (arg1); 1865 mode = TYPE_MODE (type); 1866 1867 /* Don't perform operation if we honor signaling NaNs and 1868 either operand is a NaN. */ 1869 if (HONOR_SNANS (mode) 1870 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2))) 1871 return NULL_TREE; 1872 1873 /* Don't perform operation if it would raise a division 1874 by zero exception. */ 1875 if (code == RDIV_EXPR 1876 && REAL_VALUES_EQUAL (d2, dconst0) 1877 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode))) 1878 return NULL_TREE; 1879 1880 /* If either operand is a NaN, just return it. Otherwise, set up 1881 for floating-point trap; we return an overflow. */ 1882 if (REAL_VALUE_ISNAN (d1)) 1883 return arg1; 1884 else if (REAL_VALUE_ISNAN (d2)) 1885 return arg2; 1886 1887 inexact = real_arithmetic (&value, code, &d1, &d2); 1888 real_convert (&result, mode, &value); 1889 1890 /* Don't constant fold this floating point operation if 1891 the result has overflowed and flag_trapping_math. */ 1892 if (flag_trapping_math 1893 && MODE_HAS_INFINITIES (mode) 1894 && REAL_VALUE_ISINF (result) 1895 && !REAL_VALUE_ISINF (d1) 1896 && !REAL_VALUE_ISINF (d2)) 1897 return NULL_TREE; 1898 1899 /* Don't constant fold this floating point operation if the 1900 result may dependent upon the run-time rounding mode and 1901 flag_rounding_math is set, or if GCC's software emulation 1902 is unable to accurately represent the result. */ 1903 if ((flag_rounding_math 1904 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations)) 1905 && (inexact || !real_identical (&result, &value))) 1906 return NULL_TREE; 1907 1908 t = build_real (type, result); 1909 1910 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2); 1911 return t; 1912 } 1913 1914 if (TREE_CODE (arg1) == FIXED_CST) 1915 { 1916 FIXED_VALUE_TYPE f1; 1917 FIXED_VALUE_TYPE f2; 1918 FIXED_VALUE_TYPE result; 1919 tree t, type; 1920 int sat_p; 1921 bool overflow_p; 1922 1923 /* The following codes are handled by fixed_arithmetic. */ 1924 switch (code) 1925 { 1926 case PLUS_EXPR: 1927 case MINUS_EXPR: 1928 case MULT_EXPR: 1929 case TRUNC_DIV_EXPR: 1930 f2 = TREE_FIXED_CST (arg2); 1931 break; 1932 1933 case LSHIFT_EXPR: 1934 case RSHIFT_EXPR: 1935 f2.data.high = TREE_INT_CST_HIGH (arg2); 1936 f2.data.low = TREE_INT_CST_LOW (arg2); 1937 f2.mode = SImode; 1938 break; 1939 1940 default: 1941 return NULL_TREE; 1942 } 1943 1944 f1 = TREE_FIXED_CST (arg1); 1945 type = TREE_TYPE (arg1); 1946 sat_p = TYPE_SATURATING (type); 1947 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p); 1948 t = build_fixed (type, result); 1949 /* Propagate overflow flags. */ 1950 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)) 1951 TREE_OVERFLOW (t) = 1; 1952 return t; 1953 } 1954 1955 if (TREE_CODE (arg1) == COMPLEX_CST) 1956 { 1957 tree type = TREE_TYPE (arg1); 1958 tree r1 = TREE_REALPART (arg1); 1959 tree i1 = TREE_IMAGPART (arg1); 1960 tree r2 = TREE_REALPART (arg2); 1961 tree i2 = TREE_IMAGPART (arg2); 1962 tree real, imag; 1963 1964 switch (code) 1965 { 1966 case PLUS_EXPR: 1967 case MINUS_EXPR: 1968 real = const_binop (code, r1, r2, notrunc); 1969 imag = const_binop (code, i1, i2, notrunc); 1970 break; 1971 1972 case MULT_EXPR: 1973 if (COMPLEX_FLOAT_TYPE_P (type)) 1974 return do_mpc_arg2 (arg1, arg2, type, 1975 /* do_nonfinite= */ folding_initializer, 1976 mpc_mul); 1977 1978 real = const_binop (MINUS_EXPR, 1979 const_binop (MULT_EXPR, r1, r2, notrunc), 1980 const_binop (MULT_EXPR, i1, i2, notrunc), 1981 notrunc); 1982 imag = const_binop (PLUS_EXPR, 1983 const_binop (MULT_EXPR, r1, i2, notrunc), 1984 const_binop (MULT_EXPR, i1, r2, notrunc), 1985 notrunc); 1986 break; 1987 1988 case RDIV_EXPR: 1989 if (COMPLEX_FLOAT_TYPE_P (type)) 1990 return do_mpc_arg2 (arg1, arg2, type, 1991 /* do_nonfinite= */ folding_initializer, 1992 mpc_div); 1993 /* Fallthru ... */ 1994 case TRUNC_DIV_EXPR: 1995 case CEIL_DIV_EXPR: 1996 case FLOOR_DIV_EXPR: 1997 case ROUND_DIV_EXPR: 1998 if (flag_complex_method == 0) 1999 { 2000 /* Keep this algorithm in sync with 2001 tree-complex.c:expand_complex_div_straight(). 2002 2003 Expand complex division to scalars, straightforward algorithm. 2004 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t) 2005 t = br*br + bi*bi 2006 */ 2007 tree magsquared 2008 = const_binop (PLUS_EXPR, 2009 const_binop (MULT_EXPR, r2, r2, notrunc), 2010 const_binop (MULT_EXPR, i2, i2, notrunc), 2011 notrunc); 2012 tree t1 2013 = const_binop (PLUS_EXPR, 2014 const_binop (MULT_EXPR, r1, r2, notrunc), 2015 const_binop (MULT_EXPR, i1, i2, notrunc), 2016 notrunc); 2017 tree t2 2018 = const_binop (MINUS_EXPR, 2019 const_binop (MULT_EXPR, i1, r2, notrunc), 2020 const_binop (MULT_EXPR, r1, i2, notrunc), 2021 notrunc); 2022 2023 real = const_binop (code, t1, magsquared, notrunc); 2024 imag = const_binop (code, t2, magsquared, notrunc); 2025 } 2026 else 2027 { 2028 /* Keep this algorithm in sync with 2029 tree-complex.c:expand_complex_div_wide(). 2030 2031 Expand complex division to scalars, modified algorithm to minimize 2032 overflow with wide input ranges. */ 2033 tree compare = fold_build2 (LT_EXPR, boolean_type_node, 2034 fold_abs_const (r2, TREE_TYPE (type)), 2035 fold_abs_const (i2, TREE_TYPE (type))); 2036 2037 if (integer_nonzerop (compare)) 2038 { 2039 /* In the TRUE branch, we compute 2040 ratio = br/bi; 2041 div = (br * ratio) + bi; 2042 tr = (ar * ratio) + ai; 2043 ti = (ai * ratio) - ar; 2044 tr = tr / div; 2045 ti = ti / div; */ 2046 tree ratio = const_binop (code, r2, i2, notrunc); 2047 tree div = const_binop (PLUS_EXPR, i2, 2048 const_binop (MULT_EXPR, r2, ratio, 2049 notrunc), 2050 notrunc); 2051 real = const_binop (MULT_EXPR, r1, ratio, notrunc); 2052 real = const_binop (PLUS_EXPR, real, i1, notrunc); 2053 real = const_binop (code, real, div, notrunc); 2054 2055 imag = const_binop (MULT_EXPR, i1, ratio, notrunc); 2056 imag = const_binop (MINUS_EXPR, imag, r1, notrunc); 2057 imag = const_binop (code, imag, div, notrunc); 2058 } 2059 else 2060 { 2061 /* In the FALSE branch, we compute 2062 ratio = d/c; 2063 divisor = (d * ratio) + c; 2064 tr = (b * ratio) + a; 2065 ti = b - (a * ratio); 2066 tr = tr / div; 2067 ti = ti / div; */ 2068 tree ratio = const_binop (code, i2, r2, notrunc); 2069 tree div = const_binop (PLUS_EXPR, r2, 2070 const_binop (MULT_EXPR, i2, ratio, 2071 notrunc), 2072 notrunc); 2073 2074 real = const_binop (MULT_EXPR, i1, ratio, notrunc); 2075 real = const_binop (PLUS_EXPR, real, r1, notrunc); 2076 real = const_binop (code, real, div, notrunc); 2077 2078 imag = const_binop (MULT_EXPR, r1, ratio, notrunc); 2079 imag = const_binop (MINUS_EXPR, i1, imag, notrunc); 2080 imag = const_binop (code, imag, div, notrunc); 2081 } 2082 } 2083 break; 2084 2085 default: 2086 return NULL_TREE; 2087 } 2088 2089 if (real && imag) 2090 return build_complex (type, real, imag); 2091 } 2092 2093 if (TREE_CODE (arg1) == VECTOR_CST) 2094 { 2095 tree type = TREE_TYPE(arg1); 2096 int count = TYPE_VECTOR_SUBPARTS (type), i; 2097 tree elements1, elements2, list = NULL_TREE; 2098 2099 if(TREE_CODE(arg2) != VECTOR_CST) 2100 return NULL_TREE; 2101 2102 elements1 = TREE_VECTOR_CST_ELTS (arg1); 2103 elements2 = TREE_VECTOR_CST_ELTS (arg2); 2104 2105 for (i = 0; i < count; i++) 2106 { 2107 tree elem1, elem2, elem; 2108 2109 /* The trailing elements can be empty and should be treated as 0 */ 2110 if(!elements1) 2111 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); 2112 else 2113 { 2114 elem1 = TREE_VALUE(elements1); 2115 elements1 = TREE_CHAIN (elements1); 2116 } 2117 2118 if(!elements2) 2119 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); 2120 else 2121 { 2122 elem2 = TREE_VALUE(elements2); 2123 elements2 = TREE_CHAIN (elements2); 2124 } 2125 2126 elem = const_binop (code, elem1, elem2, notrunc); 2127 2128 /* It is possible that const_binop cannot handle the given 2129 code and return NULL_TREE */ 2130 if(elem == NULL_TREE) 2131 return NULL_TREE; 2132 2133 list = tree_cons (NULL_TREE, elem, list); 2134 } 2135 return build_vector(type, nreverse(list)); 2136 } 2137 return NULL_TREE; 2138 } 2139 2140 /* Create a size type INT_CST node with NUMBER sign extended. KIND 2141 indicates which particular sizetype to create. */ 2142 2143 tree 2144 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind) 2145 { 2146 return build_int_cst (sizetype_tab[(int) kind], number); 2147 } 2148 2149 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE 2150 is a tree code. The type of the result is taken from the operands. 2151 Both must be equivalent integer types, ala int_binop_types_match_p. 2152 If the operands are constant, so is the result. */ 2153 2154 tree 2155 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1) 2156 { 2157 tree type = TREE_TYPE (arg0); 2158 2159 if (arg0 == error_mark_node || arg1 == error_mark_node) 2160 return error_mark_node; 2161 2162 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0), 2163 TREE_TYPE (arg1))); 2164 2165 /* Handle the special case of two integer constants faster. */ 2166 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 2167 { 2168 /* And some specific cases even faster than that. */ 2169 if (code == PLUS_EXPR) 2170 { 2171 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0)) 2172 return arg1; 2173 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1)) 2174 return arg0; 2175 } 2176 else if (code == MINUS_EXPR) 2177 { 2178 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1)) 2179 return arg0; 2180 } 2181 else if (code == MULT_EXPR) 2182 { 2183 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0)) 2184 return arg1; 2185 } 2186 2187 /* Handle general case of two integer constants. */ 2188 return int_const_binop (code, arg0, arg1, 0); 2189 } 2190 2191 return fold_build2_loc (loc, code, type, arg0, arg1); 2192 } 2193 2194 /* Given two values, either both of sizetype or both of bitsizetype, 2195 compute the difference between the two values. Return the value 2196 in signed type corresponding to the type of the operands. */ 2197 2198 tree 2199 size_diffop_loc (location_t loc, tree arg0, tree arg1) 2200 { 2201 tree type = TREE_TYPE (arg0); 2202 tree ctype; 2203 2204 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0), 2205 TREE_TYPE (arg1))); 2206 2207 /* If the type is already signed, just do the simple thing. */ 2208 if (!TYPE_UNSIGNED (type)) 2209 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1); 2210 2211 if (type == sizetype) 2212 ctype = ssizetype; 2213 else if (type == bitsizetype) 2214 ctype = sbitsizetype; 2215 else 2216 ctype = signed_type_for (type); 2217 2218 /* If either operand is not a constant, do the conversions to the signed 2219 type and subtract. The hardware will do the right thing with any 2220 overflow in the subtraction. */ 2221 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST) 2222 return size_binop_loc (loc, MINUS_EXPR, 2223 fold_convert_loc (loc, ctype, arg0), 2224 fold_convert_loc (loc, ctype, arg1)); 2225 2226 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE. 2227 Otherwise, subtract the other way, convert to CTYPE (we know that can't 2228 overflow) and negate (which can't either). Special-case a result 2229 of zero while we're here. */ 2230 if (tree_int_cst_equal (arg0, arg1)) 2231 return build_int_cst (ctype, 0); 2232 else if (tree_int_cst_lt (arg1, arg0)) 2233 return fold_convert_loc (loc, ctype, 2234 size_binop_loc (loc, MINUS_EXPR, arg0, arg1)); 2235 else 2236 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0), 2237 fold_convert_loc (loc, ctype, 2238 size_binop_loc (loc, 2239 MINUS_EXPR, 2240 arg1, arg0))); 2241 } 2242 2243 /* A subroutine of fold_convert_const handling conversions of an 2244 INTEGER_CST to another integer type. */ 2245 2246 static tree 2247 fold_convert_const_int_from_int (tree type, const_tree arg1) 2248 { 2249 tree t; 2250 2251 /* Given an integer constant, make new constant with new type, 2252 appropriately sign-extended or truncated. */ 2253 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1), 2254 TREE_INT_CST_HIGH (arg1), 2255 /* Don't set the overflow when 2256 converting from a pointer, */ 2257 !POINTER_TYPE_P (TREE_TYPE (arg1)) 2258 /* or to a sizetype with same signedness 2259 and the precision is unchanged. 2260 ??? sizetype is always sign-extended, 2261 but its signedness depends on the 2262 frontend. Thus we see spurious overflows 2263 here if we do not check this. */ 2264 && !((TYPE_PRECISION (TREE_TYPE (arg1)) 2265 == TYPE_PRECISION (type)) 2266 && (TYPE_UNSIGNED (TREE_TYPE (arg1)) 2267 == TYPE_UNSIGNED (type)) 2268 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE 2269 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1))) 2270 || (TREE_CODE (type) == INTEGER_TYPE 2271 && TYPE_IS_SIZETYPE (type)))), 2272 (TREE_INT_CST_HIGH (arg1) < 0 2273 && (TYPE_UNSIGNED (type) 2274 < TYPE_UNSIGNED (TREE_TYPE (arg1)))) 2275 | TREE_OVERFLOW (arg1)); 2276 2277 return t; 2278 } 2279 2280 /* A subroutine of fold_convert_const handling conversions a REAL_CST 2281 to an integer type. */ 2282 2283 static tree 2284 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1) 2285 { 2286 int overflow = 0; 2287 tree t; 2288 2289 /* The following code implements the floating point to integer 2290 conversion rules required by the Java Language Specification, 2291 that IEEE NaNs are mapped to zero and values that overflow 2292 the target precision saturate, i.e. values greater than 2293 INT_MAX are mapped to INT_MAX, and values less than INT_MIN 2294 are mapped to INT_MIN. These semantics are allowed by the 2295 C and C++ standards that simply state that the behavior of 2296 FP-to-integer conversion is unspecified upon overflow. */ 2297 2298 HOST_WIDE_INT high, low; 2299 REAL_VALUE_TYPE r; 2300 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1); 2301 2302 switch (code) 2303 { 2304 case FIX_TRUNC_EXPR: 2305 real_trunc (&r, VOIDmode, &x); 2306 break; 2307 2308 default: 2309 gcc_unreachable (); 2310 } 2311 2312 /* If R is NaN, return zero and show we have an overflow. */ 2313 if (REAL_VALUE_ISNAN (r)) 2314 { 2315 overflow = 1; 2316 high = 0; 2317 low = 0; 2318 } 2319 2320 /* See if R is less than the lower bound or greater than the 2321 upper bound. */ 2322 2323 if (! overflow) 2324 { 2325 tree lt = TYPE_MIN_VALUE (type); 2326 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt); 2327 if (REAL_VALUES_LESS (r, l)) 2328 { 2329 overflow = 1; 2330 high = TREE_INT_CST_HIGH (lt); 2331 low = TREE_INT_CST_LOW (lt); 2332 } 2333 } 2334 2335 if (! overflow) 2336 { 2337 tree ut = TYPE_MAX_VALUE (type); 2338 if (ut) 2339 { 2340 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut); 2341 if (REAL_VALUES_LESS (u, r)) 2342 { 2343 overflow = 1; 2344 high = TREE_INT_CST_HIGH (ut); 2345 low = TREE_INT_CST_LOW (ut); 2346 } 2347 } 2348 } 2349 2350 if (! overflow) 2351 REAL_VALUE_TO_INT (&low, &high, r); 2352 2353 t = force_fit_type_double (type, low, high, -1, 2354 overflow | TREE_OVERFLOW (arg1)); 2355 return t; 2356 } 2357 2358 /* A subroutine of fold_convert_const handling conversions of a 2359 FIXED_CST to an integer type. */ 2360 2361 static tree 2362 fold_convert_const_int_from_fixed (tree type, const_tree arg1) 2363 { 2364 tree t; 2365 double_int temp, temp_trunc; 2366 unsigned int mode; 2367 2368 /* Right shift FIXED_CST to temp by fbit. */ 2369 temp = TREE_FIXED_CST (arg1).data; 2370 mode = TREE_FIXED_CST (arg1).mode; 2371 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT) 2372 { 2373 lshift_double (temp.low, temp.high, 2374 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT, 2375 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode)); 2376 2377 /* Left shift temp to temp_trunc by fbit. */ 2378 lshift_double (temp.low, temp.high, 2379 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT, 2380 &temp_trunc.low, &temp_trunc.high, 2381 SIGNED_FIXED_POINT_MODE_P (mode)); 2382 } 2383 else 2384 { 2385 temp.low = 0; 2386 temp.high = 0; 2387 temp_trunc.low = 0; 2388 temp_trunc.high = 0; 2389 } 2390 2391 /* If FIXED_CST is negative, we need to round the value toward 0. 2392 By checking if the fractional bits are not zero to add 1 to temp. */ 2393 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0 2394 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc)) 2395 { 2396 double_int one; 2397 one.low = 1; 2398 one.high = 0; 2399 temp = double_int_add (temp, one); 2400 } 2401 2402 /* Given a fixed-point constant, make new constant with new type, 2403 appropriately sign-extended or truncated. */ 2404 t = force_fit_type_double (type, temp.low, temp.high, -1, 2405 (temp.high < 0 2406 && (TYPE_UNSIGNED (type) 2407 < TYPE_UNSIGNED (TREE_TYPE (arg1)))) 2408 | TREE_OVERFLOW (arg1)); 2409 2410 return t; 2411 } 2412 2413 /* A subroutine of fold_convert_const handling conversions a REAL_CST 2414 to another floating point type. */ 2415 2416 static tree 2417 fold_convert_const_real_from_real (tree type, const_tree arg1) 2418 { 2419 REAL_VALUE_TYPE value; 2420 tree t; 2421 2422 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1)); 2423 t = build_real (type, value); 2424 2425 /* If converting an infinity or NAN to a representation that doesn't 2426 have one, set the overflow bit so that we can produce some kind of 2427 error message at the appropriate point if necessary. It's not the 2428 most user-friendly message, but it's better than nothing. */ 2429 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1)) 2430 && !MODE_HAS_INFINITIES (TYPE_MODE (type))) 2431 TREE_OVERFLOW (t) = 1; 2432 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)) 2433 && !MODE_HAS_NANS (TYPE_MODE (type))) 2434 TREE_OVERFLOW (t) = 1; 2435 /* Regular overflow, conversion produced an infinity in a mode that 2436 can't represent them. */ 2437 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) 2438 && REAL_VALUE_ISINF (value) 2439 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1))) 2440 TREE_OVERFLOW (t) = 1; 2441 else 2442 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); 2443 return t; 2444 } 2445 2446 /* A subroutine of fold_convert_const handling conversions a FIXED_CST 2447 to a floating point type. */ 2448 2449 static tree 2450 fold_convert_const_real_from_fixed (tree type, const_tree arg1) 2451 { 2452 REAL_VALUE_TYPE value; 2453 tree t; 2454 2455 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1)); 2456 t = build_real (type, value); 2457 2458 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); 2459 return t; 2460 } 2461 2462 /* A subroutine of fold_convert_const handling conversions a FIXED_CST 2463 to another fixed-point type. */ 2464 2465 static tree 2466 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1) 2467 { 2468 FIXED_VALUE_TYPE value; 2469 tree t; 2470 bool overflow_p; 2471 2472 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1), 2473 TYPE_SATURATING (type)); 2474 t = build_fixed (type, value); 2475 2476 /* Propagate overflow flags. */ 2477 if (overflow_p | TREE_OVERFLOW (arg1)) 2478 TREE_OVERFLOW (t) = 1; 2479 return t; 2480 } 2481 2482 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST 2483 to a fixed-point type. */ 2484 2485 static tree 2486 fold_convert_const_fixed_from_int (tree type, const_tree arg1) 2487 { 2488 FIXED_VALUE_TYPE value; 2489 tree t; 2490 bool overflow_p; 2491 2492 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), 2493 TREE_INT_CST (arg1), 2494 TYPE_UNSIGNED (TREE_TYPE (arg1)), 2495 TYPE_SATURATING (type)); 2496 t = build_fixed (type, value); 2497 2498 /* Propagate overflow flags. */ 2499 if (overflow_p | TREE_OVERFLOW (arg1)) 2500 TREE_OVERFLOW (t) = 1; 2501 return t; 2502 } 2503 2504 /* A subroutine of fold_convert_const handling conversions a REAL_CST 2505 to a fixed-point type. */ 2506 2507 static tree 2508 fold_convert_const_fixed_from_real (tree type, const_tree arg1) 2509 { 2510 FIXED_VALUE_TYPE value; 2511 tree t; 2512 bool overflow_p; 2513 2514 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type), 2515 &TREE_REAL_CST (arg1), 2516 TYPE_SATURATING (type)); 2517 t = build_fixed (type, value); 2518 2519 /* Propagate overflow flags. */ 2520 if (overflow_p | TREE_OVERFLOW (arg1)) 2521 TREE_OVERFLOW (t) = 1; 2522 return t; 2523 } 2524 2525 /* Attempt to fold type conversion operation CODE of expression ARG1 to 2526 type TYPE. If no simplification can be done return NULL_TREE. */ 2527 2528 static tree 2529 fold_convert_const (enum tree_code code, tree type, tree arg1) 2530 { 2531 if (TREE_TYPE (arg1) == type) 2532 return arg1; 2533 2534 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type) 2535 || TREE_CODE (type) == OFFSET_TYPE) 2536 { 2537 if (TREE_CODE (arg1) == INTEGER_CST) 2538 return fold_convert_const_int_from_int (type, arg1); 2539 else if (TREE_CODE (arg1) == REAL_CST) 2540 return fold_convert_const_int_from_real (code, type, arg1); 2541 else if (TREE_CODE (arg1) == FIXED_CST) 2542 return fold_convert_const_int_from_fixed (type, arg1); 2543 } 2544 else if (TREE_CODE (type) == REAL_TYPE) 2545 { 2546 if (TREE_CODE (arg1) == INTEGER_CST) 2547 return build_real_from_int_cst (type, arg1); 2548 else if (TREE_CODE (arg1) == REAL_CST) 2549 return fold_convert_const_real_from_real (type, arg1); 2550 else if (TREE_CODE (arg1) == FIXED_CST) 2551 return fold_convert_const_real_from_fixed (type, arg1); 2552 } 2553 else if (TREE_CODE (type) == FIXED_POINT_TYPE) 2554 { 2555 if (TREE_CODE (arg1) == FIXED_CST) 2556 return fold_convert_const_fixed_from_fixed (type, arg1); 2557 else if (TREE_CODE (arg1) == INTEGER_CST) 2558 return fold_convert_const_fixed_from_int (type, arg1); 2559 else if (TREE_CODE (arg1) == REAL_CST) 2560 return fold_convert_const_fixed_from_real (type, arg1); 2561 } 2562 return NULL_TREE; 2563 } 2564 2565 /* Construct a vector of zero elements of vector type TYPE. */ 2566 2567 static tree 2568 build_zero_vector (tree type) 2569 { 2570 tree elem, list; 2571 int i, units; 2572 2573 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); 2574 units = TYPE_VECTOR_SUBPARTS (type); 2575 2576 list = NULL_TREE; 2577 for (i = 0; i < units; i++) 2578 list = tree_cons (NULL_TREE, elem, list); 2579 return build_vector (type, list); 2580 } 2581 2582 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */ 2583 2584 bool 2585 fold_convertible_p (const_tree type, const_tree arg) 2586 { 2587 tree orig = TREE_TYPE (arg); 2588 2589 if (type == orig) 2590 return true; 2591 2592 if (TREE_CODE (arg) == ERROR_MARK 2593 || TREE_CODE (type) == ERROR_MARK 2594 || TREE_CODE (orig) == ERROR_MARK) 2595 return false; 2596 2597 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)) 2598 return true; 2599 2600 switch (TREE_CODE (type)) 2601 { 2602 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2603 case POINTER_TYPE: case REFERENCE_TYPE: 2604 case OFFSET_TYPE: 2605 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 2606 || TREE_CODE (orig) == OFFSET_TYPE) 2607 return true; 2608 return (TREE_CODE (orig) == VECTOR_TYPE 2609 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); 2610 2611 case REAL_TYPE: 2612 case FIXED_POINT_TYPE: 2613 case COMPLEX_TYPE: 2614 case VECTOR_TYPE: 2615 case VOID_TYPE: 2616 return TREE_CODE (type) == TREE_CODE (orig); 2617 2618 default: 2619 return false; 2620 } 2621 } 2622 2623 /* Convert expression ARG to type TYPE. Used by the middle-end for 2624 simple conversions in preference to calling the front-end's convert. */ 2625 2626 tree 2627 fold_convert_loc (location_t loc, tree type, tree arg) 2628 { 2629 tree orig = TREE_TYPE (arg); 2630 tree tem; 2631 2632 if (type == orig) 2633 return arg; 2634 2635 if (TREE_CODE (arg) == ERROR_MARK 2636 || TREE_CODE (type) == ERROR_MARK 2637 || TREE_CODE (orig) == ERROR_MARK) 2638 return error_mark_node; 2639 2640 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)) 2641 return fold_build1_loc (loc, NOP_EXPR, type, arg); 2642 2643 switch (TREE_CODE (type)) 2644 { 2645 case POINTER_TYPE: 2646 case REFERENCE_TYPE: 2647 /* Handle conversions between pointers to different address spaces. */ 2648 if (POINTER_TYPE_P (orig) 2649 && (TYPE_ADDR_SPACE (TREE_TYPE (type)) 2650 != TYPE_ADDR_SPACE (TREE_TYPE (orig)))) 2651 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg); 2652 /* fall through */ 2653 2654 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2655 case OFFSET_TYPE: 2656 if (TREE_CODE (arg) == INTEGER_CST) 2657 { 2658 tem = fold_convert_const (NOP_EXPR, type, arg); 2659 if (tem != NULL_TREE) 2660 return tem; 2661 } 2662 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 2663 || TREE_CODE (orig) == OFFSET_TYPE) 2664 return fold_build1_loc (loc, NOP_EXPR, type, arg); 2665 if (TREE_CODE (orig) == COMPLEX_TYPE) 2666 return fold_convert_loc (loc, type, 2667 fold_build1_loc (loc, REALPART_EXPR, 2668 TREE_TYPE (orig), arg)); 2669 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE 2670 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); 2671 return fold_build1_loc (loc, NOP_EXPR, type, arg); 2672 2673 case REAL_TYPE: 2674 if (TREE_CODE (arg) == INTEGER_CST) 2675 { 2676 tem = fold_convert_const (FLOAT_EXPR, type, arg); 2677 if (tem != NULL_TREE) 2678 return tem; 2679 } 2680 else if (TREE_CODE (arg) == REAL_CST) 2681 { 2682 tem = fold_convert_const (NOP_EXPR, type, arg); 2683 if (tem != NULL_TREE) 2684 return tem; 2685 } 2686 else if (TREE_CODE (arg) == FIXED_CST) 2687 { 2688 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg); 2689 if (tem != NULL_TREE) 2690 return tem; 2691 } 2692 2693 switch (TREE_CODE (orig)) 2694 { 2695 case INTEGER_TYPE: 2696 case BOOLEAN_TYPE: case ENUMERAL_TYPE: 2697 case POINTER_TYPE: case REFERENCE_TYPE: 2698 return fold_build1_loc (loc, FLOAT_EXPR, type, arg); 2699 2700 case REAL_TYPE: 2701 return fold_build1_loc (loc, NOP_EXPR, type, arg); 2702 2703 case FIXED_POINT_TYPE: 2704 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg); 2705 2706 case COMPLEX_TYPE: 2707 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); 2708 return fold_convert_loc (loc, type, tem); 2709 2710 default: 2711 gcc_unreachable (); 2712 } 2713 2714 case FIXED_POINT_TYPE: 2715 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST 2716 || TREE_CODE (arg) == REAL_CST) 2717 { 2718 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg); 2719 if (tem != NULL_TREE) 2720 goto fold_convert_exit; 2721 } 2722 2723 switch (TREE_CODE (orig)) 2724 { 2725 case FIXED_POINT_TYPE: 2726 case INTEGER_TYPE: 2727 case ENUMERAL_TYPE: 2728 case BOOLEAN_TYPE: 2729 case REAL_TYPE: 2730 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg); 2731 2732 case COMPLEX_TYPE: 2733 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); 2734 return fold_convert_loc (loc, type, tem); 2735 2736 default: 2737 gcc_unreachable (); 2738 } 2739 2740 case COMPLEX_TYPE: 2741 switch (TREE_CODE (orig)) 2742 { 2743 case INTEGER_TYPE: 2744 case BOOLEAN_TYPE: case ENUMERAL_TYPE: 2745 case POINTER_TYPE: case REFERENCE_TYPE: 2746 case REAL_TYPE: 2747 case FIXED_POINT_TYPE: 2748 return fold_build2_loc (loc, COMPLEX_EXPR, type, 2749 fold_convert_loc (loc, TREE_TYPE (type), arg), 2750 fold_convert_loc (loc, TREE_TYPE (type), 2751 integer_zero_node)); 2752 case COMPLEX_TYPE: 2753 { 2754 tree rpart, ipart; 2755 2756 if (TREE_CODE (arg) == COMPLEX_EXPR) 2757 { 2758 rpart = fold_convert_loc (loc, TREE_TYPE (type), 2759 TREE_OPERAND (arg, 0)); 2760 ipart = fold_convert_loc (loc, TREE_TYPE (type), 2761 TREE_OPERAND (arg, 1)); 2762 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart); 2763 } 2764 2765 arg = save_expr (arg); 2766 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); 2767 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg); 2768 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart); 2769 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart); 2770 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart); 2771 } 2772 2773 default: 2774 gcc_unreachable (); 2775 } 2776 2777 case VECTOR_TYPE: 2778 if (integer_zerop (arg)) 2779 return build_zero_vector (type); 2780 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); 2781 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 2782 || TREE_CODE (orig) == VECTOR_TYPE); 2783 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg); 2784 2785 case VOID_TYPE: 2786 tem = fold_ignored_result (arg); 2787 return fold_build1_loc (loc, NOP_EXPR, type, tem); 2788 2789 default: 2790 gcc_unreachable (); 2791 } 2792 fold_convert_exit: 2793 protected_set_expr_location (tem, loc); 2794 return tem; 2795 } 2796 2797 /* Return false if expr can be assumed not to be an lvalue, true 2798 otherwise. */ 2799 2800 static bool 2801 maybe_lvalue_p (const_tree x) 2802 { 2803 /* We only need to wrap lvalue tree codes. */ 2804 switch (TREE_CODE (x)) 2805 { 2806 case VAR_DECL: 2807 case PARM_DECL: 2808 case RESULT_DECL: 2809 case LABEL_DECL: 2810 case FUNCTION_DECL: 2811 case SSA_NAME: 2812 2813 case COMPONENT_REF: 2814 case INDIRECT_REF: 2815 case ALIGN_INDIRECT_REF: 2816 case MISALIGNED_INDIRECT_REF: 2817 case ARRAY_REF: 2818 case ARRAY_RANGE_REF: 2819 case BIT_FIELD_REF: 2820 case OBJ_TYPE_REF: 2821 2822 case REALPART_EXPR: 2823 case IMAGPART_EXPR: 2824 case PREINCREMENT_EXPR: 2825 case PREDECREMENT_EXPR: 2826 case SAVE_EXPR: 2827 case TRY_CATCH_EXPR: 2828 case WITH_CLEANUP_EXPR: 2829 case COMPOUND_EXPR: 2830 case MODIFY_EXPR: 2831 case TARGET_EXPR: 2832 case COND_EXPR: 2833 case BIND_EXPR: 2834 break; 2835 2836 default: 2837 /* Assume the worst for front-end tree codes. */ 2838 if ((int)TREE_CODE (x) >= NUM_TREE_CODES) 2839 break; 2840 return false; 2841 } 2842 2843 return true; 2844 } 2845 2846 /* Return an expr equal to X but certainly not valid as an lvalue. */ 2847 2848 tree 2849 non_lvalue_loc (location_t loc, tree x) 2850 { 2851 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to 2852 us. */ 2853 if (in_gimple_form) 2854 return x; 2855 2856 if (! maybe_lvalue_p (x)) 2857 return x; 2858 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x); 2859 SET_EXPR_LOCATION (x, loc); 2860 return x; 2861 } 2862 2863 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C. 2864 Zero means allow extended lvalues. */ 2865 2866 int pedantic_lvalues; 2867 2868 /* When pedantic, return an expr equal to X but certainly not valid as a 2869 pedantic lvalue. Otherwise, return X. */ 2870 2871 static tree 2872 pedantic_non_lvalue_loc (location_t loc, tree x) 2873 { 2874 if (pedantic_lvalues) 2875 return non_lvalue_loc (loc, x); 2876 2877 if (CAN_HAVE_LOCATION_P (x) 2878 && EXPR_LOCATION (x) != loc 2879 && !(TREE_CODE (x) == SAVE_EXPR 2880 || TREE_CODE (x) == TARGET_EXPR 2881 || TREE_CODE (x) == BIND_EXPR)) 2882 { 2883 x = copy_node (x); 2884 SET_EXPR_LOCATION (x, loc); 2885 } 2886 return x; 2887 } 2888 2889 /* Given a tree comparison code, return the code that is the logical inverse 2890 of the given code. It is not safe to do this for floating-point 2891 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode 2892 as well: if reversing the comparison is unsafe, return ERROR_MARK. */ 2893 2894 enum tree_code 2895 invert_tree_comparison (enum tree_code code, bool honor_nans) 2896 { 2897 if (honor_nans && flag_trapping_math) 2898 return ERROR_MARK; 2899 2900 switch (code) 2901 { 2902 case EQ_EXPR: 2903 return NE_EXPR; 2904 case NE_EXPR: 2905 return EQ_EXPR; 2906 case GT_EXPR: 2907 return honor_nans ? UNLE_EXPR : LE_EXPR; 2908 case GE_EXPR: 2909 return honor_nans ? UNLT_EXPR : LT_EXPR; 2910 case LT_EXPR: 2911 return honor_nans ? UNGE_EXPR : GE_EXPR; 2912 case LE_EXPR: 2913 return honor_nans ? UNGT_EXPR : GT_EXPR; 2914 case LTGT_EXPR: 2915 return UNEQ_EXPR; 2916 case UNEQ_EXPR: 2917 return LTGT_EXPR; 2918 case UNGT_EXPR: 2919 return LE_EXPR; 2920 case UNGE_EXPR: 2921 return LT_EXPR; 2922 case UNLT_EXPR: 2923 return GE_EXPR; 2924 case UNLE_EXPR: 2925 return GT_EXPR; 2926 case ORDERED_EXPR: 2927 return UNORDERED_EXPR; 2928 case UNORDERED_EXPR: 2929 return ORDERED_EXPR; 2930 default: 2931 gcc_unreachable (); 2932 } 2933 } 2934 2935 /* Similar, but return the comparison that results if the operands are 2936 swapped. This is safe for floating-point. */ 2937 2938 enum tree_code 2939 swap_tree_comparison (enum tree_code code) 2940 { 2941 switch (code) 2942 { 2943 case EQ_EXPR: 2944 case NE_EXPR: 2945 case ORDERED_EXPR: 2946 case UNORDERED_EXPR: 2947 case LTGT_EXPR: 2948 case UNEQ_EXPR: 2949 return code; 2950 case GT_EXPR: 2951 return LT_EXPR; 2952 case GE_EXPR: 2953 return LE_EXPR; 2954 case LT_EXPR: 2955 return GT_EXPR; 2956 case LE_EXPR: 2957 return GE_EXPR; 2958 case UNGT_EXPR: 2959 return UNLT_EXPR; 2960 case UNGE_EXPR: 2961 return UNLE_EXPR; 2962 case UNLT_EXPR: 2963 return UNGT_EXPR; 2964 case UNLE_EXPR: 2965 return UNGE_EXPR; 2966 default: 2967 gcc_unreachable (); 2968 } 2969 } 2970 2971 2972 /* Convert a comparison tree code from an enum tree_code representation 2973 into a compcode bit-based encoding. This function is the inverse of 2974 compcode_to_comparison. */ 2975 2976 static enum comparison_code 2977 comparison_to_compcode (enum tree_code code) 2978 { 2979 switch (code) 2980 { 2981 case LT_EXPR: 2982 return COMPCODE_LT; 2983 case EQ_EXPR: 2984 return COMPCODE_EQ; 2985 case LE_EXPR: 2986 return COMPCODE_LE; 2987 case GT_EXPR: 2988 return COMPCODE_GT; 2989 case NE_EXPR: 2990 return COMPCODE_NE; 2991 case GE_EXPR: 2992 return COMPCODE_GE; 2993 case ORDERED_EXPR: 2994 return COMPCODE_ORD; 2995 case UNORDERED_EXPR: 2996 return COMPCODE_UNORD; 2997 case UNLT_EXPR: 2998 return COMPCODE_UNLT; 2999 case UNEQ_EXPR: 3000 return COMPCODE_UNEQ; 3001 case UNLE_EXPR: 3002 return COMPCODE_UNLE; 3003 case UNGT_EXPR: 3004 return COMPCODE_UNGT; 3005 case LTGT_EXPR: 3006 return COMPCODE_LTGT; 3007 case UNGE_EXPR: 3008 return COMPCODE_UNGE; 3009 default: 3010 gcc_unreachable (); 3011 } 3012 } 3013 3014 /* Convert a compcode bit-based encoding of a comparison operator back 3015 to GCC's enum tree_code representation. This function is the 3016 inverse of comparison_to_compcode. */ 3017 3018 static enum tree_code 3019 compcode_to_comparison (enum comparison_code code) 3020 { 3021 switch (code) 3022 { 3023 case COMPCODE_LT: 3024 return LT_EXPR; 3025 case COMPCODE_EQ: 3026 return EQ_EXPR; 3027 case COMPCODE_LE: 3028 return LE_EXPR; 3029 case COMPCODE_GT: 3030 return GT_EXPR; 3031 case COMPCODE_NE: 3032 return NE_EXPR; 3033 case COMPCODE_GE: 3034 return GE_EXPR; 3035 case COMPCODE_ORD: 3036 return ORDERED_EXPR; 3037 case COMPCODE_UNORD: 3038 return UNORDERED_EXPR; 3039 case COMPCODE_UNLT: 3040 return UNLT_EXPR; 3041 case COMPCODE_UNEQ: 3042 return UNEQ_EXPR; 3043 case COMPCODE_UNLE: 3044 return UNLE_EXPR; 3045 case COMPCODE_UNGT: 3046 return UNGT_EXPR; 3047 case COMPCODE_LTGT: 3048 return LTGT_EXPR; 3049 case COMPCODE_UNGE: 3050 return UNGE_EXPR; 3051 default: 3052 gcc_unreachable (); 3053 } 3054 } 3055 3056 /* Return a tree for the comparison which is the combination of 3057 doing the AND or OR (depending on CODE) of the two operations LCODE 3058 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account 3059 the possibility of trapping if the mode has NaNs, and return NULL_TREE 3060 if this makes the transformation invalid. */ 3061 3062 tree 3063 combine_comparisons (location_t loc, 3064 enum tree_code code, enum tree_code lcode, 3065 enum tree_code rcode, tree truth_type, 3066 tree ll_arg, tree lr_arg) 3067 { 3068 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg))); 3069 enum comparison_code lcompcode = comparison_to_compcode (lcode); 3070 enum comparison_code rcompcode = comparison_to_compcode (rcode); 3071 int compcode; 3072 3073 switch (code) 3074 { 3075 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR: 3076 compcode = lcompcode & rcompcode; 3077 break; 3078 3079 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR: 3080 compcode = lcompcode | rcompcode; 3081 break; 3082 3083 default: 3084 return NULL_TREE; 3085 } 3086 3087 if (!honor_nans) 3088 { 3089 /* Eliminate unordered comparisons, as well as LTGT and ORD 3090 which are not used unless the mode has NaNs. */ 3091 compcode &= ~COMPCODE_UNORD; 3092 if (compcode == COMPCODE_LTGT) 3093 compcode = COMPCODE_NE; 3094 else if (compcode == COMPCODE_ORD) 3095 compcode = COMPCODE_TRUE; 3096 } 3097 else if (flag_trapping_math) 3098 { 3099 /* Check that the original operation and the optimized ones will trap 3100 under the same condition. */ 3101 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0 3102 && (lcompcode != COMPCODE_EQ) 3103 && (lcompcode != COMPCODE_ORD); 3104 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0 3105 && (rcompcode != COMPCODE_EQ) 3106 && (rcompcode != COMPCODE_ORD); 3107 bool trap = (compcode & COMPCODE_UNORD) == 0 3108 && (compcode != COMPCODE_EQ) 3109 && (compcode != COMPCODE_ORD); 3110 3111 /* In a short-circuited boolean expression the LHS might be 3112 such that the RHS, if evaluated, will never trap. For 3113 example, in ORD (x, y) && (x < y), we evaluate the RHS only 3114 if neither x nor y is NaN. (This is a mixed blessing: for 3115 example, the expression above will never trap, hence 3116 optimizing it to x < y would be invalid). */ 3117 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD)) 3118 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD))) 3119 rtrap = false; 3120 3121 /* If the comparison was short-circuited, and only the RHS 3122 trapped, we may now generate a spurious trap. */ 3123 if (rtrap && !ltrap 3124 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) 3125 return NULL_TREE; 3126 3127 /* If we changed the conditions that cause a trap, we lose. */ 3128 if ((ltrap || rtrap) != trap) 3129 return NULL_TREE; 3130 } 3131 3132 if (compcode == COMPCODE_TRUE) 3133 return constant_boolean_node (true, truth_type); 3134 else if (compcode == COMPCODE_FALSE) 3135 return constant_boolean_node (false, truth_type); 3136 else 3137 { 3138 enum tree_code tcode; 3139 3140 tcode = compcode_to_comparison ((enum comparison_code) compcode); 3141 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg); 3142 } 3143 } 3144 3145 /* Return nonzero if two operands (typically of the same tree node) 3146 are necessarily equal. If either argument has side-effects this 3147 function returns zero. FLAGS modifies behavior as follows: 3148 3149 If OEP_ONLY_CONST is set, only return nonzero for constants. 3150 This function tests whether the operands are indistinguishable; 3151 it does not test whether they are equal using C's == operation. 3152 The distinction is important for IEEE floating point, because 3153 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and 3154 (2) two NaNs may be indistinguishable, but NaN!=NaN. 3155 3156 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself 3157 even though it may hold multiple values during a function. 3158 This is because a GCC tree node guarantees that nothing else is 3159 executed between the evaluation of its "operands" (which may often 3160 be evaluated in arbitrary order). Hence if the operands themselves 3161 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the 3162 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST 3163 unset means assuming isochronic (or instantaneous) tree equivalence. 3164 Unless comparing arbitrary expression trees, such as from different 3165 statements, this flag can usually be left unset. 3166 3167 If OEP_PURE_SAME is set, then pure functions with identical arguments 3168 are considered the same. It is used when the caller has other ways 3169 to ensure that global memory is unchanged in between. */ 3170 3171 int 3172 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) 3173 { 3174 /* If either is ERROR_MARK, they aren't equal. */ 3175 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK 3176 || TREE_TYPE (arg0) == error_mark_node 3177 || TREE_TYPE (arg1) == error_mark_node) 3178 return 0; 3179 3180 /* Check equality of integer constants before bailing out due to 3181 precision differences. */ 3182 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 3183 return tree_int_cst_equal (arg0, arg1); 3184 3185 /* If both types don't have the same signedness, then we can't consider 3186 them equal. We must check this before the STRIP_NOPS calls 3187 because they may change the signedness of the arguments. As pointers 3188 strictly don't have a signedness, require either two pointers or 3189 two non-pointers as well. */ 3190 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)) 3191 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1))) 3192 return 0; 3193 3194 /* We cannot consider pointers to different address space equal. */ 3195 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1)) 3196 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))) 3197 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1))))) 3198 return 0; 3199 3200 /* If both types don't have the same precision, then it is not safe 3201 to strip NOPs. */ 3202 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1))) 3203 return 0; 3204 3205 STRIP_NOPS (arg0); 3206 STRIP_NOPS (arg1); 3207 3208 /* In case both args are comparisons but with different comparison 3209 code, try to swap the comparison operands of one arg to produce 3210 a match and compare that variant. */ 3211 if (TREE_CODE (arg0) != TREE_CODE (arg1) 3212 && COMPARISON_CLASS_P (arg0) 3213 && COMPARISON_CLASS_P (arg1)) 3214 { 3215 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1)); 3216 3217 if (TREE_CODE (arg0) == swap_code) 3218 return operand_equal_p (TREE_OPERAND (arg0, 0), 3219 TREE_OPERAND (arg1, 1), flags) 3220 && operand_equal_p (TREE_OPERAND (arg0, 1), 3221 TREE_OPERAND (arg1, 0), flags); 3222 } 3223 3224 if (TREE_CODE (arg0) != TREE_CODE (arg1) 3225 /* This is needed for conversions and for COMPONENT_REF. 3226 Might as well play it safe and always test this. */ 3227 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK 3228 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK 3229 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))) 3230 return 0; 3231 3232 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal. 3233 We don't care about side effects in that case because the SAVE_EXPR 3234 takes care of that for us. In all other cases, two expressions are 3235 equal if they have no side effects. If we have two identical 3236 expressions with side effects that should be treated the same due 3237 to the only side effects being identical SAVE_EXPR's, that will 3238 be detected in the recursive calls below. */ 3239 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST) 3240 && (TREE_CODE (arg0) == SAVE_EXPR 3241 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1)))) 3242 return 1; 3243 3244 /* Next handle constant cases, those for which we can return 1 even 3245 if ONLY_CONST is set. */ 3246 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)) 3247 switch (TREE_CODE (arg0)) 3248 { 3249 case INTEGER_CST: 3250 return tree_int_cst_equal (arg0, arg1); 3251 3252 case FIXED_CST: 3253 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0), 3254 TREE_FIXED_CST (arg1)); 3255 3256 case REAL_CST: 3257 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0), 3258 TREE_REAL_CST (arg1))) 3259 return 1; 3260 3261 3262 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))) 3263 { 3264 /* If we do not distinguish between signed and unsigned zero, 3265 consider them equal. */ 3266 if (real_zerop (arg0) && real_zerop (arg1)) 3267 return 1; 3268 } 3269 return 0; 3270 3271 case VECTOR_CST: 3272 { 3273 tree v1, v2; 3274 3275 v1 = TREE_VECTOR_CST_ELTS (arg0); 3276 v2 = TREE_VECTOR_CST_ELTS (arg1); 3277 while (v1 && v2) 3278 { 3279 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2), 3280 flags)) 3281 return 0; 3282 v1 = TREE_CHAIN (v1); 3283 v2 = TREE_CHAIN (v2); 3284 } 3285 3286 return v1 == v2; 3287 } 3288 3289 case COMPLEX_CST: 3290 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1), 3291 flags) 3292 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1), 3293 flags)); 3294 3295 case STRING_CST: 3296 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1) 3297 && ! memcmp (TREE_STRING_POINTER (arg0), 3298 TREE_STRING_POINTER (arg1), 3299 TREE_STRING_LENGTH (arg0))); 3300 3301 case ADDR_EXPR: 3302 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 3303 0); 3304 default: 3305 break; 3306 } 3307 3308 if (flags & OEP_ONLY_CONST) 3309 return 0; 3310 3311 /* Define macros to test an operand from arg0 and arg1 for equality and a 3312 variant that allows null and views null as being different from any 3313 non-null value. In the latter case, if either is null, the both 3314 must be; otherwise, do the normal comparison. */ 3315 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \ 3316 TREE_OPERAND (arg1, N), flags) 3317 3318 #define OP_SAME_WITH_NULL(N) \ 3319 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \ 3320 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N)) 3321 3322 switch (TREE_CODE_CLASS (TREE_CODE (arg0))) 3323 { 3324 case tcc_unary: 3325 /* Two conversions are equal only if signedness and modes match. */ 3326 switch (TREE_CODE (arg0)) 3327 { 3328 CASE_CONVERT: 3329 case FIX_TRUNC_EXPR: 3330 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) 3331 != TYPE_UNSIGNED (TREE_TYPE (arg1))) 3332 return 0; 3333 break; 3334 default: 3335 break; 3336 } 3337 3338 return OP_SAME (0); 3339 3340 3341 case tcc_comparison: 3342 case tcc_binary: 3343 if (OP_SAME (0) && OP_SAME (1)) 3344 return 1; 3345 3346 /* For commutative ops, allow the other order. */ 3347 return (commutative_tree_code (TREE_CODE (arg0)) 3348 && operand_equal_p (TREE_OPERAND (arg0, 0), 3349 TREE_OPERAND (arg1, 1), flags) 3350 && operand_equal_p (TREE_OPERAND (arg0, 1), 3351 TREE_OPERAND (arg1, 0), flags)); 3352 3353 case tcc_reference: 3354 /* If either of the pointer (or reference) expressions we are 3355 dereferencing contain a side effect, these cannot be equal. */ 3356 if (TREE_SIDE_EFFECTS (arg0) 3357 || TREE_SIDE_EFFECTS (arg1)) 3358 return 0; 3359 3360 switch (TREE_CODE (arg0)) 3361 { 3362 case INDIRECT_REF: 3363 case ALIGN_INDIRECT_REF: 3364 case MISALIGNED_INDIRECT_REF: 3365 case REALPART_EXPR: 3366 case IMAGPART_EXPR: 3367 return OP_SAME (0); 3368 3369 case ARRAY_REF: 3370 case ARRAY_RANGE_REF: 3371 /* Operands 2 and 3 may be null. 3372 Compare the array index by value if it is constant first as we 3373 may have different types but same value here. */ 3374 return (OP_SAME (0) 3375 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1), 3376 TREE_OPERAND (arg1, 1)) 3377 || OP_SAME (1)) 3378 && OP_SAME_WITH_NULL (2) 3379 && OP_SAME_WITH_NULL (3)); 3380 3381 case COMPONENT_REF: 3382 /* Handle operand 2 the same as for ARRAY_REF. Operand 0 3383 may be NULL when we're called to compare MEM_EXPRs. */ 3384 return OP_SAME_WITH_NULL (0) 3385 && OP_SAME (1) 3386 && OP_SAME_WITH_NULL (2); 3387 3388 case BIT_FIELD_REF: 3389 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); 3390 3391 default: 3392 return 0; 3393 } 3394 3395 case tcc_expression: 3396 switch (TREE_CODE (arg0)) 3397 { 3398 case ADDR_EXPR: 3399 case TRUTH_NOT_EXPR: 3400 return OP_SAME (0); 3401 3402 case TRUTH_ANDIF_EXPR: 3403 case TRUTH_ORIF_EXPR: 3404 return OP_SAME (0) && OP_SAME (1); 3405 3406 case TRUTH_AND_EXPR: 3407 case TRUTH_OR_EXPR: 3408 case TRUTH_XOR_EXPR: 3409 if (OP_SAME (0) && OP_SAME (1)) 3410 return 1; 3411 3412 /* Otherwise take into account this is a commutative operation. */ 3413 return (operand_equal_p (TREE_OPERAND (arg0, 0), 3414 TREE_OPERAND (arg1, 1), flags) 3415 && operand_equal_p (TREE_OPERAND (arg0, 1), 3416 TREE_OPERAND (arg1, 0), flags)); 3417 3418 case COND_EXPR: 3419 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); 3420 3421 default: 3422 return 0; 3423 } 3424 3425 case tcc_vl_exp: 3426 switch (TREE_CODE (arg0)) 3427 { 3428 case CALL_EXPR: 3429 /* If the CALL_EXPRs call different functions, then they 3430 clearly can not be equal. */ 3431 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1), 3432 flags)) 3433 return 0; 3434 3435 { 3436 unsigned int cef = call_expr_flags (arg0); 3437 if (flags & OEP_PURE_SAME) 3438 cef &= ECF_CONST | ECF_PURE; 3439 else 3440 cef &= ECF_CONST; 3441 if (!cef) 3442 return 0; 3443 } 3444 3445 /* Now see if all the arguments are the same. */ 3446 { 3447 const_call_expr_arg_iterator iter0, iter1; 3448 const_tree a0, a1; 3449 for (a0 = first_const_call_expr_arg (arg0, &iter0), 3450 a1 = first_const_call_expr_arg (arg1, &iter1); 3451 a0 && a1; 3452 a0 = next_const_call_expr_arg (&iter0), 3453 a1 = next_const_call_expr_arg (&iter1)) 3454 if (! operand_equal_p (a0, a1, flags)) 3455 return 0; 3456 3457 /* If we get here and both argument lists are exhausted 3458 then the CALL_EXPRs are equal. */ 3459 return ! (a0 || a1); 3460 } 3461 default: 3462 return 0; 3463 } 3464 3465 case tcc_declaration: 3466 /* Consider __builtin_sqrt equal to sqrt. */ 3467 return (TREE_CODE (arg0) == FUNCTION_DECL 3468 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1) 3469 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1) 3470 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1)); 3471 3472 default: 3473 return 0; 3474 } 3475 3476 #undef OP_SAME 3477 #undef OP_SAME_WITH_NULL 3478 } 3479 3480 /* Similar to operand_equal_p, but see if ARG0 might have been made by 3481 shorten_compare from ARG1 when ARG1 was being compared with OTHER. 3482 3483 When in doubt, return 0. */ 3484 3485 static int 3486 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other) 3487 { 3488 int unsignedp1, unsignedpo; 3489 tree primarg0, primarg1, primother; 3490 unsigned int correct_width; 3491 3492 if (operand_equal_p (arg0, arg1, 0)) 3493 return 1; 3494 3495 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 3496 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1))) 3497 return 0; 3498 3499 /* Discard any conversions that don't change the modes of ARG0 and ARG1 3500 and see if the inner values are the same. This removes any 3501 signedness comparison, which doesn't matter here. */ 3502 primarg0 = arg0, primarg1 = arg1; 3503 STRIP_NOPS (primarg0); 3504 STRIP_NOPS (primarg1); 3505 if (operand_equal_p (primarg0, primarg1, 0)) 3506 return 1; 3507 3508 /* Duplicate what shorten_compare does to ARG1 and see if that gives the 3509 actual comparison operand, ARG0. 3510 3511 First throw away any conversions to wider types 3512 already present in the operands. */ 3513 3514 primarg1 = get_narrower (arg1, &unsignedp1); 3515 primother = get_narrower (other, &unsignedpo); 3516 3517 correct_width = TYPE_PRECISION (TREE_TYPE (arg1)); 3518 if (unsignedp1 == unsignedpo 3519 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width 3520 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width) 3521 { 3522 tree type = TREE_TYPE (arg0); 3523 3524 /* Make sure shorter operand is extended the right way 3525 to match the longer operand. */ 3526 primarg1 = fold_convert (signed_or_unsigned_type_for 3527 (unsignedp1, TREE_TYPE (primarg1)), primarg1); 3528 3529 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0)) 3530 return 1; 3531 } 3532 3533 return 0; 3534 } 3535 3536 /* See if ARG is an expression that is either a comparison or is performing 3537 arithmetic on comparisons. The comparisons must only be comparing 3538 two different values, which will be stored in *CVAL1 and *CVAL2; if 3539 they are nonzero it means that some operands have already been found. 3540 No variables may be used anywhere else in the expression except in the 3541 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around 3542 the expression and save_expr needs to be called with CVAL1 and CVAL2. 3543 3544 If this is true, return 1. Otherwise, return zero. */ 3545 3546 static int 3547 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p) 3548 { 3549 enum tree_code code = TREE_CODE (arg); 3550 enum tree_code_class tclass = TREE_CODE_CLASS (code); 3551 3552 /* We can handle some of the tcc_expression cases here. */ 3553 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR) 3554 tclass = tcc_unary; 3555 else if (tclass == tcc_expression 3556 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR 3557 || code == COMPOUND_EXPR)) 3558 tclass = tcc_binary; 3559 3560 else if (tclass == tcc_expression && code == SAVE_EXPR 3561 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0))) 3562 { 3563 /* If we've already found a CVAL1 or CVAL2, this expression is 3564 two complex to handle. */ 3565 if (*cval1 || *cval2) 3566 return 0; 3567 3568 tclass = tcc_unary; 3569 *save_p = 1; 3570 } 3571 3572 switch (tclass) 3573 { 3574 case tcc_unary: 3575 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p); 3576 3577 case tcc_binary: 3578 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p) 3579 && twoval_comparison_p (TREE_OPERAND (arg, 1), 3580 cval1, cval2, save_p)); 3581 3582 case tcc_constant: 3583 return 1; 3584 3585 case tcc_expression: 3586 if (code == COND_EXPR) 3587 return (twoval_comparison_p (TREE_OPERAND (arg, 0), 3588 cval1, cval2, save_p) 3589 && twoval_comparison_p (TREE_OPERAND (arg, 1), 3590 cval1, cval2, save_p) 3591 && twoval_comparison_p (TREE_OPERAND (arg, 2), 3592 cval1, cval2, save_p)); 3593 return 0; 3594 3595 case tcc_comparison: 3596 /* First see if we can handle the first operand, then the second. For 3597 the second operand, we know *CVAL1 can't be zero. It must be that 3598 one side of the comparison is each of the values; test for the 3599 case where this isn't true by failing if the two operands 3600 are the same. */ 3601 3602 if (operand_equal_p (TREE_OPERAND (arg, 0), 3603 TREE_OPERAND (arg, 1), 0)) 3604 return 0; 3605 3606 if (*cval1 == 0) 3607 *cval1 = TREE_OPERAND (arg, 0); 3608 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0)) 3609 ; 3610 else if (*cval2 == 0) 3611 *cval2 = TREE_OPERAND (arg, 0); 3612 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0)) 3613 ; 3614 else 3615 return 0; 3616 3617 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0)) 3618 ; 3619 else if (*cval2 == 0) 3620 *cval2 = TREE_OPERAND (arg, 1); 3621 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0)) 3622 ; 3623 else 3624 return 0; 3625 3626 return 1; 3627 3628 default: 3629 return 0; 3630 } 3631 } 3632 3633 /* ARG is a tree that is known to contain just arithmetic operations and 3634 comparisons. Evaluate the operations in the tree substituting NEW0 for 3635 any occurrence of OLD0 as an operand of a comparison and likewise for 3636 NEW1 and OLD1. */ 3637 3638 static tree 3639 eval_subst (location_t loc, tree arg, tree old0, tree new0, 3640 tree old1, tree new1) 3641 { 3642 tree type = TREE_TYPE (arg); 3643 enum tree_code code = TREE_CODE (arg); 3644 enum tree_code_class tclass = TREE_CODE_CLASS (code); 3645 3646 /* We can handle some of the tcc_expression cases here. */ 3647 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR) 3648 tclass = tcc_unary; 3649 else if (tclass == tcc_expression 3650 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) 3651 tclass = tcc_binary; 3652 3653 switch (tclass) 3654 { 3655 case tcc_unary: 3656 return fold_build1_loc (loc, code, type, 3657 eval_subst (loc, TREE_OPERAND (arg, 0), 3658 old0, new0, old1, new1)); 3659 3660 case tcc_binary: 3661 return fold_build2_loc (loc, code, type, 3662 eval_subst (loc, TREE_OPERAND (arg, 0), 3663 old0, new0, old1, new1), 3664 eval_subst (loc, TREE_OPERAND (arg, 1), 3665 old0, new0, old1, new1)); 3666 3667 case tcc_expression: 3668 switch (code) 3669 { 3670 case SAVE_EXPR: 3671 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0, 3672 old1, new1); 3673 3674 case COMPOUND_EXPR: 3675 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0, 3676 old1, new1); 3677 3678 case COND_EXPR: 3679 return fold_build3_loc (loc, code, type, 3680 eval_subst (loc, TREE_OPERAND (arg, 0), 3681 old0, new0, old1, new1), 3682 eval_subst (loc, TREE_OPERAND (arg, 1), 3683 old0, new0, old1, new1), 3684 eval_subst (loc, TREE_OPERAND (arg, 2), 3685 old0, new0, old1, new1)); 3686 default: 3687 break; 3688 } 3689 /* Fall through - ??? */ 3690 3691 case tcc_comparison: 3692 { 3693 tree arg0 = TREE_OPERAND (arg, 0); 3694 tree arg1 = TREE_OPERAND (arg, 1); 3695 3696 /* We need to check both for exact equality and tree equality. The 3697 former will be true if the operand has a side-effect. In that 3698 case, we know the operand occurred exactly once. */ 3699 3700 if (arg0 == old0 || operand_equal_p (arg0, old0, 0)) 3701 arg0 = new0; 3702 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0)) 3703 arg0 = new1; 3704 3705 if (arg1 == old0 || operand_equal_p (arg1, old0, 0)) 3706 arg1 = new0; 3707 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0)) 3708 arg1 = new1; 3709 3710 return fold_build2_loc (loc, code, type, arg0, arg1); 3711 } 3712 3713 default: 3714 return arg; 3715 } 3716 } 3717 3718 /* Return a tree for the case when the result of an expression is RESULT 3719 converted to TYPE and OMITTED was previously an operand of the expression 3720 but is now not needed (e.g., we folded OMITTED * 0). 3721 3722 If OMITTED has side effects, we must evaluate it. Otherwise, just do 3723 the conversion of RESULT to TYPE. */ 3724 3725 tree 3726 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted) 3727 { 3728 tree t = fold_convert_loc (loc, type, result); 3729 3730 /* If the resulting operand is an empty statement, just return the omitted 3731 statement casted to void. */ 3732 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted)) 3733 { 3734 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted)); 3735 goto omit_one_operand_exit; 3736 } 3737 3738 if (TREE_SIDE_EFFECTS (omitted)) 3739 { 3740 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t); 3741 goto omit_one_operand_exit; 3742 } 3743 3744 return non_lvalue_loc (loc, t); 3745 3746 omit_one_operand_exit: 3747 protected_set_expr_location (t, loc); 3748 return t; 3749 } 3750 3751 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */ 3752 3753 static tree 3754 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result, 3755 tree omitted) 3756 { 3757 tree t = fold_convert_loc (loc, type, result); 3758 3759 /* If the resulting operand is an empty statement, just return the omitted 3760 statement casted to void. */ 3761 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted)) 3762 { 3763 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted)); 3764 goto pedantic_omit_one_operand_exit; 3765 } 3766 3767 if (TREE_SIDE_EFFECTS (omitted)) 3768 { 3769 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t); 3770 goto pedantic_omit_one_operand_exit; 3771 } 3772 3773 return pedantic_non_lvalue_loc (loc, t); 3774 3775 pedantic_omit_one_operand_exit: 3776 protected_set_expr_location (t, loc); 3777 return t; 3778 } 3779 3780 /* Return a tree for the case when the result of an expression is RESULT 3781 converted to TYPE and OMITTED1 and OMITTED2 were previously operands 3782 of the expression but are now not needed. 3783 3784 If OMITTED1 or OMITTED2 has side effects, they must be evaluated. 3785 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is 3786 evaluated before OMITTED2. Otherwise, if neither has side effects, 3787 just do the conversion of RESULT to TYPE. */ 3788 3789 tree 3790 omit_two_operands_loc (location_t loc, tree type, tree result, 3791 tree omitted1, tree omitted2) 3792 { 3793 tree t = fold_convert_loc (loc, type, result); 3794 3795 if (TREE_SIDE_EFFECTS (omitted2)) 3796 { 3797 t = build2 (COMPOUND_EXPR, type, omitted2, t); 3798 SET_EXPR_LOCATION (t, loc); 3799 } 3800 if (TREE_SIDE_EFFECTS (omitted1)) 3801 { 3802 t = build2 (COMPOUND_EXPR, type, omitted1, t); 3803 SET_EXPR_LOCATION (t, loc); 3804 } 3805 3806 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t; 3807 } 3808 3809 3810 /* Return a simplified tree node for the truth-negation of ARG. This 3811 never alters ARG itself. We assume that ARG is an operation that 3812 returns a truth value (0 or 1). 3813 3814 FIXME: one would think we would fold the result, but it causes 3815 problems with the dominator optimizer. */ 3816 3817 tree 3818 fold_truth_not_expr (location_t loc, tree arg) 3819 { 3820 tree t, type = TREE_TYPE (arg); 3821 enum tree_code code = TREE_CODE (arg); 3822 location_t loc1, loc2; 3823 3824 /* If this is a comparison, we can simply invert it, except for 3825 floating-point non-equality comparisons, in which case we just 3826 enclose a TRUTH_NOT_EXPR around what we have. */ 3827 3828 if (TREE_CODE_CLASS (code) == tcc_comparison) 3829 { 3830 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0)); 3831 if (FLOAT_TYPE_P (op_type) 3832 && flag_trapping_math 3833 && code != ORDERED_EXPR && code != UNORDERED_EXPR 3834 && code != NE_EXPR && code != EQ_EXPR) 3835 return NULL_TREE; 3836 3837 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type))); 3838 if (code == ERROR_MARK) 3839 return NULL_TREE; 3840 3841 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1)); 3842 SET_EXPR_LOCATION (t, loc); 3843 return t; 3844 } 3845 3846 switch (code) 3847 { 3848 case INTEGER_CST: 3849 return constant_boolean_node (integer_zerop (arg), type); 3850 3851 case TRUTH_AND_EXPR: 3852 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); 3853 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); 3854 if (loc1 == UNKNOWN_LOCATION) 3855 loc1 = loc; 3856 if (loc2 == UNKNOWN_LOCATION) 3857 loc2 = loc; 3858 t = build2 (TRUTH_OR_EXPR, type, 3859 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3860 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3861 break; 3862 3863 case TRUTH_OR_EXPR: 3864 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); 3865 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); 3866 if (loc1 == UNKNOWN_LOCATION) 3867 loc1 = loc; 3868 if (loc2 == UNKNOWN_LOCATION) 3869 loc2 = loc; 3870 t = build2 (TRUTH_AND_EXPR, type, 3871 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3872 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3873 break; 3874 3875 case TRUTH_XOR_EXPR: 3876 /* Here we can invert either operand. We invert the first operand 3877 unless the second operand is a TRUTH_NOT_EXPR in which case our 3878 result is the XOR of the first operand with the inside of the 3879 negation of the second operand. */ 3880 3881 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR) 3882 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0), 3883 TREE_OPERAND (TREE_OPERAND (arg, 1), 0)); 3884 else 3885 t = build2 (TRUTH_XOR_EXPR, type, 3886 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)), 3887 TREE_OPERAND (arg, 1)); 3888 break; 3889 3890 case TRUTH_ANDIF_EXPR: 3891 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); 3892 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); 3893 if (loc1 == UNKNOWN_LOCATION) 3894 loc1 = loc; 3895 if (loc2 == UNKNOWN_LOCATION) 3896 loc2 = loc; 3897 t = build2 (TRUTH_ORIF_EXPR, type, 3898 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3899 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3900 break; 3901 3902 case TRUTH_ORIF_EXPR: 3903 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); 3904 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); 3905 if (loc1 == UNKNOWN_LOCATION) 3906 loc1 = loc; 3907 if (loc2 == UNKNOWN_LOCATION) 3908 loc2 = loc; 3909 t = build2 (TRUTH_ANDIF_EXPR, type, 3910 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3911 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3912 break; 3913 3914 case TRUTH_NOT_EXPR: 3915 return TREE_OPERAND (arg, 0); 3916 3917 case COND_EXPR: 3918 { 3919 tree arg1 = TREE_OPERAND (arg, 1); 3920 tree arg2 = TREE_OPERAND (arg, 2); 3921 3922 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); 3923 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2)); 3924 if (loc1 == UNKNOWN_LOCATION) 3925 loc1 = loc; 3926 if (loc2 == UNKNOWN_LOCATION) 3927 loc2 = loc; 3928 3929 /* A COND_EXPR may have a throw as one operand, which 3930 then has void type. Just leave void operands 3931 as they are. */ 3932 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0), 3933 VOID_TYPE_P (TREE_TYPE (arg1)) 3934 ? arg1 : invert_truthvalue_loc (loc1, arg1), 3935 VOID_TYPE_P (TREE_TYPE (arg2)) 3936 ? arg2 : invert_truthvalue_loc (loc2, arg2)); 3937 break; 3938 } 3939 3940 case COMPOUND_EXPR: 3941 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); 3942 if (loc1 == UNKNOWN_LOCATION) 3943 loc1 = loc; 3944 t = build2 (COMPOUND_EXPR, type, 3945 TREE_OPERAND (arg, 0), 3946 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1))); 3947 break; 3948 3949 case NON_LVALUE_EXPR: 3950 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); 3951 if (loc1 == UNKNOWN_LOCATION) 3952 loc1 = loc; 3953 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)); 3954 3955 CASE_CONVERT: 3956 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE) 3957 { 3958 t = build1 (TRUTH_NOT_EXPR, type, arg); 3959 break; 3960 } 3961 3962 /* ... fall through ... */ 3963 3964 case FLOAT_EXPR: 3965 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); 3966 if (loc1 == UNKNOWN_LOCATION) 3967 loc1 = loc; 3968 t = build1 (TREE_CODE (arg), type, 3969 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); 3970 break; 3971 3972 case BIT_AND_EXPR: 3973 if (!integer_onep (TREE_OPERAND (arg, 1))) 3974 return NULL_TREE; 3975 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0)); 3976 break; 3977 3978 case SAVE_EXPR: 3979 t = build1 (TRUTH_NOT_EXPR, type, arg); 3980 break; 3981 3982 case CLEANUP_POINT_EXPR: 3983 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); 3984 if (loc1 == UNKNOWN_LOCATION) 3985 loc1 = loc; 3986 t = build1 (CLEANUP_POINT_EXPR, type, 3987 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); 3988 break; 3989 3990 default: 3991 t = NULL_TREE; 3992 break; 3993 } 3994 3995 if (t) 3996 SET_EXPR_LOCATION (t, loc); 3997 3998 return t; 3999 } 4000 4001 /* Return a simplified tree node for the truth-negation of ARG. This 4002 never alters ARG itself. We assume that ARG is an operation that 4003 returns a truth value (0 or 1). 4004 4005 FIXME: one would think we would fold the result, but it causes 4006 problems with the dominator optimizer. */ 4007 4008 tree 4009 invert_truthvalue_loc (location_t loc, tree arg) 4010 { 4011 tree tem; 4012 4013 if (TREE_CODE (arg) == ERROR_MARK) 4014 return arg; 4015 4016 tem = fold_truth_not_expr (loc, arg); 4017 if (!tem) 4018 { 4019 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg); 4020 SET_EXPR_LOCATION (tem, loc); 4021 } 4022 4023 return tem; 4024 } 4025 4026 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both 4027 operands are another bit-wise operation with a common input. If so, 4028 distribute the bit operations to save an operation and possibly two if 4029 constants are involved. For example, convert 4030 (A | B) & (A | C) into A | (B & C) 4031 Further simplification will occur if B and C are constants. 4032 4033 If this optimization cannot be done, 0 will be returned. */ 4034 4035 static tree 4036 distribute_bit_expr (location_t loc, enum tree_code code, tree type, 4037 tree arg0, tree arg1) 4038 { 4039 tree common; 4040 tree left, right; 4041 4042 if (TREE_CODE (arg0) != TREE_CODE (arg1) 4043 || TREE_CODE (arg0) == code 4044 || (TREE_CODE (arg0) != BIT_AND_EXPR 4045 && TREE_CODE (arg0) != BIT_IOR_EXPR)) 4046 return 0; 4047 4048 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)) 4049 { 4050 common = TREE_OPERAND (arg0, 0); 4051 left = TREE_OPERAND (arg0, 1); 4052 right = TREE_OPERAND (arg1, 1); 4053 } 4054 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0)) 4055 { 4056 common = TREE_OPERAND (arg0, 0); 4057 left = TREE_OPERAND (arg0, 1); 4058 right = TREE_OPERAND (arg1, 0); 4059 } 4060 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0)) 4061 { 4062 common = TREE_OPERAND (arg0, 1); 4063 left = TREE_OPERAND (arg0, 0); 4064 right = TREE_OPERAND (arg1, 1); 4065 } 4066 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0)) 4067 { 4068 common = TREE_OPERAND (arg0, 1); 4069 left = TREE_OPERAND (arg0, 0); 4070 right = TREE_OPERAND (arg1, 0); 4071 } 4072 else 4073 return 0; 4074 4075 common = fold_convert_loc (loc, type, common); 4076 left = fold_convert_loc (loc, type, left); 4077 right = fold_convert_loc (loc, type, right); 4078 return fold_build2_loc (loc, TREE_CODE (arg0), type, common, 4079 fold_build2_loc (loc, code, type, left, right)); 4080 } 4081 4082 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation 4083 with code CODE. This optimization is unsafe. */ 4084 static tree 4085 distribute_real_division (location_t loc, enum tree_code code, tree type, 4086 tree arg0, tree arg1) 4087 { 4088 bool mul0 = TREE_CODE (arg0) == MULT_EXPR; 4089 bool mul1 = TREE_CODE (arg1) == MULT_EXPR; 4090 4091 /* (A / C) +- (B / C) -> (A +- B) / C. */ 4092 if (mul0 == mul1 4093 && operand_equal_p (TREE_OPERAND (arg0, 1), 4094 TREE_OPERAND (arg1, 1), 0)) 4095 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type, 4096 fold_build2_loc (loc, code, type, 4097 TREE_OPERAND (arg0, 0), 4098 TREE_OPERAND (arg1, 0)), 4099 TREE_OPERAND (arg0, 1)); 4100 4101 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */ 4102 if (operand_equal_p (TREE_OPERAND (arg0, 0), 4103 TREE_OPERAND (arg1, 0), 0) 4104 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 4105 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) 4106 { 4107 REAL_VALUE_TYPE r0, r1; 4108 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1)); 4109 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1)); 4110 if (!mul0) 4111 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0); 4112 if (!mul1) 4113 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1); 4114 real_arithmetic (&r0, code, &r0, &r1); 4115 return fold_build2_loc (loc, MULT_EXPR, type, 4116 TREE_OPERAND (arg0, 0), 4117 build_real (type, r0)); 4118 } 4119 4120 return NULL_TREE; 4121 } 4122 4123 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER 4124 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */ 4125 4126 static tree 4127 make_bit_field_ref (location_t loc, tree inner, tree type, 4128 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp) 4129 { 4130 tree result, bftype; 4131 4132 if (bitpos == 0) 4133 { 4134 tree size = TYPE_SIZE (TREE_TYPE (inner)); 4135 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner)) 4136 || POINTER_TYPE_P (TREE_TYPE (inner))) 4137 && host_integerp (size, 0) 4138 && tree_low_cst (size, 0) == bitsize) 4139 return fold_convert_loc (loc, type, inner); 4140 } 4141 4142 bftype = type; 4143 if (TYPE_PRECISION (bftype) != bitsize 4144 || TYPE_UNSIGNED (bftype) == !unsignedp) 4145 bftype = build_nonstandard_integer_type (bitsize, 0); 4146 4147 result = build3 (BIT_FIELD_REF, bftype, inner, 4148 size_int (bitsize), bitsize_int (bitpos)); 4149 SET_EXPR_LOCATION (result, loc); 4150 4151 if (bftype != type) 4152 result = fold_convert_loc (loc, type, result); 4153 4154 return result; 4155 } 4156 4157 /* Optimize a bit-field compare. 4158 4159 There are two cases: First is a compare against a constant and the 4160 second is a comparison of two items where the fields are at the same 4161 bit position relative to the start of a chunk (byte, halfword, word) 4162 large enough to contain it. In these cases we can avoid the shift 4163 implicit in bitfield extractions. 4164 4165 For constants, we emit a compare of the shifted constant with the 4166 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being 4167 compared. For two fields at the same position, we do the ANDs with the 4168 similar mask and compare the result of the ANDs. 4169 4170 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR. 4171 COMPARE_TYPE is the type of the comparison, and LHS and RHS 4172 are the left and right operands of the comparison, respectively. 4173 4174 If the optimization described above can be done, we return the resulting 4175 tree. Otherwise we return zero. */ 4176 4177 static tree 4178 optimize_bit_field_compare (location_t loc, enum tree_code code, 4179 tree compare_type, tree lhs, tree rhs) 4180 { 4181 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize; 4182 tree type = TREE_TYPE (lhs); 4183 tree signed_type, unsigned_type; 4184 int const_p = TREE_CODE (rhs) == INTEGER_CST; 4185 enum machine_mode lmode, rmode, nmode; 4186 int lunsignedp, runsignedp; 4187 int lvolatilep = 0, rvolatilep = 0; 4188 tree linner, rinner = NULL_TREE; 4189 tree mask; 4190 tree offset; 4191 4192 /* Get all the information about the extractions being done. If the bit size 4193 if the same as the size of the underlying object, we aren't doing an 4194 extraction at all and so can do nothing. We also don't want to 4195 do anything if the inner expression is a PLACEHOLDER_EXPR since we 4196 then will no longer be able to replace it. */ 4197 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode, 4198 &lunsignedp, &lvolatilep, false); 4199 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0 4200 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR) 4201 return 0; 4202 4203 if (!const_p) 4204 { 4205 /* If this is not a constant, we can only do something if bit positions, 4206 sizes, and signedness are the same. */ 4207 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode, 4208 &runsignedp, &rvolatilep, false); 4209 4210 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize 4211 || lunsignedp != runsignedp || offset != 0 4212 || TREE_CODE (rinner) == PLACEHOLDER_EXPR) 4213 return 0; 4214 } 4215 4216 /* See if we can find a mode to refer to this field. We should be able to, 4217 but fail if we can't. */ 4218 nmode = get_best_mode (lbitsize, lbitpos, 4219 const_p ? TYPE_ALIGN (TREE_TYPE (linner)) 4220 : MIN (TYPE_ALIGN (TREE_TYPE (linner)), 4221 TYPE_ALIGN (TREE_TYPE (rinner))), 4222 word_mode, lvolatilep || rvolatilep); 4223 if (nmode == VOIDmode) 4224 return 0; 4225 4226 /* Set signed and unsigned types of the precision of this mode for the 4227 shifts below. */ 4228 signed_type = lang_hooks.types.type_for_mode (nmode, 0); 4229 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1); 4230 4231 /* Compute the bit position and size for the new reference and our offset 4232 within it. If the new reference is the same size as the original, we 4233 won't optimize anything, so return zero. */ 4234 nbitsize = GET_MODE_BITSIZE (nmode); 4235 nbitpos = lbitpos & ~ (nbitsize - 1); 4236 lbitpos -= nbitpos; 4237 if (nbitsize == lbitsize) 4238 return 0; 4239 4240 if (BYTES_BIG_ENDIAN) 4241 lbitpos = nbitsize - lbitsize - lbitpos; 4242 4243 /* Make the mask to be used against the extracted field. */ 4244 mask = build_int_cst_type (unsigned_type, -1); 4245 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0); 4246 mask = const_binop (RSHIFT_EXPR, mask, 4247 size_int (nbitsize - lbitsize - lbitpos), 0); 4248 4249 if (! const_p) 4250 /* If not comparing with constant, just rework the comparison 4251 and return. */ 4252 return fold_build2_loc (loc, code, compare_type, 4253 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, 4254 make_bit_field_ref (loc, linner, 4255 unsigned_type, 4256 nbitsize, nbitpos, 4257 1), 4258 mask), 4259 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, 4260 make_bit_field_ref (loc, rinner, 4261 unsigned_type, 4262 nbitsize, nbitpos, 4263 1), 4264 mask)); 4265 4266 /* Otherwise, we are handling the constant case. See if the constant is too 4267 big for the field. Warn and return a tree of for 0 (false) if so. We do 4268 this not only for its own sake, but to avoid having to test for this 4269 error case below. If we didn't, we might generate wrong code. 4270 4271 For unsigned fields, the constant shifted right by the field length should 4272 be all zero. For signed fields, the high-order bits should agree with 4273 the sign bit. */ 4274 4275 if (lunsignedp) 4276 { 4277 if (! integer_zerop (const_binop (RSHIFT_EXPR, 4278 fold_convert_loc (loc, 4279 unsigned_type, rhs), 4280 size_int (lbitsize), 0))) 4281 { 4282 warning (0, "comparison is always %d due to width of bit-field", 4283 code == NE_EXPR); 4284 return constant_boolean_node (code == NE_EXPR, compare_type); 4285 } 4286 } 4287 else 4288 { 4289 tree tem = const_binop (RSHIFT_EXPR, 4290 fold_convert_loc (loc, signed_type, rhs), 4291 size_int (lbitsize - 1), 0); 4292 if (! integer_zerop (tem) && ! integer_all_onesp (tem)) 4293 { 4294 warning (0, "comparison is always %d due to width of bit-field", 4295 code == NE_EXPR); 4296 return constant_boolean_node (code == NE_EXPR, compare_type); 4297 } 4298 } 4299 4300 /* Single-bit compares should always be against zero. */ 4301 if (lbitsize == 1 && ! integer_zerop (rhs)) 4302 { 4303 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR; 4304 rhs = build_int_cst (type, 0); 4305 } 4306 4307 /* Make a new bitfield reference, shift the constant over the 4308 appropriate number of bits and mask it with the computed mask 4309 (in case this was a signed field). If we changed it, make a new one. */ 4310 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1); 4311 if (lvolatilep) 4312 { 4313 TREE_SIDE_EFFECTS (lhs) = 1; 4314 TREE_THIS_VOLATILE (lhs) = 1; 4315 } 4316 4317 rhs = const_binop (BIT_AND_EXPR, 4318 const_binop (LSHIFT_EXPR, 4319 fold_convert_loc (loc, unsigned_type, rhs), 4320 size_int (lbitpos), 0), 4321 mask, 0); 4322 4323 lhs = build2 (code, compare_type, 4324 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), 4325 rhs); 4326 SET_EXPR_LOCATION (lhs, loc); 4327 return lhs; 4328 } 4329 4330 /* Subroutine for fold_truthop: decode a field reference. 4331 4332 If EXP is a comparison reference, we return the innermost reference. 4333 4334 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is 4335 set to the starting bit number. 4336 4337 If the innermost field can be completely contained in a mode-sized 4338 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode. 4339 4340 *PVOLATILEP is set to 1 if the any expression encountered is volatile; 4341 otherwise it is not changed. 4342 4343 *PUNSIGNEDP is set to the signedness of the field. 4344 4345 *PMASK is set to the mask used. This is either contained in a 4346 BIT_AND_EXPR or derived from the width of the field. 4347 4348 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any. 4349 4350 Return 0 if this is not a component reference or is one that we can't 4351 do anything with. */ 4352 4353 static tree 4354 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize, 4355 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode, 4356 int *punsignedp, int *pvolatilep, 4357 tree *pmask, tree *pand_mask) 4358 { 4359 tree outer_type = 0; 4360 tree and_mask = 0; 4361 tree mask, inner, offset; 4362 tree unsigned_type; 4363 unsigned int precision; 4364 4365 /* All the optimizations using this function assume integer fields. 4366 There are problems with FP fields since the type_for_size call 4367 below can fail for, e.g., XFmode. */ 4368 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp))) 4369 return 0; 4370 4371 /* We are interested in the bare arrangement of bits, so strip everything 4372 that doesn't affect the machine mode. However, record the type of the 4373 outermost expression if it may matter below. */ 4374 if (CONVERT_EXPR_P (exp) 4375 || TREE_CODE (exp) == NON_LVALUE_EXPR) 4376 outer_type = TREE_TYPE (exp); 4377 STRIP_NOPS (exp); 4378 4379 if (TREE_CODE (exp) == BIT_AND_EXPR) 4380 { 4381 and_mask = TREE_OPERAND (exp, 1); 4382 exp = TREE_OPERAND (exp, 0); 4383 STRIP_NOPS (exp); STRIP_NOPS (and_mask); 4384 if (TREE_CODE (and_mask) != INTEGER_CST) 4385 return 0; 4386 } 4387 4388 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode, 4389 punsignedp, pvolatilep, false); 4390 if ((inner == exp && and_mask == 0) 4391 || *pbitsize < 0 || offset != 0 4392 || TREE_CODE (inner) == PLACEHOLDER_EXPR) 4393 return 0; 4394 4395 /* If the number of bits in the reference is the same as the bitsize of 4396 the outer type, then the outer type gives the signedness. Otherwise 4397 (in case of a small bitfield) the signedness is unchanged. */ 4398 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type)) 4399 *punsignedp = TYPE_UNSIGNED (outer_type); 4400 4401 /* Compute the mask to access the bitfield. */ 4402 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1); 4403 precision = TYPE_PRECISION (unsigned_type); 4404 4405 mask = build_int_cst_type (unsigned_type, -1); 4406 4407 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0); 4408 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0); 4409 4410 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */ 4411 if (and_mask != 0) 4412 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, 4413 fold_convert_loc (loc, unsigned_type, and_mask), mask); 4414 4415 *pmask = mask; 4416 *pand_mask = and_mask; 4417 return inner; 4418 } 4419 4420 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order 4421 bit positions. */ 4422 4423 static int 4424 all_ones_mask_p (const_tree mask, int size) 4425 { 4426 tree type = TREE_TYPE (mask); 4427 unsigned int precision = TYPE_PRECISION (type); 4428 tree tmask; 4429 4430 tmask = build_int_cst_type (signed_type_for (type), -1); 4431 4432 return 4433 tree_int_cst_equal (mask, 4434 const_binop (RSHIFT_EXPR, 4435 const_binop (LSHIFT_EXPR, tmask, 4436 size_int (precision - size), 4437 0), 4438 size_int (precision - size), 0)); 4439 } 4440 4441 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that 4442 represents the sign bit of EXP's type. If EXP represents a sign 4443 or zero extension, also test VAL against the unextended type. 4444 The return value is the (sub)expression whose sign bit is VAL, 4445 or NULL_TREE otherwise. */ 4446 4447 static tree 4448 sign_bit_p (tree exp, const_tree val) 4449 { 4450 unsigned HOST_WIDE_INT mask_lo, lo; 4451 HOST_WIDE_INT mask_hi, hi; 4452 int width; 4453 tree t; 4454 4455 /* Tree EXP must have an integral type. */ 4456 t = TREE_TYPE (exp); 4457 if (! INTEGRAL_TYPE_P (t)) 4458 return NULL_TREE; 4459 4460 /* Tree VAL must be an integer constant. */ 4461 if (TREE_CODE (val) != INTEGER_CST 4462 || TREE_OVERFLOW (val)) 4463 return NULL_TREE; 4464 4465 width = TYPE_PRECISION (t); 4466 if (width > HOST_BITS_PER_WIDE_INT) 4467 { 4468 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1); 4469 lo = 0; 4470 4471 mask_hi = ((unsigned HOST_WIDE_INT) -1 4472 >> (2 * HOST_BITS_PER_WIDE_INT - width)); 4473 mask_lo = -1; 4474 } 4475 else 4476 { 4477 hi = 0; 4478 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1); 4479 4480 mask_hi = 0; 4481 mask_lo = ((unsigned HOST_WIDE_INT) -1 4482 >> (HOST_BITS_PER_WIDE_INT - width)); 4483 } 4484 4485 /* We mask off those bits beyond TREE_TYPE (exp) so that we can 4486 treat VAL as if it were unsigned. */ 4487 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi 4488 && (TREE_INT_CST_LOW (val) & mask_lo) == lo) 4489 return exp; 4490 4491 /* Handle extension from a narrower type. */ 4492 if (TREE_CODE (exp) == NOP_EXPR 4493 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width) 4494 return sign_bit_p (TREE_OPERAND (exp, 0), val); 4495 4496 return NULL_TREE; 4497 } 4498 4499 /* Subroutine for fold_truthop: determine if an operand is simple enough 4500 to be evaluated unconditionally. */ 4501 4502 static int 4503 simple_operand_p (const_tree exp) 4504 { 4505 /* Strip any conversions that don't change the machine mode. */ 4506 STRIP_NOPS (exp); 4507 4508 return (CONSTANT_CLASS_P (exp) 4509 || TREE_CODE (exp) == SSA_NAME 4510 || (DECL_P (exp) 4511 && ! TREE_ADDRESSABLE (exp) 4512 && ! TREE_THIS_VOLATILE (exp) 4513 && ! DECL_NONLOCAL (exp) 4514 /* Don't regard global variables as simple. They may be 4515 allocated in ways unknown to the compiler (shared memory, 4516 #pragma weak, etc). */ 4517 && ! TREE_PUBLIC (exp) 4518 && ! DECL_EXTERNAL (exp) 4519 /* Loading a static variable is unduly expensive, but global 4520 registers aren't expensive. */ 4521 && (! TREE_STATIC (exp) || DECL_REGISTER (exp)))); 4522 } 4523 4524 /* The following functions are subroutines to fold_range_test and allow it to 4525 try to change a logical combination of comparisons into a range test. 4526 4527 For example, both 4528 X == 2 || X == 3 || X == 4 || X == 5 4529 and 4530 X >= 2 && X <= 5 4531 are converted to 4532 (unsigned) (X - 2) <= 3 4533 4534 We describe each set of comparisons as being either inside or outside 4535 a range, using a variable named like IN_P, and then describe the 4536 range with a lower and upper bound. If one of the bounds is omitted, 4537 it represents either the highest or lowest value of the type. 4538 4539 In the comments below, we represent a range by two numbers in brackets 4540 preceded by a "+" to designate being inside that range, or a "-" to 4541 designate being outside that range, so the condition can be inverted by 4542 flipping the prefix. An omitted bound is represented by a "-". For 4543 example, "- [-, 10]" means being outside the range starting at the lowest 4544 possible value and ending at 10, in other words, being greater than 10. 4545 The range "+ [-, -]" is always true and hence the range "- [-, -]" is 4546 always false. 4547 4548 We set up things so that the missing bounds are handled in a consistent 4549 manner so neither a missing bound nor "true" and "false" need to be 4550 handled using a special case. */ 4551 4552 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case 4553 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P 4554 and UPPER1_P are nonzero if the respective argument is an upper bound 4555 and zero for a lower. TYPE, if nonzero, is the type of the result; it 4556 must be specified for a comparison. ARG1 will be converted to ARG0's 4557 type if both are specified. */ 4558 4559 static tree 4560 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p, 4561 tree arg1, int upper1_p) 4562 { 4563 tree tem; 4564 int result; 4565 int sgn0, sgn1; 4566 4567 /* If neither arg represents infinity, do the normal operation. 4568 Else, if not a comparison, return infinity. Else handle the special 4569 comparison rules. Note that most of the cases below won't occur, but 4570 are handled for consistency. */ 4571 4572 if (arg0 != 0 && arg1 != 0) 4573 { 4574 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0), 4575 arg0, fold_convert (TREE_TYPE (arg0), arg1)); 4576 STRIP_NOPS (tem); 4577 return TREE_CODE (tem) == INTEGER_CST ? tem : 0; 4578 } 4579 4580 if (TREE_CODE_CLASS (code) != tcc_comparison) 4581 return 0; 4582 4583 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0 4584 for neither. In real maths, we cannot assume open ended ranges are 4585 the same. But, this is computer arithmetic, where numbers are finite. 4586 We can therefore make the transformation of any unbounded range with 4587 the value Z, Z being greater than any representable number. This permits 4588 us to treat unbounded ranges as equal. */ 4589 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1); 4590 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1); 4591 switch (code) 4592 { 4593 case EQ_EXPR: 4594 result = sgn0 == sgn1; 4595 break; 4596 case NE_EXPR: 4597 result = sgn0 != sgn1; 4598 break; 4599 case LT_EXPR: 4600 result = sgn0 < sgn1; 4601 break; 4602 case LE_EXPR: 4603 result = sgn0 <= sgn1; 4604 break; 4605 case GT_EXPR: 4606 result = sgn0 > sgn1; 4607 break; 4608 case GE_EXPR: 4609 result = sgn0 >= sgn1; 4610 break; 4611 default: 4612 gcc_unreachable (); 4613 } 4614 4615 return constant_boolean_node (result, type); 4616 } 4617 4618 /* Given EXP, a logical expression, set the range it is testing into 4619 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression 4620 actually being tested. *PLOW and *PHIGH will be made of the same 4621 type as the returned expression. If EXP is not a comparison, we 4622 will most likely not be returning a useful value and range. Set 4623 *STRICT_OVERFLOW_P to true if the return value is only valid 4624 because signed overflow is undefined; otherwise, do not change 4625 *STRICT_OVERFLOW_P. */ 4626 4627 tree 4628 make_range (tree exp, int *pin_p, tree *plow, tree *phigh, 4629 bool *strict_overflow_p) 4630 { 4631 enum tree_code code; 4632 tree arg0 = NULL_TREE, arg1 = NULL_TREE; 4633 tree exp_type = NULL_TREE, arg0_type = NULL_TREE; 4634 int in_p, n_in_p; 4635 tree low, high, n_low, n_high; 4636 location_t loc = EXPR_LOCATION (exp); 4637 4638 /* Start with simply saying "EXP != 0" and then look at the code of EXP 4639 and see if we can refine the range. Some of the cases below may not 4640 happen, but it doesn't seem worth worrying about this. We "continue" 4641 the outer loop when we've changed something; otherwise we "break" 4642 the switch, which will "break" the while. */ 4643 4644 in_p = 0; 4645 low = high = build_int_cst (TREE_TYPE (exp), 0); 4646 4647 while (1) 4648 { 4649 code = TREE_CODE (exp); 4650 exp_type = TREE_TYPE (exp); 4651 4652 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) 4653 { 4654 if (TREE_OPERAND_LENGTH (exp) > 0) 4655 arg0 = TREE_OPERAND (exp, 0); 4656 if (TREE_CODE_CLASS (code) == tcc_comparison 4657 || TREE_CODE_CLASS (code) == tcc_unary 4658 || TREE_CODE_CLASS (code) == tcc_binary) 4659 arg0_type = TREE_TYPE (arg0); 4660 if (TREE_CODE_CLASS (code) == tcc_binary 4661 || TREE_CODE_CLASS (code) == tcc_comparison 4662 || (TREE_CODE_CLASS (code) == tcc_expression 4663 && TREE_OPERAND_LENGTH (exp) > 1)) 4664 arg1 = TREE_OPERAND (exp, 1); 4665 } 4666 4667 switch (code) 4668 { 4669 case TRUTH_NOT_EXPR: 4670 in_p = ! in_p, exp = arg0; 4671 continue; 4672 4673 case EQ_EXPR: case NE_EXPR: 4674 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR: 4675 /* We can only do something if the range is testing for zero 4676 and if the second operand is an integer constant. Note that 4677 saying something is "in" the range we make is done by 4678 complementing IN_P since it will set in the initial case of 4679 being not equal to zero; "out" is leaving it alone. */ 4680 if (low == 0 || high == 0 4681 || ! integer_zerop (low) || ! integer_zerop (high) 4682 || TREE_CODE (arg1) != INTEGER_CST) 4683 break; 4684 4685 switch (code) 4686 { 4687 case NE_EXPR: /* - [c, c] */ 4688 low = high = arg1; 4689 break; 4690 case EQ_EXPR: /* + [c, c] */ 4691 in_p = ! in_p, low = high = arg1; 4692 break; 4693 case GT_EXPR: /* - [-, c] */ 4694 low = 0, high = arg1; 4695 break; 4696 case GE_EXPR: /* + [c, -] */ 4697 in_p = ! in_p, low = arg1, high = 0; 4698 break; 4699 case LT_EXPR: /* - [c, -] */ 4700 low = arg1, high = 0; 4701 break; 4702 case LE_EXPR: /* + [-, c] */ 4703 in_p = ! in_p, low = 0, high = arg1; 4704 break; 4705 default: 4706 gcc_unreachable (); 4707 } 4708 4709 /* If this is an unsigned comparison, we also know that EXP is 4710 greater than or equal to zero. We base the range tests we make 4711 on that fact, so we record it here so we can parse existing 4712 range tests. We test arg0_type since often the return type 4713 of, e.g. EQ_EXPR, is boolean. */ 4714 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0)) 4715 { 4716 if (! merge_ranges (&n_in_p, &n_low, &n_high, 4717 in_p, low, high, 1, 4718 build_int_cst (arg0_type, 0), 4719 NULL_TREE)) 4720 break; 4721 4722 in_p = n_in_p, low = n_low, high = n_high; 4723 4724 /* If the high bound is missing, but we have a nonzero low 4725 bound, reverse the range so it goes from zero to the low bound 4726 minus 1. */ 4727 if (high == 0 && low && ! integer_zerop (low)) 4728 { 4729 in_p = ! in_p; 4730 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0, 4731 integer_one_node, 0); 4732 low = build_int_cst (arg0_type, 0); 4733 } 4734 } 4735 4736 exp = arg0; 4737 continue; 4738 4739 case NEGATE_EXPR: 4740 /* (-x) IN [a,b] -> x in [-b, -a] */ 4741 n_low = range_binop (MINUS_EXPR, exp_type, 4742 build_int_cst (exp_type, 0), 4743 0, high, 1); 4744 n_high = range_binop (MINUS_EXPR, exp_type, 4745 build_int_cst (exp_type, 0), 4746 0, low, 0); 4747 if (n_high != 0 && TREE_OVERFLOW (n_high)) 4748 break; 4749 goto normalize; 4750 4751 case BIT_NOT_EXPR: 4752 /* ~ X -> -X - 1 */ 4753 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0), 4754 build_int_cst (exp_type, 1)); 4755 SET_EXPR_LOCATION (exp, loc); 4756 continue; 4757 4758 case PLUS_EXPR: case MINUS_EXPR: 4759 if (TREE_CODE (arg1) != INTEGER_CST) 4760 break; 4761 4762 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot 4763 move a constant to the other side. */ 4764 if (!TYPE_UNSIGNED (arg0_type) 4765 && !TYPE_OVERFLOW_UNDEFINED (arg0_type)) 4766 break; 4767 4768 /* If EXP is signed, any overflow in the computation is undefined, 4769 so we don't worry about it so long as our computations on 4770 the bounds don't overflow. For unsigned, overflow is defined 4771 and this is exactly the right thing. */ 4772 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, 4773 arg0_type, low, 0, arg1, 0); 4774 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, 4775 arg0_type, high, 1, arg1, 0); 4776 if ((n_low != 0 && TREE_OVERFLOW (n_low)) 4777 || (n_high != 0 && TREE_OVERFLOW (n_high))) 4778 break; 4779 4780 if (TYPE_OVERFLOW_UNDEFINED (arg0_type)) 4781 *strict_overflow_p = true; 4782 4783 normalize: 4784 /* Check for an unsigned range which has wrapped around the maximum 4785 value thus making n_high < n_low, and normalize it. */ 4786 if (n_low && n_high && tree_int_cst_lt (n_high, n_low)) 4787 { 4788 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0, 4789 integer_one_node, 0); 4790 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0, 4791 integer_one_node, 0); 4792 4793 /* If the range is of the form +/- [ x+1, x ], we won't 4794 be able to normalize it. But then, it represents the 4795 whole range or the empty set, so make it 4796 +/- [ -, - ]. */ 4797 if (tree_int_cst_equal (n_low, low) 4798 && tree_int_cst_equal (n_high, high)) 4799 low = high = 0; 4800 else 4801 in_p = ! in_p; 4802 } 4803 else 4804 low = n_low, high = n_high; 4805 4806 exp = arg0; 4807 continue; 4808 4809 CASE_CONVERT: case NON_LVALUE_EXPR: 4810 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type)) 4811 break; 4812 4813 if (! INTEGRAL_TYPE_P (arg0_type) 4814 || (low != 0 && ! int_fits_type_p (low, arg0_type)) 4815 || (high != 0 && ! int_fits_type_p (high, arg0_type))) 4816 break; 4817 4818 n_low = low, n_high = high; 4819 4820 if (n_low != 0) 4821 n_low = fold_convert_loc (loc, arg0_type, n_low); 4822 4823 if (n_high != 0) 4824 n_high = fold_convert_loc (loc, arg0_type, n_high); 4825 4826 4827 /* If we're converting arg0 from an unsigned type, to exp, 4828 a signed type, we will be doing the comparison as unsigned. 4829 The tests above have already verified that LOW and HIGH 4830 are both positive. 4831 4832 So we have to ensure that we will handle large unsigned 4833 values the same way that the current signed bounds treat 4834 negative values. */ 4835 4836 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type)) 4837 { 4838 tree high_positive; 4839 tree equiv_type; 4840 /* For fixed-point modes, we need to pass the saturating flag 4841 as the 2nd parameter. */ 4842 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type))) 4843 equiv_type = lang_hooks.types.type_for_mode 4844 (TYPE_MODE (arg0_type), 4845 TYPE_SATURATING (arg0_type)); 4846 else 4847 equiv_type = lang_hooks.types.type_for_mode 4848 (TYPE_MODE (arg0_type), 1); 4849 4850 /* A range without an upper bound is, naturally, unbounded. 4851 Since convert would have cropped a very large value, use 4852 the max value for the destination type. */ 4853 high_positive 4854 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type) 4855 : TYPE_MAX_VALUE (arg0_type); 4856 4857 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type)) 4858 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type, 4859 fold_convert_loc (loc, arg0_type, 4860 high_positive), 4861 build_int_cst (arg0_type, 1)); 4862 4863 /* If the low bound is specified, "and" the range with the 4864 range for which the original unsigned value will be 4865 positive. */ 4866 if (low != 0) 4867 { 4868 if (! merge_ranges (&n_in_p, &n_low, &n_high, 4869 1, n_low, n_high, 1, 4870 fold_convert_loc (loc, arg0_type, 4871 integer_zero_node), 4872 high_positive)) 4873 break; 4874 4875 in_p = (n_in_p == in_p); 4876 } 4877 else 4878 { 4879 /* Otherwise, "or" the range with the range of the input 4880 that will be interpreted as negative. */ 4881 if (! merge_ranges (&n_in_p, &n_low, &n_high, 4882 0, n_low, n_high, 1, 4883 fold_convert_loc (loc, arg0_type, 4884 integer_zero_node), 4885 high_positive)) 4886 break; 4887 4888 in_p = (in_p != n_in_p); 4889 } 4890 } 4891 4892 exp = arg0; 4893 low = n_low, high = n_high; 4894 continue; 4895 4896 default: 4897 break; 4898 } 4899 4900 break; 4901 } 4902 4903 /* If EXP is a constant, we can evaluate whether this is true or false. */ 4904 if (TREE_CODE (exp) == INTEGER_CST) 4905 { 4906 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node, 4907 exp, 0, low, 0)) 4908 && integer_onep (range_binop (LE_EXPR, integer_type_node, 4909 exp, 1, high, 1))); 4910 low = high = 0; 4911 exp = 0; 4912 } 4913 4914 *pin_p = in_p, *plow = low, *phigh = high; 4915 return exp; 4916 } 4917 4918 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result 4919 type, TYPE, return an expression to test if EXP is in (or out of, depending 4920 on IN_P) the range. Return 0 if the test couldn't be created. */ 4921 4922 tree 4923 build_range_check (location_t loc, tree type, tree exp, int in_p, 4924 tree low, tree high) 4925 { 4926 tree etype = TREE_TYPE (exp), value; 4927 4928 #ifdef HAVE_canonicalize_funcptr_for_compare 4929 /* Disable this optimization for function pointer expressions 4930 on targets that require function pointer canonicalization. */ 4931 if (HAVE_canonicalize_funcptr_for_compare 4932 && TREE_CODE (etype) == POINTER_TYPE 4933 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE) 4934 return NULL_TREE; 4935 #endif 4936 4937 if (! in_p) 4938 { 4939 value = build_range_check (loc, type, exp, 1, low, high); 4940 if (value != 0) 4941 return invert_truthvalue_loc (loc, value); 4942 4943 return 0; 4944 } 4945 4946 if (low == 0 && high == 0) 4947 return build_int_cst (type, 1); 4948 4949 if (low == 0) 4950 return fold_build2_loc (loc, LE_EXPR, type, exp, 4951 fold_convert_loc (loc, etype, high)); 4952 4953 if (high == 0) 4954 return fold_build2_loc (loc, GE_EXPR, type, exp, 4955 fold_convert_loc (loc, etype, low)); 4956 4957 if (operand_equal_p (low, high, 0)) 4958 return fold_build2_loc (loc, EQ_EXPR, type, exp, 4959 fold_convert_loc (loc, etype, low)); 4960 4961 if (integer_zerop (low)) 4962 { 4963 if (! TYPE_UNSIGNED (etype)) 4964 { 4965 etype = unsigned_type_for (etype); 4966 high = fold_convert_loc (loc, etype, high); 4967 exp = fold_convert_loc (loc, etype, exp); 4968 } 4969 return build_range_check (loc, type, exp, 1, 0, high); 4970 } 4971 4972 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */ 4973 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST) 4974 { 4975 unsigned HOST_WIDE_INT lo; 4976 HOST_WIDE_INT hi; 4977 int prec; 4978 4979 prec = TYPE_PRECISION (etype); 4980 if (prec <= HOST_BITS_PER_WIDE_INT) 4981 { 4982 hi = 0; 4983 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1; 4984 } 4985 else 4986 { 4987 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1; 4988 lo = (unsigned HOST_WIDE_INT) -1; 4989 } 4990 4991 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo) 4992 { 4993 if (TYPE_UNSIGNED (etype)) 4994 { 4995 tree signed_etype = signed_type_for (etype); 4996 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype)) 4997 etype 4998 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0); 4999 else 5000 etype = signed_etype; 5001 exp = fold_convert_loc (loc, etype, exp); 5002 } 5003 return fold_build2_loc (loc, GT_EXPR, type, exp, 5004 build_int_cst (etype, 0)); 5005 } 5006 } 5007 5008 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low). 5009 This requires wrap-around arithmetics for the type of the expression. 5010 First make sure that arithmetics in this type is valid, then make sure 5011 that it wraps around. */ 5012 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE) 5013 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 5014 TYPE_UNSIGNED (etype)); 5015 5016 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype)) 5017 { 5018 tree utype, minv, maxv; 5019 5020 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN 5021 for the type in question, as we rely on this here. */ 5022 utype = unsigned_type_for (etype); 5023 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype)); 5024 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1, 5025 integer_one_node, 1); 5026 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype)); 5027 5028 if (integer_zerop (range_binop (NE_EXPR, integer_type_node, 5029 minv, 1, maxv, 1))) 5030 etype = utype; 5031 else 5032 return 0; 5033 } 5034 5035 high = fold_convert_loc (loc, etype, high); 5036 low = fold_convert_loc (loc, etype, low); 5037 exp = fold_convert_loc (loc, etype, exp); 5038 5039 value = const_binop (MINUS_EXPR, high, low, 0); 5040 5041 5042 if (POINTER_TYPE_P (etype)) 5043 { 5044 if (value != 0 && !TREE_OVERFLOW (value)) 5045 { 5046 low = fold_convert_loc (loc, sizetype, low); 5047 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low); 5048 return build_range_check (loc, type, 5049 fold_build2_loc (loc, POINTER_PLUS_EXPR, 5050 etype, exp, low), 5051 1, build_int_cst (etype, 0), value); 5052 } 5053 return 0; 5054 } 5055 5056 if (value != 0 && !TREE_OVERFLOW (value)) 5057 return build_range_check (loc, type, 5058 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low), 5059 1, build_int_cst (etype, 0), value); 5060 5061 return 0; 5062 } 5063 5064 /* Return the predecessor of VAL in its type, handling the infinite case. */ 5065 5066 static tree 5067 range_predecessor (tree val) 5068 { 5069 tree type = TREE_TYPE (val); 5070 5071 if (INTEGRAL_TYPE_P (type) 5072 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0)) 5073 return 0; 5074 else 5075 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0); 5076 } 5077 5078 /* Return the successor of VAL in its type, handling the infinite case. */ 5079 5080 static tree 5081 range_successor (tree val) 5082 { 5083 tree type = TREE_TYPE (val); 5084 5085 if (INTEGRAL_TYPE_P (type) 5086 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0)) 5087 return 0; 5088 else 5089 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0); 5090 } 5091 5092 /* Given two ranges, see if we can merge them into one. Return 1 if we 5093 can, 0 if we can't. Set the output range into the specified parameters. */ 5094 5095 bool 5096 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0, 5097 tree high0, int in1_p, tree low1, tree high1) 5098 { 5099 int no_overlap; 5100 int subset; 5101 int temp; 5102 tree tem; 5103 int in_p; 5104 tree low, high; 5105 int lowequal = ((low0 == 0 && low1 == 0) 5106 || integer_onep (range_binop (EQ_EXPR, integer_type_node, 5107 low0, 0, low1, 0))); 5108 int highequal = ((high0 == 0 && high1 == 0) 5109 || integer_onep (range_binop (EQ_EXPR, integer_type_node, 5110 high0, 1, high1, 1))); 5111 5112 /* Make range 0 be the range that starts first, or ends last if they 5113 start at the same value. Swap them if it isn't. */ 5114 if (integer_onep (range_binop (GT_EXPR, integer_type_node, 5115 low0, 0, low1, 0)) 5116 || (lowequal 5117 && integer_onep (range_binop (GT_EXPR, integer_type_node, 5118 high1, 1, high0, 1)))) 5119 { 5120 temp = in0_p, in0_p = in1_p, in1_p = temp; 5121 tem = low0, low0 = low1, low1 = tem; 5122 tem = high0, high0 = high1, high1 = tem; 5123 } 5124 5125 /* Now flag two cases, whether the ranges are disjoint or whether the 5126 second range is totally subsumed in the first. Note that the tests 5127 below are simplified by the ones above. */ 5128 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node, 5129 high0, 1, low1, 0)); 5130 subset = integer_onep (range_binop (LE_EXPR, integer_type_node, 5131 high1, 1, high0, 1)); 5132 5133 /* We now have four cases, depending on whether we are including or 5134 excluding the two ranges. */ 5135 if (in0_p && in1_p) 5136 { 5137 /* If they don't overlap, the result is false. If the second range 5138 is a subset it is the result. Otherwise, the range is from the start 5139 of the second to the end of the first. */ 5140 if (no_overlap) 5141 in_p = 0, low = high = 0; 5142 else if (subset) 5143 in_p = 1, low = low1, high = high1; 5144 else 5145 in_p = 1, low = low1, high = high0; 5146 } 5147 5148 else if (in0_p && ! in1_p) 5149 { 5150 /* If they don't overlap, the result is the first range. If they are 5151 equal, the result is false. If the second range is a subset of the 5152 first, and the ranges begin at the same place, we go from just after 5153 the end of the second range to the end of the first. If the second 5154 range is not a subset of the first, or if it is a subset and both 5155 ranges end at the same place, the range starts at the start of the 5156 first range and ends just before the second range. 5157 Otherwise, we can't describe this as a single range. */ 5158 if (no_overlap) 5159 in_p = 1, low = low0, high = high0; 5160 else if (lowequal && highequal) 5161 in_p = 0, low = high = 0; 5162 else if (subset && lowequal) 5163 { 5164 low = range_successor (high1); 5165 high = high0; 5166 in_p = 1; 5167 if (low == 0) 5168 { 5169 /* We are in the weird situation where high0 > high1 but 5170 high1 has no successor. Punt. */ 5171 return 0; 5172 } 5173 } 5174 else if (! subset || highequal) 5175 { 5176 low = low0; 5177 high = range_predecessor (low1); 5178 in_p = 1; 5179 if (high == 0) 5180 { 5181 /* low0 < low1 but low1 has no predecessor. Punt. */ 5182 return 0; 5183 } 5184 } 5185 else 5186 return 0; 5187 } 5188 5189 else if (! in0_p && in1_p) 5190 { 5191 /* If they don't overlap, the result is the second range. If the second 5192 is a subset of the first, the result is false. Otherwise, 5193 the range starts just after the first range and ends at the 5194 end of the second. */ 5195 if (no_overlap) 5196 in_p = 1, low = low1, high = high1; 5197 else if (subset || highequal) 5198 in_p = 0, low = high = 0; 5199 else 5200 { 5201 low = range_successor (high0); 5202 high = high1; 5203 in_p = 1; 5204 if (low == 0) 5205 { 5206 /* high1 > high0 but high0 has no successor. Punt. */ 5207 return 0; 5208 } 5209 } 5210 } 5211 5212 else 5213 { 5214 /* The case where we are excluding both ranges. Here the complex case 5215 is if they don't overlap. In that case, the only time we have a 5216 range is if they are adjacent. If the second is a subset of the 5217 first, the result is the first. Otherwise, the range to exclude 5218 starts at the beginning of the first range and ends at the end of the 5219 second. */ 5220 if (no_overlap) 5221 { 5222 if (integer_onep (range_binop (EQ_EXPR, integer_type_node, 5223 range_successor (high0), 5224 1, low1, 0))) 5225 in_p = 0, low = low0, high = high1; 5226 else 5227 { 5228 /* Canonicalize - [min, x] into - [-, x]. */ 5229 if (low0 && TREE_CODE (low0) == INTEGER_CST) 5230 switch (TREE_CODE (TREE_TYPE (low0))) 5231 { 5232 case ENUMERAL_TYPE: 5233 if (TYPE_PRECISION (TREE_TYPE (low0)) 5234 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0)))) 5235 break; 5236 /* FALLTHROUGH */ 5237 case INTEGER_TYPE: 5238 if (tree_int_cst_equal (low0, 5239 TYPE_MIN_VALUE (TREE_TYPE (low0)))) 5240 low0 = 0; 5241 break; 5242 case POINTER_TYPE: 5243 if (TYPE_UNSIGNED (TREE_TYPE (low0)) 5244 && integer_zerop (low0)) 5245 low0 = 0; 5246 break; 5247 default: 5248 break; 5249 } 5250 5251 /* Canonicalize - [x, max] into - [x, -]. */ 5252 if (high1 && TREE_CODE (high1) == INTEGER_CST) 5253 switch (TREE_CODE (TREE_TYPE (high1))) 5254 { 5255 case ENUMERAL_TYPE: 5256 if (TYPE_PRECISION (TREE_TYPE (high1)) 5257 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1)))) 5258 break; 5259 /* FALLTHROUGH */ 5260 case INTEGER_TYPE: 5261 if (tree_int_cst_equal (high1, 5262 TYPE_MAX_VALUE (TREE_TYPE (high1)))) 5263 high1 = 0; 5264 break; 5265 case POINTER_TYPE: 5266 if (TYPE_UNSIGNED (TREE_TYPE (high1)) 5267 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE, 5268 high1, 1, 5269 integer_one_node, 1))) 5270 high1 = 0; 5271 break; 5272 default: 5273 break; 5274 } 5275 5276 /* The ranges might be also adjacent between the maximum and 5277 minimum values of the given type. For 5278 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y 5279 return + [x + 1, y - 1]. */ 5280 if (low0 == 0 && high1 == 0) 5281 { 5282 low = range_successor (high0); 5283 high = range_predecessor (low1); 5284 if (low == 0 || high == 0) 5285 return 0; 5286 5287 in_p = 1; 5288 } 5289 else 5290 return 0; 5291 } 5292 } 5293 else if (subset) 5294 in_p = 0, low = low0, high = high0; 5295 else 5296 in_p = 0, low = low0, high = high1; 5297 } 5298 5299 *pin_p = in_p, *plow = low, *phigh = high; 5300 return 1; 5301 } 5302 5303 5304 /* Subroutine of fold, looking inside expressions of the form 5305 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands 5306 of the COND_EXPR. This function is being used also to optimize 5307 A op B ? C : A, by reversing the comparison first. 5308 5309 Return a folded expression whose code is not a COND_EXPR 5310 anymore, or NULL_TREE if no folding opportunity is found. */ 5311 5312 static tree 5313 fold_cond_expr_with_comparison (location_t loc, tree type, 5314 tree arg0, tree arg1, tree arg2) 5315 { 5316 enum tree_code comp_code = TREE_CODE (arg0); 5317 tree arg00 = TREE_OPERAND (arg0, 0); 5318 tree arg01 = TREE_OPERAND (arg0, 1); 5319 tree arg1_type = TREE_TYPE (arg1); 5320 tree tem; 5321 5322 STRIP_NOPS (arg1); 5323 STRIP_NOPS (arg2); 5324 5325 /* If we have A op 0 ? A : -A, consider applying the following 5326 transformations: 5327 5328 A == 0? A : -A same as -A 5329 A != 0? A : -A same as A 5330 A >= 0? A : -A same as abs (A) 5331 A > 0? A : -A same as abs (A) 5332 A <= 0? A : -A same as -abs (A) 5333 A < 0? A : -A same as -abs (A) 5334 5335 None of these transformations work for modes with signed 5336 zeros. If A is +/-0, the first two transformations will 5337 change the sign of the result (from +0 to -0, or vice 5338 versa). The last four will fix the sign of the result, 5339 even though the original expressions could be positive or 5340 negative, depending on the sign of A. 5341 5342 Note that all these transformations are correct if A is 5343 NaN, since the two alternatives (A and -A) are also NaNs. */ 5344 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)) 5345 && (FLOAT_TYPE_P (TREE_TYPE (arg01)) 5346 ? real_zerop (arg01) 5347 : integer_zerop (arg01)) 5348 && ((TREE_CODE (arg2) == NEGATE_EXPR 5349 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0)) 5350 /* In the case that A is of the form X-Y, '-A' (arg2) may 5351 have already been folded to Y-X, check for that. */ 5352 || (TREE_CODE (arg1) == MINUS_EXPR 5353 && TREE_CODE (arg2) == MINUS_EXPR 5354 && operand_equal_p (TREE_OPERAND (arg1, 0), 5355 TREE_OPERAND (arg2, 1), 0) 5356 && operand_equal_p (TREE_OPERAND (arg1, 1), 5357 TREE_OPERAND (arg2, 0), 0)))) 5358 switch (comp_code) 5359 { 5360 case EQ_EXPR: 5361 case UNEQ_EXPR: 5362 tem = fold_convert_loc (loc, arg1_type, arg1); 5363 return pedantic_non_lvalue_loc (loc, 5364 fold_convert_loc (loc, type, 5365 negate_expr (tem))); 5366 case NE_EXPR: 5367 case LTGT_EXPR: 5368 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 5369 case UNGE_EXPR: 5370 case UNGT_EXPR: 5371 if (flag_trapping_math) 5372 break; 5373 /* Fall through. */ 5374 case GE_EXPR: 5375 case GT_EXPR: 5376 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) 5377 arg1 = fold_convert_loc (loc, signed_type_for 5378 (TREE_TYPE (arg1)), arg1); 5379 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); 5380 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 5381 case UNLE_EXPR: 5382 case UNLT_EXPR: 5383 if (flag_trapping_math) 5384 break; 5385 case LE_EXPR: 5386 case LT_EXPR: 5387 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) 5388 arg1 = fold_convert_loc (loc, signed_type_for 5389 (TREE_TYPE (arg1)), arg1); 5390 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); 5391 return negate_expr (fold_convert_loc (loc, type, tem)); 5392 default: 5393 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); 5394 break; 5395 } 5396 5397 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise 5398 A == 0 ? A : 0 is always 0 unless A is -0. Note that 5399 both transformations are correct when A is NaN: A != 0 5400 is then true, and A == 0 is false. */ 5401 5402 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)) 5403 && integer_zerop (arg01) && integer_zerop (arg2)) 5404 { 5405 if (comp_code == NE_EXPR) 5406 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 5407 else if (comp_code == EQ_EXPR) 5408 return build_int_cst (type, 0); 5409 } 5410 5411 /* Try some transformations of A op B ? A : B. 5412 5413 A == B? A : B same as B 5414 A != B? A : B same as A 5415 A >= B? A : B same as max (A, B) 5416 A > B? A : B same as max (B, A) 5417 A <= B? A : B same as min (A, B) 5418 A < B? A : B same as min (B, A) 5419 5420 As above, these transformations don't work in the presence 5421 of signed zeros. For example, if A and B are zeros of 5422 opposite sign, the first two transformations will change 5423 the sign of the result. In the last four, the original 5424 expressions give different results for (A=+0, B=-0) and 5425 (A=-0, B=+0), but the transformed expressions do not. 5426 5427 The first two transformations are correct if either A or B 5428 is a NaN. In the first transformation, the condition will 5429 be false, and B will indeed be chosen. In the case of the 5430 second transformation, the condition A != B will be true, 5431 and A will be chosen. 5432 5433 The conversions to max() and min() are not correct if B is 5434 a number and A is not. The conditions in the original 5435 expressions will be false, so all four give B. The min() 5436 and max() versions would give a NaN instead. */ 5437 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)) 5438 && operand_equal_for_comparison_p (arg01, arg2, arg00) 5439 /* Avoid these transformations if the COND_EXPR may be used 5440 as an lvalue in the C++ front-end. PR c++/19199. */ 5441 && (in_gimple_form 5442 || (strcmp (lang_hooks.name, "GNU C++") != 0 5443 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0) 5444 || ! maybe_lvalue_p (arg1) 5445 || ! maybe_lvalue_p (arg2))) 5446 { 5447 tree comp_op0 = arg00; 5448 tree comp_op1 = arg01; 5449 tree comp_type = TREE_TYPE (comp_op0); 5450 5451 /* Avoid adding NOP_EXPRs in case this is an lvalue. */ 5452 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type)) 5453 { 5454 comp_type = type; 5455 comp_op0 = arg1; 5456 comp_op1 = arg2; 5457 } 5458 5459 switch (comp_code) 5460 { 5461 case EQ_EXPR: 5462 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2)); 5463 case NE_EXPR: 5464 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 5465 case LE_EXPR: 5466 case LT_EXPR: 5467 case UNLE_EXPR: 5468 case UNLT_EXPR: 5469 /* In C++ a ?: expression can be an lvalue, so put the 5470 operand which will be used if they are equal first 5471 so that we can convert this back to the 5472 corresponding COND_EXPR. */ 5473 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 5474 { 5475 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); 5476 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); 5477 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR) 5478 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1) 5479 : fold_build2_loc (loc, MIN_EXPR, comp_type, 5480 comp_op1, comp_op0); 5481 return pedantic_non_lvalue_loc (loc, 5482 fold_convert_loc (loc, type, tem)); 5483 } 5484 break; 5485 case GE_EXPR: 5486 case GT_EXPR: 5487 case UNGE_EXPR: 5488 case UNGT_EXPR: 5489 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 5490 { 5491 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); 5492 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); 5493 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR) 5494 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1) 5495 : fold_build2_loc (loc, MAX_EXPR, comp_type, 5496 comp_op1, comp_op0); 5497 return pedantic_non_lvalue_loc (loc, 5498 fold_convert_loc (loc, type, tem)); 5499 } 5500 break; 5501 case UNEQ_EXPR: 5502 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 5503 return pedantic_non_lvalue_loc (loc, 5504 fold_convert_loc (loc, type, arg2)); 5505 break; 5506 case LTGT_EXPR: 5507 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 5508 return pedantic_non_lvalue_loc (loc, 5509 fold_convert_loc (loc, type, arg1)); 5510 break; 5511 default: 5512 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); 5513 break; 5514 } 5515 } 5516 5517 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers, 5518 we might still be able to simplify this. For example, 5519 if C1 is one less or one more than C2, this might have started 5520 out as a MIN or MAX and been transformed by this function. 5521 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */ 5522 5523 if (INTEGRAL_TYPE_P (type) 5524 && TREE_CODE (arg01) == INTEGER_CST 5525 && TREE_CODE (arg2) == INTEGER_CST) 5526 switch (comp_code) 5527 { 5528 case EQ_EXPR: 5529 if (TREE_CODE (arg1) == INTEGER_CST) 5530 break; 5531 /* We can replace A with C1 in this case. */ 5532 arg1 = fold_convert_loc (loc, type, arg01); 5533 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2); 5534 5535 case LT_EXPR: 5536 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for 5537 MIN_EXPR, to preserve the signedness of the comparison. */ 5538 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 5539 OEP_ONLY_CONST) 5540 && operand_equal_p (arg01, 5541 const_binop (PLUS_EXPR, arg2, 5542 build_int_cst (type, 1), 0), 5543 OEP_ONLY_CONST)) 5544 { 5545 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, 5546 fold_convert_loc (loc, TREE_TYPE (arg00), 5547 arg2)); 5548 return pedantic_non_lvalue_loc (loc, 5549 fold_convert_loc (loc, type, tem)); 5550 } 5551 break; 5552 5553 case LE_EXPR: 5554 /* If C1 is C2 - 1, this is min(A, C2), with the same care 5555 as above. */ 5556 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 5557 OEP_ONLY_CONST) 5558 && operand_equal_p (arg01, 5559 const_binop (MINUS_EXPR, arg2, 5560 build_int_cst (type, 1), 0), 5561 OEP_ONLY_CONST)) 5562 { 5563 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, 5564 fold_convert_loc (loc, TREE_TYPE (arg00), 5565 arg2)); 5566 return pedantic_non_lvalue_loc (loc, 5567 fold_convert_loc (loc, type, tem)); 5568 } 5569 break; 5570 5571 case GT_EXPR: 5572 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for 5573 MAX_EXPR, to preserve the signedness of the comparison. */ 5574 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 5575 OEP_ONLY_CONST) 5576 && operand_equal_p (arg01, 5577 const_binop (MINUS_EXPR, arg2, 5578 build_int_cst (type, 1), 0), 5579 OEP_ONLY_CONST)) 5580 { 5581 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, 5582 fold_convert_loc (loc, TREE_TYPE (arg00), 5583 arg2)); 5584 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 5585 } 5586 break; 5587 5588 case GE_EXPR: 5589 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */ 5590 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 5591 OEP_ONLY_CONST) 5592 && operand_equal_p (arg01, 5593 const_binop (PLUS_EXPR, arg2, 5594 build_int_cst (type, 1), 0), 5595 OEP_ONLY_CONST)) 5596 { 5597 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, 5598 fold_convert_loc (loc, TREE_TYPE (arg00), 5599 arg2)); 5600 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 5601 } 5602 break; 5603 case NE_EXPR: 5604 break; 5605 default: 5606 gcc_unreachable (); 5607 } 5608 5609 return NULL_TREE; 5610 } 5611 5612 5613 5614 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT 5615 #define LOGICAL_OP_NON_SHORT_CIRCUIT \ 5616 (BRANCH_COST (optimize_function_for_speed_p (cfun), \ 5617 false) >= 2) 5618 #endif 5619 5620 /* EXP is some logical combination of boolean tests. See if we can 5621 merge it into some range test. Return the new tree if so. */ 5622 5623 static tree 5624 fold_range_test (location_t loc, enum tree_code code, tree type, 5625 tree op0, tree op1) 5626 { 5627 int or_op = (code == TRUTH_ORIF_EXPR 5628 || code == TRUTH_OR_EXPR); 5629 int in0_p, in1_p, in_p; 5630 tree low0, low1, low, high0, high1, high; 5631 bool strict_overflow_p = false; 5632 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p); 5633 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p); 5634 tree tem; 5635 const char * const warnmsg = G_("assuming signed overflow does not occur " 5636 "when simplifying range test"); 5637 5638 /* If this is an OR operation, invert both sides; we will invert 5639 again at the end. */ 5640 if (or_op) 5641 in0_p = ! in0_p, in1_p = ! in1_p; 5642 5643 /* If both expressions are the same, if we can merge the ranges, and we 5644 can build the range test, return it or it inverted. If one of the 5645 ranges is always true or always false, consider it to be the same 5646 expression as the other. */ 5647 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0)) 5648 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0, 5649 in1_p, low1, high1) 5650 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type, 5651 lhs != 0 ? lhs 5652 : rhs != 0 ? rhs : integer_zero_node, 5653 in_p, low, high)))) 5654 { 5655 if (strict_overflow_p) 5656 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); 5657 return or_op ? invert_truthvalue_loc (loc, tem) : tem; 5658 } 5659 5660 /* On machines where the branch cost is expensive, if this is a 5661 short-circuited branch and the underlying object on both sides 5662 is the same, make a non-short-circuit operation. */ 5663 else if (LOGICAL_OP_NON_SHORT_CIRCUIT 5664 && lhs != 0 && rhs != 0 5665 && (code == TRUTH_ANDIF_EXPR 5666 || code == TRUTH_ORIF_EXPR) 5667 && operand_equal_p (lhs, rhs, 0)) 5668 { 5669 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR 5670 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in 5671 which cases we can't do this. */ 5672 if (simple_operand_p (lhs)) 5673 { 5674 tem = build2 (code == TRUTH_ANDIF_EXPR 5675 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, 5676 type, op0, op1); 5677 SET_EXPR_LOCATION (tem, loc); 5678 return tem; 5679 } 5680 5681 else if (lang_hooks.decls.global_bindings_p () == 0 5682 && ! CONTAINS_PLACEHOLDER_P (lhs)) 5683 { 5684 tree common = save_expr (lhs); 5685 5686 if (0 != (lhs = build_range_check (loc, type, common, 5687 or_op ? ! in0_p : in0_p, 5688 low0, high0)) 5689 && (0 != (rhs = build_range_check (loc, type, common, 5690 or_op ? ! in1_p : in1_p, 5691 low1, high1)))) 5692 { 5693 if (strict_overflow_p) 5694 fold_overflow_warning (warnmsg, 5695 WARN_STRICT_OVERFLOW_COMPARISON); 5696 tem = build2 (code == TRUTH_ANDIF_EXPR 5697 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, 5698 type, lhs, rhs); 5699 SET_EXPR_LOCATION (tem, loc); 5700 return tem; 5701 } 5702 } 5703 } 5704 5705 return 0; 5706 } 5707 5708 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P 5709 bit value. Arrange things so the extra bits will be set to zero if and 5710 only if C is signed-extended to its full width. If MASK is nonzero, 5711 it is an INTEGER_CST that should be AND'ed with the extra bits. */ 5712 5713 static tree 5714 unextend (tree c, int p, int unsignedp, tree mask) 5715 { 5716 tree type = TREE_TYPE (c); 5717 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type)); 5718 tree temp; 5719 5720 if (p == modesize || unsignedp) 5721 return c; 5722 5723 /* We work by getting just the sign bit into the low-order bit, then 5724 into the high-order bit, then sign-extend. We then XOR that value 5725 with C. */ 5726 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0); 5727 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0); 5728 5729 /* We must use a signed type in order to get an arithmetic right shift. 5730 However, we must also avoid introducing accidental overflows, so that 5731 a subsequent call to integer_zerop will work. Hence we must 5732 do the type conversion here. At this point, the constant is either 5733 zero or one, and the conversion to a signed type can never overflow. 5734 We could get an overflow if this conversion is done anywhere else. */ 5735 if (TYPE_UNSIGNED (type)) 5736 temp = fold_convert (signed_type_for (type), temp); 5737 5738 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0); 5739 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0); 5740 if (mask != 0) 5741 temp = const_binop (BIT_AND_EXPR, temp, 5742 fold_convert (TREE_TYPE (c), mask), 5743 0); 5744 /* If necessary, convert the type back to match the type of C. */ 5745 if (TYPE_UNSIGNED (type)) 5746 temp = fold_convert (type, temp); 5747 5748 return fold_convert (type, 5749 const_binop (BIT_XOR_EXPR, c, temp, 0)); 5750 } 5751 5752 /* Find ways of folding logical expressions of LHS and RHS: 5753 Try to merge two comparisons to the same innermost item. 5754 Look for range tests like "ch >= '0' && ch <= '9'". 5755 Look for combinations of simple terms on machines with expensive branches 5756 and evaluate the RHS unconditionally. 5757 5758 For example, if we have p->a == 2 && p->b == 4 and we can make an 5759 object large enough to span both A and B, we can do this with a comparison 5760 against the object ANDed with the a mask. 5761 5762 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking 5763 operations to do this with one comparison. 5764 5765 We check for both normal comparisons and the BIT_AND_EXPRs made this by 5766 function and the one above. 5767 5768 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR, 5769 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR. 5770 5771 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its 5772 two operands. 5773 5774 We return the simplified tree or 0 if no optimization is possible. */ 5775 5776 static tree 5777 fold_truthop (location_t loc, enum tree_code code, tree truth_type, 5778 tree lhs, tree rhs) 5779 { 5780 /* If this is the "or" of two comparisons, we can do something if 5781 the comparisons are NE_EXPR. If this is the "and", we can do something 5782 if the comparisons are EQ_EXPR. I.e., 5783 (a->b == 2 && a->c == 4) can become (a->new == NEW). 5784 5785 WANTED_CODE is this operation code. For single bit fields, we can 5786 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong" 5787 comparison for one-bit fields. */ 5788 5789 enum tree_code wanted_code; 5790 enum tree_code lcode, rcode; 5791 tree ll_arg, lr_arg, rl_arg, rr_arg; 5792 tree ll_inner, lr_inner, rl_inner, rr_inner; 5793 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos; 5794 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos; 5795 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos; 5796 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos; 5797 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp; 5798 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode; 5799 enum machine_mode lnmode, rnmode; 5800 tree ll_mask, lr_mask, rl_mask, rr_mask; 5801 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask; 5802 tree l_const, r_const; 5803 tree lntype, rntype, result; 5804 HOST_WIDE_INT first_bit, end_bit; 5805 int volatilep; 5806 tree orig_lhs = lhs, orig_rhs = rhs; 5807 enum tree_code orig_code = code; 5808 5809 /* Start by getting the comparison codes. Fail if anything is volatile. 5810 If one operand is a BIT_AND_EXPR with the constant one, treat it as if 5811 it were surrounded with a NE_EXPR. */ 5812 5813 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs)) 5814 return 0; 5815 5816 lcode = TREE_CODE (lhs); 5817 rcode = TREE_CODE (rhs); 5818 5819 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1))) 5820 { 5821 lhs = build2 (NE_EXPR, truth_type, lhs, 5822 build_int_cst (TREE_TYPE (lhs), 0)); 5823 lcode = NE_EXPR; 5824 } 5825 5826 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1))) 5827 { 5828 rhs = build2 (NE_EXPR, truth_type, rhs, 5829 build_int_cst (TREE_TYPE (rhs), 0)); 5830 rcode = NE_EXPR; 5831 } 5832 5833 if (TREE_CODE_CLASS (lcode) != tcc_comparison 5834 || TREE_CODE_CLASS (rcode) != tcc_comparison) 5835 return 0; 5836 5837 ll_arg = TREE_OPERAND (lhs, 0); 5838 lr_arg = TREE_OPERAND (lhs, 1); 5839 rl_arg = TREE_OPERAND (rhs, 0); 5840 rr_arg = TREE_OPERAND (rhs, 1); 5841 5842 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */ 5843 if (simple_operand_p (ll_arg) 5844 && simple_operand_p (lr_arg)) 5845 { 5846 tree result; 5847 if (operand_equal_p (ll_arg, rl_arg, 0) 5848 && operand_equal_p (lr_arg, rr_arg, 0)) 5849 { 5850 result = combine_comparisons (loc, code, lcode, rcode, 5851 truth_type, ll_arg, lr_arg); 5852 if (result) 5853 return result; 5854 } 5855 else if (operand_equal_p (ll_arg, rr_arg, 0) 5856 && operand_equal_p (lr_arg, rl_arg, 0)) 5857 { 5858 result = combine_comparisons (loc, code, lcode, 5859 swap_tree_comparison (rcode), 5860 truth_type, ll_arg, lr_arg); 5861 if (result) 5862 return result; 5863 } 5864 } 5865 5866 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR) 5867 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR); 5868 5869 /* If the RHS can be evaluated unconditionally and its operands are 5870 simple, it wins to evaluate the RHS unconditionally on machines 5871 with expensive branches. In this case, this isn't a comparison 5872 that can be merged. Avoid doing this if the RHS is a floating-point 5873 comparison since those can trap. */ 5874 5875 if (BRANCH_COST (optimize_function_for_speed_p (cfun), 5876 false) >= 2 5877 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg)) 5878 && simple_operand_p (rl_arg) 5879 && simple_operand_p (rr_arg)) 5880 { 5881 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */ 5882 if (code == TRUTH_OR_EXPR 5883 && lcode == NE_EXPR && integer_zerop (lr_arg) 5884 && rcode == NE_EXPR && integer_zerop (rr_arg) 5885 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg) 5886 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))) 5887 { 5888 result = build2 (NE_EXPR, truth_type, 5889 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), 5890 ll_arg, rl_arg), 5891 build_int_cst (TREE_TYPE (ll_arg), 0)); 5892 goto fold_truthop_exit; 5893 } 5894 5895 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */ 5896 if (code == TRUTH_AND_EXPR 5897 && lcode == EQ_EXPR && integer_zerop (lr_arg) 5898 && rcode == EQ_EXPR && integer_zerop (rr_arg) 5899 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg) 5900 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))) 5901 { 5902 result = build2 (EQ_EXPR, truth_type, 5903 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), 5904 ll_arg, rl_arg), 5905 build_int_cst (TREE_TYPE (ll_arg), 0)); 5906 goto fold_truthop_exit; 5907 } 5908 5909 if (LOGICAL_OP_NON_SHORT_CIRCUIT) 5910 { 5911 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs) 5912 { 5913 result = build2 (code, truth_type, lhs, rhs); 5914 goto fold_truthop_exit; 5915 } 5916 return NULL_TREE; 5917 } 5918 } 5919 5920 /* See if the comparisons can be merged. Then get all the parameters for 5921 each side. */ 5922 5923 if ((lcode != EQ_EXPR && lcode != NE_EXPR) 5924 || (rcode != EQ_EXPR && rcode != NE_EXPR)) 5925 return 0; 5926 5927 volatilep = 0; 5928 ll_inner = decode_field_reference (loc, ll_arg, 5929 &ll_bitsize, &ll_bitpos, &ll_mode, 5930 &ll_unsignedp, &volatilep, &ll_mask, 5931 &ll_and_mask); 5932 lr_inner = decode_field_reference (loc, lr_arg, 5933 &lr_bitsize, &lr_bitpos, &lr_mode, 5934 &lr_unsignedp, &volatilep, &lr_mask, 5935 &lr_and_mask); 5936 rl_inner = decode_field_reference (loc, rl_arg, 5937 &rl_bitsize, &rl_bitpos, &rl_mode, 5938 &rl_unsignedp, &volatilep, &rl_mask, 5939 &rl_and_mask); 5940 rr_inner = decode_field_reference (loc, rr_arg, 5941 &rr_bitsize, &rr_bitpos, &rr_mode, 5942 &rr_unsignedp, &volatilep, &rr_mask, 5943 &rr_and_mask); 5944 5945 /* It must be true that the inner operation on the lhs of each 5946 comparison must be the same if we are to be able to do anything. 5947 Then see if we have constants. If not, the same must be true for 5948 the rhs's. */ 5949 if (volatilep || ll_inner == 0 || rl_inner == 0 5950 || ! operand_equal_p (ll_inner, rl_inner, 0)) 5951 return 0; 5952 5953 if (TREE_CODE (lr_arg) == INTEGER_CST 5954 && TREE_CODE (rr_arg) == INTEGER_CST) 5955 l_const = lr_arg, r_const = rr_arg; 5956 else if (lr_inner == 0 || rr_inner == 0 5957 || ! operand_equal_p (lr_inner, rr_inner, 0)) 5958 return 0; 5959 else 5960 l_const = r_const = 0; 5961 5962 /* If either comparison code is not correct for our logical operation, 5963 fail. However, we can convert a one-bit comparison against zero into 5964 the opposite comparison against that bit being set in the field. */ 5965 5966 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR); 5967 if (lcode != wanted_code) 5968 { 5969 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask)) 5970 { 5971 /* Make the left operand unsigned, since we are only interested 5972 in the value of one bit. Otherwise we are doing the wrong 5973 thing below. */ 5974 ll_unsignedp = 1; 5975 l_const = ll_mask; 5976 } 5977 else 5978 return 0; 5979 } 5980 5981 /* This is analogous to the code for l_const above. */ 5982 if (rcode != wanted_code) 5983 { 5984 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask)) 5985 { 5986 rl_unsignedp = 1; 5987 r_const = rl_mask; 5988 } 5989 else 5990 return 0; 5991 } 5992 5993 /* See if we can find a mode that contains both fields being compared on 5994 the left. If we can't, fail. Otherwise, update all constants and masks 5995 to be relative to a field of that size. */ 5996 first_bit = MIN (ll_bitpos, rl_bitpos); 5997 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize); 5998 lnmode = get_best_mode (end_bit - first_bit, first_bit, 5999 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode, 6000 volatilep); 6001 if (lnmode == VOIDmode) 6002 return 0; 6003 6004 lnbitsize = GET_MODE_BITSIZE (lnmode); 6005 lnbitpos = first_bit & ~ (lnbitsize - 1); 6006 lntype = lang_hooks.types.type_for_size (lnbitsize, 1); 6007 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos; 6008 6009 if (BYTES_BIG_ENDIAN) 6010 { 6011 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize; 6012 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize; 6013 } 6014 6015 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask), 6016 size_int (xll_bitpos), 0); 6017 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask), 6018 size_int (xrl_bitpos), 0); 6019 6020 if (l_const) 6021 { 6022 l_const = fold_convert_loc (loc, lntype, l_const); 6023 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask); 6024 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0); 6025 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const, 6026 fold_build1_loc (loc, BIT_NOT_EXPR, 6027 lntype, ll_mask), 6028 0))) 6029 { 6030 warning (0, "comparison is always %d", wanted_code == NE_EXPR); 6031 6032 return constant_boolean_node (wanted_code == NE_EXPR, truth_type); 6033 } 6034 } 6035 if (r_const) 6036 { 6037 r_const = fold_convert_loc (loc, lntype, r_const); 6038 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask); 6039 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0); 6040 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const, 6041 fold_build1_loc (loc, BIT_NOT_EXPR, 6042 lntype, rl_mask), 6043 0))) 6044 { 6045 warning (0, "comparison is always %d", wanted_code == NE_EXPR); 6046 6047 return constant_boolean_node (wanted_code == NE_EXPR, truth_type); 6048 } 6049 } 6050 6051 /* If the right sides are not constant, do the same for it. Also, 6052 disallow this optimization if a size or signedness mismatch occurs 6053 between the left and right sides. */ 6054 if (l_const == 0) 6055 { 6056 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize 6057 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp 6058 /* Make sure the two fields on the right 6059 correspond to the left without being swapped. */ 6060 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos) 6061 return 0; 6062 6063 first_bit = MIN (lr_bitpos, rr_bitpos); 6064 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize); 6065 rnmode = get_best_mode (end_bit - first_bit, first_bit, 6066 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode, 6067 volatilep); 6068 if (rnmode == VOIDmode) 6069 return 0; 6070 6071 rnbitsize = GET_MODE_BITSIZE (rnmode); 6072 rnbitpos = first_bit & ~ (rnbitsize - 1); 6073 rntype = lang_hooks.types.type_for_size (rnbitsize, 1); 6074 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos; 6075 6076 if (BYTES_BIG_ENDIAN) 6077 { 6078 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize; 6079 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize; 6080 } 6081 6082 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, 6083 rntype, lr_mask), 6084 size_int (xlr_bitpos), 0); 6085 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, 6086 rntype, rr_mask), 6087 size_int (xrr_bitpos), 0); 6088 6089 /* Make a mask that corresponds to both fields being compared. 6090 Do this for both items being compared. If the operands are the 6091 same size and the bits being compared are in the same position 6092 then we can do this by masking both and comparing the masked 6093 results. */ 6094 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0); 6095 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0); 6096 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos) 6097 { 6098 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, 6099 ll_unsignedp || rl_unsignedp); 6100 if (! all_ones_mask_p (ll_mask, lnbitsize)) 6101 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask); 6102 6103 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos, 6104 lr_unsignedp || rr_unsignedp); 6105 if (! all_ones_mask_p (lr_mask, rnbitsize)) 6106 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask); 6107 6108 result = build2 (wanted_code, truth_type, lhs, rhs); 6109 goto fold_truthop_exit; 6110 } 6111 6112 /* There is still another way we can do something: If both pairs of 6113 fields being compared are adjacent, we may be able to make a wider 6114 field containing them both. 6115 6116 Note that we still must mask the lhs/rhs expressions. Furthermore, 6117 the mask must be shifted to account for the shift done by 6118 make_bit_field_ref. */ 6119 if ((ll_bitsize + ll_bitpos == rl_bitpos 6120 && lr_bitsize + lr_bitpos == rr_bitpos) 6121 || (ll_bitpos == rl_bitpos + rl_bitsize 6122 && lr_bitpos == rr_bitpos + rr_bitsize)) 6123 { 6124 tree type; 6125 6126 lhs = make_bit_field_ref (loc, ll_inner, lntype, 6127 ll_bitsize + rl_bitsize, 6128 MIN (ll_bitpos, rl_bitpos), ll_unsignedp); 6129 rhs = make_bit_field_ref (loc, lr_inner, rntype, 6130 lr_bitsize + rr_bitsize, 6131 MIN (lr_bitpos, rr_bitpos), lr_unsignedp); 6132 6133 ll_mask = const_binop (RSHIFT_EXPR, ll_mask, 6134 size_int (MIN (xll_bitpos, xrl_bitpos)), 0); 6135 lr_mask = const_binop (RSHIFT_EXPR, lr_mask, 6136 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0); 6137 6138 /* Convert to the smaller type before masking out unwanted bits. */ 6139 type = lntype; 6140 if (lntype != rntype) 6141 { 6142 if (lnbitsize > rnbitsize) 6143 { 6144 lhs = fold_convert_loc (loc, rntype, lhs); 6145 ll_mask = fold_convert_loc (loc, rntype, ll_mask); 6146 type = rntype; 6147 } 6148 else if (lnbitsize < rnbitsize) 6149 { 6150 rhs = fold_convert_loc (loc, lntype, rhs); 6151 lr_mask = fold_convert_loc (loc, lntype, lr_mask); 6152 type = lntype; 6153 } 6154 } 6155 6156 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize)) 6157 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask); 6158 6159 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize)) 6160 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask); 6161 6162 result = build2 (wanted_code, truth_type, lhs, rhs); 6163 goto fold_truthop_exit; 6164 } 6165 6166 return 0; 6167 } 6168 6169 /* Handle the case of comparisons with constants. If there is something in 6170 common between the masks, those bits of the constants must be the same. 6171 If not, the condition is always false. Test for this to avoid generating 6172 incorrect code below. */ 6173 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0); 6174 if (! integer_zerop (result) 6175 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0), 6176 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1) 6177 { 6178 if (wanted_code == NE_EXPR) 6179 { 6180 warning (0, "%<or%> of unmatched not-equal tests is always 1"); 6181 return constant_boolean_node (true, truth_type); 6182 } 6183 else 6184 { 6185 warning (0, "%<and%> of mutually exclusive equal-tests is always 0"); 6186 return constant_boolean_node (false, truth_type); 6187 } 6188 } 6189 6190 /* Construct the expression we will return. First get the component 6191 reference we will make. Unless the mask is all ones the width of 6192 that field, perform the mask operation. Then compare with the 6193 merged constant. */ 6194 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, 6195 ll_unsignedp || rl_unsignedp); 6196 6197 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0); 6198 if (! all_ones_mask_p (ll_mask, lnbitsize)) 6199 { 6200 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask); 6201 SET_EXPR_LOCATION (result, loc); 6202 } 6203 6204 result = build2 (wanted_code, truth_type, result, 6205 const_binop (BIT_IOR_EXPR, l_const, r_const, 0)); 6206 6207 fold_truthop_exit: 6208 SET_EXPR_LOCATION (result, loc); 6209 return result; 6210 } 6211 6212 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a 6213 constant. */ 6214 6215 static tree 6216 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type, 6217 tree op0, tree op1) 6218 { 6219 tree arg0 = op0; 6220 enum tree_code op_code; 6221 tree comp_const; 6222 tree minmax_const; 6223 int consts_equal, consts_lt; 6224 tree inner; 6225 6226 STRIP_SIGN_NOPS (arg0); 6227 6228 op_code = TREE_CODE (arg0); 6229 minmax_const = TREE_OPERAND (arg0, 1); 6230 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1); 6231 consts_equal = tree_int_cst_equal (minmax_const, comp_const); 6232 consts_lt = tree_int_cst_lt (minmax_const, comp_const); 6233 inner = TREE_OPERAND (arg0, 0); 6234 6235 /* If something does not permit us to optimize, return the original tree. */ 6236 if ((op_code != MIN_EXPR && op_code != MAX_EXPR) 6237 || TREE_CODE (comp_const) != INTEGER_CST 6238 || TREE_OVERFLOW (comp_const) 6239 || TREE_CODE (minmax_const) != INTEGER_CST 6240 || TREE_OVERFLOW (minmax_const)) 6241 return NULL_TREE; 6242 6243 /* Now handle all the various comparison codes. We only handle EQ_EXPR 6244 and GT_EXPR, doing the rest with recursive calls using logical 6245 simplifications. */ 6246 switch (code) 6247 { 6248 case NE_EXPR: case LT_EXPR: case LE_EXPR: 6249 { 6250 tree tem 6251 = optimize_minmax_comparison (loc, 6252 invert_tree_comparison (code, false), 6253 type, op0, op1); 6254 if (tem) 6255 return invert_truthvalue_loc (loc, tem); 6256 return NULL_TREE; 6257 } 6258 6259 case GE_EXPR: 6260 return 6261 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, 6262 optimize_minmax_comparison 6263 (loc, EQ_EXPR, type, arg0, comp_const), 6264 optimize_minmax_comparison 6265 (loc, GT_EXPR, type, arg0, comp_const)); 6266 6267 case EQ_EXPR: 6268 if (op_code == MAX_EXPR && consts_equal) 6269 /* MAX (X, 0) == 0 -> X <= 0 */ 6270 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const); 6271 6272 else if (op_code == MAX_EXPR && consts_lt) 6273 /* MAX (X, 0) == 5 -> X == 5 */ 6274 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const); 6275 6276 else if (op_code == MAX_EXPR) 6277 /* MAX (X, 0) == -1 -> false */ 6278 return omit_one_operand_loc (loc, type, integer_zero_node, inner); 6279 6280 else if (consts_equal) 6281 /* MIN (X, 0) == 0 -> X >= 0 */ 6282 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const); 6283 6284 else if (consts_lt) 6285 /* MIN (X, 0) == 5 -> false */ 6286 return omit_one_operand_loc (loc, type, integer_zero_node, inner); 6287 6288 else 6289 /* MIN (X, 0) == -1 -> X == -1 */ 6290 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const); 6291 6292 case GT_EXPR: 6293 if (op_code == MAX_EXPR && (consts_equal || consts_lt)) 6294 /* MAX (X, 0) > 0 -> X > 0 6295 MAX (X, 0) > 5 -> X > 5 */ 6296 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const); 6297 6298 else if (op_code == MAX_EXPR) 6299 /* MAX (X, 0) > -1 -> true */ 6300 return omit_one_operand_loc (loc, type, integer_one_node, inner); 6301 6302 else if (op_code == MIN_EXPR && (consts_equal || consts_lt)) 6303 /* MIN (X, 0) > 0 -> false 6304 MIN (X, 0) > 5 -> false */ 6305 return omit_one_operand_loc (loc, type, integer_zero_node, inner); 6306 6307 else 6308 /* MIN (X, 0) > -1 -> X > -1 */ 6309 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const); 6310 6311 default: 6312 return NULL_TREE; 6313 } 6314 } 6315 6316 /* T is an integer expression that is being multiplied, divided, or taken a 6317 modulus (CODE says which and what kind of divide or modulus) by a 6318 constant C. See if we can eliminate that operation by folding it with 6319 other operations already in T. WIDE_TYPE, if non-null, is a type that 6320 should be used for the computation if wider than our type. 6321 6322 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return 6323 (X * 2) + (Y * 4). We must, however, be assured that either the original 6324 expression would not overflow or that overflow is undefined for the type 6325 in the language in question. 6326 6327 If we return a non-null expression, it is an equivalent form of the 6328 original computation, but need not be in the original type. 6329 6330 We set *STRICT_OVERFLOW_P to true if the return values depends on 6331 signed overflow being undefined. Otherwise we do not change 6332 *STRICT_OVERFLOW_P. */ 6333 6334 static tree 6335 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type, 6336 bool *strict_overflow_p) 6337 { 6338 /* To avoid exponential search depth, refuse to allow recursion past 6339 three levels. Beyond that (1) it's highly unlikely that we'll find 6340 something interesting and (2) we've probably processed it before 6341 when we built the inner expression. */ 6342 6343 static int depth; 6344 tree ret; 6345 6346 if (depth > 3) 6347 return NULL; 6348 6349 depth++; 6350 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p); 6351 depth--; 6352 6353 return ret; 6354 } 6355 6356 static tree 6357 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, 6358 bool *strict_overflow_p) 6359 { 6360 tree type = TREE_TYPE (t); 6361 enum tree_code tcode = TREE_CODE (t); 6362 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type)) 6363 > GET_MODE_SIZE (TYPE_MODE (type))) 6364 ? wide_type : type); 6365 tree t1, t2; 6366 int same_p = tcode == code; 6367 tree op0 = NULL_TREE, op1 = NULL_TREE; 6368 bool sub_strict_overflow_p; 6369 6370 /* Don't deal with constants of zero here; they confuse the code below. */ 6371 if (integer_zerop (c)) 6372 return NULL_TREE; 6373 6374 if (TREE_CODE_CLASS (tcode) == tcc_unary) 6375 op0 = TREE_OPERAND (t, 0); 6376 6377 if (TREE_CODE_CLASS (tcode) == tcc_binary) 6378 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1); 6379 6380 /* Note that we need not handle conditional operations here since fold 6381 already handles those cases. So just do arithmetic here. */ 6382 switch (tcode) 6383 { 6384 case INTEGER_CST: 6385 /* For a constant, we can always simplify if we are a multiply 6386 or (for divide and modulus) if it is a multiple of our constant. */ 6387 if (code == MULT_EXPR 6388 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0))) 6389 return const_binop (code, fold_convert (ctype, t), 6390 fold_convert (ctype, c), 0); 6391 break; 6392 6393 CASE_CONVERT: case NON_LVALUE_EXPR: 6394 /* If op0 is an expression ... */ 6395 if ((COMPARISON_CLASS_P (op0) 6396 || UNARY_CLASS_P (op0) 6397 || BINARY_CLASS_P (op0) 6398 || VL_EXP_CLASS_P (op0) 6399 || EXPRESSION_CLASS_P (op0)) 6400 /* ... and has wrapping overflow, and its type is smaller 6401 than ctype, then we cannot pass through as widening. */ 6402 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)) 6403 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE 6404 && TYPE_IS_SIZETYPE (TREE_TYPE (op0))) 6405 && (TYPE_PRECISION (ctype) 6406 > TYPE_PRECISION (TREE_TYPE (op0)))) 6407 /* ... or this is a truncation (t is narrower than op0), 6408 then we cannot pass through this narrowing. */ 6409 || (TYPE_PRECISION (type) 6410 < TYPE_PRECISION (TREE_TYPE (op0))) 6411 /* ... or signedness changes for division or modulus, 6412 then we cannot pass through this conversion. */ 6413 || (code != MULT_EXPR 6414 && (TYPE_UNSIGNED (ctype) 6415 != TYPE_UNSIGNED (TREE_TYPE (op0)))) 6416 /* ... or has undefined overflow while the converted to 6417 type has not, we cannot do the operation in the inner type 6418 as that would introduce undefined overflow. */ 6419 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)) 6420 && !TYPE_OVERFLOW_UNDEFINED (type)))) 6421 break; 6422 6423 /* Pass the constant down and see if we can make a simplification. If 6424 we can, replace this expression with the inner simplification for 6425 possible later conversion to our or some other type. */ 6426 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0 6427 && TREE_CODE (t2) == INTEGER_CST 6428 && !TREE_OVERFLOW (t2) 6429 && (0 != (t1 = extract_muldiv (op0, t2, code, 6430 code == MULT_EXPR 6431 ? ctype : NULL_TREE, 6432 strict_overflow_p)))) 6433 return t1; 6434 break; 6435 6436 case ABS_EXPR: 6437 /* If widening the type changes it from signed to unsigned, then we 6438 must avoid building ABS_EXPR itself as unsigned. */ 6439 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type)) 6440 { 6441 tree cstype = (*signed_type_for) (ctype); 6442 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p)) 6443 != 0) 6444 { 6445 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1)); 6446 return fold_convert (ctype, t1); 6447 } 6448 break; 6449 } 6450 /* If the constant is negative, we cannot simplify this. */ 6451 if (tree_int_cst_sgn (c) == -1) 6452 break; 6453 /* FALLTHROUGH */ 6454 case NEGATE_EXPR: 6455 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p)) 6456 != 0) 6457 return fold_build1 (tcode, ctype, fold_convert (ctype, t1)); 6458 break; 6459 6460 case MIN_EXPR: case MAX_EXPR: 6461 /* If widening the type changes the signedness, then we can't perform 6462 this optimization as that changes the result. */ 6463 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type)) 6464 break; 6465 6466 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */ 6467 sub_strict_overflow_p = false; 6468 if ((t1 = extract_muldiv (op0, c, code, wide_type, 6469 &sub_strict_overflow_p)) != 0 6470 && (t2 = extract_muldiv (op1, c, code, wide_type, 6471 &sub_strict_overflow_p)) != 0) 6472 { 6473 if (tree_int_cst_sgn (c) < 0) 6474 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR); 6475 if (sub_strict_overflow_p) 6476 *strict_overflow_p = true; 6477 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 6478 fold_convert (ctype, t2)); 6479 } 6480 break; 6481 6482 case LSHIFT_EXPR: case RSHIFT_EXPR: 6483 /* If the second operand is constant, this is a multiplication 6484 or floor division, by a power of two, so we can treat it that 6485 way unless the multiplier or divisor overflows. Signed 6486 left-shift overflow is implementation-defined rather than 6487 undefined in C90, so do not convert signed left shift into 6488 multiplication. */ 6489 if (TREE_CODE (op1) == INTEGER_CST 6490 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0))) 6491 /* const_binop may not detect overflow correctly, 6492 so check for it explicitly here. */ 6493 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1) 6494 && TREE_INT_CST_HIGH (op1) == 0 6495 && 0 != (t1 = fold_convert (ctype, 6496 const_binop (LSHIFT_EXPR, 6497 size_one_node, 6498 op1, 0))) 6499 && !TREE_OVERFLOW (t1)) 6500 return extract_muldiv (build2 (tcode == LSHIFT_EXPR 6501 ? MULT_EXPR : FLOOR_DIV_EXPR, 6502 ctype, 6503 fold_convert (ctype, op0), 6504 t1), 6505 c, code, wide_type, strict_overflow_p); 6506 break; 6507 6508 case PLUS_EXPR: case MINUS_EXPR: 6509 /* See if we can eliminate the operation on both sides. If we can, we 6510 can return a new PLUS or MINUS. If we can't, the only remaining 6511 cases where we can do anything are if the second operand is a 6512 constant. */ 6513 sub_strict_overflow_p = false; 6514 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p); 6515 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p); 6516 if (t1 != 0 && t2 != 0 6517 && (code == MULT_EXPR 6518 /* If not multiplication, we can only do this if both operands 6519 are divisible by c. */ 6520 || (multiple_of_p (ctype, op0, c) 6521 && multiple_of_p (ctype, op1, c)))) 6522 { 6523 if (sub_strict_overflow_p) 6524 *strict_overflow_p = true; 6525 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 6526 fold_convert (ctype, t2)); 6527 } 6528 6529 /* If this was a subtraction, negate OP1 and set it to be an addition. 6530 This simplifies the logic below. */ 6531 if (tcode == MINUS_EXPR) 6532 { 6533 tcode = PLUS_EXPR, op1 = negate_expr (op1); 6534 /* If OP1 was not easily negatable, the constant may be OP0. */ 6535 if (TREE_CODE (op0) == INTEGER_CST) 6536 { 6537 tree tem = op0; 6538 op0 = op1; 6539 op1 = tem; 6540 tem = t1; 6541 t1 = t2; 6542 t2 = tem; 6543 } 6544 } 6545 6546 if (TREE_CODE (op1) != INTEGER_CST) 6547 break; 6548 6549 /* If either OP1 or C are negative, this optimization is not safe for 6550 some of the division and remainder types while for others we need 6551 to change the code. */ 6552 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0) 6553 { 6554 if (code == CEIL_DIV_EXPR) 6555 code = FLOOR_DIV_EXPR; 6556 else if (code == FLOOR_DIV_EXPR) 6557 code = CEIL_DIV_EXPR; 6558 else if (code != MULT_EXPR 6559 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR) 6560 break; 6561 } 6562 6563 /* If it's a multiply or a division/modulus operation of a multiple 6564 of our constant, do the operation and verify it doesn't overflow. */ 6565 if (code == MULT_EXPR 6566 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) 6567 { 6568 op1 = const_binop (code, fold_convert (ctype, op1), 6569 fold_convert (ctype, c), 0); 6570 /* We allow the constant to overflow with wrapping semantics. */ 6571 if (op1 == 0 6572 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype))) 6573 break; 6574 } 6575 else 6576 break; 6577 6578 /* If we have an unsigned type is not a sizetype, we cannot widen 6579 the operation since it will change the result if the original 6580 computation overflowed. */ 6581 if (TYPE_UNSIGNED (ctype) 6582 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)) 6583 && ctype != type) 6584 break; 6585 6586 /* If we were able to eliminate our operation from the first side, 6587 apply our operation to the second side and reform the PLUS. */ 6588 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR)) 6589 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1); 6590 6591 /* The last case is if we are a multiply. In that case, we can 6592 apply the distributive law to commute the multiply and addition 6593 if the multiplication of the constants doesn't overflow. */ 6594 if (code == MULT_EXPR) 6595 return fold_build2 (tcode, ctype, 6596 fold_build2 (code, ctype, 6597 fold_convert (ctype, op0), 6598 fold_convert (ctype, c)), 6599 op1); 6600 6601 break; 6602 6603 case MULT_EXPR: 6604 /* We have a special case here if we are doing something like 6605 (C * 8) % 4 since we know that's zero. */ 6606 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR 6607 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR) 6608 /* If the multiplication can overflow we cannot optimize this. 6609 ??? Until we can properly mark individual operations as 6610 not overflowing we need to treat sizetype special here as 6611 stor-layout relies on this opimization to make 6612 DECL_FIELD_BIT_OFFSET always a constant. */ 6613 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)) 6614 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE 6615 && TYPE_IS_SIZETYPE (TREE_TYPE (t)))) 6616 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST 6617 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) 6618 { 6619 *strict_overflow_p = true; 6620 return omit_one_operand (type, integer_zero_node, op0); 6621 } 6622 6623 /* ... fall through ... */ 6624 6625 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR: 6626 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR: 6627 /* If we can extract our operation from the LHS, do so and return a 6628 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise, 6629 do something only if the second operand is a constant. */ 6630 if (same_p 6631 && (t1 = extract_muldiv (op0, c, code, wide_type, 6632 strict_overflow_p)) != 0) 6633 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 6634 fold_convert (ctype, op1)); 6635 else if (tcode == MULT_EXPR && code == MULT_EXPR 6636 && (t1 = extract_muldiv (op1, c, code, wide_type, 6637 strict_overflow_p)) != 0) 6638 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), 6639 fold_convert (ctype, t1)); 6640 else if (TREE_CODE (op1) != INTEGER_CST) 6641 return 0; 6642 6643 /* If these are the same operation types, we can associate them 6644 assuming no overflow. */ 6645 if (tcode == code 6646 && 0 != (t1 = int_const_binop (MULT_EXPR, 6647 fold_convert (ctype, op1), 6648 fold_convert (ctype, c), 1)) 6649 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1), 6650 TREE_INT_CST_HIGH (t1), 6651 (TYPE_UNSIGNED (ctype) 6652 && tcode != MULT_EXPR) ? -1 : 1, 6653 TREE_OVERFLOW (t1))) 6654 && !TREE_OVERFLOW (t1)) 6655 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1); 6656 6657 /* If these operations "cancel" each other, we have the main 6658 optimizations of this pass, which occur when either constant is a 6659 multiple of the other, in which case we replace this with either an 6660 operation or CODE or TCODE. 6661 6662 If we have an unsigned type that is not a sizetype, we cannot do 6663 this since it will change the result if the original computation 6664 overflowed. */ 6665 if ((TYPE_OVERFLOW_UNDEFINED (ctype) 6666 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))) 6667 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR) 6668 || (tcode == MULT_EXPR 6669 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR 6670 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR 6671 && code != MULT_EXPR))) 6672 { 6673 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) 6674 { 6675 if (TYPE_OVERFLOW_UNDEFINED (ctype)) 6676 *strict_overflow_p = true; 6677 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), 6678 fold_convert (ctype, 6679 const_binop (TRUNC_DIV_EXPR, 6680 op1, c, 0))); 6681 } 6682 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0))) 6683 { 6684 if (TYPE_OVERFLOW_UNDEFINED (ctype)) 6685 *strict_overflow_p = true; 6686 return fold_build2 (code, ctype, fold_convert (ctype, op0), 6687 fold_convert (ctype, 6688 const_binop (TRUNC_DIV_EXPR, 6689 c, op1, 0))); 6690 } 6691 } 6692 break; 6693 6694 default: 6695 break; 6696 } 6697 6698 return 0; 6699 } 6700 6701 /* Return a node which has the indicated constant VALUE (either 0 or 6702 1), and is of the indicated TYPE. */ 6703 6704 tree 6705 constant_boolean_node (int value, tree type) 6706 { 6707 if (type == integer_type_node) 6708 return value ? integer_one_node : integer_zero_node; 6709 else if (type == boolean_type_node) 6710 return value ? boolean_true_node : boolean_false_node; 6711 else 6712 return build_int_cst (type, value); 6713 } 6714 6715 6716 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'. 6717 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here 6718 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)' 6719 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the 6720 COND is the first argument to CODE; otherwise (as in the example 6721 given here), it is the second argument. TYPE is the type of the 6722 original expression. Return NULL_TREE if no simplification is 6723 possible. */ 6724 6725 static tree 6726 fold_binary_op_with_conditional_arg (location_t loc, 6727 enum tree_code code, 6728 tree type, tree op0, tree op1, 6729 tree cond, tree arg, int cond_first_p) 6730 { 6731 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1); 6732 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0); 6733 tree test, true_value, false_value; 6734 tree lhs = NULL_TREE; 6735 tree rhs = NULL_TREE; 6736 6737 /* This transformation is only worthwhile if we don't have to wrap 6738 arg in a SAVE_EXPR, and the operation can be simplified on at least 6739 one of the branches once its pushed inside the COND_EXPR. */ 6740 if (!TREE_CONSTANT (arg)) 6741 return NULL_TREE; 6742 6743 if (TREE_CODE (cond) == COND_EXPR) 6744 { 6745 test = TREE_OPERAND (cond, 0); 6746 true_value = TREE_OPERAND (cond, 1); 6747 false_value = TREE_OPERAND (cond, 2); 6748 /* If this operand throws an expression, then it does not make 6749 sense to try to perform a logical or arithmetic operation 6750 involving it. */ 6751 if (VOID_TYPE_P (TREE_TYPE (true_value))) 6752 lhs = true_value; 6753 if (VOID_TYPE_P (TREE_TYPE (false_value))) 6754 rhs = false_value; 6755 } 6756 else 6757 { 6758 tree testtype = TREE_TYPE (cond); 6759 test = cond; 6760 true_value = constant_boolean_node (true, testtype); 6761 false_value = constant_boolean_node (false, testtype); 6762 } 6763 6764 arg = fold_convert_loc (loc, arg_type, arg); 6765 if (lhs == 0) 6766 { 6767 true_value = fold_convert_loc (loc, cond_type, true_value); 6768 if (cond_first_p) 6769 lhs = fold_build2_loc (loc, code, type, true_value, arg); 6770 else 6771 lhs = fold_build2_loc (loc, code, type, arg, true_value); 6772 } 6773 if (rhs == 0) 6774 { 6775 false_value = fold_convert_loc (loc, cond_type, false_value); 6776 if (cond_first_p) 6777 rhs = fold_build2_loc (loc, code, type, false_value, arg); 6778 else 6779 rhs = fold_build2_loc (loc, code, type, arg, false_value); 6780 } 6781 6782 test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs); 6783 return fold_convert_loc (loc, type, test); 6784 } 6785 6786 6787 /* Subroutine of fold() that checks for the addition of +/- 0.0. 6788 6789 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type 6790 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X - 6791 ADDEND is the same as X. 6792 6793 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero 6794 and finite. The problematic cases are when X is zero, and its mode 6795 has signed zeros. In the case of rounding towards -infinity, 6796 X - 0 is not the same as X because 0 - 0 is -0. In other rounding 6797 modes, X + 0 is not the same as X because -0 + 0 is 0. */ 6798 6799 bool 6800 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate) 6801 { 6802 if (!real_zerop (addend)) 6803 return false; 6804 6805 /* Don't allow the fold with -fsignaling-nans. */ 6806 if (HONOR_SNANS (TYPE_MODE (type))) 6807 return false; 6808 6809 /* Allow the fold if zeros aren't signed, or their sign isn't important. */ 6810 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))) 6811 return true; 6812 6813 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */ 6814 if (TREE_CODE (addend) == REAL_CST 6815 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend))) 6816 negate = !negate; 6817 6818 /* The mode has signed zeros, and we have to honor their sign. 6819 In this situation, there is only one case we can return true for. 6820 X - 0 is the same as X unless rounding towards -infinity is 6821 supported. */ 6822 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)); 6823 } 6824 6825 /* Subroutine of fold() that checks comparisons of built-in math 6826 functions against real constants. 6827 6828 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison 6829 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE 6830 is the type of the result and ARG0 and ARG1 are the operands of the 6831 comparison. ARG1 must be a TREE_REAL_CST. 6832 6833 The function returns the constant folded tree if a simplification 6834 can be made, and NULL_TREE otherwise. */ 6835 6836 static tree 6837 fold_mathfn_compare (location_t loc, 6838 enum built_in_function fcode, enum tree_code code, 6839 tree type, tree arg0, tree arg1) 6840 { 6841 REAL_VALUE_TYPE c; 6842 6843 if (BUILTIN_SQRT_P (fcode)) 6844 { 6845 tree arg = CALL_EXPR_ARG (arg0, 0); 6846 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0)); 6847 6848 c = TREE_REAL_CST (arg1); 6849 if (REAL_VALUE_NEGATIVE (c)) 6850 { 6851 /* sqrt(x) < y is always false, if y is negative. */ 6852 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR) 6853 return omit_one_operand_loc (loc, type, integer_zero_node, arg); 6854 6855 /* sqrt(x) > y is always true, if y is negative and we 6856 don't care about NaNs, i.e. negative values of x. */ 6857 if (code == NE_EXPR || !HONOR_NANS (mode)) 6858 return omit_one_operand_loc (loc, type, integer_one_node, arg); 6859 6860 /* sqrt(x) > y is the same as x >= 0, if y is negative. */ 6861 return fold_build2_loc (loc, GE_EXPR, type, arg, 6862 build_real (TREE_TYPE (arg), dconst0)); 6863 } 6864 else if (code == GT_EXPR || code == GE_EXPR) 6865 { 6866 REAL_VALUE_TYPE c2; 6867 6868 REAL_ARITHMETIC (c2, MULT_EXPR, c, c); 6869 real_convert (&c2, mode, &c2); 6870 6871 if (REAL_VALUE_ISINF (c2)) 6872 { 6873 /* sqrt(x) > y is x == +Inf, when y is very large. */ 6874 if (HONOR_INFINITIES (mode)) 6875 return fold_build2_loc (loc, EQ_EXPR, type, arg, 6876 build_real (TREE_TYPE (arg), c2)); 6877 6878 /* sqrt(x) > y is always false, when y is very large 6879 and we don't care about infinities. */ 6880 return omit_one_operand_loc (loc, type, integer_zero_node, arg); 6881 } 6882 6883 /* sqrt(x) > c is the same as x > c*c. */ 6884 return fold_build2_loc (loc, code, type, arg, 6885 build_real (TREE_TYPE (arg), c2)); 6886 } 6887 else if (code == LT_EXPR || code == LE_EXPR) 6888 { 6889 REAL_VALUE_TYPE c2; 6890 6891 REAL_ARITHMETIC (c2, MULT_EXPR, c, c); 6892 real_convert (&c2, mode, &c2); 6893 6894 if (REAL_VALUE_ISINF (c2)) 6895 { 6896 /* sqrt(x) < y is always true, when y is a very large 6897 value and we don't care about NaNs or Infinities. */ 6898 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode)) 6899 return omit_one_operand_loc (loc, type, integer_one_node, arg); 6900 6901 /* sqrt(x) < y is x != +Inf when y is very large and we 6902 don't care about NaNs. */ 6903 if (! HONOR_NANS (mode)) 6904 return fold_build2_loc (loc, NE_EXPR, type, arg, 6905 build_real (TREE_TYPE (arg), c2)); 6906 6907 /* sqrt(x) < y is x >= 0 when y is very large and we 6908 don't care about Infinities. */ 6909 if (! HONOR_INFINITIES (mode)) 6910 return fold_build2_loc (loc, GE_EXPR, type, arg, 6911 build_real (TREE_TYPE (arg), dconst0)); 6912 6913 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */ 6914 if (lang_hooks.decls.global_bindings_p () != 0 6915 || CONTAINS_PLACEHOLDER_P (arg)) 6916 return NULL_TREE; 6917 6918 arg = save_expr (arg); 6919 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 6920 fold_build2_loc (loc, GE_EXPR, type, arg, 6921 build_real (TREE_TYPE (arg), 6922 dconst0)), 6923 fold_build2_loc (loc, NE_EXPR, type, arg, 6924 build_real (TREE_TYPE (arg), 6925 c2))); 6926 } 6927 6928 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */ 6929 if (! HONOR_NANS (mode)) 6930 return fold_build2_loc (loc, code, type, arg, 6931 build_real (TREE_TYPE (arg), c2)); 6932 6933 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */ 6934 if (lang_hooks.decls.global_bindings_p () == 0 6935 && ! CONTAINS_PLACEHOLDER_P (arg)) 6936 { 6937 arg = save_expr (arg); 6938 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 6939 fold_build2_loc (loc, GE_EXPR, type, arg, 6940 build_real (TREE_TYPE (arg), 6941 dconst0)), 6942 fold_build2_loc (loc, code, type, arg, 6943 build_real (TREE_TYPE (arg), 6944 c2))); 6945 } 6946 } 6947 } 6948 6949 return NULL_TREE; 6950 } 6951 6952 /* Subroutine of fold() that optimizes comparisons against Infinities, 6953 either +Inf or -Inf. 6954 6955 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, 6956 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 6957 are the operands of the comparison. ARG1 must be a TREE_REAL_CST. 6958 6959 The function returns the constant folded tree if a simplification 6960 can be made, and NULL_TREE otherwise. */ 6961 6962 static tree 6963 fold_inf_compare (location_t loc, enum tree_code code, tree type, 6964 tree arg0, tree arg1) 6965 { 6966 enum machine_mode mode; 6967 REAL_VALUE_TYPE max; 6968 tree temp; 6969 bool neg; 6970 6971 mode = TYPE_MODE (TREE_TYPE (arg0)); 6972 6973 /* For negative infinity swap the sense of the comparison. */ 6974 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)); 6975 if (neg) 6976 code = swap_tree_comparison (code); 6977 6978 switch (code) 6979 { 6980 case GT_EXPR: 6981 /* x > +Inf is always false, if with ignore sNANs. */ 6982 if (HONOR_SNANS (mode)) 6983 return NULL_TREE; 6984 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 6985 6986 case LE_EXPR: 6987 /* x <= +Inf is always true, if we don't case about NaNs. */ 6988 if (! HONOR_NANS (mode)) 6989 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 6990 6991 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */ 6992 if (lang_hooks.decls.global_bindings_p () == 0 6993 && ! CONTAINS_PLACEHOLDER_P (arg0)) 6994 { 6995 arg0 = save_expr (arg0); 6996 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0); 6997 } 6998 break; 6999 7000 case EQ_EXPR: 7001 case GE_EXPR: 7002 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */ 7003 real_maxval (&max, neg, mode); 7004 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type, 7005 arg0, build_real (TREE_TYPE (arg0), max)); 7006 7007 case LT_EXPR: 7008 /* x < +Inf is always equal to x <= DBL_MAX. */ 7009 real_maxval (&max, neg, mode); 7010 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type, 7011 arg0, build_real (TREE_TYPE (arg0), max)); 7012 7013 case NE_EXPR: 7014 /* x != +Inf is always equal to !(x > DBL_MAX). */ 7015 real_maxval (&max, neg, mode); 7016 if (! HONOR_NANS (mode)) 7017 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type, 7018 arg0, build_real (TREE_TYPE (arg0), max)); 7019 7020 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type, 7021 arg0, build_real (TREE_TYPE (arg0), max)); 7022 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp); 7023 7024 default: 7025 break; 7026 } 7027 7028 return NULL_TREE; 7029 } 7030 7031 /* Subroutine of fold() that optimizes comparisons of a division by 7032 a nonzero integer constant against an integer constant, i.e. 7033 X/C1 op C2. 7034 7035 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, 7036 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 7037 are the operands of the comparison. ARG1 must be a TREE_REAL_CST. 7038 7039 The function returns the constant folded tree if a simplification 7040 can be made, and NULL_TREE otherwise. */ 7041 7042 static tree 7043 fold_div_compare (location_t loc, 7044 enum tree_code code, tree type, tree arg0, tree arg1) 7045 { 7046 tree prod, tmp, hi, lo; 7047 tree arg00 = TREE_OPERAND (arg0, 0); 7048 tree arg01 = TREE_OPERAND (arg0, 1); 7049 unsigned HOST_WIDE_INT lpart; 7050 HOST_WIDE_INT hpart; 7051 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0)); 7052 bool neg_overflow; 7053 int overflow; 7054 7055 /* We have to do this the hard way to detect unsigned overflow. 7056 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */ 7057 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01), 7058 TREE_INT_CST_HIGH (arg01), 7059 TREE_INT_CST_LOW (arg1), 7060 TREE_INT_CST_HIGH (arg1), 7061 &lpart, &hpart, unsigned_p); 7062 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart, 7063 -1, overflow); 7064 neg_overflow = false; 7065 7066 if (unsigned_p) 7067 { 7068 tmp = int_const_binop (MINUS_EXPR, arg01, 7069 build_int_cst (TREE_TYPE (arg01), 1), 0); 7070 lo = prod; 7071 7072 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */ 7073 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod), 7074 TREE_INT_CST_HIGH (prod), 7075 TREE_INT_CST_LOW (tmp), 7076 TREE_INT_CST_HIGH (tmp), 7077 &lpart, &hpart, unsigned_p); 7078 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart, 7079 -1, overflow | TREE_OVERFLOW (prod)); 7080 } 7081 else if (tree_int_cst_sgn (arg01) >= 0) 7082 { 7083 tmp = int_const_binop (MINUS_EXPR, arg01, 7084 build_int_cst (TREE_TYPE (arg01), 1), 0); 7085 switch (tree_int_cst_sgn (arg1)) 7086 { 7087 case -1: 7088 neg_overflow = true; 7089 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0); 7090 hi = prod; 7091 break; 7092 7093 case 0: 7094 lo = fold_negate_const (tmp, TREE_TYPE (arg0)); 7095 hi = tmp; 7096 break; 7097 7098 case 1: 7099 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0); 7100 lo = prod; 7101 break; 7102 7103 default: 7104 gcc_unreachable (); 7105 } 7106 } 7107 else 7108 { 7109 /* A negative divisor reverses the relational operators. */ 7110 code = swap_tree_comparison (code); 7111 7112 tmp = int_const_binop (PLUS_EXPR, arg01, 7113 build_int_cst (TREE_TYPE (arg01), 1), 0); 7114 switch (tree_int_cst_sgn (arg1)) 7115 { 7116 case -1: 7117 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0); 7118 lo = prod; 7119 break; 7120 7121 case 0: 7122 hi = fold_negate_const (tmp, TREE_TYPE (arg0)); 7123 lo = tmp; 7124 break; 7125 7126 case 1: 7127 neg_overflow = true; 7128 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0); 7129 hi = prod; 7130 break; 7131 7132 default: 7133 gcc_unreachable (); 7134 } 7135 } 7136 7137 switch (code) 7138 { 7139 case EQ_EXPR: 7140 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) 7141 return omit_one_operand_loc (loc, type, integer_zero_node, arg00); 7142 if (TREE_OVERFLOW (hi)) 7143 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo); 7144 if (TREE_OVERFLOW (lo)) 7145 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi); 7146 return build_range_check (loc, type, arg00, 1, lo, hi); 7147 7148 case NE_EXPR: 7149 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) 7150 return omit_one_operand_loc (loc, type, integer_one_node, arg00); 7151 if (TREE_OVERFLOW (hi)) 7152 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo); 7153 if (TREE_OVERFLOW (lo)) 7154 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi); 7155 return build_range_check (loc, type, arg00, 0, lo, hi); 7156 7157 case LT_EXPR: 7158 if (TREE_OVERFLOW (lo)) 7159 { 7160 tmp = neg_overflow ? integer_zero_node : integer_one_node; 7161 return omit_one_operand_loc (loc, type, tmp, arg00); 7162 } 7163 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo); 7164 7165 case LE_EXPR: 7166 if (TREE_OVERFLOW (hi)) 7167 { 7168 tmp = neg_overflow ? integer_zero_node : integer_one_node; 7169 return omit_one_operand_loc (loc, type, tmp, arg00); 7170 } 7171 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi); 7172 7173 case GT_EXPR: 7174 if (TREE_OVERFLOW (hi)) 7175 { 7176 tmp = neg_overflow ? integer_one_node : integer_zero_node; 7177 return omit_one_operand_loc (loc, type, tmp, arg00); 7178 } 7179 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi); 7180 7181 case GE_EXPR: 7182 if (TREE_OVERFLOW (lo)) 7183 { 7184 tmp = neg_overflow ? integer_one_node : integer_zero_node; 7185 return omit_one_operand_loc (loc, type, tmp, arg00); 7186 } 7187 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo); 7188 7189 default: 7190 break; 7191 } 7192 7193 return NULL_TREE; 7194 } 7195 7196 7197 /* If CODE with arguments ARG0 and ARG1 represents a single bit 7198 equality/inequality test, then return a simplified form of the test 7199 using a sign testing. Otherwise return NULL. TYPE is the desired 7200 result type. */ 7201 7202 static tree 7203 fold_single_bit_test_into_sign_test (location_t loc, 7204 enum tree_code code, tree arg0, tree arg1, 7205 tree result_type) 7206 { 7207 /* If this is testing a single bit, we can optimize the test. */ 7208 if ((code == NE_EXPR || code == EQ_EXPR) 7209 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) 7210 && integer_pow2p (TREE_OPERAND (arg0, 1))) 7211 { 7212 /* If we have (A & C) != 0 where C is the sign bit of A, convert 7213 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */ 7214 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)); 7215 7216 if (arg00 != NULL_TREE 7217 /* This is only a win if casting to a signed type is cheap, 7218 i.e. when arg00's type is not a partial mode. */ 7219 && TYPE_PRECISION (TREE_TYPE (arg00)) 7220 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00)))) 7221 { 7222 tree stype = signed_type_for (TREE_TYPE (arg00)); 7223 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, 7224 result_type, 7225 fold_convert_loc (loc, stype, arg00), 7226 build_int_cst (stype, 0)); 7227 } 7228 } 7229 7230 return NULL_TREE; 7231 } 7232 7233 /* If CODE with arguments ARG0 and ARG1 represents a single bit 7234 equality/inequality test, then return a simplified form of 7235 the test using shifts and logical operations. Otherwise return 7236 NULL. TYPE is the desired result type. */ 7237 7238 tree 7239 fold_single_bit_test (location_t loc, enum tree_code code, 7240 tree arg0, tree arg1, tree result_type) 7241 { 7242 /* If this is testing a single bit, we can optimize the test. */ 7243 if ((code == NE_EXPR || code == EQ_EXPR) 7244 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) 7245 && integer_pow2p (TREE_OPERAND (arg0, 1))) 7246 { 7247 tree inner = TREE_OPERAND (arg0, 0); 7248 tree type = TREE_TYPE (arg0); 7249 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1)); 7250 enum machine_mode operand_mode = TYPE_MODE (type); 7251 int ops_unsigned; 7252 tree signed_type, unsigned_type, intermediate_type; 7253 tree tem, one; 7254 7255 /* First, see if we can fold the single bit test into a sign-bit 7256 test. */ 7257 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, 7258 result_type); 7259 if (tem) 7260 return tem; 7261 7262 /* Otherwise we have (A & C) != 0 where C is a single bit, 7263 convert that into ((A >> C2) & 1). Where C2 = log2(C). 7264 Similarly for (A & C) == 0. */ 7265 7266 /* If INNER is a right shift of a constant and it plus BITNUM does 7267 not overflow, adjust BITNUM and INNER. */ 7268 if (TREE_CODE (inner) == RSHIFT_EXPR 7269 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST 7270 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0 7271 && bitnum < TYPE_PRECISION (type) 7272 && 0 > compare_tree_int (TREE_OPERAND (inner, 1), 7273 bitnum - TYPE_PRECISION (type))) 7274 { 7275 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)); 7276 inner = TREE_OPERAND (inner, 0); 7277 } 7278 7279 /* If we are going to be able to omit the AND below, we must do our 7280 operations as unsigned. If we must use the AND, we have a choice. 7281 Normally unsigned is faster, but for some machines signed is. */ 7282 #ifdef LOAD_EXTEND_OP 7283 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND 7284 && !flag_syntax_only) ? 0 : 1; 7285 #else 7286 ops_unsigned = 1; 7287 #endif 7288 7289 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0); 7290 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1); 7291 intermediate_type = ops_unsigned ? unsigned_type : signed_type; 7292 inner = fold_convert_loc (loc, intermediate_type, inner); 7293 7294 if (bitnum != 0) 7295 inner = build2 (RSHIFT_EXPR, intermediate_type, 7296 inner, size_int (bitnum)); 7297 7298 one = build_int_cst (intermediate_type, 1); 7299 7300 if (code == EQ_EXPR) 7301 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one); 7302 7303 /* Put the AND last so it can combine with more things. */ 7304 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one); 7305 7306 /* Make sure to return the proper type. */ 7307 inner = fold_convert_loc (loc, result_type, inner); 7308 7309 return inner; 7310 } 7311 return NULL_TREE; 7312 } 7313 7314 /* Check whether we are allowed to reorder operands arg0 and arg1, 7315 such that the evaluation of arg1 occurs before arg0. */ 7316 7317 static bool 7318 reorder_operands_p (const_tree arg0, const_tree arg1) 7319 { 7320 if (! flag_evaluation_order) 7321 return true; 7322 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1)) 7323 return true; 7324 return ! TREE_SIDE_EFFECTS (arg0) 7325 && ! TREE_SIDE_EFFECTS (arg1); 7326 } 7327 7328 /* Test whether it is preferable two swap two operands, ARG0 and 7329 ARG1, for example because ARG0 is an integer constant and ARG1 7330 isn't. If REORDER is true, only recommend swapping if we can 7331 evaluate the operands in reverse order. */ 7332 7333 bool 7334 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder) 7335 { 7336 STRIP_SIGN_NOPS (arg0); 7337 STRIP_SIGN_NOPS (arg1); 7338 7339 if (TREE_CODE (arg1) == INTEGER_CST) 7340 return 0; 7341 if (TREE_CODE (arg0) == INTEGER_CST) 7342 return 1; 7343 7344 if (TREE_CODE (arg1) == REAL_CST) 7345 return 0; 7346 if (TREE_CODE (arg0) == REAL_CST) 7347 return 1; 7348 7349 if (TREE_CODE (arg1) == FIXED_CST) 7350 return 0; 7351 if (TREE_CODE (arg0) == FIXED_CST) 7352 return 1; 7353 7354 if (TREE_CODE (arg1) == COMPLEX_CST) 7355 return 0; 7356 if (TREE_CODE (arg0) == COMPLEX_CST) 7357 return 1; 7358 7359 if (TREE_CONSTANT (arg1)) 7360 return 0; 7361 if (TREE_CONSTANT (arg0)) 7362 return 1; 7363 7364 if (optimize_function_for_size_p (cfun)) 7365 return 0; 7366 7367 if (reorder && flag_evaluation_order 7368 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))) 7369 return 0; 7370 7371 /* It is preferable to swap two SSA_NAME to ensure a canonical form 7372 for commutative and comparison operators. Ensuring a canonical 7373 form allows the optimizers to find additional redundancies without 7374 having to explicitly check for both orderings. */ 7375 if (TREE_CODE (arg0) == SSA_NAME 7376 && TREE_CODE (arg1) == SSA_NAME 7377 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1)) 7378 return 1; 7379 7380 /* Put SSA_NAMEs last. */ 7381 if (TREE_CODE (arg1) == SSA_NAME) 7382 return 0; 7383 if (TREE_CODE (arg0) == SSA_NAME) 7384 return 1; 7385 7386 /* Put variables last. */ 7387 if (DECL_P (arg1)) 7388 return 0; 7389 if (DECL_P (arg0)) 7390 return 1; 7391 7392 return 0; 7393 } 7394 7395 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where 7396 ARG0 is extended to a wider type. */ 7397 7398 static tree 7399 fold_widened_comparison (location_t loc, enum tree_code code, 7400 tree type, tree arg0, tree arg1) 7401 { 7402 tree arg0_unw = get_unwidened (arg0, NULL_TREE); 7403 tree arg1_unw; 7404 tree shorter_type, outer_type; 7405 tree min, max; 7406 bool above, below; 7407 7408 if (arg0_unw == arg0) 7409 return NULL_TREE; 7410 shorter_type = TREE_TYPE (arg0_unw); 7411 7412 #ifdef HAVE_canonicalize_funcptr_for_compare 7413 /* Disable this optimization if we're casting a function pointer 7414 type on targets that require function pointer canonicalization. */ 7415 if (HAVE_canonicalize_funcptr_for_compare 7416 && TREE_CODE (shorter_type) == POINTER_TYPE 7417 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE) 7418 return NULL_TREE; 7419 #endif 7420 7421 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type)) 7422 return NULL_TREE; 7423 7424 arg1_unw = get_unwidened (arg1, NULL_TREE); 7425 7426 /* If possible, express the comparison in the shorter mode. */ 7427 if ((code == EQ_EXPR || code == NE_EXPR 7428 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type)) 7429 && (TREE_TYPE (arg1_unw) == shorter_type 7430 || ((TYPE_PRECISION (shorter_type) 7431 >= TYPE_PRECISION (TREE_TYPE (arg1_unw))) 7432 && (TYPE_UNSIGNED (shorter_type) 7433 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw)))) 7434 || (TREE_CODE (arg1_unw) == INTEGER_CST 7435 && (TREE_CODE (shorter_type) == INTEGER_TYPE 7436 || TREE_CODE (shorter_type) == BOOLEAN_TYPE) 7437 && int_fits_type_p (arg1_unw, shorter_type)))) 7438 return fold_build2_loc (loc, code, type, arg0_unw, 7439 fold_convert_loc (loc, shorter_type, arg1_unw)); 7440 7441 if (TREE_CODE (arg1_unw) != INTEGER_CST 7442 || TREE_CODE (shorter_type) != INTEGER_TYPE 7443 || !int_fits_type_p (arg1_unw, shorter_type)) 7444 return NULL_TREE; 7445 7446 /* If we are comparing with the integer that does not fit into the range 7447 of the shorter type, the result is known. */ 7448 outer_type = TREE_TYPE (arg1_unw); 7449 min = lower_bound_in_type (outer_type, shorter_type); 7450 max = upper_bound_in_type (outer_type, shorter_type); 7451 7452 above = integer_nonzerop (fold_relational_const (LT_EXPR, type, 7453 max, arg1_unw)); 7454 below = integer_nonzerop (fold_relational_const (LT_EXPR, type, 7455 arg1_unw, min)); 7456 7457 switch (code) 7458 { 7459 case EQ_EXPR: 7460 if (above || below) 7461 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 7462 break; 7463 7464 case NE_EXPR: 7465 if (above || below) 7466 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 7467 break; 7468 7469 case LT_EXPR: 7470 case LE_EXPR: 7471 if (above) 7472 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 7473 else if (below) 7474 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 7475 7476 case GT_EXPR: 7477 case GE_EXPR: 7478 if (above) 7479 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 7480 else if (below) 7481 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 7482 7483 default: 7484 break; 7485 } 7486 7487 return NULL_TREE; 7488 } 7489 7490 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for 7491 ARG0 just the signedness is changed. */ 7492 7493 static tree 7494 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type, 7495 tree arg0, tree arg1) 7496 { 7497 tree arg0_inner; 7498 tree inner_type, outer_type; 7499 7500 if (!CONVERT_EXPR_P (arg0)) 7501 return NULL_TREE; 7502 7503 outer_type = TREE_TYPE (arg0); 7504 arg0_inner = TREE_OPERAND (arg0, 0); 7505 inner_type = TREE_TYPE (arg0_inner); 7506 7507 #ifdef HAVE_canonicalize_funcptr_for_compare 7508 /* Disable this optimization if we're casting a function pointer 7509 type on targets that require function pointer canonicalization. */ 7510 if (HAVE_canonicalize_funcptr_for_compare 7511 && TREE_CODE (inner_type) == POINTER_TYPE 7512 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE) 7513 return NULL_TREE; 7514 #endif 7515 7516 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type)) 7517 return NULL_TREE; 7518 7519 if (TREE_CODE (arg1) != INTEGER_CST 7520 && !(CONVERT_EXPR_P (arg1) 7521 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type)) 7522 return NULL_TREE; 7523 7524 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type) 7525 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type)) 7526 && code != NE_EXPR 7527 && code != EQ_EXPR) 7528 return NULL_TREE; 7529 7530 if (TREE_CODE (arg1) == INTEGER_CST) 7531 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1), 7532 TREE_INT_CST_HIGH (arg1), 0, 7533 TREE_OVERFLOW (arg1)); 7534 else 7535 arg1 = fold_convert_loc (loc, inner_type, arg1); 7536 7537 return fold_build2_loc (loc, code, type, arg0_inner, arg1); 7538 } 7539 7540 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is 7541 step of the array. Reconstructs s and delta in the case of s * 7542 delta being an integer constant (and thus already folded). ADDR is 7543 the address. MULT is the multiplicative expression. If the 7544 function succeeds, the new address expression is returned. 7545 Otherwise NULL_TREE is returned. LOC is the location of the 7546 resulting expression. */ 7547 7548 static tree 7549 try_move_mult_to_index (location_t loc, tree addr, tree op1) 7550 { 7551 tree s, delta, step; 7552 tree ref = TREE_OPERAND (addr, 0), pref; 7553 tree ret, pos; 7554 tree itype; 7555 bool mdim = false; 7556 7557 /* Strip the nops that might be added when converting op1 to sizetype. */ 7558 STRIP_NOPS (op1); 7559 7560 /* Canonicalize op1 into a possibly non-constant delta 7561 and an INTEGER_CST s. */ 7562 if (TREE_CODE (op1) == MULT_EXPR) 7563 { 7564 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1); 7565 7566 STRIP_NOPS (arg0); 7567 STRIP_NOPS (arg1); 7568 7569 if (TREE_CODE (arg0) == INTEGER_CST) 7570 { 7571 s = arg0; 7572 delta = arg1; 7573 } 7574 else if (TREE_CODE (arg1) == INTEGER_CST) 7575 { 7576 s = arg1; 7577 delta = arg0; 7578 } 7579 else 7580 return NULL_TREE; 7581 } 7582 else if (TREE_CODE (op1) == INTEGER_CST) 7583 { 7584 delta = op1; 7585 s = NULL_TREE; 7586 } 7587 else 7588 { 7589 /* Simulate we are delta * 1. */ 7590 delta = op1; 7591 s = integer_one_node; 7592 } 7593 7594 for (;; ref = TREE_OPERAND (ref, 0)) 7595 { 7596 if (TREE_CODE (ref) == ARRAY_REF) 7597 { 7598 tree domain; 7599 7600 /* Remember if this was a multi-dimensional array. */ 7601 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF) 7602 mdim = true; 7603 7604 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0))); 7605 if (! domain) 7606 continue; 7607 itype = TREE_TYPE (domain); 7608 7609 step = array_ref_element_size (ref); 7610 if (TREE_CODE (step) != INTEGER_CST) 7611 continue; 7612 7613 if (s) 7614 { 7615 if (! tree_int_cst_equal (step, s)) 7616 continue; 7617 } 7618 else 7619 { 7620 /* Try if delta is a multiple of step. */ 7621 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step); 7622 if (! tmp) 7623 continue; 7624 delta = tmp; 7625 } 7626 7627 /* Only fold here if we can verify we do not overflow one 7628 dimension of a multi-dimensional array. */ 7629 if (mdim) 7630 { 7631 tree tmp; 7632 7633 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST 7634 || !TYPE_MAX_VALUE (domain) 7635 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST) 7636 continue; 7637 7638 tmp = fold_binary_loc (loc, PLUS_EXPR, itype, 7639 fold_convert_loc (loc, itype, 7640 TREE_OPERAND (ref, 1)), 7641 fold_convert_loc (loc, itype, delta)); 7642 if (!tmp 7643 || TREE_CODE (tmp) != INTEGER_CST 7644 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp)) 7645 continue; 7646 } 7647 7648 break; 7649 } 7650 else 7651 mdim = false; 7652 7653 if (!handled_component_p (ref)) 7654 return NULL_TREE; 7655 } 7656 7657 /* We found the suitable array reference. So copy everything up to it, 7658 and replace the index. */ 7659 7660 pref = TREE_OPERAND (addr, 0); 7661 ret = copy_node (pref); 7662 SET_EXPR_LOCATION (ret, loc); 7663 pos = ret; 7664 7665 while (pref != ref) 7666 { 7667 pref = TREE_OPERAND (pref, 0); 7668 TREE_OPERAND (pos, 0) = copy_node (pref); 7669 pos = TREE_OPERAND (pos, 0); 7670 } 7671 7672 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype, 7673 fold_convert_loc (loc, itype, 7674 TREE_OPERAND (pos, 1)), 7675 fold_convert_loc (loc, itype, delta)); 7676 7677 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret); 7678 } 7679 7680 7681 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y 7682 means A >= Y && A != MAX, but in this case we know that 7683 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */ 7684 7685 static tree 7686 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound) 7687 { 7688 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y; 7689 7690 if (TREE_CODE (bound) == LT_EXPR) 7691 a = TREE_OPERAND (bound, 0); 7692 else if (TREE_CODE (bound) == GT_EXPR) 7693 a = TREE_OPERAND (bound, 1); 7694 else 7695 return NULL_TREE; 7696 7697 typea = TREE_TYPE (a); 7698 if (!INTEGRAL_TYPE_P (typea) 7699 && !POINTER_TYPE_P (typea)) 7700 return NULL_TREE; 7701 7702 if (TREE_CODE (ineq) == LT_EXPR) 7703 { 7704 a1 = TREE_OPERAND (ineq, 1); 7705 y = TREE_OPERAND (ineq, 0); 7706 } 7707 else if (TREE_CODE (ineq) == GT_EXPR) 7708 { 7709 a1 = TREE_OPERAND (ineq, 0); 7710 y = TREE_OPERAND (ineq, 1); 7711 } 7712 else 7713 return NULL_TREE; 7714 7715 if (TREE_TYPE (a1) != typea) 7716 return NULL_TREE; 7717 7718 if (POINTER_TYPE_P (typea)) 7719 { 7720 /* Convert the pointer types into integer before taking the difference. */ 7721 tree ta = fold_convert_loc (loc, ssizetype, a); 7722 tree ta1 = fold_convert_loc (loc, ssizetype, a1); 7723 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta); 7724 } 7725 else 7726 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a); 7727 7728 if (!diff || !integer_onep (diff)) 7729 return NULL_TREE; 7730 7731 return fold_build2_loc (loc, GE_EXPR, type, a, y); 7732 } 7733 7734 /* Fold a sum or difference of at least one multiplication. 7735 Returns the folded tree or NULL if no simplification could be made. */ 7736 7737 static tree 7738 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type, 7739 tree arg0, tree arg1) 7740 { 7741 tree arg00, arg01, arg10, arg11; 7742 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same; 7743 7744 /* (A * C) +- (B * C) -> (A+-B) * C. 7745 (A * C) +- A -> A * (C+-1). 7746 We are most concerned about the case where C is a constant, 7747 but other combinations show up during loop reduction. Since 7748 it is not difficult, try all four possibilities. */ 7749 7750 if (TREE_CODE (arg0) == MULT_EXPR) 7751 { 7752 arg00 = TREE_OPERAND (arg0, 0); 7753 arg01 = TREE_OPERAND (arg0, 1); 7754 } 7755 else if (TREE_CODE (arg0) == INTEGER_CST) 7756 { 7757 arg00 = build_one_cst (type); 7758 arg01 = arg0; 7759 } 7760 else 7761 { 7762 /* We cannot generate constant 1 for fract. */ 7763 if (ALL_FRACT_MODE_P (TYPE_MODE (type))) 7764 return NULL_TREE; 7765 arg00 = arg0; 7766 arg01 = build_one_cst (type); 7767 } 7768 if (TREE_CODE (arg1) == MULT_EXPR) 7769 { 7770 arg10 = TREE_OPERAND (arg1, 0); 7771 arg11 = TREE_OPERAND (arg1, 1); 7772 } 7773 else if (TREE_CODE (arg1) == INTEGER_CST) 7774 { 7775 arg10 = build_one_cst (type); 7776 /* As we canonicalize A - 2 to A + -2 get rid of that sign for 7777 the purpose of this canonicalization. */ 7778 if (TREE_INT_CST_HIGH (arg1) == -1 7779 && negate_expr_p (arg1) 7780 && code == PLUS_EXPR) 7781 { 7782 arg11 = negate_expr (arg1); 7783 code = MINUS_EXPR; 7784 } 7785 else 7786 arg11 = arg1; 7787 } 7788 else 7789 { 7790 /* We cannot generate constant 1 for fract. */ 7791 if (ALL_FRACT_MODE_P (TYPE_MODE (type))) 7792 return NULL_TREE; 7793 arg10 = arg1; 7794 arg11 = build_one_cst (type); 7795 } 7796 same = NULL_TREE; 7797 7798 if (operand_equal_p (arg01, arg11, 0)) 7799 same = arg01, alt0 = arg00, alt1 = arg10; 7800 else if (operand_equal_p (arg00, arg10, 0)) 7801 same = arg00, alt0 = arg01, alt1 = arg11; 7802 else if (operand_equal_p (arg00, arg11, 0)) 7803 same = arg00, alt0 = arg01, alt1 = arg10; 7804 else if (operand_equal_p (arg01, arg10, 0)) 7805 same = arg01, alt0 = arg00, alt1 = arg11; 7806 7807 /* No identical multiplicands; see if we can find a common 7808 power-of-two factor in non-power-of-two multiplies. This 7809 can help in multi-dimensional array access. */ 7810 else if (host_integerp (arg01, 0) 7811 && host_integerp (arg11, 0)) 7812 { 7813 HOST_WIDE_INT int01, int11, tmp; 7814 bool swap = false; 7815 tree maybe_same; 7816 int01 = TREE_INT_CST_LOW (arg01); 7817 int11 = TREE_INT_CST_LOW (arg11); 7818 7819 /* Move min of absolute values to int11. */ 7820 if ((int01 >= 0 ? int01 : -int01) 7821 < (int11 >= 0 ? int11 : -int11)) 7822 { 7823 tmp = int01, int01 = int11, int11 = tmp; 7824 alt0 = arg00, arg00 = arg10, arg10 = alt0; 7825 maybe_same = arg01; 7826 swap = true; 7827 } 7828 else 7829 maybe_same = arg11; 7830 7831 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0 7832 /* The remainder should not be a constant, otherwise we 7833 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has 7834 increased the number of multiplications necessary. */ 7835 && TREE_CODE (arg10) != INTEGER_CST) 7836 { 7837 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00, 7838 build_int_cst (TREE_TYPE (arg00), 7839 int01 / int11)); 7840 alt1 = arg10; 7841 same = maybe_same; 7842 if (swap) 7843 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same; 7844 } 7845 } 7846 7847 if (same) 7848 return fold_build2_loc (loc, MULT_EXPR, type, 7849 fold_build2_loc (loc, code, type, 7850 fold_convert_loc (loc, type, alt0), 7851 fold_convert_loc (loc, type, alt1)), 7852 fold_convert_loc (loc, type, same)); 7853 7854 return NULL_TREE; 7855 } 7856 7857 /* Subroutine of native_encode_expr. Encode the INTEGER_CST 7858 specified by EXPR into the buffer PTR of length LEN bytes. 7859 Return the number of bytes placed in the buffer, or zero 7860 upon failure. */ 7861 7862 static int 7863 native_encode_int (const_tree expr, unsigned char *ptr, int len) 7864 { 7865 tree type = TREE_TYPE (expr); 7866 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7867 int byte, offset, word, words; 7868 unsigned char value; 7869 7870 if (total_bytes > len) 7871 return 0; 7872 words = total_bytes / UNITS_PER_WORD; 7873 7874 for (byte = 0; byte < total_bytes; byte++) 7875 { 7876 int bitpos = byte * BITS_PER_UNIT; 7877 if (bitpos < HOST_BITS_PER_WIDE_INT) 7878 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos); 7879 else 7880 value = (unsigned char) (TREE_INT_CST_HIGH (expr) 7881 >> (bitpos - HOST_BITS_PER_WIDE_INT)); 7882 7883 if (total_bytes > UNITS_PER_WORD) 7884 { 7885 word = byte / UNITS_PER_WORD; 7886 if (WORDS_BIG_ENDIAN) 7887 word = (words - 1) - word; 7888 offset = word * UNITS_PER_WORD; 7889 if (BYTES_BIG_ENDIAN) 7890 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7891 else 7892 offset += byte % UNITS_PER_WORD; 7893 } 7894 else 7895 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; 7896 ptr[offset] = value; 7897 } 7898 return total_bytes; 7899 } 7900 7901 7902 /* Subroutine of native_encode_expr. Encode the REAL_CST 7903 specified by EXPR into the buffer PTR of length LEN bytes. 7904 Return the number of bytes placed in the buffer, or zero 7905 upon failure. */ 7906 7907 static int 7908 native_encode_real (const_tree expr, unsigned char *ptr, int len) 7909 { 7910 tree type = TREE_TYPE (expr); 7911 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7912 int byte, offset, word, words, bitpos; 7913 unsigned char value; 7914 7915 /* There are always 32 bits in each long, no matter the size of 7916 the hosts long. We handle floating point representations with 7917 up to 192 bits. */ 7918 long tmp[6]; 7919 7920 if (total_bytes > len) 7921 return 0; 7922 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD; 7923 7924 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type)); 7925 7926 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT; 7927 bitpos += BITS_PER_UNIT) 7928 { 7929 byte = (bitpos / BITS_PER_UNIT) & 3; 7930 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31)); 7931 7932 if (UNITS_PER_WORD < 4) 7933 { 7934 word = byte / UNITS_PER_WORD; 7935 if (WORDS_BIG_ENDIAN) 7936 word = (words - 1) - word; 7937 offset = word * UNITS_PER_WORD; 7938 if (BYTES_BIG_ENDIAN) 7939 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7940 else 7941 offset += byte % UNITS_PER_WORD; 7942 } 7943 else 7944 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte; 7945 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value; 7946 } 7947 return total_bytes; 7948 } 7949 7950 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST 7951 specified by EXPR into the buffer PTR of length LEN bytes. 7952 Return the number of bytes placed in the buffer, or zero 7953 upon failure. */ 7954 7955 static int 7956 native_encode_complex (const_tree expr, unsigned char *ptr, int len) 7957 { 7958 int rsize, isize; 7959 tree part; 7960 7961 part = TREE_REALPART (expr); 7962 rsize = native_encode_expr (part, ptr, len); 7963 if (rsize == 0) 7964 return 0; 7965 part = TREE_IMAGPART (expr); 7966 isize = native_encode_expr (part, ptr+rsize, len-rsize); 7967 if (isize != rsize) 7968 return 0; 7969 return rsize + isize; 7970 } 7971 7972 7973 /* Subroutine of native_encode_expr. Encode the VECTOR_CST 7974 specified by EXPR into the buffer PTR of length LEN bytes. 7975 Return the number of bytes placed in the buffer, or zero 7976 upon failure. */ 7977 7978 static int 7979 native_encode_vector (const_tree expr, unsigned char *ptr, int len) 7980 { 7981 int i, size, offset, count; 7982 tree itype, elem, elements; 7983 7984 offset = 0; 7985 elements = TREE_VECTOR_CST_ELTS (expr); 7986 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)); 7987 itype = TREE_TYPE (TREE_TYPE (expr)); 7988 size = GET_MODE_SIZE (TYPE_MODE (itype)); 7989 for (i = 0; i < count; i++) 7990 { 7991 if (elements) 7992 { 7993 elem = TREE_VALUE (elements); 7994 elements = TREE_CHAIN (elements); 7995 } 7996 else 7997 elem = NULL_TREE; 7998 7999 if (elem) 8000 { 8001 if (native_encode_expr (elem, ptr+offset, len-offset) != size) 8002 return 0; 8003 } 8004 else 8005 { 8006 if (offset + size > len) 8007 return 0; 8008 memset (ptr+offset, 0, size); 8009 } 8010 offset += size; 8011 } 8012 return offset; 8013 } 8014 8015 8016 /* Subroutine of native_encode_expr. Encode the STRING_CST 8017 specified by EXPR into the buffer PTR of length LEN bytes. 8018 Return the number of bytes placed in the buffer, or zero 8019 upon failure. */ 8020 8021 static int 8022 native_encode_string (const_tree expr, unsigned char *ptr, int len) 8023 { 8024 tree type = TREE_TYPE (expr); 8025 HOST_WIDE_INT total_bytes; 8026 8027 if (TREE_CODE (type) != ARRAY_TYPE 8028 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE 8029 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT 8030 || !host_integerp (TYPE_SIZE_UNIT (type), 0)) 8031 return 0; 8032 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0); 8033 if (total_bytes > len) 8034 return 0; 8035 if (TREE_STRING_LENGTH (expr) < total_bytes) 8036 { 8037 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr)); 8038 memset (ptr + TREE_STRING_LENGTH (expr), 0, 8039 total_bytes - TREE_STRING_LENGTH (expr)); 8040 } 8041 else 8042 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes); 8043 return total_bytes; 8044 } 8045 8046 8047 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, 8048 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the 8049 buffer PTR of length LEN bytes. Return the number of bytes 8050 placed in the buffer, or zero upon failure. */ 8051 8052 int 8053 native_encode_expr (const_tree expr, unsigned char *ptr, int len) 8054 { 8055 switch (TREE_CODE (expr)) 8056 { 8057 case INTEGER_CST: 8058 return native_encode_int (expr, ptr, len); 8059 8060 case REAL_CST: 8061 return native_encode_real (expr, ptr, len); 8062 8063 case COMPLEX_CST: 8064 return native_encode_complex (expr, ptr, len); 8065 8066 case VECTOR_CST: 8067 return native_encode_vector (expr, ptr, len); 8068 8069 case STRING_CST: 8070 return native_encode_string (expr, ptr, len); 8071 8072 default: 8073 return 0; 8074 } 8075 } 8076 8077 8078 /* Subroutine of native_interpret_expr. Interpret the contents of 8079 the buffer PTR of length LEN as an INTEGER_CST of type TYPE. 8080 If the buffer cannot be interpreted, return NULL_TREE. */ 8081 8082 static tree 8083 native_interpret_int (tree type, const unsigned char *ptr, int len) 8084 { 8085 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 8086 int byte, offset, word, words; 8087 unsigned char value; 8088 unsigned int HOST_WIDE_INT lo = 0; 8089 HOST_WIDE_INT hi = 0; 8090 8091 if (total_bytes > len) 8092 return NULL_TREE; 8093 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT) 8094 return NULL_TREE; 8095 words = total_bytes / UNITS_PER_WORD; 8096 8097 for (byte = 0; byte < total_bytes; byte++) 8098 { 8099 int bitpos = byte * BITS_PER_UNIT; 8100 if (total_bytes > UNITS_PER_WORD) 8101 { 8102 word = byte / UNITS_PER_WORD; 8103 if (WORDS_BIG_ENDIAN) 8104 word = (words - 1) - word; 8105 offset = word * UNITS_PER_WORD; 8106 if (BYTES_BIG_ENDIAN) 8107 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 8108 else 8109 offset += byte % UNITS_PER_WORD; 8110 } 8111 else 8112 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; 8113 value = ptr[offset]; 8114 8115 if (bitpos < HOST_BITS_PER_WIDE_INT) 8116 lo |= (unsigned HOST_WIDE_INT) value << bitpos; 8117 else 8118 hi |= (unsigned HOST_WIDE_INT) value 8119 << (bitpos - HOST_BITS_PER_WIDE_INT); 8120 } 8121 8122 return build_int_cst_wide_type (type, lo, hi); 8123 } 8124 8125 8126 /* Subroutine of native_interpret_expr. Interpret the contents of 8127 the buffer PTR of length LEN as a REAL_CST of type TYPE. 8128 If the buffer cannot be interpreted, return NULL_TREE. */ 8129 8130 static tree 8131 native_interpret_real (tree type, const unsigned char *ptr, int len) 8132 { 8133 enum machine_mode mode = TYPE_MODE (type); 8134 int total_bytes = GET_MODE_SIZE (mode); 8135 int byte, offset, word, words, bitpos; 8136 unsigned char value; 8137 /* There are always 32 bits in each long, no matter the size of 8138 the hosts long. We handle floating point representations with 8139 up to 192 bits. */ 8140 REAL_VALUE_TYPE r; 8141 long tmp[6]; 8142 8143 total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 8144 if (total_bytes > len || total_bytes > 24) 8145 return NULL_TREE; 8146 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD; 8147 8148 memset (tmp, 0, sizeof (tmp)); 8149 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT; 8150 bitpos += BITS_PER_UNIT) 8151 { 8152 byte = (bitpos / BITS_PER_UNIT) & 3; 8153 if (UNITS_PER_WORD < 4) 8154 { 8155 word = byte / UNITS_PER_WORD; 8156 if (WORDS_BIG_ENDIAN) 8157 word = (words - 1) - word; 8158 offset = word * UNITS_PER_WORD; 8159 if (BYTES_BIG_ENDIAN) 8160 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 8161 else 8162 offset += byte % UNITS_PER_WORD; 8163 } 8164 else 8165 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte; 8166 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)]; 8167 8168 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31); 8169 } 8170 8171 real_from_target (&r, tmp, mode); 8172 return build_real (type, r); 8173 } 8174 8175 8176 /* Subroutine of native_interpret_expr. Interpret the contents of 8177 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE. 8178 If the buffer cannot be interpreted, return NULL_TREE. */ 8179 8180 static tree 8181 native_interpret_complex (tree type, const unsigned char *ptr, int len) 8182 { 8183 tree etype, rpart, ipart; 8184 int size; 8185 8186 etype = TREE_TYPE (type); 8187 size = GET_MODE_SIZE (TYPE_MODE (etype)); 8188 if (size * 2 > len) 8189 return NULL_TREE; 8190 rpart = native_interpret_expr (etype, ptr, size); 8191 if (!rpart) 8192 return NULL_TREE; 8193 ipart = native_interpret_expr (etype, ptr+size, size); 8194 if (!ipart) 8195 return NULL_TREE; 8196 return build_complex (type, rpart, ipart); 8197 } 8198 8199 8200 /* Subroutine of native_interpret_expr. Interpret the contents of 8201 the buffer PTR of length LEN as a VECTOR_CST of type TYPE. 8202 If the buffer cannot be interpreted, return NULL_TREE. */ 8203 8204 static tree 8205 native_interpret_vector (tree type, const unsigned char *ptr, int len) 8206 { 8207 tree etype, elem, elements; 8208 int i, size, count; 8209 8210 etype = TREE_TYPE (type); 8211 size = GET_MODE_SIZE (TYPE_MODE (etype)); 8212 count = TYPE_VECTOR_SUBPARTS (type); 8213 if (size * count > len) 8214 return NULL_TREE; 8215 8216 elements = NULL_TREE; 8217 for (i = count - 1; i >= 0; i--) 8218 { 8219 elem = native_interpret_expr (etype, ptr+(i*size), size); 8220 if (!elem) 8221 return NULL_TREE; 8222 elements = tree_cons (NULL_TREE, elem, elements); 8223 } 8224 return build_vector (type, elements); 8225 } 8226 8227 8228 /* Subroutine of fold_view_convert_expr. Interpret the contents of 8229 the buffer PTR of length LEN as a constant of type TYPE. For 8230 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P 8231 we return a REAL_CST, etc... If the buffer cannot be interpreted, 8232 return NULL_TREE. */ 8233 8234 tree 8235 native_interpret_expr (tree type, const unsigned char *ptr, int len) 8236 { 8237 switch (TREE_CODE (type)) 8238 { 8239 case INTEGER_TYPE: 8240 case ENUMERAL_TYPE: 8241 case BOOLEAN_TYPE: 8242 return native_interpret_int (type, ptr, len); 8243 8244 case REAL_TYPE: 8245 return native_interpret_real (type, ptr, len); 8246 8247 case COMPLEX_TYPE: 8248 return native_interpret_complex (type, ptr, len); 8249 8250 case VECTOR_TYPE: 8251 return native_interpret_vector (type, ptr, len); 8252 8253 default: 8254 return NULL_TREE; 8255 } 8256 } 8257 8258 8259 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type 8260 TYPE at compile-time. If we're unable to perform the conversion 8261 return NULL_TREE. */ 8262 8263 static tree 8264 fold_view_convert_expr (tree type, tree expr) 8265 { 8266 /* We support up to 512-bit values (for V8DFmode). */ 8267 unsigned char buffer[64]; 8268 int len; 8269 8270 /* Check that the host and target are sane. */ 8271 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8) 8272 return NULL_TREE; 8273 8274 len = native_encode_expr (expr, buffer, sizeof (buffer)); 8275 if (len == 0) 8276 return NULL_TREE; 8277 8278 return native_interpret_expr (type, buffer, len); 8279 } 8280 8281 /* Build an expression for the address of T. Folds away INDIRECT_REF 8282 to avoid confusing the gimplify process. */ 8283 8284 tree 8285 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype) 8286 { 8287 /* The size of the object is not relevant when talking about its address. */ 8288 if (TREE_CODE (t) == WITH_SIZE_EXPR) 8289 t = TREE_OPERAND (t, 0); 8290 8291 /* Note: doesn't apply to ALIGN_INDIRECT_REF */ 8292 if (TREE_CODE (t) == INDIRECT_REF 8293 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF) 8294 { 8295 t = TREE_OPERAND (t, 0); 8296 8297 if (TREE_TYPE (t) != ptrtype) 8298 { 8299 t = build1 (NOP_EXPR, ptrtype, t); 8300 SET_EXPR_LOCATION (t, loc); 8301 } 8302 } 8303 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR) 8304 { 8305 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0)); 8306 8307 if (TREE_TYPE (t) != ptrtype) 8308 t = fold_convert_loc (loc, ptrtype, t); 8309 } 8310 else 8311 { 8312 t = build1 (ADDR_EXPR, ptrtype, t); 8313 SET_EXPR_LOCATION (t, loc); 8314 } 8315 8316 return t; 8317 } 8318 8319 /* Build an expression for the address of T. */ 8320 8321 tree 8322 build_fold_addr_expr_loc (location_t loc, tree t) 8323 { 8324 tree ptrtype = build_pointer_type (TREE_TYPE (t)); 8325 8326 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype); 8327 } 8328 8329 /* Fold a unary expression of code CODE and type TYPE with operand 8330 OP0. Return the folded expression if folding is successful. 8331 Otherwise, return NULL_TREE. */ 8332 8333 tree 8334 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0) 8335 { 8336 tree tem; 8337 tree arg0; 8338 enum tree_code_class kind = TREE_CODE_CLASS (code); 8339 8340 gcc_assert (IS_EXPR_CODE_CLASS (kind) 8341 && TREE_CODE_LENGTH (code) == 1); 8342 8343 arg0 = op0; 8344 if (arg0) 8345 { 8346 if (CONVERT_EXPR_CODE_P (code) 8347 || code == FLOAT_EXPR || code == ABS_EXPR) 8348 { 8349 /* Don't use STRIP_NOPS, because signedness of argument type 8350 matters. */ 8351 STRIP_SIGN_NOPS (arg0); 8352 } 8353 else 8354 { 8355 /* Strip any conversions that don't change the mode. This 8356 is safe for every expression, except for a comparison 8357 expression because its signedness is derived from its 8358 operands. 8359 8360 Note that this is done as an internal manipulation within 8361 the constant folder, in order to find the simplest 8362 representation of the arguments so that their form can be 8363 studied. In any cases, the appropriate type conversions 8364 should be put back in the tree that will get out of the 8365 constant folder. */ 8366 STRIP_NOPS (arg0); 8367 } 8368 } 8369 8370 if (TREE_CODE_CLASS (code) == tcc_unary) 8371 { 8372 if (TREE_CODE (arg0) == COMPOUND_EXPR) 8373 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), 8374 fold_build1_loc (loc, code, type, 8375 fold_convert_loc (loc, TREE_TYPE (op0), 8376 TREE_OPERAND (arg0, 1)))); 8377 else if (TREE_CODE (arg0) == COND_EXPR) 8378 { 8379 tree arg01 = TREE_OPERAND (arg0, 1); 8380 tree arg02 = TREE_OPERAND (arg0, 2); 8381 if (! VOID_TYPE_P (TREE_TYPE (arg01))) 8382 arg01 = fold_build1_loc (loc, code, type, 8383 fold_convert_loc (loc, 8384 TREE_TYPE (op0), arg01)); 8385 if (! VOID_TYPE_P (TREE_TYPE (arg02))) 8386 arg02 = fold_build1_loc (loc, code, type, 8387 fold_convert_loc (loc, 8388 TREE_TYPE (op0), arg02)); 8389 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0), 8390 arg01, arg02); 8391 8392 /* If this was a conversion, and all we did was to move into 8393 inside the COND_EXPR, bring it back out. But leave it if 8394 it is a conversion from integer to integer and the 8395 result precision is no wider than a word since such a 8396 conversion is cheap and may be optimized away by combine, 8397 while it couldn't if it were outside the COND_EXPR. Then return 8398 so we don't get into an infinite recursion loop taking the 8399 conversion out and then back in. */ 8400 8401 if ((CONVERT_EXPR_CODE_P (code) 8402 || code == NON_LVALUE_EXPR) 8403 && TREE_CODE (tem) == COND_EXPR 8404 && TREE_CODE (TREE_OPERAND (tem, 1)) == code 8405 && TREE_CODE (TREE_OPERAND (tem, 2)) == code 8406 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1)) 8407 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2)) 8408 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0)) 8409 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0))) 8410 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem)) 8411 && (INTEGRAL_TYPE_P 8412 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0)))) 8413 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD) 8414 || flag_syntax_only)) 8415 { 8416 tem = build1 (code, type, 8417 build3 (COND_EXPR, 8418 TREE_TYPE (TREE_OPERAND 8419 (TREE_OPERAND (tem, 1), 0)), 8420 TREE_OPERAND (tem, 0), 8421 TREE_OPERAND (TREE_OPERAND (tem, 1), 0), 8422 TREE_OPERAND (TREE_OPERAND (tem, 2), 0))); 8423 SET_EXPR_LOCATION (tem, loc); 8424 } 8425 return tem; 8426 } 8427 else if (COMPARISON_CLASS_P (arg0)) 8428 { 8429 if (TREE_CODE (type) == BOOLEAN_TYPE) 8430 { 8431 arg0 = copy_node (arg0); 8432 TREE_TYPE (arg0) = type; 8433 return arg0; 8434 } 8435 else if (TREE_CODE (type) != INTEGER_TYPE) 8436 return fold_build3_loc (loc, COND_EXPR, type, arg0, 8437 fold_build1_loc (loc, code, type, 8438 integer_one_node), 8439 fold_build1_loc (loc, code, type, 8440 integer_zero_node)); 8441 } 8442 } 8443 8444 switch (code) 8445 { 8446 case PAREN_EXPR: 8447 /* Re-association barriers around constants and other re-association 8448 barriers can be removed. */ 8449 if (CONSTANT_CLASS_P (op0) 8450 || TREE_CODE (op0) == PAREN_EXPR) 8451 return fold_convert_loc (loc, type, op0); 8452 return NULL_TREE; 8453 8454 CASE_CONVERT: 8455 case FLOAT_EXPR: 8456 case FIX_TRUNC_EXPR: 8457 if (TREE_TYPE (op0) == type) 8458 return op0; 8459 8460 /* If we have (type) (a CMP b) and type is an integral type, return 8461 new expression involving the new type. */ 8462 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type)) 8463 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0), 8464 TREE_OPERAND (op0, 1)); 8465 8466 /* Handle cases of two conversions in a row. */ 8467 if (CONVERT_EXPR_P (op0)) 8468 { 8469 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0)); 8470 tree inter_type = TREE_TYPE (op0); 8471 int inside_int = INTEGRAL_TYPE_P (inside_type); 8472 int inside_ptr = POINTER_TYPE_P (inside_type); 8473 int inside_float = FLOAT_TYPE_P (inside_type); 8474 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE; 8475 unsigned int inside_prec = TYPE_PRECISION (inside_type); 8476 int inside_unsignedp = TYPE_UNSIGNED (inside_type); 8477 int inter_int = INTEGRAL_TYPE_P (inter_type); 8478 int inter_ptr = POINTER_TYPE_P (inter_type); 8479 int inter_float = FLOAT_TYPE_P (inter_type); 8480 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE; 8481 unsigned int inter_prec = TYPE_PRECISION (inter_type); 8482 int inter_unsignedp = TYPE_UNSIGNED (inter_type); 8483 int final_int = INTEGRAL_TYPE_P (type); 8484 int final_ptr = POINTER_TYPE_P (type); 8485 int final_float = FLOAT_TYPE_P (type); 8486 int final_vec = TREE_CODE (type) == VECTOR_TYPE; 8487 unsigned int final_prec = TYPE_PRECISION (type); 8488 int final_unsignedp = TYPE_UNSIGNED (type); 8489 8490 /* In addition to the cases of two conversions in a row 8491 handled below, if we are converting something to its own 8492 type via an object of identical or wider precision, neither 8493 conversion is needed. */ 8494 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type) 8495 && (((inter_int || inter_ptr) && final_int) 8496 || (inter_float && final_float)) 8497 && inter_prec >= final_prec) 8498 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); 8499 8500 /* Likewise, if the intermediate and initial types are either both 8501 float or both integer, we don't need the middle conversion if the 8502 former is wider than the latter and doesn't change the signedness 8503 (for integers). Avoid this if the final type is a pointer since 8504 then we sometimes need the middle conversion. Likewise if the 8505 final type has a precision not equal to the size of its mode. */ 8506 if (((inter_int && inside_int) 8507 || (inter_float && inside_float) 8508 || (inter_vec && inside_vec)) 8509 && inter_prec >= inside_prec 8510 && (inter_float || inter_vec 8511 || inter_unsignedp == inside_unsignedp) 8512 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) 8513 && TYPE_MODE (type) == TYPE_MODE (inter_type)) 8514 && ! final_ptr 8515 && (! final_vec || inter_prec == inside_prec)) 8516 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); 8517 8518 /* If we have a sign-extension of a zero-extended value, we can 8519 replace that by a single zero-extension. */ 8520 if (inside_int && inter_int && final_int 8521 && inside_prec < inter_prec && inter_prec < final_prec 8522 && inside_unsignedp && !inter_unsignedp) 8523 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); 8524 8525 /* Two conversions in a row are not needed unless: 8526 - some conversion is floating-point (overstrict for now), or 8527 - some conversion is a vector (overstrict for now), or 8528 - the intermediate type is narrower than both initial and 8529 final, or 8530 - the intermediate type and innermost type differ in signedness, 8531 and the outermost type is wider than the intermediate, or 8532 - the initial type is a pointer type and the precisions of the 8533 intermediate and final types differ, or 8534 - the final type is a pointer type and the precisions of the 8535 initial and intermediate types differ. */ 8536 if (! inside_float && ! inter_float && ! final_float 8537 && ! inside_vec && ! inter_vec && ! final_vec 8538 && (inter_prec >= inside_prec || inter_prec >= final_prec) 8539 && ! (inside_int && inter_int 8540 && inter_unsignedp != inside_unsignedp 8541 && inter_prec < final_prec) 8542 && ((inter_unsignedp && inter_prec > inside_prec) 8543 == (final_unsignedp && final_prec > inter_prec)) 8544 && ! (inside_ptr && inter_prec != final_prec) 8545 && ! (final_ptr && inside_prec != inter_prec) 8546 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) 8547 && TYPE_MODE (type) == TYPE_MODE (inter_type))) 8548 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); 8549 } 8550 8551 /* Handle (T *)&A.B.C for A being of type T and B and C 8552 living at offset zero. This occurs frequently in 8553 C++ upcasting and then accessing the base. */ 8554 if (TREE_CODE (op0) == ADDR_EXPR 8555 && POINTER_TYPE_P (type) 8556 && handled_component_p (TREE_OPERAND (op0, 0))) 8557 { 8558 HOST_WIDE_INT bitsize, bitpos; 8559 tree offset; 8560 enum machine_mode mode; 8561 int unsignedp, volatilep; 8562 tree base = TREE_OPERAND (op0, 0); 8563 base = get_inner_reference (base, &bitsize, &bitpos, &offset, 8564 &mode, &unsignedp, &volatilep, false); 8565 /* If the reference was to a (constant) zero offset, we can use 8566 the address of the base if it has the same base type 8567 as the result type. */ 8568 if (! offset && bitpos == 0 8569 && TYPE_MAIN_VARIANT (TREE_TYPE (type)) 8570 == TYPE_MAIN_VARIANT (TREE_TYPE (base))) 8571 return fold_convert_loc (loc, type, 8572 build_fold_addr_expr_loc (loc, base)); 8573 } 8574 8575 if (TREE_CODE (op0) == MODIFY_EXPR 8576 && TREE_CONSTANT (TREE_OPERAND (op0, 1)) 8577 /* Detect assigning a bitfield. */ 8578 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF 8579 && DECL_BIT_FIELD 8580 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1)))) 8581 { 8582 /* Don't leave an assignment inside a conversion 8583 unless assigning a bitfield. */ 8584 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1)); 8585 /* First do the assignment, then return converted constant. */ 8586 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem); 8587 TREE_NO_WARNING (tem) = 1; 8588 TREE_USED (tem) = 1; 8589 SET_EXPR_LOCATION (tem, loc); 8590 return tem; 8591 } 8592 8593 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer 8594 constants (if x has signed type, the sign bit cannot be set 8595 in c). This folds extension into the BIT_AND_EXPR. 8596 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they 8597 very likely don't have maximal range for their precision and this 8598 transformation effectively doesn't preserve non-maximal ranges. */ 8599 if (TREE_CODE (type) == INTEGER_TYPE 8600 && TREE_CODE (op0) == BIT_AND_EXPR 8601 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST) 8602 { 8603 tree and_expr = op0; 8604 tree and0 = TREE_OPERAND (and_expr, 0); 8605 tree and1 = TREE_OPERAND (and_expr, 1); 8606 int change = 0; 8607 8608 if (TYPE_UNSIGNED (TREE_TYPE (and_expr)) 8609 || (TYPE_PRECISION (type) 8610 <= TYPE_PRECISION (TREE_TYPE (and_expr)))) 8611 change = 1; 8612 else if (TYPE_PRECISION (TREE_TYPE (and1)) 8613 <= HOST_BITS_PER_WIDE_INT 8614 && host_integerp (and1, 1)) 8615 { 8616 unsigned HOST_WIDE_INT cst; 8617 8618 cst = tree_low_cst (and1, 1); 8619 cst &= (HOST_WIDE_INT) -1 8620 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1); 8621 change = (cst == 0); 8622 #ifdef LOAD_EXTEND_OP 8623 if (change 8624 && !flag_syntax_only 8625 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0))) 8626 == ZERO_EXTEND)) 8627 { 8628 tree uns = unsigned_type_for (TREE_TYPE (and0)); 8629 and0 = fold_convert_loc (loc, uns, and0); 8630 and1 = fold_convert_loc (loc, uns, and1); 8631 } 8632 #endif 8633 } 8634 if (change) 8635 { 8636 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1), 8637 TREE_INT_CST_HIGH (and1), 0, 8638 TREE_OVERFLOW (and1)); 8639 return fold_build2_loc (loc, BIT_AND_EXPR, type, 8640 fold_convert_loc (loc, type, and0), tem); 8641 } 8642 } 8643 8644 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, 8645 when one of the new casts will fold away. Conservatively we assume 8646 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */ 8647 if (POINTER_TYPE_P (type) 8648 && TREE_CODE (arg0) == POINTER_PLUS_EXPR 8649 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 8650 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR 8651 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR)) 8652 { 8653 tree arg00 = TREE_OPERAND (arg0, 0); 8654 tree arg01 = TREE_OPERAND (arg0, 1); 8655 8656 return fold_build2_loc (loc, 8657 TREE_CODE (arg0), type, 8658 fold_convert_loc (loc, type, arg00), 8659 fold_convert_loc (loc, sizetype, arg01)); 8660 } 8661 8662 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types 8663 of the same precision, and X is an integer type not narrower than 8664 types T1 or T2, i.e. the cast (T2)X isn't an extension. */ 8665 if (INTEGRAL_TYPE_P (type) 8666 && TREE_CODE (op0) == BIT_NOT_EXPR 8667 && INTEGRAL_TYPE_P (TREE_TYPE (op0)) 8668 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0)) 8669 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))) 8670 { 8671 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0); 8672 if (INTEGRAL_TYPE_P (TREE_TYPE (tem)) 8673 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem))) 8674 return fold_build1_loc (loc, BIT_NOT_EXPR, type, 8675 fold_convert_loc (loc, type, tem)); 8676 } 8677 8678 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the 8679 type of X and Y (integer types only). */ 8680 if (INTEGRAL_TYPE_P (type) 8681 && TREE_CODE (op0) == MULT_EXPR 8682 && INTEGRAL_TYPE_P (TREE_TYPE (op0)) 8683 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))) 8684 { 8685 /* Be careful not to introduce new overflows. */ 8686 tree mult_type; 8687 if (TYPE_OVERFLOW_WRAPS (type)) 8688 mult_type = type; 8689 else 8690 mult_type = unsigned_type_for (type); 8691 8692 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0))) 8693 { 8694 tem = fold_build2_loc (loc, MULT_EXPR, mult_type, 8695 fold_convert_loc (loc, mult_type, 8696 TREE_OPERAND (op0, 0)), 8697 fold_convert_loc (loc, mult_type, 8698 TREE_OPERAND (op0, 1))); 8699 return fold_convert_loc (loc, type, tem); 8700 } 8701 } 8702 8703 tem = fold_convert_const (code, type, op0); 8704 return tem ? tem : NULL_TREE; 8705 8706 case ADDR_SPACE_CONVERT_EXPR: 8707 if (integer_zerop (arg0)) 8708 return fold_convert_const (code, type, arg0); 8709 return NULL_TREE; 8710 8711 case FIXED_CONVERT_EXPR: 8712 tem = fold_convert_const (code, type, arg0); 8713 return tem ? tem : NULL_TREE; 8714 8715 case VIEW_CONVERT_EXPR: 8716 if (TREE_TYPE (op0) == type) 8717 return op0; 8718 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR) 8719 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, 8720 type, TREE_OPERAND (op0, 0)); 8721 8722 /* For integral conversions with the same precision or pointer 8723 conversions use a NOP_EXPR instead. */ 8724 if ((INTEGRAL_TYPE_P (type) 8725 || POINTER_TYPE_P (type)) 8726 && (INTEGRAL_TYPE_P (TREE_TYPE (op0)) 8727 || POINTER_TYPE_P (TREE_TYPE (op0))) 8728 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))) 8729 return fold_convert_loc (loc, type, op0); 8730 8731 /* Strip inner integral conversions that do not change the precision. */ 8732 if (CONVERT_EXPR_P (op0) 8733 && (INTEGRAL_TYPE_P (TREE_TYPE (op0)) 8734 || POINTER_TYPE_P (TREE_TYPE (op0))) 8735 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))) 8736 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))) 8737 && (TYPE_PRECISION (TREE_TYPE (op0)) 8738 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0))))) 8739 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, 8740 type, TREE_OPERAND (op0, 0)); 8741 8742 return fold_view_convert_expr (type, op0); 8743 8744 case NEGATE_EXPR: 8745 tem = fold_negate_expr (loc, arg0); 8746 if (tem) 8747 return fold_convert_loc (loc, type, tem); 8748 return NULL_TREE; 8749 8750 case ABS_EXPR: 8751 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST) 8752 return fold_abs_const (arg0, type); 8753 else if (TREE_CODE (arg0) == NEGATE_EXPR) 8754 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0)); 8755 /* Convert fabs((double)float) into (double)fabsf(float). */ 8756 else if (TREE_CODE (arg0) == NOP_EXPR 8757 && TREE_CODE (type) == REAL_TYPE) 8758 { 8759 tree targ0 = strip_float_extensions (arg0); 8760 if (targ0 != arg0) 8761 return fold_convert_loc (loc, type, 8762 fold_build1_loc (loc, ABS_EXPR, 8763 TREE_TYPE (targ0), 8764 targ0)); 8765 } 8766 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */ 8767 else if (TREE_CODE (arg0) == ABS_EXPR) 8768 return arg0; 8769 else if (tree_expr_nonnegative_p (arg0)) 8770 return arg0; 8771 8772 /* Strip sign ops from argument. */ 8773 if (TREE_CODE (type) == REAL_TYPE) 8774 { 8775 tem = fold_strip_sign_ops (arg0); 8776 if (tem) 8777 return fold_build1_loc (loc, ABS_EXPR, type, 8778 fold_convert_loc (loc, type, tem)); 8779 } 8780 return NULL_TREE; 8781 8782 case CONJ_EXPR: 8783 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 8784 return fold_convert_loc (loc, type, arg0); 8785 if (TREE_CODE (arg0) == COMPLEX_EXPR) 8786 { 8787 tree itype = TREE_TYPE (type); 8788 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0)); 8789 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1)); 8790 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, 8791 negate_expr (ipart)); 8792 } 8793 if (TREE_CODE (arg0) == COMPLEX_CST) 8794 { 8795 tree itype = TREE_TYPE (type); 8796 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0)); 8797 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0)); 8798 return build_complex (type, rpart, negate_expr (ipart)); 8799 } 8800 if (TREE_CODE (arg0) == CONJ_EXPR) 8801 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 8802 return NULL_TREE; 8803 8804 case BIT_NOT_EXPR: 8805 if (TREE_CODE (arg0) == INTEGER_CST) 8806 return fold_not_const (arg0, type); 8807 else if (TREE_CODE (arg0) == BIT_NOT_EXPR) 8808 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 8809 /* Convert ~ (-A) to A - 1. */ 8810 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR) 8811 return fold_build2_loc (loc, MINUS_EXPR, type, 8812 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)), 8813 build_int_cst (type, 1)); 8814 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */ 8815 else if (INTEGRAL_TYPE_P (type) 8816 && ((TREE_CODE (arg0) == MINUS_EXPR 8817 && integer_onep (TREE_OPERAND (arg0, 1))) 8818 || (TREE_CODE (arg0) == PLUS_EXPR 8819 && integer_all_onesp (TREE_OPERAND (arg0, 1))))) 8820 return fold_build1_loc (loc, NEGATE_EXPR, type, 8821 fold_convert_loc (loc, type, 8822 TREE_OPERAND (arg0, 0))); 8823 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */ 8824 else if (TREE_CODE (arg0) == BIT_XOR_EXPR 8825 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type, 8826 fold_convert_loc (loc, type, 8827 TREE_OPERAND (arg0, 0))))) 8828 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem, 8829 fold_convert_loc (loc, type, 8830 TREE_OPERAND (arg0, 1))); 8831 else if (TREE_CODE (arg0) == BIT_XOR_EXPR 8832 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type, 8833 fold_convert_loc (loc, type, 8834 TREE_OPERAND (arg0, 1))))) 8835 return fold_build2_loc (loc, BIT_XOR_EXPR, type, 8836 fold_convert_loc (loc, type, 8837 TREE_OPERAND (arg0, 0)), tem); 8838 /* Perform BIT_NOT_EXPR on each element individually. */ 8839 else if (TREE_CODE (arg0) == VECTOR_CST) 8840 { 8841 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE; 8842 int count = TYPE_VECTOR_SUBPARTS (type), i; 8843 8844 for (i = 0; i < count; i++) 8845 { 8846 if (elements) 8847 { 8848 elem = TREE_VALUE (elements); 8849 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem); 8850 if (elem == NULL_TREE) 8851 break; 8852 elements = TREE_CHAIN (elements); 8853 } 8854 else 8855 elem = build_int_cst (TREE_TYPE (type), -1); 8856 list = tree_cons (NULL_TREE, elem, list); 8857 } 8858 if (i == count) 8859 return build_vector (type, nreverse (list)); 8860 } 8861 8862 return NULL_TREE; 8863 8864 case TRUTH_NOT_EXPR: 8865 /* The argument to invert_truthvalue must have Boolean type. */ 8866 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) 8867 arg0 = fold_convert_loc (loc, boolean_type_node, arg0); 8868 8869 /* Note that the operand of this must be an int 8870 and its values must be 0 or 1. 8871 ("true" is a fixed value perhaps depending on the language, 8872 but we don't handle values other than 1 correctly yet.) */ 8873 tem = fold_truth_not_expr (loc, arg0); 8874 if (!tem) 8875 return NULL_TREE; 8876 return fold_convert_loc (loc, type, tem); 8877 8878 case REALPART_EXPR: 8879 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 8880 return fold_convert_loc (loc, type, arg0); 8881 if (TREE_CODE (arg0) == COMPLEX_EXPR) 8882 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0), 8883 TREE_OPERAND (arg0, 1)); 8884 if (TREE_CODE (arg0) == COMPLEX_CST) 8885 return fold_convert_loc (loc, type, TREE_REALPART (arg0)); 8886 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 8887 { 8888 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8889 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype, 8890 fold_build1_loc (loc, REALPART_EXPR, itype, 8891 TREE_OPERAND (arg0, 0)), 8892 fold_build1_loc (loc, REALPART_EXPR, itype, 8893 TREE_OPERAND (arg0, 1))); 8894 return fold_convert_loc (loc, type, tem); 8895 } 8896 if (TREE_CODE (arg0) == CONJ_EXPR) 8897 { 8898 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8899 tem = fold_build1_loc (loc, REALPART_EXPR, itype, 8900 TREE_OPERAND (arg0, 0)); 8901 return fold_convert_loc (loc, type, tem); 8902 } 8903 if (TREE_CODE (arg0) == CALL_EXPR) 8904 { 8905 tree fn = get_callee_fndecl (arg0); 8906 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL) 8907 switch (DECL_FUNCTION_CODE (fn)) 8908 { 8909 CASE_FLT_FN (BUILT_IN_CEXPI): 8910 fn = mathfn_built_in (type, BUILT_IN_COS); 8911 if (fn) 8912 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0)); 8913 break; 8914 8915 default: 8916 break; 8917 } 8918 } 8919 return NULL_TREE; 8920 8921 case IMAGPART_EXPR: 8922 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 8923 return fold_convert_loc (loc, type, integer_zero_node); 8924 if (TREE_CODE (arg0) == COMPLEX_EXPR) 8925 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1), 8926 TREE_OPERAND (arg0, 0)); 8927 if (TREE_CODE (arg0) == COMPLEX_CST) 8928 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0)); 8929 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 8930 { 8931 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8932 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype, 8933 fold_build1_loc (loc, IMAGPART_EXPR, itype, 8934 TREE_OPERAND (arg0, 0)), 8935 fold_build1_loc (loc, IMAGPART_EXPR, itype, 8936 TREE_OPERAND (arg0, 1))); 8937 return fold_convert_loc (loc, type, tem); 8938 } 8939 if (TREE_CODE (arg0) == CONJ_EXPR) 8940 { 8941 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8942 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0)); 8943 return fold_convert_loc (loc, type, negate_expr (tem)); 8944 } 8945 if (TREE_CODE (arg0) == CALL_EXPR) 8946 { 8947 tree fn = get_callee_fndecl (arg0); 8948 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL) 8949 switch (DECL_FUNCTION_CODE (fn)) 8950 { 8951 CASE_FLT_FN (BUILT_IN_CEXPI): 8952 fn = mathfn_built_in (type, BUILT_IN_SIN); 8953 if (fn) 8954 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0)); 8955 break; 8956 8957 default: 8958 break; 8959 } 8960 } 8961 return NULL_TREE; 8962 8963 case INDIRECT_REF: 8964 /* Fold *&X to X if X is an lvalue. */ 8965 if (TREE_CODE (op0) == ADDR_EXPR) 8966 { 8967 tree op00 = TREE_OPERAND (op0, 0); 8968 if ((TREE_CODE (op00) == VAR_DECL 8969 || TREE_CODE (op00) == PARM_DECL 8970 || TREE_CODE (op00) == RESULT_DECL) 8971 && !TREE_READONLY (op00)) 8972 return op00; 8973 } 8974 return NULL_TREE; 8975 8976 default: 8977 return NULL_TREE; 8978 } /* switch (code) */ 8979 } 8980 8981 8982 /* If the operation was a conversion do _not_ mark a resulting constant 8983 with TREE_OVERFLOW if the original constant was not. These conversions 8984 have implementation defined behavior and retaining the TREE_OVERFLOW 8985 flag here would confuse later passes such as VRP. */ 8986 tree 8987 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code, 8988 tree type, tree op0) 8989 { 8990 tree res = fold_unary_loc (loc, code, type, op0); 8991 if (res 8992 && TREE_CODE (res) == INTEGER_CST 8993 && TREE_CODE (op0) == INTEGER_CST 8994 && CONVERT_EXPR_CODE_P (code)) 8995 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0); 8996 8997 return res; 8998 } 8999 9000 /* Fold a binary expression of code CODE and type TYPE with operands 9001 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination. 9002 Return the folded expression if folding is successful. Otherwise, 9003 return NULL_TREE. */ 9004 9005 static tree 9006 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1) 9007 { 9008 enum tree_code compl_code; 9009 9010 if (code == MIN_EXPR) 9011 compl_code = MAX_EXPR; 9012 else if (code == MAX_EXPR) 9013 compl_code = MIN_EXPR; 9014 else 9015 gcc_unreachable (); 9016 9017 /* MIN (MAX (a, b), b) == b. */ 9018 if (TREE_CODE (op0) == compl_code 9019 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0)) 9020 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0)); 9021 9022 /* MIN (MAX (b, a), b) == b. */ 9023 if (TREE_CODE (op0) == compl_code 9024 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0) 9025 && reorder_operands_p (TREE_OPERAND (op0, 1), op1)) 9026 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1)); 9027 9028 /* MIN (a, MAX (a, b)) == a. */ 9029 if (TREE_CODE (op1) == compl_code 9030 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0) 9031 && reorder_operands_p (op0, TREE_OPERAND (op1, 1))) 9032 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1)); 9033 9034 /* MIN (a, MAX (b, a)) == a. */ 9035 if (TREE_CODE (op1) == compl_code 9036 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0) 9037 && reorder_operands_p (op0, TREE_OPERAND (op1, 0))) 9038 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0)); 9039 9040 return NULL_TREE; 9041 } 9042 9043 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1 9044 by changing CODE to reduce the magnitude of constants involved in 9045 ARG0 of the comparison. 9046 Returns a canonicalized comparison tree if a simplification was 9047 possible, otherwise returns NULL_TREE. 9048 Set *STRICT_OVERFLOW_P to true if the canonicalization is only 9049 valid if signed overflow is undefined. */ 9050 9051 static tree 9052 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type, 9053 tree arg0, tree arg1, 9054 bool *strict_overflow_p) 9055 { 9056 enum tree_code code0 = TREE_CODE (arg0); 9057 tree t, cst0 = NULL_TREE; 9058 int sgn0; 9059 bool swap = false; 9060 9061 /* Match A +- CST code arg1 and CST code arg1. We can change the 9062 first form only if overflow is undefined. */ 9063 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)) 9064 /* In principle pointers also have undefined overflow behavior, 9065 but that causes problems elsewhere. */ 9066 && !POINTER_TYPE_P (TREE_TYPE (arg0)) 9067 && (code0 == MINUS_EXPR 9068 || code0 == PLUS_EXPR) 9069 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 9070 || code0 == INTEGER_CST)) 9071 return NULL_TREE; 9072 9073 /* Identify the constant in arg0 and its sign. */ 9074 if (code0 == INTEGER_CST) 9075 cst0 = arg0; 9076 else 9077 cst0 = TREE_OPERAND (arg0, 1); 9078 sgn0 = tree_int_cst_sgn (cst0); 9079 9080 /* Overflowed constants and zero will cause problems. */ 9081 if (integer_zerop (cst0) 9082 || TREE_OVERFLOW (cst0)) 9083 return NULL_TREE; 9084 9085 /* See if we can reduce the magnitude of the constant in 9086 arg0 by changing the comparison code. */ 9087 if (code0 == INTEGER_CST) 9088 { 9089 /* CST <= arg1 -> CST-1 < arg1. */ 9090 if (code == LE_EXPR && sgn0 == 1) 9091 code = LT_EXPR; 9092 /* -CST < arg1 -> -CST-1 <= arg1. */ 9093 else if (code == LT_EXPR && sgn0 == -1) 9094 code = LE_EXPR; 9095 /* CST > arg1 -> CST-1 >= arg1. */ 9096 else if (code == GT_EXPR && sgn0 == 1) 9097 code = GE_EXPR; 9098 /* -CST >= arg1 -> -CST-1 > arg1. */ 9099 else if (code == GE_EXPR && sgn0 == -1) 9100 code = GT_EXPR; 9101 else 9102 return NULL_TREE; 9103 /* arg1 code' CST' might be more canonical. */ 9104 swap = true; 9105 } 9106 else 9107 { 9108 /* A - CST < arg1 -> A - CST-1 <= arg1. */ 9109 if (code == LT_EXPR 9110 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR)) 9111 code = LE_EXPR; 9112 /* A + CST > arg1 -> A + CST-1 >= arg1. */ 9113 else if (code == GT_EXPR 9114 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR)) 9115 code = GE_EXPR; 9116 /* A + CST <= arg1 -> A + CST-1 < arg1. */ 9117 else if (code == LE_EXPR 9118 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR)) 9119 code = LT_EXPR; 9120 /* A - CST >= arg1 -> A - CST-1 > arg1. */ 9121 else if (code == GE_EXPR 9122 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR)) 9123 code = GT_EXPR; 9124 else 9125 return NULL_TREE; 9126 *strict_overflow_p = true; 9127 } 9128 9129 /* Now build the constant reduced in magnitude. But not if that 9130 would produce one outside of its types range. */ 9131 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0)) 9132 && ((sgn0 == 1 9133 && TYPE_MIN_VALUE (TREE_TYPE (cst0)) 9134 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0)))) 9135 || (sgn0 == -1 9136 && TYPE_MAX_VALUE (TREE_TYPE (cst0)) 9137 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0)))))) 9138 /* We cannot swap the comparison here as that would cause us to 9139 endlessly recurse. */ 9140 return NULL_TREE; 9141 9142 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR, 9143 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0); 9144 if (code0 != INTEGER_CST) 9145 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t); 9146 9147 /* If swapping might yield to a more canonical form, do so. */ 9148 if (swap) 9149 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t); 9150 else 9151 return fold_build2_loc (loc, code, type, t, arg1); 9152 } 9153 9154 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined 9155 overflow further. Try to decrease the magnitude of constants involved 9156 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa 9157 and put sole constants at the second argument position. 9158 Returns the canonicalized tree if changed, otherwise NULL_TREE. */ 9159 9160 static tree 9161 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type, 9162 tree arg0, tree arg1) 9163 { 9164 tree t; 9165 bool strict_overflow_p; 9166 const char * const warnmsg = G_("assuming signed overflow does not occur " 9167 "when reducing constant in comparison"); 9168 9169 /* Try canonicalization by simplifying arg0. */ 9170 strict_overflow_p = false; 9171 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1, 9172 &strict_overflow_p); 9173 if (t) 9174 { 9175 if (strict_overflow_p) 9176 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE); 9177 return t; 9178 } 9179 9180 /* Try canonicalization by simplifying arg1 using the swapped 9181 comparison. */ 9182 code = swap_tree_comparison (code); 9183 strict_overflow_p = false; 9184 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0, 9185 &strict_overflow_p); 9186 if (t && strict_overflow_p) 9187 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE); 9188 return t; 9189 } 9190 9191 /* Return whether BASE + OFFSET + BITPOS may wrap around the address 9192 space. This is used to avoid issuing overflow warnings for 9193 expressions like &p->x which can not wrap. */ 9194 9195 static bool 9196 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos) 9197 { 9198 unsigned HOST_WIDE_INT offset_low, total_low; 9199 HOST_WIDE_INT size, offset_high, total_high; 9200 9201 if (!POINTER_TYPE_P (TREE_TYPE (base))) 9202 return true; 9203 9204 if (bitpos < 0) 9205 return true; 9206 9207 if (offset == NULL_TREE) 9208 { 9209 offset_low = 0; 9210 offset_high = 0; 9211 } 9212 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset)) 9213 return true; 9214 else 9215 { 9216 offset_low = TREE_INT_CST_LOW (offset); 9217 offset_high = TREE_INT_CST_HIGH (offset); 9218 } 9219 9220 if (add_double_with_sign (offset_low, offset_high, 9221 bitpos / BITS_PER_UNIT, 0, 9222 &total_low, &total_high, 9223 true)) 9224 return true; 9225 9226 if (total_high != 0) 9227 return true; 9228 9229 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base))); 9230 if (size <= 0) 9231 return true; 9232 9233 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an 9234 array. */ 9235 if (TREE_CODE (base) == ADDR_EXPR) 9236 { 9237 HOST_WIDE_INT base_size; 9238 9239 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0))); 9240 if (base_size > 0 && size < base_size) 9241 size = base_size; 9242 } 9243 9244 return total_low > (unsigned HOST_WIDE_INT) size; 9245 } 9246 9247 /* Subroutine of fold_binary. This routine performs all of the 9248 transformations that are common to the equality/inequality 9249 operators (EQ_EXPR and NE_EXPR) and the ordering operators 9250 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than 9251 fold_binary should call fold_binary. Fold a comparison with 9252 tree code CODE and type TYPE with operands OP0 and OP1. Return 9253 the folded comparison or NULL_TREE. */ 9254 9255 static tree 9256 fold_comparison (location_t loc, enum tree_code code, tree type, 9257 tree op0, tree op1) 9258 { 9259 tree arg0, arg1, tem; 9260 9261 arg0 = op0; 9262 arg1 = op1; 9263 9264 STRIP_SIGN_NOPS (arg0); 9265 STRIP_SIGN_NOPS (arg1); 9266 9267 tem = fold_relational_const (code, type, arg0, arg1); 9268 if (tem != NULL_TREE) 9269 return tem; 9270 9271 /* If one arg is a real or integer constant, put it last. */ 9272 if (tree_swap_operands_p (arg0, arg1, true)) 9273 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0); 9274 9275 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */ 9276 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 9277 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 9278 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)) 9279 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 9280 && (TREE_CODE (arg1) == INTEGER_CST 9281 && !TREE_OVERFLOW (arg1))) 9282 { 9283 tree const1 = TREE_OPERAND (arg0, 1); 9284 tree const2 = arg1; 9285 tree variable = TREE_OPERAND (arg0, 0); 9286 tree lhs; 9287 int lhs_add; 9288 lhs_add = TREE_CODE (arg0) != PLUS_EXPR; 9289 9290 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR, 9291 TREE_TYPE (arg1), const2, const1); 9292 9293 /* If the constant operation overflowed this can be 9294 simplified as a comparison against INT_MAX/INT_MIN. */ 9295 if (TREE_CODE (lhs) == INTEGER_CST 9296 && TREE_OVERFLOW (lhs)) 9297 { 9298 int const1_sgn = tree_int_cst_sgn (const1); 9299 enum tree_code code2 = code; 9300 9301 /* Get the sign of the constant on the lhs if the 9302 operation were VARIABLE + CONST1. */ 9303 if (TREE_CODE (arg0) == MINUS_EXPR) 9304 const1_sgn = -const1_sgn; 9305 9306 /* The sign of the constant determines if we overflowed 9307 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1). 9308 Canonicalize to the INT_MIN overflow by swapping the comparison 9309 if necessary. */ 9310 if (const1_sgn == -1) 9311 code2 = swap_tree_comparison (code); 9312 9313 /* We now can look at the canonicalized case 9314 VARIABLE + 1 CODE2 INT_MIN 9315 and decide on the result. */ 9316 if (code2 == LT_EXPR 9317 || code2 == LE_EXPR 9318 || code2 == EQ_EXPR) 9319 return omit_one_operand_loc (loc, type, boolean_false_node, variable); 9320 else if (code2 == NE_EXPR 9321 || code2 == GE_EXPR 9322 || code2 == GT_EXPR) 9323 return omit_one_operand_loc (loc, type, boolean_true_node, variable); 9324 } 9325 9326 if (TREE_CODE (lhs) == TREE_CODE (arg1) 9327 && (TREE_CODE (lhs) != INTEGER_CST 9328 || !TREE_OVERFLOW (lhs))) 9329 { 9330 fold_overflow_warning (("assuming signed overflow does not occur " 9331 "when changing X +- C1 cmp C2 to " 9332 "X cmp C1 +- C2"), 9333 WARN_STRICT_OVERFLOW_COMPARISON); 9334 return fold_build2_loc (loc, code, type, variable, lhs); 9335 } 9336 } 9337 9338 /* For comparisons of pointers we can decompose it to a compile time 9339 comparison of the base objects and the offsets into the object. 9340 This requires at least one operand being an ADDR_EXPR or a 9341 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */ 9342 if (POINTER_TYPE_P (TREE_TYPE (arg0)) 9343 && (TREE_CODE (arg0) == ADDR_EXPR 9344 || TREE_CODE (arg1) == ADDR_EXPR 9345 || TREE_CODE (arg0) == POINTER_PLUS_EXPR 9346 || TREE_CODE (arg1) == POINTER_PLUS_EXPR)) 9347 { 9348 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE; 9349 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0; 9350 enum machine_mode mode; 9351 int volatilep, unsignedp; 9352 bool indirect_base0 = false, indirect_base1 = false; 9353 9354 /* Get base and offset for the access. Strip ADDR_EXPR for 9355 get_inner_reference, but put it back by stripping INDIRECT_REF 9356 off the base object if possible. indirect_baseN will be true 9357 if baseN is not an address but refers to the object itself. */ 9358 base0 = arg0; 9359 if (TREE_CODE (arg0) == ADDR_EXPR) 9360 { 9361 base0 = get_inner_reference (TREE_OPERAND (arg0, 0), 9362 &bitsize, &bitpos0, &offset0, &mode, 9363 &unsignedp, &volatilep, false); 9364 if (TREE_CODE (base0) == INDIRECT_REF) 9365 base0 = TREE_OPERAND (base0, 0); 9366 else 9367 indirect_base0 = true; 9368 } 9369 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR) 9370 { 9371 base0 = TREE_OPERAND (arg0, 0); 9372 offset0 = TREE_OPERAND (arg0, 1); 9373 } 9374 9375 base1 = arg1; 9376 if (TREE_CODE (arg1) == ADDR_EXPR) 9377 { 9378 base1 = get_inner_reference (TREE_OPERAND (arg1, 0), 9379 &bitsize, &bitpos1, &offset1, &mode, 9380 &unsignedp, &volatilep, false); 9381 if (TREE_CODE (base1) == INDIRECT_REF) 9382 base1 = TREE_OPERAND (base1, 0); 9383 else 9384 indirect_base1 = true; 9385 } 9386 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR) 9387 { 9388 base1 = TREE_OPERAND (arg1, 0); 9389 offset1 = TREE_OPERAND (arg1, 1); 9390 } 9391 9392 /* If we have equivalent bases we might be able to simplify. */ 9393 if (indirect_base0 == indirect_base1 9394 && operand_equal_p (base0, base1, 0)) 9395 { 9396 /* We can fold this expression to a constant if the non-constant 9397 offset parts are equal. */ 9398 if ((offset0 == offset1 9399 || (offset0 && offset1 9400 && operand_equal_p (offset0, offset1, 0))) 9401 && (code == EQ_EXPR 9402 || code == NE_EXPR 9403 || POINTER_TYPE_OVERFLOW_UNDEFINED)) 9404 9405 { 9406 if (code != EQ_EXPR 9407 && code != NE_EXPR 9408 && bitpos0 != bitpos1 9409 && (pointer_may_wrap_p (base0, offset0, bitpos0) 9410 || pointer_may_wrap_p (base1, offset1, bitpos1))) 9411 fold_overflow_warning (("assuming pointer wraparound does not " 9412 "occur when comparing P +- C1 with " 9413 "P +- C2"), 9414 WARN_STRICT_OVERFLOW_CONDITIONAL); 9415 9416 switch (code) 9417 { 9418 case EQ_EXPR: 9419 return constant_boolean_node (bitpos0 == bitpos1, type); 9420 case NE_EXPR: 9421 return constant_boolean_node (bitpos0 != bitpos1, type); 9422 case LT_EXPR: 9423 return constant_boolean_node (bitpos0 < bitpos1, type); 9424 case LE_EXPR: 9425 return constant_boolean_node (bitpos0 <= bitpos1, type); 9426 case GE_EXPR: 9427 return constant_boolean_node (bitpos0 >= bitpos1, type); 9428 case GT_EXPR: 9429 return constant_boolean_node (bitpos0 > bitpos1, type); 9430 default:; 9431 } 9432 } 9433 /* We can simplify the comparison to a comparison of the variable 9434 offset parts if the constant offset parts are equal. 9435 Be careful to use signed size type here because otherwise we 9436 mess with array offsets in the wrong way. This is possible 9437 because pointer arithmetic is restricted to retain within an 9438 object and overflow on pointer differences is undefined as of 9439 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */ 9440 else if (bitpos0 == bitpos1 9441 && ((code == EQ_EXPR || code == NE_EXPR) 9442 || POINTER_TYPE_OVERFLOW_UNDEFINED)) 9443 { 9444 tree signed_size_type_node; 9445 signed_size_type_node = signed_type_for (size_type_node); 9446 9447 /* By converting to signed size type we cover middle-end pointer 9448 arithmetic which operates on unsigned pointer types of size 9449 type size and ARRAY_REF offsets which are properly sign or 9450 zero extended from their type in case it is narrower than 9451 size type. */ 9452 if (offset0 == NULL_TREE) 9453 offset0 = build_int_cst (signed_size_type_node, 0); 9454 else 9455 offset0 = fold_convert_loc (loc, signed_size_type_node, 9456 offset0); 9457 if (offset1 == NULL_TREE) 9458 offset1 = build_int_cst (signed_size_type_node, 0); 9459 else 9460 offset1 = fold_convert_loc (loc, signed_size_type_node, 9461 offset1); 9462 9463 if (code != EQ_EXPR 9464 && code != NE_EXPR 9465 && (pointer_may_wrap_p (base0, offset0, bitpos0) 9466 || pointer_may_wrap_p (base1, offset1, bitpos1))) 9467 fold_overflow_warning (("assuming pointer wraparound does not " 9468 "occur when comparing P +- C1 with " 9469 "P +- C2"), 9470 WARN_STRICT_OVERFLOW_COMPARISON); 9471 9472 return fold_build2_loc (loc, code, type, offset0, offset1); 9473 } 9474 } 9475 /* For non-equal bases we can simplify if they are addresses 9476 of local binding decls or constants. */ 9477 else if (indirect_base0 && indirect_base1 9478 /* We know that !operand_equal_p (base0, base1, 0) 9479 because the if condition was false. But make 9480 sure two decls are not the same. */ 9481 && base0 != base1 9482 && TREE_CODE (arg0) == ADDR_EXPR 9483 && TREE_CODE (arg1) == ADDR_EXPR 9484 && (((TREE_CODE (base0) == VAR_DECL 9485 || TREE_CODE (base0) == PARM_DECL) 9486 && (targetm.binds_local_p (base0) 9487 || CONSTANT_CLASS_P (base1))) 9488 || CONSTANT_CLASS_P (base0)) 9489 && (((TREE_CODE (base1) == VAR_DECL 9490 || TREE_CODE (base1) == PARM_DECL) 9491 && (targetm.binds_local_p (base1) 9492 || CONSTANT_CLASS_P (base0))) 9493 || CONSTANT_CLASS_P (base1))) 9494 { 9495 if (code == EQ_EXPR) 9496 return omit_two_operands_loc (loc, type, boolean_false_node, 9497 arg0, arg1); 9498 else if (code == NE_EXPR) 9499 return omit_two_operands_loc (loc, type, boolean_true_node, 9500 arg0, arg1); 9501 } 9502 /* For equal offsets we can simplify to a comparison of the 9503 base addresses. */ 9504 else if (bitpos0 == bitpos1 9505 && (indirect_base0 9506 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0) 9507 && (indirect_base1 9508 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1) 9509 && ((offset0 == offset1) 9510 || (offset0 && offset1 9511 && operand_equal_p (offset0, offset1, 0)))) 9512 { 9513 if (indirect_base0) 9514 base0 = build_fold_addr_expr_loc (loc, base0); 9515 if (indirect_base1) 9516 base1 = build_fold_addr_expr_loc (loc, base1); 9517 return fold_build2_loc (loc, code, type, base0, base1); 9518 } 9519 } 9520 9521 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to 9522 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if 9523 the resulting offset is smaller in absolute value than the 9524 original one. */ 9525 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)) 9526 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 9527 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 9528 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))) 9529 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR) 9530 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST 9531 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1)))) 9532 { 9533 tree const1 = TREE_OPERAND (arg0, 1); 9534 tree const2 = TREE_OPERAND (arg1, 1); 9535 tree variable1 = TREE_OPERAND (arg0, 0); 9536 tree variable2 = TREE_OPERAND (arg1, 0); 9537 tree cst; 9538 const char * const warnmsg = G_("assuming signed overflow does not " 9539 "occur when combining constants around " 9540 "a comparison"); 9541 9542 /* Put the constant on the side where it doesn't overflow and is 9543 of lower absolute value than before. */ 9544 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1) 9545 ? MINUS_EXPR : PLUS_EXPR, 9546 const2, const1, 0); 9547 if (!TREE_OVERFLOW (cst) 9548 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)) 9549 { 9550 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); 9551 return fold_build2_loc (loc, code, type, 9552 variable1, 9553 fold_build2_loc (loc, 9554 TREE_CODE (arg1), TREE_TYPE (arg1), 9555 variable2, cst)); 9556 } 9557 9558 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1) 9559 ? MINUS_EXPR : PLUS_EXPR, 9560 const1, const2, 0); 9561 if (!TREE_OVERFLOW (cst) 9562 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)) 9563 { 9564 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); 9565 return fold_build2_loc (loc, code, type, 9566 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0), 9567 variable1, cst), 9568 variable2); 9569 } 9570 } 9571 9572 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the 9573 signed arithmetic case. That form is created by the compiler 9574 often enough for folding it to be of value. One example is in 9575 computing loop trip counts after Operator Strength Reduction. */ 9576 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)) 9577 && TREE_CODE (arg0) == MULT_EXPR 9578 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 9579 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))) 9580 && integer_zerop (arg1)) 9581 { 9582 tree const1 = TREE_OPERAND (arg0, 1); 9583 tree const2 = arg1; /* zero */ 9584 tree variable1 = TREE_OPERAND (arg0, 0); 9585 enum tree_code cmp_code = code; 9586 9587 /* Handle unfolded multiplication by zero. */ 9588 if (integer_zerop (const1)) 9589 return fold_build2_loc (loc, cmp_code, type, const1, const2); 9590 9591 fold_overflow_warning (("assuming signed overflow does not occur when " 9592 "eliminating multiplication in comparison " 9593 "with zero"), 9594 WARN_STRICT_OVERFLOW_COMPARISON); 9595 9596 /* If const1 is negative we swap the sense of the comparison. */ 9597 if (tree_int_cst_sgn (const1) < 0) 9598 cmp_code = swap_tree_comparison (cmp_code); 9599 9600 return fold_build2_loc (loc, cmp_code, type, variable1, const2); 9601 } 9602 9603 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1); 9604 if (tem) 9605 return tem; 9606 9607 if (FLOAT_TYPE_P (TREE_TYPE (arg0))) 9608 { 9609 tree targ0 = strip_float_extensions (arg0); 9610 tree targ1 = strip_float_extensions (arg1); 9611 tree newtype = TREE_TYPE (targ0); 9612 9613 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype)) 9614 newtype = TREE_TYPE (targ1); 9615 9616 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */ 9617 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0))) 9618 return fold_build2_loc (loc, code, type, 9619 fold_convert_loc (loc, newtype, targ0), 9620 fold_convert_loc (loc, newtype, targ1)); 9621 9622 /* (-a) CMP (-b) -> b CMP a */ 9623 if (TREE_CODE (arg0) == NEGATE_EXPR 9624 && TREE_CODE (arg1) == NEGATE_EXPR) 9625 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0), 9626 TREE_OPERAND (arg0, 0)); 9627 9628 if (TREE_CODE (arg1) == REAL_CST) 9629 { 9630 REAL_VALUE_TYPE cst; 9631 cst = TREE_REAL_CST (arg1); 9632 9633 /* (-a) CMP CST -> a swap(CMP) (-CST) */ 9634 if (TREE_CODE (arg0) == NEGATE_EXPR) 9635 return fold_build2_loc (loc, swap_tree_comparison (code), type, 9636 TREE_OPERAND (arg0, 0), 9637 build_real (TREE_TYPE (arg1), 9638 REAL_VALUE_NEGATE (cst))); 9639 9640 /* IEEE doesn't distinguish +0 and -0 in comparisons. */ 9641 /* a CMP (-0) -> a CMP 0 */ 9642 if (REAL_VALUE_MINUS_ZERO (cst)) 9643 return fold_build2_loc (loc, code, type, arg0, 9644 build_real (TREE_TYPE (arg1), dconst0)); 9645 9646 /* x != NaN is always true, other ops are always false. */ 9647 if (REAL_VALUE_ISNAN (cst) 9648 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))) 9649 { 9650 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node; 9651 return omit_one_operand_loc (loc, type, tem, arg0); 9652 } 9653 9654 /* Fold comparisons against infinity. */ 9655 if (REAL_VALUE_ISINF (cst) 9656 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))) 9657 { 9658 tem = fold_inf_compare (loc, code, type, arg0, arg1); 9659 if (tem != NULL_TREE) 9660 return tem; 9661 } 9662 } 9663 9664 /* If this is a comparison of a real constant with a PLUS_EXPR 9665 or a MINUS_EXPR of a real constant, we can convert it into a 9666 comparison with a revised real constant as long as no overflow 9667 occurs when unsafe_math_optimizations are enabled. */ 9668 if (flag_unsafe_math_optimizations 9669 && TREE_CODE (arg1) == REAL_CST 9670 && (TREE_CODE (arg0) == PLUS_EXPR 9671 || TREE_CODE (arg0) == MINUS_EXPR) 9672 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 9673 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR 9674 ? MINUS_EXPR : PLUS_EXPR, 9675 arg1, TREE_OPERAND (arg0, 1), 0)) 9676 && !TREE_OVERFLOW (tem)) 9677 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 9678 9679 /* Likewise, we can simplify a comparison of a real constant with 9680 a MINUS_EXPR whose first operand is also a real constant, i.e. 9681 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on 9682 floating-point types only if -fassociative-math is set. */ 9683 if (flag_associative_math 9684 && TREE_CODE (arg1) == REAL_CST 9685 && TREE_CODE (arg0) == MINUS_EXPR 9686 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST 9687 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0), 9688 arg1, 0)) 9689 && !TREE_OVERFLOW (tem)) 9690 return fold_build2_loc (loc, swap_tree_comparison (code), type, 9691 TREE_OPERAND (arg0, 1), tem); 9692 9693 /* Fold comparisons against built-in math functions. */ 9694 if (TREE_CODE (arg1) == REAL_CST 9695 && flag_unsafe_math_optimizations 9696 && ! flag_errno_math) 9697 { 9698 enum built_in_function fcode = builtin_mathfn_code (arg0); 9699 9700 if (fcode != END_BUILTINS) 9701 { 9702 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1); 9703 if (tem != NULL_TREE) 9704 return tem; 9705 } 9706 } 9707 } 9708 9709 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE 9710 && CONVERT_EXPR_P (arg0)) 9711 { 9712 /* If we are widening one operand of an integer comparison, 9713 see if the other operand is similarly being widened. Perhaps we 9714 can do the comparison in the narrower type. */ 9715 tem = fold_widened_comparison (loc, code, type, arg0, arg1); 9716 if (tem) 9717 return tem; 9718 9719 /* Or if we are changing signedness. */ 9720 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1); 9721 if (tem) 9722 return tem; 9723 } 9724 9725 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a 9726 constant, we can simplify it. */ 9727 if (TREE_CODE (arg1) == INTEGER_CST 9728 && (TREE_CODE (arg0) == MIN_EXPR 9729 || TREE_CODE (arg0) == MAX_EXPR) 9730 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 9731 { 9732 tem = optimize_minmax_comparison (loc, code, type, op0, op1); 9733 if (tem) 9734 return tem; 9735 } 9736 9737 /* Simplify comparison of something with itself. (For IEEE 9738 floating-point, we can only do some of these simplifications.) */ 9739 if (operand_equal_p (arg0, arg1, 0)) 9740 { 9741 switch (code) 9742 { 9743 case EQ_EXPR: 9744 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)) 9745 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))) 9746 return constant_boolean_node (1, type); 9747 break; 9748 9749 case GE_EXPR: 9750 case LE_EXPR: 9751 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)) 9752 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))) 9753 return constant_boolean_node (1, type); 9754 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1); 9755 9756 case NE_EXPR: 9757 /* For NE, we can only do this simplification if integer 9758 or we don't honor IEEE floating point NaNs. */ 9759 if (FLOAT_TYPE_P (TREE_TYPE (arg0)) 9760 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))) 9761 break; 9762 /* ... fall through ... */ 9763 case GT_EXPR: 9764 case LT_EXPR: 9765 return constant_boolean_node (0, type); 9766 default: 9767 gcc_unreachable (); 9768 } 9769 } 9770 9771 /* If we are comparing an expression that just has comparisons 9772 of two integer values, arithmetic expressions of those comparisons, 9773 and constants, we can simplify it. There are only three cases 9774 to check: the two values can either be equal, the first can be 9775 greater, or the second can be greater. Fold the expression for 9776 those three values. Since each value must be 0 or 1, we have 9777 eight possibilities, each of which corresponds to the constant 0 9778 or 1 or one of the six possible comparisons. 9779 9780 This handles common cases like (a > b) == 0 but also handles 9781 expressions like ((x > y) - (y > x)) > 0, which supposedly 9782 occur in macroized code. */ 9783 9784 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST) 9785 { 9786 tree cval1 = 0, cval2 = 0; 9787 int save_p = 0; 9788 9789 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p) 9790 /* Don't handle degenerate cases here; they should already 9791 have been handled anyway. */ 9792 && cval1 != 0 && cval2 != 0 9793 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2)) 9794 && TREE_TYPE (cval1) == TREE_TYPE (cval2) 9795 && INTEGRAL_TYPE_P (TREE_TYPE (cval1)) 9796 && TYPE_MAX_VALUE (TREE_TYPE (cval1)) 9797 && TYPE_MAX_VALUE (TREE_TYPE (cval2)) 9798 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)), 9799 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0)) 9800 { 9801 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1)); 9802 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1)); 9803 9804 /* We can't just pass T to eval_subst in case cval1 or cval2 9805 was the same as ARG1. */ 9806 9807 tree high_result 9808 = fold_build2_loc (loc, code, type, 9809 eval_subst (loc, arg0, cval1, maxval, 9810 cval2, minval), 9811 arg1); 9812 tree equal_result 9813 = fold_build2_loc (loc, code, type, 9814 eval_subst (loc, arg0, cval1, maxval, 9815 cval2, maxval), 9816 arg1); 9817 tree low_result 9818 = fold_build2_loc (loc, code, type, 9819 eval_subst (loc, arg0, cval1, minval, 9820 cval2, maxval), 9821 arg1); 9822 9823 /* All three of these results should be 0 or 1. Confirm they are. 9824 Then use those values to select the proper code to use. */ 9825 9826 if (TREE_CODE (high_result) == INTEGER_CST 9827 && TREE_CODE (equal_result) == INTEGER_CST 9828 && TREE_CODE (low_result) == INTEGER_CST) 9829 { 9830 /* Make a 3-bit mask with the high-order bit being the 9831 value for `>', the next for '=', and the low for '<'. */ 9832 switch ((integer_onep (high_result) * 4) 9833 + (integer_onep (equal_result) * 2) 9834 + integer_onep (low_result)) 9835 { 9836 case 0: 9837 /* Always false. */ 9838 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 9839 case 1: 9840 code = LT_EXPR; 9841 break; 9842 case 2: 9843 code = EQ_EXPR; 9844 break; 9845 case 3: 9846 code = LE_EXPR; 9847 break; 9848 case 4: 9849 code = GT_EXPR; 9850 break; 9851 case 5: 9852 code = NE_EXPR; 9853 break; 9854 case 6: 9855 code = GE_EXPR; 9856 break; 9857 case 7: 9858 /* Always true. */ 9859 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 9860 } 9861 9862 if (save_p) 9863 { 9864 tem = save_expr (build2 (code, type, cval1, cval2)); 9865 SET_EXPR_LOCATION (tem, loc); 9866 return tem; 9867 } 9868 return fold_build2_loc (loc, code, type, cval1, cval2); 9869 } 9870 } 9871 } 9872 9873 /* We can fold X/C1 op C2 where C1 and C2 are integer constants 9874 into a single range test. */ 9875 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR 9876 || TREE_CODE (arg0) == EXACT_DIV_EXPR) 9877 && TREE_CODE (arg1) == INTEGER_CST 9878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 9879 && !integer_zerop (TREE_OPERAND (arg0, 1)) 9880 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)) 9881 && !TREE_OVERFLOW (arg1)) 9882 { 9883 tem = fold_div_compare (loc, code, type, arg0, arg1); 9884 if (tem != NULL_TREE) 9885 return tem; 9886 } 9887 9888 /* Fold ~X op ~Y as Y op X. */ 9889 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9890 && TREE_CODE (arg1) == BIT_NOT_EXPR) 9891 { 9892 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); 9893 return fold_build2_loc (loc, code, type, 9894 fold_convert_loc (loc, cmp_type, 9895 TREE_OPERAND (arg1, 0)), 9896 TREE_OPERAND (arg0, 0)); 9897 } 9898 9899 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */ 9900 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9901 && TREE_CODE (arg1) == INTEGER_CST) 9902 { 9903 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); 9904 return fold_build2_loc (loc, swap_tree_comparison (code), type, 9905 TREE_OPERAND (arg0, 0), 9906 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type, 9907 fold_convert_loc (loc, cmp_type, arg1))); 9908 } 9909 9910 return NULL_TREE; 9911 } 9912 9913 9914 /* Subroutine of fold_binary. Optimize complex multiplications of the 9915 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The 9916 argument EXPR represents the expression "z" of type TYPE. */ 9917 9918 static tree 9919 fold_mult_zconjz (location_t loc, tree type, tree expr) 9920 { 9921 tree itype = TREE_TYPE (type); 9922 tree rpart, ipart, tem; 9923 9924 if (TREE_CODE (expr) == COMPLEX_EXPR) 9925 { 9926 rpart = TREE_OPERAND (expr, 0); 9927 ipart = TREE_OPERAND (expr, 1); 9928 } 9929 else if (TREE_CODE (expr) == COMPLEX_CST) 9930 { 9931 rpart = TREE_REALPART (expr); 9932 ipart = TREE_IMAGPART (expr); 9933 } 9934 else 9935 { 9936 expr = save_expr (expr); 9937 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr); 9938 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr); 9939 } 9940 9941 rpart = save_expr (rpart); 9942 ipart = save_expr (ipart); 9943 tem = fold_build2_loc (loc, PLUS_EXPR, itype, 9944 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart), 9945 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart)); 9946 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem, 9947 fold_convert_loc (loc, itype, integer_zero_node)); 9948 } 9949 9950 9951 /* Subroutine of fold_binary. If P is the value of EXPR, computes 9952 power-of-two M and (arbitrary) N such that M divides (P-N). This condition 9953 guarantees that P and N have the same least significant log2(M) bits. 9954 N is not otherwise constrained. In particular, N is not normalized to 9955 0 <= N < M as is common. In general, the precise value of P is unknown. 9956 M is chosen as large as possible such that constant N can be determined. 9957 9958 Returns M and sets *RESIDUE to N. 9959 9960 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into 9961 account. This is not always possible due to PR 35705. 9962 */ 9963 9964 static unsigned HOST_WIDE_INT 9965 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue, 9966 bool allow_func_align) 9967 { 9968 enum tree_code code; 9969 9970 *residue = 0; 9971 9972 code = TREE_CODE (expr); 9973 if (code == ADDR_EXPR) 9974 { 9975 expr = TREE_OPERAND (expr, 0); 9976 if (handled_component_p (expr)) 9977 { 9978 HOST_WIDE_INT bitsize, bitpos; 9979 tree offset; 9980 enum machine_mode mode; 9981 int unsignedp, volatilep; 9982 9983 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset, 9984 &mode, &unsignedp, &volatilep, false); 9985 *residue = bitpos / BITS_PER_UNIT; 9986 if (offset) 9987 { 9988 if (TREE_CODE (offset) == INTEGER_CST) 9989 *residue += TREE_INT_CST_LOW (offset); 9990 else 9991 /* We don't handle more complicated offset expressions. */ 9992 return 1; 9993 } 9994 } 9995 9996 if (DECL_P (expr) 9997 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL)) 9998 return DECL_ALIGN_UNIT (expr); 9999 } 10000 else if (code == POINTER_PLUS_EXPR) 10001 { 10002 tree op0, op1; 10003 unsigned HOST_WIDE_INT modulus; 10004 enum tree_code inner_code; 10005 10006 op0 = TREE_OPERAND (expr, 0); 10007 STRIP_NOPS (op0); 10008 modulus = get_pointer_modulus_and_residue (op0, residue, 10009 allow_func_align); 10010 10011 op1 = TREE_OPERAND (expr, 1); 10012 STRIP_NOPS (op1); 10013 inner_code = TREE_CODE (op1); 10014 if (inner_code == INTEGER_CST) 10015 { 10016 *residue += TREE_INT_CST_LOW (op1); 10017 return modulus; 10018 } 10019 else if (inner_code == MULT_EXPR) 10020 { 10021 op1 = TREE_OPERAND (op1, 1); 10022 if (TREE_CODE (op1) == INTEGER_CST) 10023 { 10024 unsigned HOST_WIDE_INT align; 10025 10026 /* Compute the greatest power-of-2 divisor of op1. */ 10027 align = TREE_INT_CST_LOW (op1); 10028 align &= -align; 10029 10030 /* If align is non-zero and less than *modulus, replace 10031 *modulus with align., If align is 0, then either op1 is 0 10032 or the greatest power-of-2 divisor of op1 doesn't fit in an 10033 unsigned HOST_WIDE_INT. In either case, no additional 10034 constraint is imposed. */ 10035 if (align) 10036 modulus = MIN (modulus, align); 10037 10038 return modulus; 10039 } 10040 } 10041 } 10042 10043 /* If we get here, we were unable to determine anything useful about the 10044 expression. */ 10045 return 1; 10046 } 10047 10048 10049 /* Fold a binary expression of code CODE and type TYPE with operands 10050 OP0 and OP1. LOC is the location of the resulting expression. 10051 Return the folded expression if folding is successful. Otherwise, 10052 return NULL_TREE. */ 10053 10054 tree 10055 fold_binary_loc (location_t loc, 10056 enum tree_code code, tree type, tree op0, tree op1) 10057 { 10058 enum tree_code_class kind = TREE_CODE_CLASS (code); 10059 tree arg0, arg1, tem; 10060 tree t1 = NULL_TREE; 10061 bool strict_overflow_p; 10062 10063 gcc_assert (IS_EXPR_CODE_CLASS (kind) 10064 && TREE_CODE_LENGTH (code) == 2 10065 && op0 != NULL_TREE 10066 && op1 != NULL_TREE); 10067 10068 arg0 = op0; 10069 arg1 = op1; 10070 10071 /* Strip any conversions that don't change the mode. This is 10072 safe for every expression, except for a comparison expression 10073 because its signedness is derived from its operands. So, in 10074 the latter case, only strip conversions that don't change the 10075 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments 10076 preserved. 10077 10078 Note that this is done as an internal manipulation within the 10079 constant folder, in order to find the simplest representation 10080 of the arguments so that their form can be studied. In any 10081 cases, the appropriate type conversions should be put back in 10082 the tree that will get out of the constant folder. */ 10083 10084 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR) 10085 { 10086 STRIP_SIGN_NOPS (arg0); 10087 STRIP_SIGN_NOPS (arg1); 10088 } 10089 else 10090 { 10091 STRIP_NOPS (arg0); 10092 STRIP_NOPS (arg1); 10093 } 10094 10095 /* Note that TREE_CONSTANT isn't enough: static var addresses are 10096 constant but we can't do arithmetic on them. */ 10097 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 10098 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST) 10099 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST) 10100 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST) 10101 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST) 10102 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)) 10103 { 10104 if (kind == tcc_binary) 10105 { 10106 /* Make sure type and arg0 have the same saturating flag. */ 10107 gcc_assert (TYPE_SATURATING (type) 10108 == TYPE_SATURATING (TREE_TYPE (arg0))); 10109 tem = const_binop (code, arg0, arg1, 0); 10110 } 10111 else if (kind == tcc_comparison) 10112 tem = fold_relational_const (code, type, arg0, arg1); 10113 else 10114 tem = NULL_TREE; 10115 10116 if (tem != NULL_TREE) 10117 { 10118 if (TREE_TYPE (tem) != type) 10119 tem = fold_convert_loc (loc, type, tem); 10120 return tem; 10121 } 10122 } 10123 10124 /* If this is a commutative operation, and ARG0 is a constant, move it 10125 to ARG1 to reduce the number of tests below. */ 10126 if (commutative_tree_code (code) 10127 && tree_swap_operands_p (arg0, arg1, true)) 10128 return fold_build2_loc (loc, code, type, op1, op0); 10129 10130 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand. 10131 10132 First check for cases where an arithmetic operation is applied to a 10133 compound, conditional, or comparison operation. Push the arithmetic 10134 operation inside the compound or conditional to see if any folding 10135 can then be done. Convert comparison to conditional for this purpose. 10136 The also optimizes non-constant cases that used to be done in 10137 expand_expr. 10138 10139 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR, 10140 one of the operands is a comparison and the other is a comparison, a 10141 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the 10142 code below would make the expression more complex. Change it to a 10143 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to 10144 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */ 10145 10146 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR 10147 || code == EQ_EXPR || code == NE_EXPR) 10148 && ((truth_value_p (TREE_CODE (arg0)) 10149 && (truth_value_p (TREE_CODE (arg1)) 10150 || (TREE_CODE (arg1) == BIT_AND_EXPR 10151 && integer_onep (TREE_OPERAND (arg1, 1))))) 10152 || (truth_value_p (TREE_CODE (arg1)) 10153 && (truth_value_p (TREE_CODE (arg0)) 10154 || (TREE_CODE (arg0) == BIT_AND_EXPR 10155 && integer_onep (TREE_OPERAND (arg0, 1))))))) 10156 { 10157 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR 10158 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR 10159 : TRUTH_XOR_EXPR, 10160 boolean_type_node, 10161 fold_convert_loc (loc, boolean_type_node, arg0), 10162 fold_convert_loc (loc, boolean_type_node, arg1)); 10163 10164 if (code == EQ_EXPR) 10165 tem = invert_truthvalue_loc (loc, tem); 10166 10167 return fold_convert_loc (loc, type, tem); 10168 } 10169 10170 if (TREE_CODE_CLASS (code) == tcc_binary 10171 || TREE_CODE_CLASS (code) == tcc_comparison) 10172 { 10173 if (TREE_CODE (arg0) == COMPOUND_EXPR) 10174 { 10175 tem = fold_build2_loc (loc, code, type, 10176 fold_convert_loc (loc, TREE_TYPE (op0), 10177 TREE_OPERAND (arg0, 1)), op1); 10178 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem); 10179 goto fold_binary_exit; 10180 } 10181 if (TREE_CODE (arg1) == COMPOUND_EXPR 10182 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 10183 { 10184 tem = fold_build2_loc (loc, code, type, op0, 10185 fold_convert_loc (loc, TREE_TYPE (op1), 10186 TREE_OPERAND (arg1, 1))); 10187 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem); 10188 goto fold_binary_exit; 10189 } 10190 10191 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0)) 10192 { 10193 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1, 10194 arg0, arg1, 10195 /*cond_first_p=*/1); 10196 if (tem != NULL_TREE) 10197 return tem; 10198 } 10199 10200 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1)) 10201 { 10202 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1, 10203 arg1, arg0, 10204 /*cond_first_p=*/0); 10205 if (tem != NULL_TREE) 10206 return tem; 10207 } 10208 } 10209 10210 switch (code) 10211 { 10212 case POINTER_PLUS_EXPR: 10213 /* 0 +p index -> (type)index */ 10214 if (integer_zerop (arg0)) 10215 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 10216 10217 /* PTR +p 0 -> PTR */ 10218 if (integer_zerop (arg1)) 10219 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10220 10221 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */ 10222 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1)) 10223 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))) 10224 return fold_convert_loc (loc, type, 10225 fold_build2_loc (loc, PLUS_EXPR, sizetype, 10226 fold_convert_loc (loc, sizetype, 10227 arg1), 10228 fold_convert_loc (loc, sizetype, 10229 arg0))); 10230 10231 /* index +p PTR -> PTR +p index */ 10232 if (POINTER_TYPE_P (TREE_TYPE (arg1)) 10233 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))) 10234 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type, 10235 fold_convert_loc (loc, type, arg1), 10236 fold_convert_loc (loc, sizetype, arg0)); 10237 10238 /* (PTR +p B) +p A -> PTR +p (B + A) */ 10239 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR) 10240 { 10241 tree inner; 10242 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1)); 10243 tree arg00 = TREE_OPERAND (arg0, 0); 10244 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype, 10245 arg01, fold_convert_loc (loc, sizetype, arg1)); 10246 return fold_convert_loc (loc, type, 10247 fold_build2_loc (loc, POINTER_PLUS_EXPR, 10248 TREE_TYPE (arg00), 10249 arg00, inner)); 10250 } 10251 10252 /* PTR_CST +p CST -> CST1 */ 10253 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 10254 return fold_build2_loc (loc, PLUS_EXPR, type, arg0, 10255 fold_convert_loc (loc, type, arg1)); 10256 10257 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step 10258 of the array. Loop optimizer sometimes produce this type of 10259 expressions. */ 10260 if (TREE_CODE (arg0) == ADDR_EXPR) 10261 { 10262 tem = try_move_mult_to_index (loc, arg0, 10263 fold_convert_loc (loc, sizetype, arg1)); 10264 if (tem) 10265 return fold_convert_loc (loc, type, tem); 10266 } 10267 10268 return NULL_TREE; 10269 10270 case PLUS_EXPR: 10271 /* A + (-B) -> A - B */ 10272 if (TREE_CODE (arg1) == NEGATE_EXPR) 10273 return fold_build2_loc (loc, MINUS_EXPR, type, 10274 fold_convert_loc (loc, type, arg0), 10275 fold_convert_loc (loc, type, 10276 TREE_OPERAND (arg1, 0))); 10277 /* (-A) + B -> B - A */ 10278 if (TREE_CODE (arg0) == NEGATE_EXPR 10279 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)) 10280 return fold_build2_loc (loc, MINUS_EXPR, type, 10281 fold_convert_loc (loc, type, arg1), 10282 fold_convert_loc (loc, type, 10283 TREE_OPERAND (arg0, 0))); 10284 10285 if (INTEGRAL_TYPE_P (type)) 10286 { 10287 /* Convert ~A + 1 to -A. */ 10288 if (TREE_CODE (arg0) == BIT_NOT_EXPR 10289 && integer_onep (arg1)) 10290 return fold_build1_loc (loc, NEGATE_EXPR, type, 10291 fold_convert_loc (loc, type, 10292 TREE_OPERAND (arg0, 0))); 10293 10294 /* ~X + X is -1. */ 10295 if (TREE_CODE (arg0) == BIT_NOT_EXPR 10296 && !TYPE_OVERFLOW_TRAPS (type)) 10297 { 10298 tree tem = TREE_OPERAND (arg0, 0); 10299 10300 STRIP_NOPS (tem); 10301 if (operand_equal_p (tem, arg1, 0)) 10302 { 10303 t1 = build_int_cst_type (type, -1); 10304 return omit_one_operand_loc (loc, type, t1, arg1); 10305 } 10306 } 10307 10308 /* X + ~X is -1. */ 10309 if (TREE_CODE (arg1) == BIT_NOT_EXPR 10310 && !TYPE_OVERFLOW_TRAPS (type)) 10311 { 10312 tree tem = TREE_OPERAND (arg1, 0); 10313 10314 STRIP_NOPS (tem); 10315 if (operand_equal_p (arg0, tem, 0)) 10316 { 10317 t1 = build_int_cst_type (type, -1); 10318 return omit_one_operand_loc (loc, type, t1, arg0); 10319 } 10320 } 10321 10322 /* X + (X / CST) * -CST is X % CST. */ 10323 if (TREE_CODE (arg1) == MULT_EXPR 10324 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR 10325 && operand_equal_p (arg0, 10326 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)) 10327 { 10328 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1); 10329 tree cst1 = TREE_OPERAND (arg1, 1); 10330 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1), 10331 cst1, cst0); 10332 if (sum && integer_zerop (sum)) 10333 return fold_convert_loc (loc, type, 10334 fold_build2_loc (loc, TRUNC_MOD_EXPR, 10335 TREE_TYPE (arg0), arg0, 10336 cst0)); 10337 } 10338 } 10339 10340 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the 10341 same or one. Make sure type is not saturating. 10342 fold_plusminus_mult_expr will re-associate. */ 10343 if ((TREE_CODE (arg0) == MULT_EXPR 10344 || TREE_CODE (arg1) == MULT_EXPR) 10345 && !TYPE_SATURATING (type) 10346 && (!FLOAT_TYPE_P (type) || flag_associative_math)) 10347 { 10348 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1); 10349 if (tem) 10350 return tem; 10351 } 10352 10353 if (! FLOAT_TYPE_P (type)) 10354 { 10355 if (integer_zerop (arg1)) 10356 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10357 10358 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing 10359 with a constant, and the two constants have no bits in common, 10360 we should treat this as a BIT_IOR_EXPR since this may produce more 10361 simplifications. */ 10362 if (TREE_CODE (arg0) == BIT_AND_EXPR 10363 && TREE_CODE (arg1) == BIT_AND_EXPR 10364 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 10365 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST 10366 && integer_zerop (const_binop (BIT_AND_EXPR, 10367 TREE_OPERAND (arg0, 1), 10368 TREE_OPERAND (arg1, 1), 0))) 10369 { 10370 code = BIT_IOR_EXPR; 10371 goto bit_ior; 10372 } 10373 10374 /* Reassociate (plus (plus (mult) (foo)) (mult)) as 10375 (plus (plus (mult) (mult)) (foo)) so that we can 10376 take advantage of the factoring cases below. */ 10377 if (((TREE_CODE (arg0) == PLUS_EXPR 10378 || TREE_CODE (arg0) == MINUS_EXPR) 10379 && TREE_CODE (arg1) == MULT_EXPR) 10380 || ((TREE_CODE (arg1) == PLUS_EXPR 10381 || TREE_CODE (arg1) == MINUS_EXPR) 10382 && TREE_CODE (arg0) == MULT_EXPR)) 10383 { 10384 tree parg0, parg1, parg, marg; 10385 enum tree_code pcode; 10386 10387 if (TREE_CODE (arg1) == MULT_EXPR) 10388 parg = arg0, marg = arg1; 10389 else 10390 parg = arg1, marg = arg0; 10391 pcode = TREE_CODE (parg); 10392 parg0 = TREE_OPERAND (parg, 0); 10393 parg1 = TREE_OPERAND (parg, 1); 10394 STRIP_NOPS (parg0); 10395 STRIP_NOPS (parg1); 10396 10397 if (TREE_CODE (parg0) == MULT_EXPR 10398 && TREE_CODE (parg1) != MULT_EXPR) 10399 return fold_build2_loc (loc, pcode, type, 10400 fold_build2_loc (loc, PLUS_EXPR, type, 10401 fold_convert_loc (loc, type, 10402 parg0), 10403 fold_convert_loc (loc, type, 10404 marg)), 10405 fold_convert_loc (loc, type, parg1)); 10406 if (TREE_CODE (parg0) != MULT_EXPR 10407 && TREE_CODE (parg1) == MULT_EXPR) 10408 return 10409 fold_build2_loc (loc, PLUS_EXPR, type, 10410 fold_convert_loc (loc, type, parg0), 10411 fold_build2_loc (loc, pcode, type, 10412 fold_convert_loc (loc, type, marg), 10413 fold_convert_loc (loc, type, 10414 parg1))); 10415 } 10416 } 10417 else 10418 { 10419 /* See if ARG1 is zero and X + ARG1 reduces to X. */ 10420 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0)) 10421 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10422 10423 /* Likewise if the operands are reversed. */ 10424 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0)) 10425 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 10426 10427 /* Convert X + -C into X - C. */ 10428 if (TREE_CODE (arg1) == REAL_CST 10429 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))) 10430 { 10431 tem = fold_negate_const (arg1, type); 10432 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math) 10433 return fold_build2_loc (loc, MINUS_EXPR, type, 10434 fold_convert_loc (loc, type, arg0), 10435 fold_convert_loc (loc, type, tem)); 10436 } 10437 10438 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y ) 10439 to __complex__ ( x, y ). This is not the same for SNaNs or 10440 if signed zeros are involved. */ 10441 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 10442 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 10443 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) 10444 { 10445 tree rtype = TREE_TYPE (TREE_TYPE (arg0)); 10446 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0); 10447 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0); 10448 bool arg0rz = false, arg0iz = false; 10449 if ((arg0r && (arg0rz = real_zerop (arg0r))) 10450 || (arg0i && (arg0iz = real_zerop (arg0i)))) 10451 { 10452 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1); 10453 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1); 10454 if (arg0rz && arg1i && real_zerop (arg1i)) 10455 { 10456 tree rp = arg1r ? arg1r 10457 : build1 (REALPART_EXPR, rtype, arg1); 10458 tree ip = arg0i ? arg0i 10459 : build1 (IMAGPART_EXPR, rtype, arg0); 10460 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10461 } 10462 else if (arg0iz && arg1r && real_zerop (arg1r)) 10463 { 10464 tree rp = arg0r ? arg0r 10465 : build1 (REALPART_EXPR, rtype, arg0); 10466 tree ip = arg1i ? arg1i 10467 : build1 (IMAGPART_EXPR, rtype, arg1); 10468 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10469 } 10470 } 10471 } 10472 10473 if (flag_unsafe_math_optimizations 10474 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) 10475 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) 10476 && (tem = distribute_real_division (loc, code, type, arg0, arg1))) 10477 return tem; 10478 10479 /* Convert x+x into x*2.0. */ 10480 if (operand_equal_p (arg0, arg1, 0) 10481 && SCALAR_FLOAT_TYPE_P (type)) 10482 return fold_build2_loc (loc, MULT_EXPR, type, arg0, 10483 build_real (type, dconst2)); 10484 10485 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. 10486 We associate floats only if the user has specified 10487 -fassociative-math. */ 10488 if (flag_associative_math 10489 && TREE_CODE (arg1) == PLUS_EXPR 10490 && TREE_CODE (arg0) != MULT_EXPR) 10491 { 10492 tree tree10 = TREE_OPERAND (arg1, 0); 10493 tree tree11 = TREE_OPERAND (arg1, 1); 10494 if (TREE_CODE (tree11) == MULT_EXPR 10495 && TREE_CODE (tree10) == MULT_EXPR) 10496 { 10497 tree tree0; 10498 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10); 10499 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11); 10500 } 10501 } 10502 /* Convert (b*c + d*e) + a into b*c + (d*e +a). 10503 We associate floats only if the user has specified 10504 -fassociative-math. */ 10505 if (flag_associative_math 10506 && TREE_CODE (arg0) == PLUS_EXPR 10507 && TREE_CODE (arg1) != MULT_EXPR) 10508 { 10509 tree tree00 = TREE_OPERAND (arg0, 0); 10510 tree tree01 = TREE_OPERAND (arg0, 1); 10511 if (TREE_CODE (tree01) == MULT_EXPR 10512 && TREE_CODE (tree00) == MULT_EXPR) 10513 { 10514 tree tree0; 10515 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1); 10516 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0); 10517 } 10518 } 10519 } 10520 10521 bit_rotate: 10522 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A 10523 is a rotate of A by C1 bits. */ 10524 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A 10525 is a rotate of A by B bits. */ 10526 { 10527 enum tree_code code0, code1; 10528 tree rtype; 10529 code0 = TREE_CODE (arg0); 10530 code1 = TREE_CODE (arg1); 10531 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR) 10532 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR)) 10533 && operand_equal_p (TREE_OPERAND (arg0, 0), 10534 TREE_OPERAND (arg1, 0), 0) 10535 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)), 10536 TYPE_UNSIGNED (rtype)) 10537 /* Only create rotates in complete modes. Other cases are not 10538 expanded properly. */ 10539 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype))) 10540 { 10541 tree tree01, tree11; 10542 enum tree_code code01, code11; 10543 10544 tree01 = TREE_OPERAND (arg0, 1); 10545 tree11 = TREE_OPERAND (arg1, 1); 10546 STRIP_NOPS (tree01); 10547 STRIP_NOPS (tree11); 10548 code01 = TREE_CODE (tree01); 10549 code11 = TREE_CODE (tree11); 10550 if (code01 == INTEGER_CST 10551 && code11 == INTEGER_CST 10552 && TREE_INT_CST_HIGH (tree01) == 0 10553 && TREE_INT_CST_HIGH (tree11) == 0 10554 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11)) 10555 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))))) 10556 { 10557 tem = build2 (LROTATE_EXPR, 10558 TREE_TYPE (TREE_OPERAND (arg0, 0)), 10559 TREE_OPERAND (arg0, 0), 10560 code0 == LSHIFT_EXPR 10561 ? tree01 : tree11); 10562 SET_EXPR_LOCATION (tem, loc); 10563 return fold_convert_loc (loc, type, tem); 10564 } 10565 else if (code11 == MINUS_EXPR) 10566 { 10567 tree tree110, tree111; 10568 tree110 = TREE_OPERAND (tree11, 0); 10569 tree111 = TREE_OPERAND (tree11, 1); 10570 STRIP_NOPS (tree110); 10571 STRIP_NOPS (tree111); 10572 if (TREE_CODE (tree110) == INTEGER_CST 10573 && 0 == compare_tree_int (tree110, 10574 TYPE_PRECISION 10575 (TREE_TYPE (TREE_OPERAND 10576 (arg0, 0)))) 10577 && operand_equal_p (tree01, tree111, 0)) 10578 return 10579 fold_convert_loc (loc, type, 10580 build2 ((code0 == LSHIFT_EXPR 10581 ? LROTATE_EXPR 10582 : RROTATE_EXPR), 10583 TREE_TYPE (TREE_OPERAND (arg0, 0)), 10584 TREE_OPERAND (arg0, 0), tree01)); 10585 } 10586 else if (code01 == MINUS_EXPR) 10587 { 10588 tree tree010, tree011; 10589 tree010 = TREE_OPERAND (tree01, 0); 10590 tree011 = TREE_OPERAND (tree01, 1); 10591 STRIP_NOPS (tree010); 10592 STRIP_NOPS (tree011); 10593 if (TREE_CODE (tree010) == INTEGER_CST 10594 && 0 == compare_tree_int (tree010, 10595 TYPE_PRECISION 10596 (TREE_TYPE (TREE_OPERAND 10597 (arg0, 0)))) 10598 && operand_equal_p (tree11, tree011, 0)) 10599 return fold_convert_loc 10600 (loc, type, 10601 build2 ((code0 != LSHIFT_EXPR 10602 ? LROTATE_EXPR 10603 : RROTATE_EXPR), 10604 TREE_TYPE (TREE_OPERAND (arg0, 0)), 10605 TREE_OPERAND (arg0, 0), tree11)); 10606 } 10607 } 10608 } 10609 10610 associate: 10611 /* In most languages, can't associate operations on floats through 10612 parentheses. Rather than remember where the parentheses were, we 10613 don't associate floats at all, unless the user has specified 10614 -fassociative-math. 10615 And, we need to make sure type is not saturating. */ 10616 10617 if ((! FLOAT_TYPE_P (type) || flag_associative_math) 10618 && !TYPE_SATURATING (type)) 10619 { 10620 tree var0, con0, lit0, minus_lit0; 10621 tree var1, con1, lit1, minus_lit1; 10622 bool ok = true; 10623 10624 /* Split both trees into variables, constants, and literals. Then 10625 associate each group together, the constants with literals, 10626 then the result with variables. This increases the chances of 10627 literals being recombined later and of generating relocatable 10628 expressions for the sum of a constant and literal. */ 10629 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0); 10630 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1, 10631 code == MINUS_EXPR); 10632 10633 /* With undefined overflow we can only associate constants 10634 with one variable. */ 10635 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED) 10636 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type))) 10637 && var0 && var1) 10638 { 10639 tree tmp0 = var0; 10640 tree tmp1 = var1; 10641 10642 if (TREE_CODE (tmp0) == NEGATE_EXPR) 10643 tmp0 = TREE_OPERAND (tmp0, 0); 10644 if (TREE_CODE (tmp1) == NEGATE_EXPR) 10645 tmp1 = TREE_OPERAND (tmp1, 0); 10646 /* The only case we can still associate with two variables 10647 is if they are the same, modulo negation. */ 10648 if (!operand_equal_p (tmp0, tmp1, 0)) 10649 ok = false; 10650 } 10651 10652 /* Only do something if we found more than two objects. Otherwise, 10653 nothing has changed and we risk infinite recursion. */ 10654 if (ok 10655 && (2 < ((var0 != 0) + (var1 != 0) 10656 + (con0 != 0) + (con1 != 0) 10657 + (lit0 != 0) + (lit1 != 0) 10658 + (minus_lit0 != 0) + (minus_lit1 != 0)))) 10659 { 10660 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */ 10661 if (code == MINUS_EXPR) 10662 code = PLUS_EXPR; 10663 10664 var0 = associate_trees (loc, var0, var1, code, type); 10665 con0 = associate_trees (loc, con0, con1, code, type); 10666 lit0 = associate_trees (loc, lit0, lit1, code, type); 10667 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type); 10668 10669 /* Preserve the MINUS_EXPR if the negative part of the literal is 10670 greater than the positive part. Otherwise, the multiplicative 10671 folding code (i.e extract_muldiv) may be fooled in case 10672 unsigned constants are subtracted, like in the following 10673 example: ((X*2 + 4) - 8U)/2. */ 10674 if (minus_lit0 && lit0) 10675 { 10676 if (TREE_CODE (lit0) == INTEGER_CST 10677 && TREE_CODE (minus_lit0) == INTEGER_CST 10678 && tree_int_cst_lt (lit0, minus_lit0)) 10679 { 10680 minus_lit0 = associate_trees (loc, minus_lit0, lit0, 10681 MINUS_EXPR, type); 10682 lit0 = 0; 10683 } 10684 else 10685 { 10686 lit0 = associate_trees (loc, lit0, minus_lit0, 10687 MINUS_EXPR, type); 10688 minus_lit0 = 0; 10689 } 10690 } 10691 if (minus_lit0) 10692 { 10693 if (con0 == 0) 10694 return 10695 fold_convert_loc (loc, type, 10696 associate_trees (loc, var0, minus_lit0, 10697 MINUS_EXPR, type)); 10698 else 10699 { 10700 con0 = associate_trees (loc, con0, minus_lit0, 10701 MINUS_EXPR, type); 10702 return 10703 fold_convert_loc (loc, type, 10704 associate_trees (loc, var0, con0, 10705 PLUS_EXPR, type)); 10706 } 10707 } 10708 10709 con0 = associate_trees (loc, con0, lit0, code, type); 10710 return 10711 fold_convert_loc (loc, type, associate_trees (loc, var0, con0, 10712 code, type)); 10713 } 10714 } 10715 10716 return NULL_TREE; 10717 10718 case MINUS_EXPR: 10719 /* Pointer simplifications for subtraction, simple reassociations. */ 10720 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0))) 10721 { 10722 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */ 10723 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR 10724 && TREE_CODE (arg1) == POINTER_PLUS_EXPR) 10725 { 10726 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 10727 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 10728 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 10729 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 10730 return fold_build2_loc (loc, PLUS_EXPR, type, 10731 fold_build2_loc (loc, MINUS_EXPR, type, 10732 arg00, arg10), 10733 fold_build2_loc (loc, MINUS_EXPR, type, 10734 arg01, arg11)); 10735 } 10736 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */ 10737 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR) 10738 { 10739 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 10740 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 10741 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00, 10742 fold_convert_loc (loc, type, arg1)); 10743 if (tmp) 10744 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01); 10745 } 10746 } 10747 /* A - (-B) -> A + B */ 10748 if (TREE_CODE (arg1) == NEGATE_EXPR) 10749 return fold_build2_loc (loc, PLUS_EXPR, type, op0, 10750 fold_convert_loc (loc, type, 10751 TREE_OPERAND (arg1, 0))); 10752 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */ 10753 if (TREE_CODE (arg0) == NEGATE_EXPR 10754 && (FLOAT_TYPE_P (type) 10755 || INTEGRAL_TYPE_P (type)) 10756 && negate_expr_p (arg1) 10757 && reorder_operands_p (arg0, arg1)) 10758 return fold_build2_loc (loc, MINUS_EXPR, type, 10759 fold_convert_loc (loc, type, 10760 negate_expr (arg1)), 10761 fold_convert_loc (loc, type, 10762 TREE_OPERAND (arg0, 0))); 10763 /* Convert -A - 1 to ~A. */ 10764 if (INTEGRAL_TYPE_P (type) 10765 && TREE_CODE (arg0) == NEGATE_EXPR 10766 && integer_onep (arg1) 10767 && !TYPE_OVERFLOW_TRAPS (type)) 10768 return fold_build1_loc (loc, BIT_NOT_EXPR, type, 10769 fold_convert_loc (loc, type, 10770 TREE_OPERAND (arg0, 0))); 10771 10772 /* Convert -1 - A to ~A. */ 10773 if (INTEGRAL_TYPE_P (type) 10774 && integer_all_onesp (arg0)) 10775 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1); 10776 10777 10778 /* X - (X / CST) * CST is X % CST. */ 10779 if (INTEGRAL_TYPE_P (type) 10780 && TREE_CODE (arg1) == MULT_EXPR 10781 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR 10782 && operand_equal_p (arg0, 10783 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0) 10784 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1), 10785 TREE_OPERAND (arg1, 1), 0)) 10786 return 10787 fold_convert_loc (loc, type, 10788 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0), 10789 arg0, TREE_OPERAND (arg1, 1))); 10790 10791 if (! FLOAT_TYPE_P (type)) 10792 { 10793 if (integer_zerop (arg0)) 10794 return negate_expr (fold_convert_loc (loc, type, arg1)); 10795 if (integer_zerop (arg1)) 10796 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10797 10798 /* Fold A - (A & B) into ~B & A. */ 10799 if (!TREE_SIDE_EFFECTS (arg0) 10800 && TREE_CODE (arg1) == BIT_AND_EXPR) 10801 { 10802 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)) 10803 { 10804 tree arg10 = fold_convert_loc (loc, type, 10805 TREE_OPERAND (arg1, 0)); 10806 return fold_build2_loc (loc, BIT_AND_EXPR, type, 10807 fold_build1_loc (loc, BIT_NOT_EXPR, 10808 type, arg10), 10809 fold_convert_loc (loc, type, arg0)); 10810 } 10811 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 10812 { 10813 tree arg11 = fold_convert_loc (loc, 10814 type, TREE_OPERAND (arg1, 1)); 10815 return fold_build2_loc (loc, BIT_AND_EXPR, type, 10816 fold_build1_loc (loc, BIT_NOT_EXPR, 10817 type, arg11), 10818 fold_convert_loc (loc, type, arg0)); 10819 } 10820 } 10821 10822 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is 10823 any power of 2 minus 1. */ 10824 if (TREE_CODE (arg0) == BIT_AND_EXPR 10825 && TREE_CODE (arg1) == BIT_AND_EXPR 10826 && operand_equal_p (TREE_OPERAND (arg0, 0), 10827 TREE_OPERAND (arg1, 0), 0)) 10828 { 10829 tree mask0 = TREE_OPERAND (arg0, 1); 10830 tree mask1 = TREE_OPERAND (arg1, 1); 10831 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0); 10832 10833 if (operand_equal_p (tem, mask1, 0)) 10834 { 10835 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type, 10836 TREE_OPERAND (arg0, 0), mask1); 10837 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1); 10838 } 10839 } 10840 } 10841 10842 /* See if ARG1 is zero and X - ARG1 reduces to X. */ 10843 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1)) 10844 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10845 10846 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether 10847 ARG0 is zero and X + ARG0 reduces to X, since that would mean 10848 (-ARG1 + ARG0) reduces to -ARG1. */ 10849 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0)) 10850 return negate_expr (fold_convert_loc (loc, type, arg1)); 10851 10852 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to 10853 __complex__ ( x, -y ). This is not the same for SNaNs or if 10854 signed zeros are involved. */ 10855 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 10856 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 10857 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) 10858 { 10859 tree rtype = TREE_TYPE (TREE_TYPE (arg0)); 10860 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0); 10861 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0); 10862 bool arg0rz = false, arg0iz = false; 10863 if ((arg0r && (arg0rz = real_zerop (arg0r))) 10864 || (arg0i && (arg0iz = real_zerop (arg0i)))) 10865 { 10866 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1); 10867 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1); 10868 if (arg0rz && arg1i && real_zerop (arg1i)) 10869 { 10870 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype, 10871 arg1r ? arg1r 10872 : build1 (REALPART_EXPR, rtype, arg1)); 10873 tree ip = arg0i ? arg0i 10874 : build1 (IMAGPART_EXPR, rtype, arg0); 10875 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10876 } 10877 else if (arg0iz && arg1r && real_zerop (arg1r)) 10878 { 10879 tree rp = arg0r ? arg0r 10880 : build1 (REALPART_EXPR, rtype, arg0); 10881 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype, 10882 arg1i ? arg1i 10883 : build1 (IMAGPART_EXPR, rtype, arg1)); 10884 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10885 } 10886 } 10887 } 10888 10889 /* Fold &x - &x. This can happen from &x.foo - &x. 10890 This is unsafe for certain floats even in non-IEEE formats. 10891 In IEEE, it is unsafe because it does wrong for NaNs. 10892 Also note that operand_equal_p is always false if an operand 10893 is volatile. */ 10894 10895 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type))) 10896 && operand_equal_p (arg0, arg1, 0)) 10897 return fold_convert_loc (loc, type, integer_zero_node); 10898 10899 /* A - B -> A + (-B) if B is easily negatable. */ 10900 if (negate_expr_p (arg1) 10901 && ((FLOAT_TYPE_P (type) 10902 /* Avoid this transformation if B is a positive REAL_CST. */ 10903 && (TREE_CODE (arg1) != REAL_CST 10904 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))) 10905 || INTEGRAL_TYPE_P (type))) 10906 return fold_build2_loc (loc, PLUS_EXPR, type, 10907 fold_convert_loc (loc, type, arg0), 10908 fold_convert_loc (loc, type, 10909 negate_expr (arg1))); 10910 10911 /* Try folding difference of addresses. */ 10912 { 10913 HOST_WIDE_INT diff; 10914 10915 if ((TREE_CODE (arg0) == ADDR_EXPR 10916 || TREE_CODE (arg1) == ADDR_EXPR) 10917 && ptr_difference_const (arg0, arg1, &diff)) 10918 return build_int_cst_type (type, diff); 10919 } 10920 10921 /* Fold &a[i] - &a[j] to i-j. */ 10922 if (TREE_CODE (arg0) == ADDR_EXPR 10923 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF 10924 && TREE_CODE (arg1) == ADDR_EXPR 10925 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF) 10926 { 10927 tree aref0 = TREE_OPERAND (arg0, 0); 10928 tree aref1 = TREE_OPERAND (arg1, 0); 10929 if (operand_equal_p (TREE_OPERAND (aref0, 0), 10930 TREE_OPERAND (aref1, 0), 0)) 10931 { 10932 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1)); 10933 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1)); 10934 tree esz = array_ref_element_size (aref0); 10935 tree diff = build2 (MINUS_EXPR, type, op0, op1); 10936 return fold_build2_loc (loc, MULT_EXPR, type, diff, 10937 fold_convert_loc (loc, type, esz)); 10938 10939 } 10940 } 10941 10942 if (FLOAT_TYPE_P (type) 10943 && flag_unsafe_math_optimizations 10944 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) 10945 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) 10946 && (tem = distribute_real_division (loc, code, type, arg0, arg1))) 10947 return tem; 10948 10949 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the 10950 same or one. Make sure type is not saturating. 10951 fold_plusminus_mult_expr will re-associate. */ 10952 if ((TREE_CODE (arg0) == MULT_EXPR 10953 || TREE_CODE (arg1) == MULT_EXPR) 10954 && !TYPE_SATURATING (type) 10955 && (!FLOAT_TYPE_P (type) || flag_associative_math)) 10956 { 10957 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1); 10958 if (tem) 10959 return tem; 10960 } 10961 10962 goto associate; 10963 10964 case MULT_EXPR: 10965 /* (-A) * (-B) -> A * B */ 10966 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1)) 10967 return fold_build2_loc (loc, MULT_EXPR, type, 10968 fold_convert_loc (loc, type, 10969 TREE_OPERAND (arg0, 0)), 10970 fold_convert_loc (loc, type, 10971 negate_expr (arg1))); 10972 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0)) 10973 return fold_build2_loc (loc, MULT_EXPR, type, 10974 fold_convert_loc (loc, type, 10975 negate_expr (arg0)), 10976 fold_convert_loc (loc, type, 10977 TREE_OPERAND (arg1, 0))); 10978 10979 if (! FLOAT_TYPE_P (type)) 10980 { 10981 if (integer_zerop (arg1)) 10982 return omit_one_operand_loc (loc, type, arg1, arg0); 10983 if (integer_onep (arg1)) 10984 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10985 /* Transform x * -1 into -x. Make sure to do the negation 10986 on the original operand with conversions not stripped 10987 because we can only strip non-sign-changing conversions. */ 10988 if (integer_all_onesp (arg1)) 10989 return fold_convert_loc (loc, type, negate_expr (op0)); 10990 /* Transform x * -C into -x * C if x is easily negatable. */ 10991 if (TREE_CODE (arg1) == INTEGER_CST 10992 && tree_int_cst_sgn (arg1) == -1 10993 && negate_expr_p (arg0) 10994 && (tem = negate_expr (arg1)) != arg1 10995 && !TREE_OVERFLOW (tem)) 10996 return fold_build2_loc (loc, MULT_EXPR, type, 10997 fold_convert_loc (loc, type, 10998 negate_expr (arg0)), 10999 tem); 11000 11001 /* (a * (1 << b)) is (a << b) */ 11002 if (TREE_CODE (arg1) == LSHIFT_EXPR 11003 && integer_onep (TREE_OPERAND (arg1, 0))) 11004 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0, 11005 TREE_OPERAND (arg1, 1)); 11006 if (TREE_CODE (arg0) == LSHIFT_EXPR 11007 && integer_onep (TREE_OPERAND (arg0, 0))) 11008 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1, 11009 TREE_OPERAND (arg0, 1)); 11010 11011 /* (A + A) * C -> A * 2 * C */ 11012 if (TREE_CODE (arg0) == PLUS_EXPR 11013 && TREE_CODE (arg1) == INTEGER_CST 11014 && operand_equal_p (TREE_OPERAND (arg0, 0), 11015 TREE_OPERAND (arg0, 1), 0)) 11016 return fold_build2_loc (loc, MULT_EXPR, type, 11017 omit_one_operand_loc (loc, type, 11018 TREE_OPERAND (arg0, 0), 11019 TREE_OPERAND (arg0, 1)), 11020 fold_build2_loc (loc, MULT_EXPR, type, 11021 build_int_cst (type, 2) , arg1)); 11022 11023 strict_overflow_p = false; 11024 if (TREE_CODE (arg1) == INTEGER_CST 11025 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 11026 &strict_overflow_p))) 11027 { 11028 if (strict_overflow_p) 11029 fold_overflow_warning (("assuming signed overflow does not " 11030 "occur when simplifying " 11031 "multiplication"), 11032 WARN_STRICT_OVERFLOW_MISC); 11033 return fold_convert_loc (loc, type, tem); 11034 } 11035 11036 /* Optimize z * conj(z) for integer complex numbers. */ 11037 if (TREE_CODE (arg0) == CONJ_EXPR 11038 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11039 return fold_mult_zconjz (loc, type, arg1); 11040 if (TREE_CODE (arg1) == CONJ_EXPR 11041 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11042 return fold_mult_zconjz (loc, type, arg0); 11043 } 11044 else 11045 { 11046 /* Maybe fold x * 0 to 0. The expressions aren't the same 11047 when x is NaN, since x * 0 is also NaN. Nor are they the 11048 same in modes with signed zeros, since multiplying a 11049 negative value by 0 gives -0, not +0. */ 11050 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) 11051 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 11052 && real_zerop (arg1)) 11053 return omit_one_operand_loc (loc, type, arg1, arg0); 11054 /* In IEEE floating point, x*1 is not equivalent to x for snans. 11055 Likewise for complex arithmetic with signed zeros. */ 11056 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 11057 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 11058 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) 11059 && real_onep (arg1)) 11060 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 11061 11062 /* Transform x * -1.0 into -x. */ 11063 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 11064 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 11065 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) 11066 && real_minus_onep (arg1)) 11067 return fold_convert_loc (loc, type, negate_expr (arg0)); 11068 11069 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change 11070 the result for floating point types due to rounding so it is applied 11071 only if -fassociative-math was specify. */ 11072 if (flag_associative_math 11073 && TREE_CODE (arg0) == RDIV_EXPR 11074 && TREE_CODE (arg1) == REAL_CST 11075 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST) 11076 { 11077 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0), 11078 arg1, 0); 11079 if (tem) 11080 return fold_build2_loc (loc, RDIV_EXPR, type, tem, 11081 TREE_OPERAND (arg0, 1)); 11082 } 11083 11084 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */ 11085 if (operand_equal_p (arg0, arg1, 0)) 11086 { 11087 tree tem = fold_strip_sign_ops (arg0); 11088 if (tem != NULL_TREE) 11089 { 11090 tem = fold_convert_loc (loc, type, tem); 11091 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem); 11092 } 11093 } 11094 11095 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z). 11096 This is not the same for NaNs or if signed zeros are 11097 involved. */ 11098 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) 11099 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 11100 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)) 11101 && TREE_CODE (arg1) == COMPLEX_CST 11102 && real_zerop (TREE_REALPART (arg1))) 11103 { 11104 tree rtype = TREE_TYPE (TREE_TYPE (arg0)); 11105 if (real_onep (TREE_IMAGPART (arg1))) 11106 return 11107 fold_build2_loc (loc, COMPLEX_EXPR, type, 11108 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR, 11109 rtype, arg0)), 11110 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0)); 11111 else if (real_minus_onep (TREE_IMAGPART (arg1))) 11112 return 11113 fold_build2_loc (loc, COMPLEX_EXPR, type, 11114 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0), 11115 negate_expr (fold_build1_loc (loc, REALPART_EXPR, 11116 rtype, arg0))); 11117 } 11118 11119 /* Optimize z * conj(z) for floating point complex numbers. 11120 Guarded by flag_unsafe_math_optimizations as non-finite 11121 imaginary components don't produce scalar results. */ 11122 if (flag_unsafe_math_optimizations 11123 && TREE_CODE (arg0) == CONJ_EXPR 11124 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11125 return fold_mult_zconjz (loc, type, arg1); 11126 if (flag_unsafe_math_optimizations 11127 && TREE_CODE (arg1) == CONJ_EXPR 11128 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11129 return fold_mult_zconjz (loc, type, arg0); 11130 11131 if (flag_unsafe_math_optimizations) 11132 { 11133 enum built_in_function fcode0 = builtin_mathfn_code (arg0); 11134 enum built_in_function fcode1 = builtin_mathfn_code (arg1); 11135 11136 /* Optimizations of root(...)*root(...). */ 11137 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0)) 11138 { 11139 tree rootfn, arg; 11140 tree arg00 = CALL_EXPR_ARG (arg0, 0); 11141 tree arg10 = CALL_EXPR_ARG (arg1, 0); 11142 11143 /* Optimize sqrt(x)*sqrt(x) as x. */ 11144 if (BUILTIN_SQRT_P (fcode0) 11145 && operand_equal_p (arg00, arg10, 0) 11146 && ! HONOR_SNANS (TYPE_MODE (type))) 11147 return arg00; 11148 11149 /* Optimize root(x)*root(y) as root(x*y). */ 11150 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 11151 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10); 11152 return build_call_expr_loc (loc, rootfn, 1, arg); 11153 } 11154 11155 /* Optimize expN(x)*expN(y) as expN(x+y). */ 11156 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0)) 11157 { 11158 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 11159 tree arg = fold_build2_loc (loc, PLUS_EXPR, type, 11160 CALL_EXPR_ARG (arg0, 0), 11161 CALL_EXPR_ARG (arg1, 0)); 11162 return build_call_expr_loc (loc, expfn, 1, arg); 11163 } 11164 11165 /* Optimizations of pow(...)*pow(...). */ 11166 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW) 11167 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF) 11168 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL)) 11169 { 11170 tree arg00 = CALL_EXPR_ARG (arg0, 0); 11171 tree arg01 = CALL_EXPR_ARG (arg0, 1); 11172 tree arg10 = CALL_EXPR_ARG (arg1, 0); 11173 tree arg11 = CALL_EXPR_ARG (arg1, 1); 11174 11175 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */ 11176 if (operand_equal_p (arg01, arg11, 0)) 11177 { 11178 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 11179 tree arg = fold_build2_loc (loc, MULT_EXPR, type, 11180 arg00, arg10); 11181 return build_call_expr_loc (loc, powfn, 2, arg, arg01); 11182 } 11183 11184 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */ 11185 if (operand_equal_p (arg00, arg10, 0)) 11186 { 11187 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 11188 tree arg = fold_build2_loc (loc, PLUS_EXPR, type, 11189 arg01, arg11); 11190 return build_call_expr_loc (loc, powfn, 2, arg00, arg); 11191 } 11192 } 11193 11194 /* Optimize tan(x)*cos(x) as sin(x). */ 11195 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS) 11196 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF) 11197 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL) 11198 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN) 11199 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF) 11200 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL)) 11201 && operand_equal_p (CALL_EXPR_ARG (arg0, 0), 11202 CALL_EXPR_ARG (arg1, 0), 0)) 11203 { 11204 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN); 11205 11206 if (sinfn != NULL_TREE) 11207 return build_call_expr_loc (loc, sinfn, 1, 11208 CALL_EXPR_ARG (arg0, 0)); 11209 } 11210 11211 /* Optimize x*pow(x,c) as pow(x,c+1). */ 11212 if (fcode1 == BUILT_IN_POW 11213 || fcode1 == BUILT_IN_POWF 11214 || fcode1 == BUILT_IN_POWL) 11215 { 11216 tree arg10 = CALL_EXPR_ARG (arg1, 0); 11217 tree arg11 = CALL_EXPR_ARG (arg1, 1); 11218 if (TREE_CODE (arg11) == REAL_CST 11219 && !TREE_OVERFLOW (arg11) 11220 && operand_equal_p (arg0, arg10, 0)) 11221 { 11222 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 11223 REAL_VALUE_TYPE c; 11224 tree arg; 11225 11226 c = TREE_REAL_CST (arg11); 11227 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); 11228 arg = build_real (type, c); 11229 return build_call_expr_loc (loc, powfn, 2, arg0, arg); 11230 } 11231 } 11232 11233 /* Optimize pow(x,c)*x as pow(x,c+1). */ 11234 if (fcode0 == BUILT_IN_POW 11235 || fcode0 == BUILT_IN_POWF 11236 || fcode0 == BUILT_IN_POWL) 11237 { 11238 tree arg00 = CALL_EXPR_ARG (arg0, 0); 11239 tree arg01 = CALL_EXPR_ARG (arg0, 1); 11240 if (TREE_CODE (arg01) == REAL_CST 11241 && !TREE_OVERFLOW (arg01) 11242 && operand_equal_p (arg1, arg00, 0)) 11243 { 11244 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 11245 REAL_VALUE_TYPE c; 11246 tree arg; 11247 11248 c = TREE_REAL_CST (arg01); 11249 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); 11250 arg = build_real (type, c); 11251 return build_call_expr_loc (loc, powfn, 2, arg1, arg); 11252 } 11253 } 11254 11255 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */ 11256 if (optimize_function_for_speed_p (cfun) 11257 && operand_equal_p (arg0, arg1, 0)) 11258 { 11259 tree powfn = mathfn_built_in (type, BUILT_IN_POW); 11260 11261 if (powfn) 11262 { 11263 tree arg = build_real (type, dconst2); 11264 return build_call_expr_loc (loc, powfn, 2, arg0, arg); 11265 } 11266 } 11267 } 11268 } 11269 goto associate; 11270 11271 case BIT_IOR_EXPR: 11272 bit_ior: 11273 if (integer_all_onesp (arg1)) 11274 return omit_one_operand_loc (loc, type, arg1, arg0); 11275 if (integer_zerop (arg1)) 11276 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 11277 if (operand_equal_p (arg0, arg1, 0)) 11278 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 11279 11280 /* ~X | X is -1. */ 11281 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11282 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11283 { 11284 t1 = fold_convert_loc (loc, type, integer_zero_node); 11285 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11286 return omit_one_operand_loc (loc, type, t1, arg1); 11287 } 11288 11289 /* X | ~X is -1. */ 11290 if (TREE_CODE (arg1) == BIT_NOT_EXPR 11291 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11292 { 11293 t1 = fold_convert_loc (loc, type, integer_zero_node); 11294 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11295 return omit_one_operand_loc (loc, type, t1, arg0); 11296 } 11297 11298 /* Canonicalize (X & C1) | C2. */ 11299 if (TREE_CODE (arg0) == BIT_AND_EXPR 11300 && TREE_CODE (arg1) == INTEGER_CST 11301 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 11302 { 11303 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi; 11304 int width = TYPE_PRECISION (type), w; 11305 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)); 11306 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)); 11307 hi2 = TREE_INT_CST_HIGH (arg1); 11308 lo2 = TREE_INT_CST_LOW (arg1); 11309 11310 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */ 11311 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1) 11312 return omit_one_operand_loc (loc, type, arg1, 11313 TREE_OPERAND (arg0, 0)); 11314 11315 if (width > HOST_BITS_PER_WIDE_INT) 11316 { 11317 mhi = (unsigned HOST_WIDE_INT) -1 11318 >> (2 * HOST_BITS_PER_WIDE_INT - width); 11319 mlo = -1; 11320 } 11321 else 11322 { 11323 mhi = 0; 11324 mlo = (unsigned HOST_WIDE_INT) -1 11325 >> (HOST_BITS_PER_WIDE_INT - width); 11326 } 11327 11328 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */ 11329 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0) 11330 return fold_build2_loc (loc, BIT_IOR_EXPR, type, 11331 TREE_OPERAND (arg0, 0), arg1); 11332 11333 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2, 11334 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some 11335 mode which allows further optimizations. */ 11336 hi1 &= mhi; 11337 lo1 &= mlo; 11338 hi2 &= mhi; 11339 lo2 &= mlo; 11340 hi3 = hi1 & ~hi2; 11341 lo3 = lo1 & ~lo2; 11342 for (w = BITS_PER_UNIT; 11343 w <= width && w <= HOST_BITS_PER_WIDE_INT; 11344 w <<= 1) 11345 { 11346 unsigned HOST_WIDE_INT mask 11347 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w); 11348 if (((lo1 | lo2) & mask) == mask 11349 && (lo1 & ~mask) == 0 && hi1 == 0) 11350 { 11351 hi3 = 0; 11352 lo3 = mask; 11353 break; 11354 } 11355 } 11356 if (hi3 != hi1 || lo3 != lo1) 11357 return fold_build2_loc (loc, BIT_IOR_EXPR, type, 11358 fold_build2_loc (loc, BIT_AND_EXPR, type, 11359 TREE_OPERAND (arg0, 0), 11360 build_int_cst_wide (type, 11361 lo3, hi3)), 11362 arg1); 11363 } 11364 11365 /* (X & Y) | Y is (X, Y). */ 11366 if (TREE_CODE (arg0) == BIT_AND_EXPR 11367 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11368 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0)); 11369 /* (X & Y) | X is (Y, X). */ 11370 if (TREE_CODE (arg0) == BIT_AND_EXPR 11371 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 11372 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 11373 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1)); 11374 /* X | (X & Y) is (Y, X). */ 11375 if (TREE_CODE (arg1) == BIT_AND_EXPR 11376 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0) 11377 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1))) 11378 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1)); 11379 /* X | (Y & X) is (Y, X). */ 11380 if (TREE_CODE (arg1) == BIT_AND_EXPR 11381 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 11382 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 11383 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0)); 11384 11385 t1 = distribute_bit_expr (loc, code, type, arg0, arg1); 11386 if (t1 != NULL_TREE) 11387 return t1; 11388 11389 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))). 11390 11391 This results in more efficient code for machines without a NAND 11392 instruction. Combine will canonicalize to the first form 11393 which will allow use of NAND instructions provided by the 11394 backend if they exist. */ 11395 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11396 && TREE_CODE (arg1) == BIT_NOT_EXPR) 11397 { 11398 return 11399 fold_build1_loc (loc, BIT_NOT_EXPR, type, 11400 build2 (BIT_AND_EXPR, type, 11401 fold_convert_loc (loc, type, 11402 TREE_OPERAND (arg0, 0)), 11403 fold_convert_loc (loc, type, 11404 TREE_OPERAND (arg1, 0)))); 11405 } 11406 11407 /* See if this can be simplified into a rotate first. If that 11408 is unsuccessful continue in the association code. */ 11409 goto bit_rotate; 11410 11411 case BIT_XOR_EXPR: 11412 if (integer_zerop (arg1)) 11413 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 11414 if (integer_all_onesp (arg1)) 11415 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0); 11416 if (operand_equal_p (arg0, arg1, 0)) 11417 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 11418 11419 /* ~X ^ X is -1. */ 11420 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11421 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11422 { 11423 t1 = fold_convert_loc (loc, type, integer_zero_node); 11424 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11425 return omit_one_operand_loc (loc, type, t1, arg1); 11426 } 11427 11428 /* X ^ ~X is -1. */ 11429 if (TREE_CODE (arg1) == BIT_NOT_EXPR 11430 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11431 { 11432 t1 = fold_convert_loc (loc, type, integer_zero_node); 11433 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11434 return omit_one_operand_loc (loc, type, t1, arg0); 11435 } 11436 11437 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing 11438 with a constant, and the two constants have no bits in common, 11439 we should treat this as a BIT_IOR_EXPR since this may produce more 11440 simplifications. */ 11441 if (TREE_CODE (arg0) == BIT_AND_EXPR 11442 && TREE_CODE (arg1) == BIT_AND_EXPR 11443 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 11444 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST 11445 && integer_zerop (const_binop (BIT_AND_EXPR, 11446 TREE_OPERAND (arg0, 1), 11447 TREE_OPERAND (arg1, 1), 0))) 11448 { 11449 code = BIT_IOR_EXPR; 11450 goto bit_ior; 11451 } 11452 11453 /* (X | Y) ^ X -> Y & ~ X*/ 11454 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11455 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11456 { 11457 tree t2 = TREE_OPERAND (arg0, 1); 11458 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), 11459 arg1); 11460 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11461 fold_convert_loc (loc, type, t2), 11462 fold_convert_loc (loc, type, t1)); 11463 return t1; 11464 } 11465 11466 /* (Y | X) ^ X -> Y & ~ X*/ 11467 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11468 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11469 { 11470 tree t2 = TREE_OPERAND (arg0, 0); 11471 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), 11472 arg1); 11473 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11474 fold_convert_loc (loc, type, t2), 11475 fold_convert_loc (loc, type, t1)); 11476 return t1; 11477 } 11478 11479 /* X ^ (X | Y) -> Y & ~ X*/ 11480 if (TREE_CODE (arg1) == BIT_IOR_EXPR 11481 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0)) 11482 { 11483 tree t2 = TREE_OPERAND (arg1, 1); 11484 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0), 11485 arg0); 11486 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11487 fold_convert_loc (loc, type, t2), 11488 fold_convert_loc (loc, type, t1)); 11489 return t1; 11490 } 11491 11492 /* X ^ (Y | X) -> Y & ~ X*/ 11493 if (TREE_CODE (arg1) == BIT_IOR_EXPR 11494 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0)) 11495 { 11496 tree t2 = TREE_OPERAND (arg1, 0); 11497 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0), 11498 arg0); 11499 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11500 fold_convert_loc (loc, type, t2), 11501 fold_convert_loc (loc, type, t1)); 11502 return t1; 11503 } 11504 11505 /* Convert ~X ^ ~Y to X ^ Y. */ 11506 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11507 && TREE_CODE (arg1) == BIT_NOT_EXPR) 11508 return fold_build2_loc (loc, code, type, 11509 fold_convert_loc (loc, type, 11510 TREE_OPERAND (arg0, 0)), 11511 fold_convert_loc (loc, type, 11512 TREE_OPERAND (arg1, 0))); 11513 11514 /* Convert ~X ^ C to X ^ ~C. */ 11515 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11516 && TREE_CODE (arg1) == INTEGER_CST) 11517 return fold_build2_loc (loc, code, type, 11518 fold_convert_loc (loc, type, 11519 TREE_OPERAND (arg0, 0)), 11520 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1)); 11521 11522 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */ 11523 if (TREE_CODE (arg0) == BIT_AND_EXPR 11524 && integer_onep (TREE_OPERAND (arg0, 1)) 11525 && integer_onep (arg1)) 11526 return fold_build2_loc (loc, EQ_EXPR, type, arg0, 11527 build_int_cst (TREE_TYPE (arg0), 0)); 11528 11529 /* Fold (X & Y) ^ Y as ~X & Y. */ 11530 if (TREE_CODE (arg0) == BIT_AND_EXPR 11531 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11532 { 11533 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11534 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11535 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11536 fold_convert_loc (loc, type, arg1)); 11537 } 11538 /* Fold (X & Y) ^ X as ~Y & X. */ 11539 if (TREE_CODE (arg0) == BIT_AND_EXPR 11540 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 11541 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 11542 { 11543 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 11544 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11545 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11546 fold_convert_loc (loc, type, arg1)); 11547 } 11548 /* Fold X ^ (X & Y) as X & ~Y. */ 11549 if (TREE_CODE (arg1) == BIT_AND_EXPR 11550 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11551 { 11552 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 11553 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11554 fold_convert_loc (loc, type, arg0), 11555 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem)); 11556 } 11557 /* Fold X ^ (Y & X) as ~Y & X. */ 11558 if (TREE_CODE (arg1) == BIT_AND_EXPR 11559 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 11560 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 11561 { 11562 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 11563 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11564 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11565 fold_convert_loc (loc, type, arg0)); 11566 } 11567 11568 /* See if this can be simplified into a rotate first. If that 11569 is unsuccessful continue in the association code. */ 11570 goto bit_rotate; 11571 11572 case BIT_AND_EXPR: 11573 if (integer_all_onesp (arg1)) 11574 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 11575 if (integer_zerop (arg1)) 11576 return omit_one_operand_loc (loc, type, arg1, arg0); 11577 if (operand_equal_p (arg0, arg1, 0)) 11578 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 11579 11580 /* ~X & X is always zero. */ 11581 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11582 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11583 return omit_one_operand_loc (loc, type, integer_zero_node, arg1); 11584 11585 /* X & ~X is always zero. */ 11586 if (TREE_CODE (arg1) == BIT_NOT_EXPR 11587 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11588 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 11589 11590 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */ 11591 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11592 && TREE_CODE (arg1) == INTEGER_CST 11593 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 11594 { 11595 tree tmp1 = fold_convert_loc (loc, type, arg1); 11596 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11597 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 11598 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1); 11599 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1); 11600 return 11601 fold_convert_loc (loc, type, 11602 fold_build2_loc (loc, BIT_IOR_EXPR, 11603 type, tmp2, tmp3)); 11604 } 11605 11606 /* (X | Y) & Y is (X, Y). */ 11607 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11608 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11609 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0)); 11610 /* (X | Y) & X is (Y, X). */ 11611 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11612 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 11613 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 11614 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1)); 11615 /* X & (X | Y) is (Y, X). */ 11616 if (TREE_CODE (arg1) == BIT_IOR_EXPR 11617 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0) 11618 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1))) 11619 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1)); 11620 /* X & (Y | X) is (Y, X). */ 11621 if (TREE_CODE (arg1) == BIT_IOR_EXPR 11622 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 11623 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 11624 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0)); 11625 11626 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */ 11627 if (TREE_CODE (arg0) == BIT_XOR_EXPR 11628 && integer_onep (TREE_OPERAND (arg0, 1)) 11629 && integer_onep (arg1)) 11630 { 11631 tem = TREE_OPERAND (arg0, 0); 11632 return fold_build2_loc (loc, EQ_EXPR, type, 11633 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem, 11634 build_int_cst (TREE_TYPE (tem), 1)), 11635 build_int_cst (TREE_TYPE (tem), 0)); 11636 } 11637 /* Fold ~X & 1 as (X & 1) == 0. */ 11638 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11639 && integer_onep (arg1)) 11640 { 11641 tem = TREE_OPERAND (arg0, 0); 11642 return fold_build2_loc (loc, EQ_EXPR, type, 11643 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem, 11644 build_int_cst (TREE_TYPE (tem), 1)), 11645 build_int_cst (TREE_TYPE (tem), 0)); 11646 } 11647 11648 /* Fold (X ^ Y) & Y as ~X & Y. */ 11649 if (TREE_CODE (arg0) == BIT_XOR_EXPR 11650 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11651 { 11652 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11653 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11654 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11655 fold_convert_loc (loc, type, arg1)); 11656 } 11657 /* Fold (X ^ Y) & X as ~Y & X. */ 11658 if (TREE_CODE (arg0) == BIT_XOR_EXPR 11659 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 11660 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 11661 { 11662 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 11663 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11664 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11665 fold_convert_loc (loc, type, arg1)); 11666 } 11667 /* Fold X & (X ^ Y) as X & ~Y. */ 11668 if (TREE_CODE (arg1) == BIT_XOR_EXPR 11669 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11670 { 11671 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 11672 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11673 fold_convert_loc (loc, type, arg0), 11674 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem)); 11675 } 11676 /* Fold X & (Y ^ X) as ~Y & X. */ 11677 if (TREE_CODE (arg1) == BIT_XOR_EXPR 11678 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 11679 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 11680 { 11681 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 11682 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11683 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11684 fold_convert_loc (loc, type, arg0)); 11685 } 11686 11687 t1 = distribute_bit_expr (loc, code, type, arg0, arg1); 11688 if (t1 != NULL_TREE) 11689 return t1; 11690 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */ 11691 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR 11692 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))) 11693 { 11694 unsigned int prec 11695 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))); 11696 11697 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT 11698 && (~TREE_INT_CST_LOW (arg1) 11699 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0) 11700 return 11701 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11702 } 11703 11704 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))). 11705 11706 This results in more efficient code for machines without a NOR 11707 instruction. Combine will canonicalize to the first form 11708 which will allow use of NOR instructions provided by the 11709 backend if they exist. */ 11710 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11711 && TREE_CODE (arg1) == BIT_NOT_EXPR) 11712 { 11713 return fold_build1_loc (loc, BIT_NOT_EXPR, type, 11714 build2 (BIT_IOR_EXPR, type, 11715 fold_convert_loc (loc, type, 11716 TREE_OPERAND (arg0, 0)), 11717 fold_convert_loc (loc, type, 11718 TREE_OPERAND (arg1, 0)))); 11719 } 11720 11721 /* If arg0 is derived from the address of an object or function, we may 11722 be able to fold this expression using the object or function's 11723 alignment. */ 11724 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1)) 11725 { 11726 unsigned HOST_WIDE_INT modulus, residue; 11727 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1); 11728 11729 modulus = get_pointer_modulus_and_residue (arg0, &residue, 11730 integer_onep (arg1)); 11731 11732 /* This works because modulus is a power of 2. If this weren't the 11733 case, we'd have to replace it by its greatest power-of-2 11734 divisor: modulus & -modulus. */ 11735 if (low < modulus) 11736 return build_int_cst (type, residue & low); 11737 } 11738 11739 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1)) 11740 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1)) 11741 if the new mask might be further optimized. */ 11742 if ((TREE_CODE (arg0) == LSHIFT_EXPR 11743 || TREE_CODE (arg0) == RSHIFT_EXPR) 11744 && host_integerp (TREE_OPERAND (arg0, 1), 1) 11745 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1))) 11746 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) 11747 < TYPE_PRECISION (TREE_TYPE (arg0)) 11748 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT 11749 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0) 11750 { 11751 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1); 11752 unsigned HOST_WIDE_INT mask 11753 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1))); 11754 unsigned HOST_WIDE_INT newmask, zerobits = 0; 11755 tree shift_type = TREE_TYPE (arg0); 11756 11757 if (TREE_CODE (arg0) == LSHIFT_EXPR) 11758 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1); 11759 else if (TREE_CODE (arg0) == RSHIFT_EXPR 11760 && TYPE_PRECISION (TREE_TYPE (arg0)) 11761 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0)))) 11762 { 11763 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0)); 11764 tree arg00 = TREE_OPERAND (arg0, 0); 11765 /* See if more bits can be proven as zero because of 11766 zero extension. */ 11767 if (TREE_CODE (arg00) == NOP_EXPR 11768 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0)))) 11769 { 11770 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0)); 11771 if (TYPE_PRECISION (inner_type) 11772 == GET_MODE_BITSIZE (TYPE_MODE (inner_type)) 11773 && TYPE_PRECISION (inner_type) < prec) 11774 { 11775 prec = TYPE_PRECISION (inner_type); 11776 /* See if we can shorten the right shift. */ 11777 if (shiftc < prec) 11778 shift_type = inner_type; 11779 } 11780 } 11781 zerobits = ~(unsigned HOST_WIDE_INT) 0; 11782 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc; 11783 zerobits <<= prec - shiftc; 11784 /* For arithmetic shift if sign bit could be set, zerobits 11785 can contain actually sign bits, so no transformation is 11786 possible, unless MASK masks them all away. In that 11787 case the shift needs to be converted into logical shift. */ 11788 if (!TYPE_UNSIGNED (TREE_TYPE (arg0)) 11789 && prec == TYPE_PRECISION (TREE_TYPE (arg0))) 11790 { 11791 if ((mask & zerobits) == 0) 11792 shift_type = unsigned_type_for (TREE_TYPE (arg0)); 11793 else 11794 zerobits = 0; 11795 } 11796 } 11797 11798 /* ((X << 16) & 0xff00) is (X, 0). */ 11799 if ((mask & zerobits) == mask) 11800 return omit_one_operand_loc (loc, type, 11801 build_int_cst (type, 0), arg0); 11802 11803 newmask = mask | zerobits; 11804 if (newmask != mask && (newmask & (newmask + 1)) == 0) 11805 { 11806 unsigned int prec; 11807 11808 /* Only do the transformation if NEWMASK is some integer 11809 mode's mask. */ 11810 for (prec = BITS_PER_UNIT; 11811 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1) 11812 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1) 11813 break; 11814 if (prec < HOST_BITS_PER_WIDE_INT 11815 || newmask == ~(unsigned HOST_WIDE_INT) 0) 11816 { 11817 tree newmaskt; 11818 11819 if (shift_type != TREE_TYPE (arg0)) 11820 { 11821 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type, 11822 fold_convert_loc (loc, shift_type, 11823 TREE_OPERAND (arg0, 0)), 11824 TREE_OPERAND (arg0, 1)); 11825 tem = fold_convert_loc (loc, type, tem); 11826 } 11827 else 11828 tem = op0; 11829 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask); 11830 if (!tree_int_cst_equal (newmaskt, arg1)) 11831 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt); 11832 } 11833 } 11834 } 11835 11836 goto associate; 11837 11838 case RDIV_EXPR: 11839 /* Don't touch a floating-point divide by zero unless the mode 11840 of the constant can represent infinity. */ 11841 if (TREE_CODE (arg1) == REAL_CST 11842 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))) 11843 && real_zerop (arg1)) 11844 return NULL_TREE; 11845 11846 /* Optimize A / A to 1.0 if we don't care about 11847 NaNs or Infinities. Skip the transformation 11848 for non-real operands. */ 11849 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0)) 11850 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) 11851 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0))) 11852 && operand_equal_p (arg0, arg1, 0)) 11853 { 11854 tree r = build_real (TREE_TYPE (arg0), dconst1); 11855 11856 return omit_two_operands_loc (loc, type, r, arg0, arg1); 11857 } 11858 11859 /* The complex version of the above A / A optimization. */ 11860 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)) 11861 && operand_equal_p (arg0, arg1, 0)) 11862 { 11863 tree elem_type = TREE_TYPE (TREE_TYPE (arg0)); 11864 if (! HONOR_NANS (TYPE_MODE (elem_type)) 11865 && ! HONOR_INFINITIES (TYPE_MODE (elem_type))) 11866 { 11867 tree r = build_real (elem_type, dconst1); 11868 /* omit_two_operands will call fold_convert for us. */ 11869 return omit_two_operands_loc (loc, type, r, arg0, arg1); 11870 } 11871 } 11872 11873 /* (-A) / (-B) -> A / B */ 11874 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1)) 11875 return fold_build2_loc (loc, RDIV_EXPR, type, 11876 TREE_OPERAND (arg0, 0), 11877 negate_expr (arg1)); 11878 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0)) 11879 return fold_build2_loc (loc, RDIV_EXPR, type, 11880 negate_expr (arg0), 11881 TREE_OPERAND (arg1, 0)); 11882 11883 /* In IEEE floating point, x/1 is not equivalent to x for snans. */ 11884 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 11885 && real_onep (arg1)) 11886 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 11887 11888 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */ 11889 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 11890 && real_minus_onep (arg1)) 11891 return non_lvalue_loc (loc, fold_convert_loc (loc, type, 11892 negate_expr (arg0))); 11893 11894 /* If ARG1 is a constant, we can convert this to a multiply by the 11895 reciprocal. This does not have the same rounding properties, 11896 so only do this if -freciprocal-math. We can actually 11897 always safely do it if ARG1 is a power of two, but it's hard to 11898 tell if it is or not in a portable manner. */ 11899 if (TREE_CODE (arg1) == REAL_CST) 11900 { 11901 if (flag_reciprocal_math 11902 && 0 != (tem = const_binop (code, build_real (type, dconst1), 11903 arg1, 0))) 11904 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem); 11905 /* Find the reciprocal if optimizing and the result is exact. */ 11906 if (optimize) 11907 { 11908 REAL_VALUE_TYPE r; 11909 r = TREE_REAL_CST (arg1); 11910 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r)) 11911 { 11912 tem = build_real (type, r); 11913 return fold_build2_loc (loc, MULT_EXPR, type, 11914 fold_convert_loc (loc, type, arg0), tem); 11915 } 11916 } 11917 } 11918 /* Convert A/B/C to A/(B*C). */ 11919 if (flag_reciprocal_math 11920 && TREE_CODE (arg0) == RDIV_EXPR) 11921 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0), 11922 fold_build2_loc (loc, MULT_EXPR, type, 11923 TREE_OPERAND (arg0, 1), arg1)); 11924 11925 /* Convert A/(B/C) to (A/B)*C. */ 11926 if (flag_reciprocal_math 11927 && TREE_CODE (arg1) == RDIV_EXPR) 11928 return fold_build2_loc (loc, MULT_EXPR, type, 11929 fold_build2_loc (loc, RDIV_EXPR, type, arg0, 11930 TREE_OPERAND (arg1, 0)), 11931 TREE_OPERAND (arg1, 1)); 11932 11933 /* Convert C1/(X*C2) into (C1/C2)/X. */ 11934 if (flag_reciprocal_math 11935 && TREE_CODE (arg1) == MULT_EXPR 11936 && TREE_CODE (arg0) == REAL_CST 11937 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) 11938 { 11939 tree tem = const_binop (RDIV_EXPR, arg0, 11940 TREE_OPERAND (arg1, 1), 0); 11941 if (tem) 11942 return fold_build2_loc (loc, RDIV_EXPR, type, tem, 11943 TREE_OPERAND (arg1, 0)); 11944 } 11945 11946 if (flag_unsafe_math_optimizations) 11947 { 11948 enum built_in_function fcode0 = builtin_mathfn_code (arg0); 11949 enum built_in_function fcode1 = builtin_mathfn_code (arg1); 11950 11951 /* Optimize sin(x)/cos(x) as tan(x). */ 11952 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS) 11953 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF) 11954 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL)) 11955 && operand_equal_p (CALL_EXPR_ARG (arg0, 0), 11956 CALL_EXPR_ARG (arg1, 0), 0)) 11957 { 11958 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN); 11959 11960 if (tanfn != NULL_TREE) 11961 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0)); 11962 } 11963 11964 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */ 11965 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN) 11966 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF) 11967 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL)) 11968 && operand_equal_p (CALL_EXPR_ARG (arg0, 0), 11969 CALL_EXPR_ARG (arg1, 0), 0)) 11970 { 11971 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN); 11972 11973 if (tanfn != NULL_TREE) 11974 { 11975 tree tmp = build_call_expr_loc (loc, tanfn, 1, 11976 CALL_EXPR_ARG (arg0, 0)); 11977 return fold_build2_loc (loc, RDIV_EXPR, type, 11978 build_real (type, dconst1), tmp); 11979 } 11980 } 11981 11982 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about 11983 NaNs or Infinities. */ 11984 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN) 11985 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF) 11986 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL))) 11987 { 11988 tree arg00 = CALL_EXPR_ARG (arg0, 0); 11989 tree arg01 = CALL_EXPR_ARG (arg1, 0); 11990 11991 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))) 11992 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00))) 11993 && operand_equal_p (arg00, arg01, 0)) 11994 { 11995 tree cosfn = mathfn_built_in (type, BUILT_IN_COS); 11996 11997 if (cosfn != NULL_TREE) 11998 return build_call_expr_loc (loc, cosfn, 1, arg00); 11999 } 12000 } 12001 12002 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about 12003 NaNs or Infinities. */ 12004 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN) 12005 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF) 12006 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL))) 12007 { 12008 tree arg00 = CALL_EXPR_ARG (arg0, 0); 12009 tree arg01 = CALL_EXPR_ARG (arg1, 0); 12010 12011 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))) 12012 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00))) 12013 && operand_equal_p (arg00, arg01, 0)) 12014 { 12015 tree cosfn = mathfn_built_in (type, BUILT_IN_COS); 12016 12017 if (cosfn != NULL_TREE) 12018 { 12019 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00); 12020 return fold_build2_loc (loc, RDIV_EXPR, type, 12021 build_real (type, dconst1), 12022 tmp); 12023 } 12024 } 12025 } 12026 12027 /* Optimize pow(x,c)/x as pow(x,c-1). */ 12028 if (fcode0 == BUILT_IN_POW 12029 || fcode0 == BUILT_IN_POWF 12030 || fcode0 == BUILT_IN_POWL) 12031 { 12032 tree arg00 = CALL_EXPR_ARG (arg0, 0); 12033 tree arg01 = CALL_EXPR_ARG (arg0, 1); 12034 if (TREE_CODE (arg01) == REAL_CST 12035 && !TREE_OVERFLOW (arg01) 12036 && operand_equal_p (arg1, arg00, 0)) 12037 { 12038 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 12039 REAL_VALUE_TYPE c; 12040 tree arg; 12041 12042 c = TREE_REAL_CST (arg01); 12043 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1); 12044 arg = build_real (type, c); 12045 return build_call_expr_loc (loc, powfn, 2, arg1, arg); 12046 } 12047 } 12048 12049 /* Optimize a/root(b/c) into a*root(c/b). */ 12050 if (BUILTIN_ROOT_P (fcode1)) 12051 { 12052 tree rootarg = CALL_EXPR_ARG (arg1, 0); 12053 12054 if (TREE_CODE (rootarg) == RDIV_EXPR) 12055 { 12056 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 12057 tree b = TREE_OPERAND (rootarg, 0); 12058 tree c = TREE_OPERAND (rootarg, 1); 12059 12060 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b); 12061 12062 tmp = build_call_expr_loc (loc, rootfn, 1, tmp); 12063 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp); 12064 } 12065 } 12066 12067 /* Optimize x/expN(y) into x*expN(-y). */ 12068 if (BUILTIN_EXPONENT_P (fcode1)) 12069 { 12070 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 12071 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0)); 12072 arg1 = build_call_expr_loc (loc, 12073 expfn, 1, 12074 fold_convert_loc (loc, type, arg)); 12075 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1); 12076 } 12077 12078 /* Optimize x/pow(y,z) into x*pow(y,-z). */ 12079 if (fcode1 == BUILT_IN_POW 12080 || fcode1 == BUILT_IN_POWF 12081 || fcode1 == BUILT_IN_POWL) 12082 { 12083 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 12084 tree arg10 = CALL_EXPR_ARG (arg1, 0); 12085 tree arg11 = CALL_EXPR_ARG (arg1, 1); 12086 tree neg11 = fold_convert_loc (loc, type, 12087 negate_expr (arg11)); 12088 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11); 12089 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1); 12090 } 12091 } 12092 return NULL_TREE; 12093 12094 case TRUNC_DIV_EXPR: 12095 case FLOOR_DIV_EXPR: 12096 /* Simplify A / (B << N) where A and B are positive and B is 12097 a power of 2, to A >> (N + log2(B)). */ 12098 strict_overflow_p = false; 12099 if (TREE_CODE (arg1) == LSHIFT_EXPR 12100 && (TYPE_UNSIGNED (type) 12101 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p))) 12102 { 12103 tree sval = TREE_OPERAND (arg1, 0); 12104 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0) 12105 { 12106 tree sh_cnt = TREE_OPERAND (arg1, 1); 12107 unsigned long pow2; 12108 12109 if (TREE_INT_CST_LOW (sval)) 12110 pow2 = exact_log2 (TREE_INT_CST_LOW (sval)); 12111 else 12112 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval)) 12113 + HOST_BITS_PER_WIDE_INT; 12114 12115 if (strict_overflow_p) 12116 fold_overflow_warning (("assuming signed overflow does not " 12117 "occur when simplifying A / (B << N)"), 12118 WARN_STRICT_OVERFLOW_MISC); 12119 12120 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt), 12121 sh_cnt, build_int_cst (NULL_TREE, pow2)); 12122 return fold_build2_loc (loc, RSHIFT_EXPR, type, 12123 fold_convert_loc (loc, type, arg0), sh_cnt); 12124 } 12125 } 12126 12127 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as 12128 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */ 12129 if (INTEGRAL_TYPE_P (type) 12130 && TYPE_UNSIGNED (type) 12131 && code == FLOOR_DIV_EXPR) 12132 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1); 12133 12134 /* Fall thru */ 12135 12136 case ROUND_DIV_EXPR: 12137 case CEIL_DIV_EXPR: 12138 case EXACT_DIV_EXPR: 12139 if (integer_onep (arg1)) 12140 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12141 if (integer_zerop (arg1)) 12142 return NULL_TREE; 12143 /* X / -1 is -X. */ 12144 if (!TYPE_UNSIGNED (type) 12145 && TREE_CODE (arg1) == INTEGER_CST 12146 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1 12147 && TREE_INT_CST_HIGH (arg1) == -1) 12148 return fold_convert_loc (loc, type, negate_expr (arg0)); 12149 12150 /* Convert -A / -B to A / B when the type is signed and overflow is 12151 undefined. */ 12152 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 12153 && TREE_CODE (arg0) == NEGATE_EXPR 12154 && negate_expr_p (arg1)) 12155 { 12156 if (INTEGRAL_TYPE_P (type)) 12157 fold_overflow_warning (("assuming signed overflow does not occur " 12158 "when distributing negation across " 12159 "division"), 12160 WARN_STRICT_OVERFLOW_MISC); 12161 return fold_build2_loc (loc, code, type, 12162 fold_convert_loc (loc, type, 12163 TREE_OPERAND (arg0, 0)), 12164 fold_convert_loc (loc, type, 12165 negate_expr (arg1))); 12166 } 12167 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 12168 && TREE_CODE (arg1) == NEGATE_EXPR 12169 && negate_expr_p (arg0)) 12170 { 12171 if (INTEGRAL_TYPE_P (type)) 12172 fold_overflow_warning (("assuming signed overflow does not occur " 12173 "when distributing negation across " 12174 "division"), 12175 WARN_STRICT_OVERFLOW_MISC); 12176 return fold_build2_loc (loc, code, type, 12177 fold_convert_loc (loc, type, 12178 negate_expr (arg0)), 12179 fold_convert_loc (loc, type, 12180 TREE_OPERAND (arg1, 0))); 12181 } 12182 12183 /* If arg0 is a multiple of arg1, then rewrite to the fastest div 12184 operation, EXACT_DIV_EXPR. 12185 12186 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now. 12187 At one time others generated faster code, it's not clear if they do 12188 after the last round to changes to the DIV code in expmed.c. */ 12189 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR) 12190 && multiple_of_p (type, arg0, arg1)) 12191 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1); 12192 12193 strict_overflow_p = false; 12194 if (TREE_CODE (arg1) == INTEGER_CST 12195 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 12196 &strict_overflow_p))) 12197 { 12198 if (strict_overflow_p) 12199 fold_overflow_warning (("assuming signed overflow does not occur " 12200 "when simplifying division"), 12201 WARN_STRICT_OVERFLOW_MISC); 12202 return fold_convert_loc (loc, type, tem); 12203 } 12204 12205 return NULL_TREE; 12206 12207 case CEIL_MOD_EXPR: 12208 case FLOOR_MOD_EXPR: 12209 case ROUND_MOD_EXPR: 12210 case TRUNC_MOD_EXPR: 12211 /* X % 1 is always zero, but be sure to preserve any side 12212 effects in X. */ 12213 if (integer_onep (arg1)) 12214 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 12215 12216 /* X % 0, return X % 0 unchanged so that we can get the 12217 proper warnings and errors. */ 12218 if (integer_zerop (arg1)) 12219 return NULL_TREE; 12220 12221 /* 0 % X is always zero, but be sure to preserve any side 12222 effects in X. Place this after checking for X == 0. */ 12223 if (integer_zerop (arg0)) 12224 return omit_one_operand_loc (loc, type, integer_zero_node, arg1); 12225 12226 /* X % -1 is zero. */ 12227 if (!TYPE_UNSIGNED (type) 12228 && TREE_CODE (arg1) == INTEGER_CST 12229 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1 12230 && TREE_INT_CST_HIGH (arg1) == -1) 12231 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 12232 12233 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR, 12234 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */ 12235 strict_overflow_p = false; 12236 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR) 12237 && (TYPE_UNSIGNED (type) 12238 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p))) 12239 { 12240 tree c = arg1; 12241 /* Also optimize A % (C << N) where C is a power of 2, 12242 to A & ((C << N) - 1). */ 12243 if (TREE_CODE (arg1) == LSHIFT_EXPR) 12244 c = TREE_OPERAND (arg1, 0); 12245 12246 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0) 12247 { 12248 tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1, 12249 build_int_cst (TREE_TYPE (arg1), 1)); 12250 if (strict_overflow_p) 12251 fold_overflow_warning (("assuming signed overflow does not " 12252 "occur when simplifying " 12253 "X % (power of two)"), 12254 WARN_STRICT_OVERFLOW_MISC); 12255 return fold_build2_loc (loc, BIT_AND_EXPR, type, 12256 fold_convert_loc (loc, type, arg0), 12257 fold_convert_loc (loc, type, mask)); 12258 } 12259 } 12260 12261 /* X % -C is the same as X % C. */ 12262 if (code == TRUNC_MOD_EXPR 12263 && !TYPE_UNSIGNED (type) 12264 && TREE_CODE (arg1) == INTEGER_CST 12265 && !TREE_OVERFLOW (arg1) 12266 && TREE_INT_CST_HIGH (arg1) < 0 12267 && !TYPE_OVERFLOW_TRAPS (type) 12268 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */ 12269 && !sign_bit_p (arg1, arg1)) 12270 return fold_build2_loc (loc, code, type, 12271 fold_convert_loc (loc, type, arg0), 12272 fold_convert_loc (loc, type, 12273 negate_expr (arg1))); 12274 12275 /* X % -Y is the same as X % Y. */ 12276 if (code == TRUNC_MOD_EXPR 12277 && !TYPE_UNSIGNED (type) 12278 && TREE_CODE (arg1) == NEGATE_EXPR 12279 && !TYPE_OVERFLOW_TRAPS (type)) 12280 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0), 12281 fold_convert_loc (loc, type, 12282 TREE_OPERAND (arg1, 0))); 12283 12284 if (TREE_CODE (arg1) == INTEGER_CST 12285 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 12286 &strict_overflow_p))) 12287 { 12288 if (strict_overflow_p) 12289 fold_overflow_warning (("assuming signed overflow does not occur " 12290 "when simplifying modulus"), 12291 WARN_STRICT_OVERFLOW_MISC); 12292 return fold_convert_loc (loc, type, tem); 12293 } 12294 12295 return NULL_TREE; 12296 12297 case LROTATE_EXPR: 12298 case RROTATE_EXPR: 12299 if (integer_all_onesp (arg0)) 12300 return omit_one_operand_loc (loc, type, arg0, arg1); 12301 goto shift; 12302 12303 case RSHIFT_EXPR: 12304 /* Optimize -1 >> x for arithmetic right shifts. */ 12305 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type) 12306 && tree_expr_nonnegative_p (arg1)) 12307 return omit_one_operand_loc (loc, type, arg0, arg1); 12308 /* ... fall through ... */ 12309 12310 case LSHIFT_EXPR: 12311 shift: 12312 if (integer_zerop (arg1)) 12313 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12314 if (integer_zerop (arg0)) 12315 return omit_one_operand_loc (loc, type, arg0, arg1); 12316 12317 /* Since negative shift count is not well-defined, 12318 don't try to compute it in the compiler. */ 12319 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0) 12320 return NULL_TREE; 12321 12322 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */ 12323 if (TREE_CODE (op0) == code && host_integerp (arg1, false) 12324 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type) 12325 && host_integerp (TREE_OPERAND (arg0, 1), false) 12326 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type)) 12327 { 12328 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) 12329 + TREE_INT_CST_LOW (arg1)); 12330 12331 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2 12332 being well defined. */ 12333 if (low >= TYPE_PRECISION (type)) 12334 { 12335 if (code == LROTATE_EXPR || code == RROTATE_EXPR) 12336 low = low % TYPE_PRECISION (type); 12337 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR) 12338 return omit_one_operand_loc (loc, type, build_int_cst (type, 0), 12339 TREE_OPERAND (arg0, 0)); 12340 else 12341 low = TYPE_PRECISION (type) - 1; 12342 } 12343 12344 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12345 build_int_cst (type, low)); 12346 } 12347 12348 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c 12349 into x & ((unsigned)-1 >> c) for unsigned types. */ 12350 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR) 12351 || (TYPE_UNSIGNED (type) 12352 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR)) 12353 && host_integerp (arg1, false) 12354 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type) 12355 && host_integerp (TREE_OPERAND (arg0, 1), false) 12356 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type)) 12357 { 12358 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)); 12359 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1); 12360 tree lshift; 12361 tree arg00; 12362 12363 if (low0 == low1) 12364 { 12365 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 12366 12367 lshift = build_int_cst (type, -1); 12368 lshift = int_const_binop (code, lshift, arg1, 0); 12369 12370 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift); 12371 } 12372 } 12373 12374 /* Rewrite an LROTATE_EXPR by a constant into an 12375 RROTATE_EXPR by a new constant. */ 12376 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST) 12377 { 12378 tree tem = build_int_cst (TREE_TYPE (arg1), 12379 TYPE_PRECISION (type)); 12380 tem = const_binop (MINUS_EXPR, tem, arg1, 0); 12381 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem); 12382 } 12383 12384 /* If we have a rotate of a bit operation with the rotate count and 12385 the second operand of the bit operation both constant, 12386 permute the two operations. */ 12387 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST 12388 && (TREE_CODE (arg0) == BIT_AND_EXPR 12389 || TREE_CODE (arg0) == BIT_IOR_EXPR 12390 || TREE_CODE (arg0) == BIT_XOR_EXPR) 12391 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12392 return fold_build2_loc (loc, TREE_CODE (arg0), type, 12393 fold_build2_loc (loc, code, type, 12394 TREE_OPERAND (arg0, 0), arg1), 12395 fold_build2_loc (loc, code, type, 12396 TREE_OPERAND (arg0, 1), arg1)); 12397 12398 /* Two consecutive rotates adding up to the precision of the 12399 type can be ignored. */ 12400 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST 12401 && TREE_CODE (arg0) == RROTATE_EXPR 12402 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 12403 && TREE_INT_CST_HIGH (arg1) == 0 12404 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0 12405 && ((TREE_INT_CST_LOW (arg1) 12406 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))) 12407 == (unsigned int) TYPE_PRECISION (type))) 12408 return TREE_OPERAND (arg0, 0); 12409 12410 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1) 12411 (X & C2) >> C1 into (X >> C1) & (C2 >> C1) 12412 if the latter can be further optimized. */ 12413 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR) 12414 && TREE_CODE (arg0) == BIT_AND_EXPR 12415 && TREE_CODE (arg1) == INTEGER_CST 12416 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12417 { 12418 tree mask = fold_build2_loc (loc, code, type, 12419 fold_convert_loc (loc, type, 12420 TREE_OPERAND (arg0, 1)), 12421 arg1); 12422 tree shift = fold_build2_loc (loc, code, type, 12423 fold_convert_loc (loc, type, 12424 TREE_OPERAND (arg0, 0)), 12425 arg1); 12426 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask); 12427 if (tem) 12428 return tem; 12429 } 12430 12431 return NULL_TREE; 12432 12433 case MIN_EXPR: 12434 if (operand_equal_p (arg0, arg1, 0)) 12435 return omit_one_operand_loc (loc, type, arg0, arg1); 12436 if (INTEGRAL_TYPE_P (type) 12437 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST)) 12438 return omit_one_operand_loc (loc, type, arg1, arg0); 12439 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1); 12440 if (tem) 12441 return tem; 12442 goto associate; 12443 12444 case MAX_EXPR: 12445 if (operand_equal_p (arg0, arg1, 0)) 12446 return omit_one_operand_loc (loc, type, arg0, arg1); 12447 if (INTEGRAL_TYPE_P (type) 12448 && TYPE_MAX_VALUE (type) 12449 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST)) 12450 return omit_one_operand_loc (loc, type, arg1, arg0); 12451 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1); 12452 if (tem) 12453 return tem; 12454 goto associate; 12455 12456 case TRUTH_ANDIF_EXPR: 12457 /* Note that the operands of this must be ints 12458 and their values must be 0 or 1. 12459 ("true" is a fixed value perhaps depending on the language.) */ 12460 /* If first arg is constant zero, return it. */ 12461 if (integer_zerop (arg0)) 12462 return fold_convert_loc (loc, type, arg0); 12463 case TRUTH_AND_EXPR: 12464 /* If either arg is constant true, drop it. */ 12465 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 12466 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 12467 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1) 12468 /* Preserve sequence points. */ 12469 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0))) 12470 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12471 /* If second arg is constant zero, result is zero, but first arg 12472 must be evaluated. */ 12473 if (integer_zerop (arg1)) 12474 return omit_one_operand_loc (loc, type, arg1, arg0); 12475 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR 12476 case will be handled here. */ 12477 if (integer_zerop (arg0)) 12478 return omit_one_operand_loc (loc, type, arg0, arg1); 12479 12480 /* !X && X is always false. */ 12481 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 12482 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 12483 return omit_one_operand_loc (loc, type, integer_zero_node, arg1); 12484 /* X && !X is always false. */ 12485 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 12486 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 12487 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 12488 12489 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y 12490 means A >= Y && A != MAX, but in this case we know that 12491 A < X <= MAX. */ 12492 12493 if (!TREE_SIDE_EFFECTS (arg0) 12494 && !TREE_SIDE_EFFECTS (arg1)) 12495 { 12496 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1); 12497 if (tem && !operand_equal_p (tem, arg0, 0)) 12498 return fold_build2_loc (loc, code, type, tem, arg1); 12499 12500 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0); 12501 if (tem && !operand_equal_p (tem, arg1, 0)) 12502 return fold_build2_loc (loc, code, type, arg0, tem); 12503 } 12504 12505 truth_andor: 12506 /* We only do these simplifications if we are optimizing. */ 12507 if (!optimize) 12508 return NULL_TREE; 12509 12510 /* Check for things like (A || B) && (A || C). We can convert this 12511 to A || (B && C). Note that either operator can be any of the four 12512 truth and/or operations and the transformation will still be 12513 valid. Also note that we only care about order for the 12514 ANDIF and ORIF operators. If B contains side effects, this 12515 might change the truth-value of A. */ 12516 if (TREE_CODE (arg0) == TREE_CODE (arg1) 12517 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR 12518 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR 12519 || TREE_CODE (arg0) == TRUTH_AND_EXPR 12520 || TREE_CODE (arg0) == TRUTH_OR_EXPR) 12521 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1))) 12522 { 12523 tree a00 = TREE_OPERAND (arg0, 0); 12524 tree a01 = TREE_OPERAND (arg0, 1); 12525 tree a10 = TREE_OPERAND (arg1, 0); 12526 tree a11 = TREE_OPERAND (arg1, 1); 12527 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR 12528 || TREE_CODE (arg0) == TRUTH_AND_EXPR) 12529 && (code == TRUTH_AND_EXPR 12530 || code == TRUTH_OR_EXPR)); 12531 12532 if (operand_equal_p (a00, a10, 0)) 12533 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00, 12534 fold_build2_loc (loc, code, type, a01, a11)); 12535 else if (commutative && operand_equal_p (a00, a11, 0)) 12536 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00, 12537 fold_build2_loc (loc, code, type, a01, a10)); 12538 else if (commutative && operand_equal_p (a01, a10, 0)) 12539 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01, 12540 fold_build2_loc (loc, code, type, a00, a11)); 12541 12542 /* This case if tricky because we must either have commutative 12543 operators or else A10 must not have side-effects. */ 12544 12545 else if ((commutative || ! TREE_SIDE_EFFECTS (a10)) 12546 && operand_equal_p (a01, a11, 0)) 12547 return fold_build2_loc (loc, TREE_CODE (arg0), type, 12548 fold_build2_loc (loc, code, type, a00, a10), 12549 a01); 12550 } 12551 12552 /* See if we can build a range comparison. */ 12553 if (0 != (tem = fold_range_test (loc, code, type, op0, op1))) 12554 return tem; 12555 12556 /* Check for the possibility of merging component references. If our 12557 lhs is another similar operation, try to merge its rhs with our 12558 rhs. Then try to merge our lhs and rhs. */ 12559 if (TREE_CODE (arg0) == code 12560 && 0 != (tem = fold_truthop (loc, code, type, 12561 TREE_OPERAND (arg0, 1), arg1))) 12562 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 12563 12564 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0) 12565 return tem; 12566 12567 return NULL_TREE; 12568 12569 case TRUTH_ORIF_EXPR: 12570 /* Note that the operands of this must be ints 12571 and their values must be 0 or true. 12572 ("true" is a fixed value perhaps depending on the language.) */ 12573 /* If first arg is constant true, return it. */ 12574 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 12575 return fold_convert_loc (loc, type, arg0); 12576 case TRUTH_OR_EXPR: 12577 /* If either arg is constant zero, drop it. */ 12578 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0)) 12579 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 12580 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1) 12581 /* Preserve sequence points. */ 12582 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0))) 12583 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12584 /* If second arg is constant true, result is true, but we must 12585 evaluate first arg. */ 12586 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)) 12587 return omit_one_operand_loc (loc, type, arg1, arg0); 12588 /* Likewise for first arg, but note this only occurs here for 12589 TRUTH_OR_EXPR. */ 12590 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 12591 return omit_one_operand_loc (loc, type, arg0, arg1); 12592 12593 /* !X || X is always true. */ 12594 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 12595 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 12596 return omit_one_operand_loc (loc, type, integer_one_node, arg1); 12597 /* X || !X is always true. */ 12598 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 12599 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 12600 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 12601 12602 goto truth_andor; 12603 12604 case TRUTH_XOR_EXPR: 12605 /* If the second arg is constant zero, drop it. */ 12606 if (integer_zerop (arg1)) 12607 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12608 /* If the second arg is constant true, this is a logical inversion. */ 12609 if (integer_onep (arg1)) 12610 { 12611 /* Only call invert_truthvalue if operand is a truth value. */ 12612 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) 12613 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0); 12614 else 12615 tem = invert_truthvalue_loc (loc, arg0); 12616 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 12617 } 12618 /* Identical arguments cancel to zero. */ 12619 if (operand_equal_p (arg0, arg1, 0)) 12620 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 12621 12622 /* !X ^ X is always true. */ 12623 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 12624 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 12625 return omit_one_operand_loc (loc, type, integer_one_node, arg1); 12626 12627 /* X ^ !X is always true. */ 12628 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 12629 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 12630 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 12631 12632 return NULL_TREE; 12633 12634 case EQ_EXPR: 12635 case NE_EXPR: 12636 tem = fold_comparison (loc, code, type, op0, op1); 12637 if (tem != NULL_TREE) 12638 return tem; 12639 12640 /* bool_var != 0 becomes bool_var. */ 12641 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1) 12642 && code == NE_EXPR) 12643 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12644 12645 /* bool_var == 1 becomes bool_var. */ 12646 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1) 12647 && code == EQ_EXPR) 12648 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12649 12650 /* bool_var != 1 becomes !bool_var. */ 12651 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1) 12652 && code == NE_EXPR) 12653 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, 12654 fold_convert_loc (loc, type, arg0)); 12655 12656 /* bool_var == 0 becomes !bool_var. */ 12657 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1) 12658 && code == EQ_EXPR) 12659 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, 12660 fold_convert_loc (loc, type, arg0)); 12661 12662 /* !exp != 0 becomes !exp */ 12663 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1) 12664 && code == NE_EXPR) 12665 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12666 12667 /* If this is an equality comparison of the address of two non-weak, 12668 unaliased symbols neither of which are extern (since we do not 12669 have access to attributes for externs), then we know the result. */ 12670 if (TREE_CODE (arg0) == ADDR_EXPR 12671 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0)) 12672 && ! DECL_WEAK (TREE_OPERAND (arg0, 0)) 12673 && ! lookup_attribute ("alias", 12674 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0))) 12675 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0)) 12676 && TREE_CODE (arg1) == ADDR_EXPR 12677 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0)) 12678 && ! DECL_WEAK (TREE_OPERAND (arg1, 0)) 12679 && ! lookup_attribute ("alias", 12680 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0))) 12681 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0))) 12682 { 12683 /* We know that we're looking at the address of two 12684 non-weak, unaliased, static _DECL nodes. 12685 12686 It is both wasteful and incorrect to call operand_equal_p 12687 to compare the two ADDR_EXPR nodes. It is wasteful in that 12688 all we need to do is test pointer equality for the arguments 12689 to the two ADDR_EXPR nodes. It is incorrect to use 12690 operand_equal_p as that function is NOT equivalent to a 12691 C equality test. It can in fact return false for two 12692 objects which would test as equal using the C equality 12693 operator. */ 12694 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0); 12695 return constant_boolean_node (equal 12696 ? code == EQ_EXPR : code != EQ_EXPR, 12697 type); 12698 } 12699 12700 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or 12701 a MINUS_EXPR of a constant, we can convert it into a comparison with 12702 a revised constant as long as no overflow occurs. */ 12703 if (TREE_CODE (arg1) == INTEGER_CST 12704 && (TREE_CODE (arg0) == PLUS_EXPR 12705 || TREE_CODE (arg0) == MINUS_EXPR) 12706 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 12707 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR 12708 ? MINUS_EXPR : PLUS_EXPR, 12709 fold_convert_loc (loc, TREE_TYPE (arg0), 12710 arg1), 12711 TREE_OPERAND (arg0, 1), 0)) 12712 && !TREE_OVERFLOW (tem)) 12713 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 12714 12715 /* Similarly for a NEGATE_EXPR. */ 12716 if (TREE_CODE (arg0) == NEGATE_EXPR 12717 && TREE_CODE (arg1) == INTEGER_CST 12718 && 0 != (tem = negate_expr (arg1)) 12719 && TREE_CODE (tem) == INTEGER_CST 12720 && !TREE_OVERFLOW (tem)) 12721 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 12722 12723 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */ 12724 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12725 && TREE_CODE (arg1) == INTEGER_CST 12726 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12727 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12728 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0), 12729 fold_convert_loc (loc, 12730 TREE_TYPE (arg0), 12731 arg1), 12732 TREE_OPERAND (arg0, 1))); 12733 12734 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */ 12735 if ((TREE_CODE (arg0) == PLUS_EXPR 12736 || TREE_CODE (arg0) == POINTER_PLUS_EXPR 12737 || TREE_CODE (arg0) == MINUS_EXPR) 12738 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 12739 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 12740 || POINTER_TYPE_P (TREE_TYPE (arg0)))) 12741 { 12742 tree val = TREE_OPERAND (arg0, 1); 12743 return omit_two_operands_loc (loc, type, 12744 fold_build2_loc (loc, code, type, 12745 val, 12746 build_int_cst (TREE_TYPE (val), 12747 0)), 12748 TREE_OPERAND (arg0, 0), arg1); 12749 } 12750 12751 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */ 12752 if (TREE_CODE (arg0) == MINUS_EXPR 12753 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST 12754 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0) 12755 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1) 12756 { 12757 return omit_two_operands_loc (loc, type, 12758 code == NE_EXPR 12759 ? boolean_true_node : boolean_false_node, 12760 TREE_OPERAND (arg0, 1), arg1); 12761 } 12762 12763 /* If we have X - Y == 0, we can convert that to X == Y and similarly 12764 for !=. Don't do this for ordered comparisons due to overflow. */ 12765 if (TREE_CODE (arg0) == MINUS_EXPR 12766 && integer_zerop (arg1)) 12767 return fold_build2_loc (loc, code, type, 12768 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)); 12769 12770 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */ 12771 if (TREE_CODE (arg0) == ABS_EXPR 12772 && (integer_zerop (arg1) || real_zerop (arg1))) 12773 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1); 12774 12775 /* If this is an EQ or NE comparison with zero and ARG0 is 12776 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require 12777 two operations, but the latter can be done in one less insn 12778 on machines that have only two-operand insns or on which a 12779 constant cannot be the first operand. */ 12780 if (TREE_CODE (arg0) == BIT_AND_EXPR 12781 && integer_zerop (arg1)) 12782 { 12783 tree arg00 = TREE_OPERAND (arg0, 0); 12784 tree arg01 = TREE_OPERAND (arg0, 1); 12785 if (TREE_CODE (arg00) == LSHIFT_EXPR 12786 && integer_onep (TREE_OPERAND (arg00, 0))) 12787 { 12788 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00), 12789 arg01, TREE_OPERAND (arg00, 1)); 12790 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem, 12791 build_int_cst (TREE_TYPE (arg0), 1)); 12792 return fold_build2_loc (loc, code, type, 12793 fold_convert_loc (loc, TREE_TYPE (arg1), tem), 12794 arg1); 12795 } 12796 else if (TREE_CODE (arg01) == LSHIFT_EXPR 12797 && integer_onep (TREE_OPERAND (arg01, 0))) 12798 { 12799 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01), 12800 arg00, TREE_OPERAND (arg01, 1)); 12801 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem, 12802 build_int_cst (TREE_TYPE (arg0), 1)); 12803 return fold_build2_loc (loc, code, type, 12804 fold_convert_loc (loc, TREE_TYPE (arg1), tem), 12805 arg1); 12806 } 12807 } 12808 12809 /* If this is an NE or EQ comparison of zero against the result of a 12810 signed MOD operation whose second operand is a power of 2, make 12811 the MOD operation unsigned since it is simpler and equivalent. */ 12812 if (integer_zerop (arg1) 12813 && !TYPE_UNSIGNED (TREE_TYPE (arg0)) 12814 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR 12815 || TREE_CODE (arg0) == CEIL_MOD_EXPR 12816 || TREE_CODE (arg0) == FLOOR_MOD_EXPR 12817 || TREE_CODE (arg0) == ROUND_MOD_EXPR) 12818 && integer_pow2p (TREE_OPERAND (arg0, 1))) 12819 { 12820 tree newtype = unsigned_type_for (TREE_TYPE (arg0)); 12821 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype, 12822 fold_convert_loc (loc, newtype, 12823 TREE_OPERAND (arg0, 0)), 12824 fold_convert_loc (loc, newtype, 12825 TREE_OPERAND (arg0, 1))); 12826 12827 return fold_build2_loc (loc, code, type, newmod, 12828 fold_convert_loc (loc, newtype, arg1)); 12829 } 12830 12831 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where 12832 C1 is a valid shift constant, and C2 is a power of two, i.e. 12833 a single bit. */ 12834 if (TREE_CODE (arg0) == BIT_AND_EXPR 12835 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR 12836 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)) 12837 == INTEGER_CST 12838 && integer_pow2p (TREE_OPERAND (arg0, 1)) 12839 && integer_zerop (arg1)) 12840 { 12841 tree itype = TREE_TYPE (arg0); 12842 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype); 12843 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1); 12844 12845 /* Check for a valid shift count. */ 12846 if (TREE_INT_CST_HIGH (arg001) == 0 12847 && TREE_INT_CST_LOW (arg001) < prec) 12848 { 12849 tree arg01 = TREE_OPERAND (arg0, 1); 12850 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 12851 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01); 12852 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0 12853 can be rewritten as (X & (C2 << C1)) != 0. */ 12854 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec) 12855 { 12856 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001); 12857 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem); 12858 return fold_build2_loc (loc, code, type, tem, arg1); 12859 } 12860 /* Otherwise, for signed (arithmetic) shifts, 12861 ((X >> C1) & C2) != 0 is rewritten as X < 0, and 12862 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */ 12863 else if (!TYPE_UNSIGNED (itype)) 12864 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type, 12865 arg000, build_int_cst (itype, 0)); 12866 /* Otherwise, of unsigned (logical) shifts, 12867 ((X >> C1) & C2) != 0 is rewritten as (X,false), and 12868 ((X >> C1) & C2) == 0 is rewritten as (X,true). */ 12869 else 12870 return omit_one_operand_loc (loc, type, 12871 code == EQ_EXPR ? integer_one_node 12872 : integer_zero_node, 12873 arg000); 12874 } 12875 } 12876 12877 /* If this is an NE comparison of zero with an AND of one, remove the 12878 comparison since the AND will give the correct value. */ 12879 if (code == NE_EXPR 12880 && integer_zerop (arg1) 12881 && TREE_CODE (arg0) == BIT_AND_EXPR 12882 && integer_onep (TREE_OPERAND (arg0, 1))) 12883 return fold_convert_loc (loc, type, arg0); 12884 12885 /* If we have (A & C) == C where C is a power of 2, convert this into 12886 (A & C) != 0. Similarly for NE_EXPR. */ 12887 if (TREE_CODE (arg0) == BIT_AND_EXPR 12888 && integer_pow2p (TREE_OPERAND (arg0, 1)) 12889 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 12890 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 12891 arg0, fold_convert_loc (loc, TREE_TYPE (arg0), 12892 integer_zero_node)); 12893 12894 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign 12895 bit, then fold the expression into A < 0 or A >= 0. */ 12896 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type); 12897 if (tem) 12898 return tem; 12899 12900 /* If we have (A & C) == D where D & ~C != 0, convert this into 0. 12901 Similarly for NE_EXPR. */ 12902 if (TREE_CODE (arg0) == BIT_AND_EXPR 12903 && TREE_CODE (arg1) == INTEGER_CST 12904 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12905 { 12906 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR, 12907 TREE_TYPE (TREE_OPERAND (arg0, 1)), 12908 TREE_OPERAND (arg0, 1)); 12909 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), 12910 arg1, notc); 12911 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node; 12912 if (integer_nonzerop (dandnotc)) 12913 return omit_one_operand_loc (loc, type, rslt, arg0); 12914 } 12915 12916 /* If we have (A | C) == D where C & ~D != 0, convert this into 0. 12917 Similarly for NE_EXPR. */ 12918 if (TREE_CODE (arg0) == BIT_IOR_EXPR 12919 && TREE_CODE (arg1) == INTEGER_CST 12920 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12921 { 12922 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1); 12923 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), 12924 TREE_OPERAND (arg0, 1), notd); 12925 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node; 12926 if (integer_nonzerop (candnotd)) 12927 return omit_one_operand_loc (loc, type, rslt, arg0); 12928 } 12929 12930 /* If this is a comparison of a field, we may be able to simplify it. */ 12931 if ((TREE_CODE (arg0) == COMPONENT_REF 12932 || TREE_CODE (arg0) == BIT_FIELD_REF) 12933 /* Handle the constant case even without -O 12934 to make sure the warnings are given. */ 12935 && (optimize || TREE_CODE (arg1) == INTEGER_CST)) 12936 { 12937 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1); 12938 if (t1) 12939 return t1; 12940 } 12941 12942 /* Optimize comparisons of strlen vs zero to a compare of the 12943 first character of the string vs zero. To wit, 12944 strlen(ptr) == 0 => *ptr == 0 12945 strlen(ptr) != 0 => *ptr != 0 12946 Other cases should reduce to one of these two (or a constant) 12947 due to the return value of strlen being unsigned. */ 12948 if (TREE_CODE (arg0) == CALL_EXPR 12949 && integer_zerop (arg1)) 12950 { 12951 tree fndecl = get_callee_fndecl (arg0); 12952 12953 if (fndecl 12954 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 12955 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN 12956 && call_expr_nargs (arg0) == 1 12957 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE) 12958 { 12959 tree iref = build_fold_indirect_ref_loc (loc, 12960 CALL_EXPR_ARG (arg0, 0)); 12961 return fold_build2_loc (loc, code, type, iref, 12962 build_int_cst (TREE_TYPE (iref), 0)); 12963 } 12964 } 12965 12966 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width 12967 of X. Similarly fold (X >> C) == 0 into X >= 0. */ 12968 if (TREE_CODE (arg0) == RSHIFT_EXPR 12969 && integer_zerop (arg1) 12970 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12971 { 12972 tree arg00 = TREE_OPERAND (arg0, 0); 12973 tree arg01 = TREE_OPERAND (arg0, 1); 12974 tree itype = TREE_TYPE (arg00); 12975 if (TREE_INT_CST_HIGH (arg01) == 0 12976 && TREE_INT_CST_LOW (arg01) 12977 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1)) 12978 { 12979 if (TYPE_UNSIGNED (itype)) 12980 { 12981 itype = signed_type_for (itype); 12982 arg00 = fold_convert_loc (loc, itype, arg00); 12983 } 12984 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, 12985 type, arg00, build_int_cst (itype, 0)); 12986 } 12987 } 12988 12989 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */ 12990 if (integer_zerop (arg1) 12991 && TREE_CODE (arg0) == BIT_XOR_EXPR) 12992 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12993 TREE_OPERAND (arg0, 1)); 12994 12995 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */ 12996 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12997 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 12998 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12999 build_int_cst (TREE_TYPE (arg1), 0)); 13000 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */ 13001 if (TREE_CODE (arg0) == BIT_XOR_EXPR 13002 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 13003 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 13004 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1), 13005 build_int_cst (TREE_TYPE (arg1), 0)); 13006 13007 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */ 13008 if (TREE_CODE (arg0) == BIT_XOR_EXPR 13009 && TREE_CODE (arg1) == INTEGER_CST 13010 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 13011 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 13012 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1), 13013 TREE_OPERAND (arg0, 1), arg1)); 13014 13015 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into 13016 (X & C) == 0 when C is a single bit. */ 13017 if (TREE_CODE (arg0) == BIT_AND_EXPR 13018 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR 13019 && integer_zerop (arg1) 13020 && integer_pow2p (TREE_OPERAND (arg0, 1))) 13021 { 13022 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), 13023 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0), 13024 TREE_OPERAND (arg0, 1)); 13025 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, 13026 type, tem, arg1); 13027 } 13028 13029 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the 13030 constant C is a power of two, i.e. a single bit. */ 13031 if (TREE_CODE (arg0) == BIT_XOR_EXPR 13032 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR 13033 && integer_zerop (arg1) 13034 && integer_pow2p (TREE_OPERAND (arg0, 1)) 13035 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 13036 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST)) 13037 { 13038 tree arg00 = TREE_OPERAND (arg0, 0); 13039 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 13040 arg00, build_int_cst (TREE_TYPE (arg00), 0)); 13041 } 13042 13043 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0, 13044 when is C is a power of two, i.e. a single bit. */ 13045 if (TREE_CODE (arg0) == BIT_AND_EXPR 13046 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR 13047 && integer_zerop (arg1) 13048 && integer_pow2p (TREE_OPERAND (arg0, 1)) 13049 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 13050 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST)) 13051 { 13052 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 13053 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000), 13054 arg000, TREE_OPERAND (arg0, 1)); 13055 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 13056 tem, build_int_cst (TREE_TYPE (tem), 0)); 13057 } 13058 13059 if (integer_zerop (arg1) 13060 && tree_expr_nonzero_p (arg0)) 13061 { 13062 tree res = constant_boolean_node (code==NE_EXPR, type); 13063 return omit_one_operand_loc (loc, type, res, arg0); 13064 } 13065 13066 /* Fold -X op -Y as X op Y, where op is eq/ne. */ 13067 if (TREE_CODE (arg0) == NEGATE_EXPR 13068 && TREE_CODE (arg1) == NEGATE_EXPR) 13069 return fold_build2_loc (loc, code, type, 13070 TREE_OPERAND (arg0, 0), 13071 TREE_OPERAND (arg1, 0)); 13072 13073 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */ 13074 if (TREE_CODE (arg0) == BIT_AND_EXPR 13075 && TREE_CODE (arg1) == BIT_AND_EXPR) 13076 { 13077 tree arg00 = TREE_OPERAND (arg0, 0); 13078 tree arg01 = TREE_OPERAND (arg0, 1); 13079 tree arg10 = TREE_OPERAND (arg1, 0); 13080 tree arg11 = TREE_OPERAND (arg1, 1); 13081 tree itype = TREE_TYPE (arg0); 13082 13083 if (operand_equal_p (arg01, arg11, 0)) 13084 return fold_build2_loc (loc, code, type, 13085 fold_build2_loc (loc, BIT_AND_EXPR, itype, 13086 fold_build2_loc (loc, 13087 BIT_XOR_EXPR, itype, 13088 arg00, arg10), 13089 arg01), 13090 build_int_cst (itype, 0)); 13091 13092 if (operand_equal_p (arg01, arg10, 0)) 13093 return fold_build2_loc (loc, code, type, 13094 fold_build2_loc (loc, BIT_AND_EXPR, itype, 13095 fold_build2_loc (loc, 13096 BIT_XOR_EXPR, itype, 13097 arg00, arg11), 13098 arg01), 13099 build_int_cst (itype, 0)); 13100 13101 if (operand_equal_p (arg00, arg11, 0)) 13102 return fold_build2_loc (loc, code, type, 13103 fold_build2_loc (loc, BIT_AND_EXPR, itype, 13104 fold_build2_loc (loc, 13105 BIT_XOR_EXPR, itype, 13106 arg01, arg10), 13107 arg00), 13108 build_int_cst (itype, 0)); 13109 13110 if (operand_equal_p (arg00, arg10, 0)) 13111 return fold_build2_loc (loc, code, type, 13112 fold_build2_loc (loc, BIT_AND_EXPR, itype, 13113 fold_build2_loc (loc, 13114 BIT_XOR_EXPR, itype, 13115 arg01, arg11), 13116 arg00), 13117 build_int_cst (itype, 0)); 13118 } 13119 13120 if (TREE_CODE (arg0) == BIT_XOR_EXPR 13121 && TREE_CODE (arg1) == BIT_XOR_EXPR) 13122 { 13123 tree arg00 = TREE_OPERAND (arg0, 0); 13124 tree arg01 = TREE_OPERAND (arg0, 1); 13125 tree arg10 = TREE_OPERAND (arg1, 0); 13126 tree arg11 = TREE_OPERAND (arg1, 1); 13127 tree itype = TREE_TYPE (arg0); 13128 13129 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries. 13130 operand_equal_p guarantees no side-effects so we don't need 13131 to use omit_one_operand on Z. */ 13132 if (operand_equal_p (arg01, arg11, 0)) 13133 return fold_build2_loc (loc, code, type, arg00, arg10); 13134 if (operand_equal_p (arg01, arg10, 0)) 13135 return fold_build2_loc (loc, code, type, arg00, arg11); 13136 if (operand_equal_p (arg00, arg11, 0)) 13137 return fold_build2_loc (loc, code, type, arg01, arg10); 13138 if (operand_equal_p (arg00, arg10, 0)) 13139 return fold_build2_loc (loc, code, type, arg01, arg11); 13140 13141 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */ 13142 if (TREE_CODE (arg01) == INTEGER_CST 13143 && TREE_CODE (arg11) == INTEGER_CST) 13144 return fold_build2_loc (loc, code, type, 13145 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, 13146 fold_build2_loc (loc, 13147 BIT_XOR_EXPR, itype, 13148 arg01, arg11)), 13149 arg10); 13150 } 13151 13152 /* Attempt to simplify equality/inequality comparisons of complex 13153 values. Only lower the comparison if the result is known or 13154 can be simplified to a single scalar comparison. */ 13155 if ((TREE_CODE (arg0) == COMPLEX_EXPR 13156 || TREE_CODE (arg0) == COMPLEX_CST) 13157 && (TREE_CODE (arg1) == COMPLEX_EXPR 13158 || TREE_CODE (arg1) == COMPLEX_CST)) 13159 { 13160 tree real0, imag0, real1, imag1; 13161 tree rcond, icond; 13162 13163 if (TREE_CODE (arg0) == COMPLEX_EXPR) 13164 { 13165 real0 = TREE_OPERAND (arg0, 0); 13166 imag0 = TREE_OPERAND (arg0, 1); 13167 } 13168 else 13169 { 13170 real0 = TREE_REALPART (arg0); 13171 imag0 = TREE_IMAGPART (arg0); 13172 } 13173 13174 if (TREE_CODE (arg1) == COMPLEX_EXPR) 13175 { 13176 real1 = TREE_OPERAND (arg1, 0); 13177 imag1 = TREE_OPERAND (arg1, 1); 13178 } 13179 else 13180 { 13181 real1 = TREE_REALPART (arg1); 13182 imag1 = TREE_IMAGPART (arg1); 13183 } 13184 13185 rcond = fold_binary_loc (loc, code, type, real0, real1); 13186 if (rcond && TREE_CODE (rcond) == INTEGER_CST) 13187 { 13188 if (integer_zerop (rcond)) 13189 { 13190 if (code == EQ_EXPR) 13191 return omit_two_operands_loc (loc, type, boolean_false_node, 13192 imag0, imag1); 13193 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1); 13194 } 13195 else 13196 { 13197 if (code == NE_EXPR) 13198 return omit_two_operands_loc (loc, type, boolean_true_node, 13199 imag0, imag1); 13200 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1); 13201 } 13202 } 13203 13204 icond = fold_binary_loc (loc, code, type, imag0, imag1); 13205 if (icond && TREE_CODE (icond) == INTEGER_CST) 13206 { 13207 if (integer_zerop (icond)) 13208 { 13209 if (code == EQ_EXPR) 13210 return omit_two_operands_loc (loc, type, boolean_false_node, 13211 real0, real1); 13212 return fold_build2_loc (loc, NE_EXPR, type, real0, real1); 13213 } 13214 else 13215 { 13216 if (code == NE_EXPR) 13217 return omit_two_operands_loc (loc, type, boolean_true_node, 13218 real0, real1); 13219 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1); 13220 } 13221 } 13222 } 13223 13224 return NULL_TREE; 13225 13226 case LT_EXPR: 13227 case GT_EXPR: 13228 case LE_EXPR: 13229 case GE_EXPR: 13230 tem = fold_comparison (loc, code, type, op0, op1); 13231 if (tem != NULL_TREE) 13232 return tem; 13233 13234 /* Transform comparisons of the form X +- C CMP X. */ 13235 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 13236 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 13237 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 13238 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))) 13239 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 13240 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))))) 13241 { 13242 tree arg01 = TREE_OPERAND (arg0, 1); 13243 enum tree_code code0 = TREE_CODE (arg0); 13244 int is_positive; 13245 13246 if (TREE_CODE (arg01) == REAL_CST) 13247 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1; 13248 else 13249 is_positive = tree_int_cst_sgn (arg01); 13250 13251 /* (X - c) > X becomes false. */ 13252 if (code == GT_EXPR 13253 && ((code0 == MINUS_EXPR && is_positive >= 0) 13254 || (code0 == PLUS_EXPR && is_positive <= 0))) 13255 { 13256 if (TREE_CODE (arg01) == INTEGER_CST 13257 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13258 fold_overflow_warning (("assuming signed overflow does not " 13259 "occur when assuming that (X - c) > X " 13260 "is always false"), 13261 WARN_STRICT_OVERFLOW_ALL); 13262 return constant_boolean_node (0, type); 13263 } 13264 13265 /* Likewise (X + c) < X becomes false. */ 13266 if (code == LT_EXPR 13267 && ((code0 == PLUS_EXPR && is_positive >= 0) 13268 || (code0 == MINUS_EXPR && is_positive <= 0))) 13269 { 13270 if (TREE_CODE (arg01) == INTEGER_CST 13271 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13272 fold_overflow_warning (("assuming signed overflow does not " 13273 "occur when assuming that " 13274 "(X + c) < X is always false"), 13275 WARN_STRICT_OVERFLOW_ALL); 13276 return constant_boolean_node (0, type); 13277 } 13278 13279 /* Convert (X - c) <= X to true. */ 13280 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))) 13281 && code == LE_EXPR 13282 && ((code0 == MINUS_EXPR && is_positive >= 0) 13283 || (code0 == PLUS_EXPR && is_positive <= 0))) 13284 { 13285 if (TREE_CODE (arg01) == INTEGER_CST 13286 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13287 fold_overflow_warning (("assuming signed overflow does not " 13288 "occur when assuming that " 13289 "(X - c) <= X is always true"), 13290 WARN_STRICT_OVERFLOW_ALL); 13291 return constant_boolean_node (1, type); 13292 } 13293 13294 /* Convert (X + c) >= X to true. */ 13295 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))) 13296 && code == GE_EXPR 13297 && ((code0 == PLUS_EXPR && is_positive >= 0) 13298 || (code0 == MINUS_EXPR && is_positive <= 0))) 13299 { 13300 if (TREE_CODE (arg01) == INTEGER_CST 13301 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13302 fold_overflow_warning (("assuming signed overflow does not " 13303 "occur when assuming that " 13304 "(X + c) >= X is always true"), 13305 WARN_STRICT_OVERFLOW_ALL); 13306 return constant_boolean_node (1, type); 13307 } 13308 13309 if (TREE_CODE (arg01) == INTEGER_CST) 13310 { 13311 /* Convert X + c > X and X - c < X to true for integers. */ 13312 if (code == GT_EXPR 13313 && ((code0 == PLUS_EXPR && is_positive > 0) 13314 || (code0 == MINUS_EXPR && is_positive < 0))) 13315 { 13316 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13317 fold_overflow_warning (("assuming signed overflow does " 13318 "not occur when assuming that " 13319 "(X + c) > X is always true"), 13320 WARN_STRICT_OVERFLOW_ALL); 13321 return constant_boolean_node (1, type); 13322 } 13323 13324 if (code == LT_EXPR 13325 && ((code0 == MINUS_EXPR && is_positive > 0) 13326 || (code0 == PLUS_EXPR && is_positive < 0))) 13327 { 13328 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13329 fold_overflow_warning (("assuming signed overflow does " 13330 "not occur when assuming that " 13331 "(X - c) < X is always true"), 13332 WARN_STRICT_OVERFLOW_ALL); 13333 return constant_boolean_node (1, type); 13334 } 13335 13336 /* Convert X + c <= X and X - c >= X to false for integers. */ 13337 if (code == LE_EXPR 13338 && ((code0 == PLUS_EXPR && is_positive > 0) 13339 || (code0 == MINUS_EXPR && is_positive < 0))) 13340 { 13341 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13342 fold_overflow_warning (("assuming signed overflow does " 13343 "not occur when assuming that " 13344 "(X + c) <= X is always false"), 13345 WARN_STRICT_OVERFLOW_ALL); 13346 return constant_boolean_node (0, type); 13347 } 13348 13349 if (code == GE_EXPR 13350 && ((code0 == MINUS_EXPR && is_positive > 0) 13351 || (code0 == PLUS_EXPR && is_positive < 0))) 13352 { 13353 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13354 fold_overflow_warning (("assuming signed overflow does " 13355 "not occur when assuming that " 13356 "(X - c) >= X is always false"), 13357 WARN_STRICT_OVERFLOW_ALL); 13358 return constant_boolean_node (0, type); 13359 } 13360 } 13361 } 13362 13363 /* Comparisons with the highest or lowest possible integer of 13364 the specified precision will have known values. */ 13365 { 13366 tree arg1_type = TREE_TYPE (arg1); 13367 unsigned int width = TYPE_PRECISION (arg1_type); 13368 13369 if (TREE_CODE (arg1) == INTEGER_CST 13370 && width <= 2 * HOST_BITS_PER_WIDE_INT 13371 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type))) 13372 { 13373 HOST_WIDE_INT signed_max_hi; 13374 unsigned HOST_WIDE_INT signed_max_lo; 13375 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo; 13376 13377 if (width <= HOST_BITS_PER_WIDE_INT) 13378 { 13379 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) 13380 - 1; 13381 signed_max_hi = 0; 13382 max_hi = 0; 13383 13384 if (TYPE_UNSIGNED (arg1_type)) 13385 { 13386 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1; 13387 min_lo = 0; 13388 min_hi = 0; 13389 } 13390 else 13391 { 13392 max_lo = signed_max_lo; 13393 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1)); 13394 min_hi = -1; 13395 } 13396 } 13397 else 13398 { 13399 width -= HOST_BITS_PER_WIDE_INT; 13400 signed_max_lo = -1; 13401 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) 13402 - 1; 13403 max_lo = -1; 13404 min_lo = 0; 13405 13406 if (TYPE_UNSIGNED (arg1_type)) 13407 { 13408 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1; 13409 min_hi = 0; 13410 } 13411 else 13412 { 13413 max_hi = signed_max_hi; 13414 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1)); 13415 } 13416 } 13417 13418 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi 13419 && TREE_INT_CST_LOW (arg1) == max_lo) 13420 switch (code) 13421 { 13422 case GT_EXPR: 13423 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 13424 13425 case GE_EXPR: 13426 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1); 13427 13428 case LE_EXPR: 13429 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 13430 13431 case LT_EXPR: 13432 return fold_build2_loc (loc, NE_EXPR, type, op0, op1); 13433 13434 /* The GE_EXPR and LT_EXPR cases above are not normally 13435 reached because of previous transformations. */ 13436 13437 default: 13438 break; 13439 } 13440 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) 13441 == max_hi 13442 && TREE_INT_CST_LOW (arg1) == max_lo - 1) 13443 switch (code) 13444 { 13445 case GT_EXPR: 13446 arg1 = const_binop (PLUS_EXPR, arg1, 13447 build_int_cst (TREE_TYPE (arg1), 1), 0); 13448 return fold_build2_loc (loc, EQ_EXPR, type, 13449 fold_convert_loc (loc, 13450 TREE_TYPE (arg1), arg0), 13451 arg1); 13452 case LE_EXPR: 13453 arg1 = const_binop (PLUS_EXPR, arg1, 13454 build_int_cst (TREE_TYPE (arg1), 1), 0); 13455 return fold_build2_loc (loc, NE_EXPR, type, 13456 fold_convert_loc (loc, TREE_TYPE (arg1), 13457 arg0), 13458 arg1); 13459 default: 13460 break; 13461 } 13462 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) 13463 == min_hi 13464 && TREE_INT_CST_LOW (arg1) == min_lo) 13465 switch (code) 13466 { 13467 case LT_EXPR: 13468 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 13469 13470 case LE_EXPR: 13471 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1); 13472 13473 case GE_EXPR: 13474 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 13475 13476 case GT_EXPR: 13477 return fold_build2_loc (loc, NE_EXPR, type, op0, op1); 13478 13479 default: 13480 break; 13481 } 13482 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) 13483 == min_hi 13484 && TREE_INT_CST_LOW (arg1) == min_lo + 1) 13485 switch (code) 13486 { 13487 case GE_EXPR: 13488 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0); 13489 return fold_build2_loc (loc, NE_EXPR, type, 13490 fold_convert_loc (loc, 13491 TREE_TYPE (arg1), arg0), 13492 arg1); 13493 case LT_EXPR: 13494 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0); 13495 return fold_build2_loc (loc, EQ_EXPR, type, 13496 fold_convert_loc (loc, TREE_TYPE (arg1), 13497 arg0), 13498 arg1); 13499 default: 13500 break; 13501 } 13502 13503 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi 13504 && TREE_INT_CST_LOW (arg1) == signed_max_lo 13505 && TYPE_UNSIGNED (arg1_type) 13506 /* We will flip the signedness of the comparison operator 13507 associated with the mode of arg1, so the sign bit is 13508 specified by this mode. Check that arg1 is the signed 13509 max associated with this sign bit. */ 13510 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type)) 13511 /* signed_type does not work on pointer types. */ 13512 && INTEGRAL_TYPE_P (arg1_type)) 13513 { 13514 /* The following case also applies to X < signed_max+1 13515 and X >= signed_max+1 because previous transformations. */ 13516 if (code == LE_EXPR || code == GT_EXPR) 13517 { 13518 tree st; 13519 st = signed_type_for (TREE_TYPE (arg1)); 13520 return fold_build2_loc (loc, 13521 code == LE_EXPR ? GE_EXPR : LT_EXPR, 13522 type, fold_convert_loc (loc, st, arg0), 13523 build_int_cst (st, 0)); 13524 } 13525 } 13526 } 13527 } 13528 13529 /* If we are comparing an ABS_EXPR with a constant, we can 13530 convert all the cases into explicit comparisons, but they may 13531 well not be faster than doing the ABS and one comparison. 13532 But ABS (X) <= C is a range comparison, which becomes a subtraction 13533 and a comparison, and is probably faster. */ 13534 if (code == LE_EXPR 13535 && TREE_CODE (arg1) == INTEGER_CST 13536 && TREE_CODE (arg0) == ABS_EXPR 13537 && ! TREE_SIDE_EFFECTS (arg0) 13538 && (0 != (tem = negate_expr (arg1))) 13539 && TREE_CODE (tem) == INTEGER_CST 13540 && !TREE_OVERFLOW (tem)) 13541 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 13542 build2 (GE_EXPR, type, 13543 TREE_OPERAND (arg0, 0), tem), 13544 build2 (LE_EXPR, type, 13545 TREE_OPERAND (arg0, 0), arg1)); 13546 13547 /* Convert ABS_EXPR<x> >= 0 to true. */ 13548 strict_overflow_p = false; 13549 if (code == GE_EXPR 13550 && (integer_zerop (arg1) 13551 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) 13552 && real_zerop (arg1))) 13553 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)) 13554 { 13555 if (strict_overflow_p) 13556 fold_overflow_warning (("assuming signed overflow does not occur " 13557 "when simplifying comparison of " 13558 "absolute value and zero"), 13559 WARN_STRICT_OVERFLOW_CONDITIONAL); 13560 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 13561 } 13562 13563 /* Convert ABS_EXPR<x> < 0 to false. */ 13564 strict_overflow_p = false; 13565 if (code == LT_EXPR 13566 && (integer_zerop (arg1) || real_zerop (arg1)) 13567 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)) 13568 { 13569 if (strict_overflow_p) 13570 fold_overflow_warning (("assuming signed overflow does not occur " 13571 "when simplifying comparison of " 13572 "absolute value and zero"), 13573 WARN_STRICT_OVERFLOW_CONDITIONAL); 13574 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 13575 } 13576 13577 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0 13578 and similarly for >= into !=. */ 13579 if ((code == LT_EXPR || code == GE_EXPR) 13580 && TYPE_UNSIGNED (TREE_TYPE (arg0)) 13581 && TREE_CODE (arg1) == LSHIFT_EXPR 13582 && integer_onep (TREE_OPERAND (arg1, 0))) 13583 { 13584 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, 13585 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0, 13586 TREE_OPERAND (arg1, 1)), 13587 build_int_cst (TREE_TYPE (arg0), 0)); 13588 goto fold_binary_exit; 13589 } 13590 13591 if ((code == LT_EXPR || code == GE_EXPR) 13592 && TYPE_UNSIGNED (TREE_TYPE (arg0)) 13593 && CONVERT_EXPR_P (arg1) 13594 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR 13595 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0))) 13596 { 13597 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, 13598 fold_convert_loc (loc, TREE_TYPE (arg0), 13599 build2 (RSHIFT_EXPR, 13600 TREE_TYPE (arg0), arg0, 13601 TREE_OPERAND (TREE_OPERAND (arg1, 0), 13602 1))), 13603 build_int_cst (TREE_TYPE (arg0), 0)); 13604 goto fold_binary_exit; 13605 } 13606 13607 return NULL_TREE; 13608 13609 case UNORDERED_EXPR: 13610 case ORDERED_EXPR: 13611 case UNLT_EXPR: 13612 case UNLE_EXPR: 13613 case UNGT_EXPR: 13614 case UNGE_EXPR: 13615 case UNEQ_EXPR: 13616 case LTGT_EXPR: 13617 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST) 13618 { 13619 t1 = fold_relational_const (code, type, arg0, arg1); 13620 if (t1 != NULL_TREE) 13621 return t1; 13622 } 13623 13624 /* If the first operand is NaN, the result is constant. */ 13625 if (TREE_CODE (arg0) == REAL_CST 13626 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0)) 13627 && (code != LTGT_EXPR || ! flag_trapping_math)) 13628 { 13629 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR) 13630 ? integer_zero_node 13631 : integer_one_node; 13632 return omit_one_operand_loc (loc, type, t1, arg1); 13633 } 13634 13635 /* If the second operand is NaN, the result is constant. */ 13636 if (TREE_CODE (arg1) == REAL_CST 13637 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)) 13638 && (code != LTGT_EXPR || ! flag_trapping_math)) 13639 { 13640 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR) 13641 ? integer_zero_node 13642 : integer_one_node; 13643 return omit_one_operand_loc (loc, type, t1, arg0); 13644 } 13645 13646 /* Simplify unordered comparison of something with itself. */ 13647 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR) 13648 && operand_equal_p (arg0, arg1, 0)) 13649 return constant_boolean_node (1, type); 13650 13651 if (code == LTGT_EXPR 13652 && !flag_trapping_math 13653 && operand_equal_p (arg0, arg1, 0)) 13654 return constant_boolean_node (0, type); 13655 13656 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */ 13657 { 13658 tree targ0 = strip_float_extensions (arg0); 13659 tree targ1 = strip_float_extensions (arg1); 13660 tree newtype = TREE_TYPE (targ0); 13661 13662 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype)) 13663 newtype = TREE_TYPE (targ1); 13664 13665 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0))) 13666 return fold_build2_loc (loc, code, type, 13667 fold_convert_loc (loc, newtype, targ0), 13668 fold_convert_loc (loc, newtype, targ1)); 13669 } 13670 13671 return NULL_TREE; 13672 13673 case COMPOUND_EXPR: 13674 /* When pedantic, a compound expression can be neither an lvalue 13675 nor an integer constant expression. */ 13676 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1)) 13677 return NULL_TREE; 13678 /* Don't let (0, 0) be null pointer constant. */ 13679 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1) 13680 : fold_convert_loc (loc, type, arg1); 13681 return pedantic_non_lvalue_loc (loc, tem); 13682 13683 case COMPLEX_EXPR: 13684 if ((TREE_CODE (arg0) == REAL_CST 13685 && TREE_CODE (arg1) == REAL_CST) 13686 || (TREE_CODE (arg0) == INTEGER_CST 13687 && TREE_CODE (arg1) == INTEGER_CST)) 13688 return build_complex (type, arg0, arg1); 13689 return NULL_TREE; 13690 13691 case ASSERT_EXPR: 13692 /* An ASSERT_EXPR should never be passed to fold_binary. */ 13693 gcc_unreachable (); 13694 13695 default: 13696 return NULL_TREE; 13697 } /* switch (code) */ 13698 fold_binary_exit: 13699 protected_set_expr_location (tem, loc); 13700 return tem; 13701 } 13702 13703 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is 13704 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees 13705 of GOTO_EXPR. */ 13706 13707 static tree 13708 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 13709 { 13710 switch (TREE_CODE (*tp)) 13711 { 13712 case LABEL_EXPR: 13713 return *tp; 13714 13715 case GOTO_EXPR: 13716 *walk_subtrees = 0; 13717 13718 /* ... fall through ... */ 13719 13720 default: 13721 return NULL_TREE; 13722 } 13723 } 13724 13725 /* Return whether the sub-tree ST contains a label which is accessible from 13726 outside the sub-tree. */ 13727 13728 static bool 13729 contains_label_p (tree st) 13730 { 13731 return 13732 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE); 13733 } 13734 13735 /* Fold a ternary expression of code CODE and type TYPE with operands 13736 OP0, OP1, and OP2. Return the folded expression if folding is 13737 successful. Otherwise, return NULL_TREE. */ 13738 13739 tree 13740 fold_ternary_loc (location_t loc, enum tree_code code, tree type, 13741 tree op0, tree op1, tree op2) 13742 { 13743 tree tem; 13744 tree arg0 = NULL_TREE, arg1 = NULL_TREE; 13745 enum tree_code_class kind = TREE_CODE_CLASS (code); 13746 13747 gcc_assert (IS_EXPR_CODE_CLASS (kind) 13748 && TREE_CODE_LENGTH (code) == 3); 13749 13750 /* Strip any conversions that don't change the mode. This is safe 13751 for every expression, except for a comparison expression because 13752 its signedness is derived from its operands. So, in the latter 13753 case, only strip conversions that don't change the signedness. 13754 13755 Note that this is done as an internal manipulation within the 13756 constant folder, in order to find the simplest representation of 13757 the arguments so that their form can be studied. In any cases, 13758 the appropriate type conversions should be put back in the tree 13759 that will get out of the constant folder. */ 13760 if (op0) 13761 { 13762 arg0 = op0; 13763 STRIP_NOPS (arg0); 13764 } 13765 13766 if (op1) 13767 { 13768 arg1 = op1; 13769 STRIP_NOPS (arg1); 13770 } 13771 13772 switch (code) 13773 { 13774 case COMPONENT_REF: 13775 if (TREE_CODE (arg0) == CONSTRUCTOR 13776 && ! type_contains_placeholder_p (TREE_TYPE (arg0))) 13777 { 13778 unsigned HOST_WIDE_INT idx; 13779 tree field, value; 13780 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value) 13781 if (field == arg1) 13782 return value; 13783 } 13784 return NULL_TREE; 13785 13786 case COND_EXPR: 13787 /* Pedantic ANSI C says that a conditional expression is never an lvalue, 13788 so all simple results must be passed through pedantic_non_lvalue. */ 13789 if (TREE_CODE (arg0) == INTEGER_CST) 13790 { 13791 tree unused_op = integer_zerop (arg0) ? op1 : op2; 13792 tem = integer_zerop (arg0) ? op2 : op1; 13793 /* Only optimize constant conditions when the selected branch 13794 has the same type as the COND_EXPR. This avoids optimizing 13795 away "c ? x : throw", where the throw has a void type. 13796 Avoid throwing away that operand which contains label. */ 13797 if ((!TREE_SIDE_EFFECTS (unused_op) 13798 || !contains_label_p (unused_op)) 13799 && (! VOID_TYPE_P (TREE_TYPE (tem)) 13800 || VOID_TYPE_P (type))) 13801 return pedantic_non_lvalue_loc (loc, tem); 13802 return NULL_TREE; 13803 } 13804 if (operand_equal_p (arg1, op2, 0)) 13805 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0); 13806 13807 /* If we have A op B ? A : C, we may be able to convert this to a 13808 simpler expression, depending on the operation and the values 13809 of B and C. Signed zeros prevent all of these transformations, 13810 for reasons given above each one. 13811 13812 Also try swapping the arguments and inverting the conditional. */ 13813 if (COMPARISON_CLASS_P (arg0) 13814 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), 13815 arg1, TREE_OPERAND (arg0, 1)) 13816 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1)))) 13817 { 13818 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2); 13819 if (tem) 13820 return tem; 13821 } 13822 13823 if (COMPARISON_CLASS_P (arg0) 13824 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), 13825 op2, 13826 TREE_OPERAND (arg0, 1)) 13827 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2)))) 13828 { 13829 tem = fold_truth_not_expr (loc, arg0); 13830 if (tem && COMPARISON_CLASS_P (tem)) 13831 { 13832 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1); 13833 if (tem) 13834 return tem; 13835 } 13836 } 13837 13838 /* If the second operand is simpler than the third, swap them 13839 since that produces better jump optimization results. */ 13840 if (truth_value_p (TREE_CODE (arg0)) 13841 && tree_swap_operands_p (op1, op2, false)) 13842 { 13843 /* See if this can be inverted. If it can't, possibly because 13844 it was a floating-point inequality comparison, don't do 13845 anything. */ 13846 tem = fold_truth_not_expr (loc, arg0); 13847 if (tem) 13848 return fold_build3_loc (loc, code, type, tem, op2, op1); 13849 } 13850 13851 /* Convert A ? 1 : 0 to simply A. */ 13852 if (integer_onep (op1) 13853 && integer_zerop (op2) 13854 /* If we try to convert OP0 to our type, the 13855 call to fold will try to move the conversion inside 13856 a COND, which will recurse. In that case, the COND_EXPR 13857 is probably the best choice, so leave it alone. */ 13858 && type == TREE_TYPE (arg0)) 13859 return pedantic_non_lvalue_loc (loc, arg0); 13860 13861 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR 13862 over COND_EXPR in cases such as floating point comparisons. */ 13863 if (integer_zerop (op1) 13864 && integer_onep (op2) 13865 && truth_value_p (TREE_CODE (arg0))) 13866 return pedantic_non_lvalue_loc (loc, 13867 fold_convert_loc (loc, type, 13868 invert_truthvalue_loc (loc, 13869 arg0))); 13870 13871 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */ 13872 if (TREE_CODE (arg0) == LT_EXPR 13873 && integer_zerop (TREE_OPERAND (arg0, 1)) 13874 && integer_zerop (op2) 13875 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1))) 13876 { 13877 /* sign_bit_p only checks ARG1 bits within A's precision. 13878 If <sign bit of A> has wider type than A, bits outside 13879 of A's precision in <sign bit of A> need to be checked. 13880 If they are all 0, this optimization needs to be done 13881 in unsigned A's type, if they are all 1 in signed A's type, 13882 otherwise this can't be done. */ 13883 if (TYPE_PRECISION (TREE_TYPE (tem)) 13884 < TYPE_PRECISION (TREE_TYPE (arg1)) 13885 && TYPE_PRECISION (TREE_TYPE (tem)) 13886 < TYPE_PRECISION (type)) 13887 { 13888 unsigned HOST_WIDE_INT mask_lo; 13889 HOST_WIDE_INT mask_hi; 13890 int inner_width, outer_width; 13891 tree tem_type; 13892 13893 inner_width = TYPE_PRECISION (TREE_TYPE (tem)); 13894 outer_width = TYPE_PRECISION (TREE_TYPE (arg1)); 13895 if (outer_width > TYPE_PRECISION (type)) 13896 outer_width = TYPE_PRECISION (type); 13897 13898 if (outer_width > HOST_BITS_PER_WIDE_INT) 13899 { 13900 mask_hi = ((unsigned HOST_WIDE_INT) -1 13901 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width)); 13902 mask_lo = -1; 13903 } 13904 else 13905 { 13906 mask_hi = 0; 13907 mask_lo = ((unsigned HOST_WIDE_INT) -1 13908 >> (HOST_BITS_PER_WIDE_INT - outer_width)); 13909 } 13910 if (inner_width > HOST_BITS_PER_WIDE_INT) 13911 { 13912 mask_hi &= ~((unsigned HOST_WIDE_INT) -1 13913 >> (HOST_BITS_PER_WIDE_INT - inner_width)); 13914 mask_lo = 0; 13915 } 13916 else 13917 mask_lo &= ~((unsigned HOST_WIDE_INT) -1 13918 >> (HOST_BITS_PER_WIDE_INT - inner_width)); 13919 13920 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi 13921 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo) 13922 { 13923 tem_type = signed_type_for (TREE_TYPE (tem)); 13924 tem = fold_convert_loc (loc, tem_type, tem); 13925 } 13926 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0 13927 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0) 13928 { 13929 tem_type = unsigned_type_for (TREE_TYPE (tem)); 13930 tem = fold_convert_loc (loc, tem_type, tem); 13931 } 13932 else 13933 tem = NULL; 13934 } 13935 13936 if (tem) 13937 return 13938 fold_convert_loc (loc, type, 13939 fold_build2_loc (loc, BIT_AND_EXPR, 13940 TREE_TYPE (tem), tem, 13941 fold_convert_loc (loc, 13942 TREE_TYPE (tem), 13943 arg1))); 13944 } 13945 13946 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was 13947 already handled above. */ 13948 if (TREE_CODE (arg0) == BIT_AND_EXPR 13949 && integer_onep (TREE_OPERAND (arg0, 1)) 13950 && integer_zerop (op2) 13951 && integer_pow2p (arg1)) 13952 { 13953 tree tem = TREE_OPERAND (arg0, 0); 13954 STRIP_NOPS (tem); 13955 if (TREE_CODE (tem) == RSHIFT_EXPR 13956 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST 13957 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) == 13958 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))) 13959 return fold_build2_loc (loc, BIT_AND_EXPR, type, 13960 TREE_OPERAND (tem, 0), arg1); 13961 } 13962 13963 /* A & N ? N : 0 is simply A & N if N is a power of two. This 13964 is probably obsolete because the first operand should be a 13965 truth value (that's why we have the two cases above), but let's 13966 leave it in until we can confirm this for all front-ends. */ 13967 if (integer_zerop (op2) 13968 && TREE_CODE (arg0) == NE_EXPR 13969 && integer_zerop (TREE_OPERAND (arg0, 1)) 13970 && integer_pow2p (arg1) 13971 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR 13972 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 13973 arg1, OEP_ONLY_CONST)) 13974 return pedantic_non_lvalue_loc (loc, 13975 fold_convert_loc (loc, type, 13976 TREE_OPERAND (arg0, 0))); 13977 13978 /* Convert A ? B : 0 into A && B if A and B are truth values. */ 13979 if (integer_zerop (op2) 13980 && truth_value_p (TREE_CODE (arg0)) 13981 && truth_value_p (TREE_CODE (arg1))) 13982 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 13983 fold_convert_loc (loc, type, arg0), 13984 arg1); 13985 13986 /* Convert A ? B : 1 into !A || B if A and B are truth values. */ 13987 if (integer_onep (op2) 13988 && truth_value_p (TREE_CODE (arg0)) 13989 && truth_value_p (TREE_CODE (arg1))) 13990 { 13991 /* Only perform transformation if ARG0 is easily inverted. */ 13992 tem = fold_truth_not_expr (loc, arg0); 13993 if (tem) 13994 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, 13995 fold_convert_loc (loc, type, tem), 13996 arg1); 13997 } 13998 13999 /* Convert A ? 0 : B into !A && B if A and B are truth values. */ 14000 if (integer_zerop (arg1) 14001 && truth_value_p (TREE_CODE (arg0)) 14002 && truth_value_p (TREE_CODE (op2))) 14003 { 14004 /* Only perform transformation if ARG0 is easily inverted. */ 14005 tem = fold_truth_not_expr (loc, arg0); 14006 if (tem) 14007 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 14008 fold_convert_loc (loc, type, tem), 14009 op2); 14010 } 14011 14012 /* Convert A ? 1 : B into A || B if A and B are truth values. */ 14013 if (integer_onep (arg1) 14014 && truth_value_p (TREE_CODE (arg0)) 14015 && truth_value_p (TREE_CODE (op2))) 14016 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, 14017 fold_convert_loc (loc, type, arg0), 14018 op2); 14019 14020 return NULL_TREE; 14021 14022 case CALL_EXPR: 14023 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses 14024 of fold_ternary on them. */ 14025 gcc_unreachable (); 14026 14027 case BIT_FIELD_REF: 14028 if ((TREE_CODE (arg0) == VECTOR_CST 14029 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0))) 14030 && type == TREE_TYPE (TREE_TYPE (arg0))) 14031 { 14032 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1); 14033 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1); 14034 14035 if (width != 0 14036 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1 14037 && (idx % width) == 0 14038 && (idx = idx / width) 14039 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))) 14040 { 14041 tree elements = NULL_TREE; 14042 14043 if (TREE_CODE (arg0) == VECTOR_CST) 14044 elements = TREE_VECTOR_CST_ELTS (arg0); 14045 else 14046 { 14047 unsigned HOST_WIDE_INT idx; 14048 tree value; 14049 14050 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value) 14051 elements = tree_cons (NULL_TREE, value, elements); 14052 } 14053 while (idx-- > 0 && elements) 14054 elements = TREE_CHAIN (elements); 14055 if (elements) 14056 return TREE_VALUE (elements); 14057 else 14058 return fold_convert_loc (loc, type, integer_zero_node); 14059 } 14060 } 14061 14062 /* A bit-field-ref that referenced the full argument can be stripped. */ 14063 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 14064 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1) 14065 && integer_zerop (op2)) 14066 return fold_convert_loc (loc, type, arg0); 14067 14068 return NULL_TREE; 14069 14070 default: 14071 return NULL_TREE; 14072 } /* switch (code) */ 14073 } 14074 14075 /* Perform constant folding and related simplification of EXPR. 14076 The related simplifications include x*1 => x, x*0 => 0, etc., 14077 and application of the associative law. 14078 NOP_EXPR conversions may be removed freely (as long as we 14079 are careful not to change the type of the overall expression). 14080 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR, 14081 but we can constant-fold them if they have constant operands. */ 14082 14083 #ifdef ENABLE_FOLD_CHECKING 14084 # define fold(x) fold_1 (x) 14085 static tree fold_1 (tree); 14086 static 14087 #endif 14088 tree 14089 fold (tree expr) 14090 { 14091 const tree t = expr; 14092 enum tree_code code = TREE_CODE (t); 14093 enum tree_code_class kind = TREE_CODE_CLASS (code); 14094 tree tem; 14095 location_t loc = EXPR_LOCATION (expr); 14096 14097 /* Return right away if a constant. */ 14098 if (kind == tcc_constant) 14099 return t; 14100 14101 /* CALL_EXPR-like objects with variable numbers of operands are 14102 treated specially. */ 14103 if (kind == tcc_vl_exp) 14104 { 14105 if (code == CALL_EXPR) 14106 { 14107 tem = fold_call_expr (loc, expr, false); 14108 return tem ? tem : expr; 14109 } 14110 return expr; 14111 } 14112 14113 if (IS_EXPR_CODE_CLASS (kind)) 14114 { 14115 tree type = TREE_TYPE (t); 14116 tree op0, op1, op2; 14117 14118 switch (TREE_CODE_LENGTH (code)) 14119 { 14120 case 1: 14121 op0 = TREE_OPERAND (t, 0); 14122 tem = fold_unary_loc (loc, code, type, op0); 14123 return tem ? tem : expr; 14124 case 2: 14125 op0 = TREE_OPERAND (t, 0); 14126 op1 = TREE_OPERAND (t, 1); 14127 tem = fold_binary_loc (loc, code, type, op0, op1); 14128 return tem ? tem : expr; 14129 case 3: 14130 op0 = TREE_OPERAND (t, 0); 14131 op1 = TREE_OPERAND (t, 1); 14132 op2 = TREE_OPERAND (t, 2); 14133 tem = fold_ternary_loc (loc, code, type, op0, op1, op2); 14134 return tem ? tem : expr; 14135 default: 14136 break; 14137 } 14138 } 14139 14140 switch (code) 14141 { 14142 case ARRAY_REF: 14143 { 14144 tree op0 = TREE_OPERAND (t, 0); 14145 tree op1 = TREE_OPERAND (t, 1); 14146 14147 if (TREE_CODE (op1) == INTEGER_CST 14148 && TREE_CODE (op0) == CONSTRUCTOR 14149 && ! type_contains_placeholder_p (TREE_TYPE (op0))) 14150 { 14151 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0); 14152 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts); 14153 unsigned HOST_WIDE_INT begin = 0; 14154 14155 /* Find a matching index by means of a binary search. */ 14156 while (begin != end) 14157 { 14158 unsigned HOST_WIDE_INT middle = (begin + end) / 2; 14159 tree index = VEC_index (constructor_elt, elts, middle)->index; 14160 14161 if (TREE_CODE (index) == INTEGER_CST 14162 && tree_int_cst_lt (index, op1)) 14163 begin = middle + 1; 14164 else if (TREE_CODE (index) == INTEGER_CST 14165 && tree_int_cst_lt (op1, index)) 14166 end = middle; 14167 else if (TREE_CODE (index) == RANGE_EXPR 14168 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1)) 14169 begin = middle + 1; 14170 else if (TREE_CODE (index) == RANGE_EXPR 14171 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0))) 14172 end = middle; 14173 else 14174 return VEC_index (constructor_elt, elts, middle)->value; 14175 } 14176 } 14177 14178 return t; 14179 } 14180 14181 case CONST_DECL: 14182 return fold (DECL_INITIAL (t)); 14183 14184 default: 14185 return t; 14186 } /* switch (code) */ 14187 } 14188 14189 #ifdef ENABLE_FOLD_CHECKING 14190 #undef fold 14191 14192 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t); 14193 static void fold_check_failed (const_tree, const_tree); 14194 void print_fold_checksum (const_tree); 14195 14196 /* When --enable-checking=fold, compute a digest of expr before 14197 and after actual fold call to see if fold did not accidentally 14198 change original expr. */ 14199 14200 tree 14201 fold (tree expr) 14202 { 14203 tree ret; 14204 struct md5_ctx ctx; 14205 unsigned char checksum_before[16], checksum_after[16]; 14206 htab_t ht; 14207 14208 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14209 md5_init_ctx (&ctx); 14210 fold_checksum_tree (expr, &ctx, ht); 14211 md5_finish_ctx (&ctx, checksum_before); 14212 htab_empty (ht); 14213 14214 ret = fold_1 (expr); 14215 14216 md5_init_ctx (&ctx); 14217 fold_checksum_tree (expr, &ctx, ht); 14218 md5_finish_ctx (&ctx, checksum_after); 14219 htab_delete (ht); 14220 14221 if (memcmp (checksum_before, checksum_after, 16)) 14222 fold_check_failed (expr, ret); 14223 14224 return ret; 14225 } 14226 14227 void 14228 print_fold_checksum (const_tree expr) 14229 { 14230 struct md5_ctx ctx; 14231 unsigned char checksum[16], cnt; 14232 htab_t ht; 14233 14234 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14235 md5_init_ctx (&ctx); 14236 fold_checksum_tree (expr, &ctx, ht); 14237 md5_finish_ctx (&ctx, checksum); 14238 htab_delete (ht); 14239 for (cnt = 0; cnt < 16; ++cnt) 14240 fprintf (stderr, "%02x", checksum[cnt]); 14241 putc ('\n', stderr); 14242 } 14243 14244 static void 14245 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED) 14246 { 14247 internal_error ("fold check: original tree changed by fold"); 14248 } 14249 14250 static void 14251 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht) 14252 { 14253 const void **slot; 14254 enum tree_code code; 14255 union tree_node buf; 14256 int i, len; 14257 14258 recursive_label: 14259 14260 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree) 14261 <= sizeof (struct tree_function_decl)) 14262 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl)); 14263 if (expr == NULL) 14264 return; 14265 slot = (const void **) htab_find_slot (ht, expr, INSERT); 14266 if (*slot != NULL) 14267 return; 14268 *slot = expr; 14269 code = TREE_CODE (expr); 14270 if (TREE_CODE_CLASS (code) == tcc_declaration 14271 && DECL_ASSEMBLER_NAME_SET_P (expr)) 14272 { 14273 /* Allow DECL_ASSEMBLER_NAME to be modified. */ 14274 memcpy ((char *) &buf, expr, tree_size (expr)); 14275 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL); 14276 expr = (tree) &buf; 14277 } 14278 else if (TREE_CODE_CLASS (code) == tcc_type 14279 && (TYPE_POINTER_TO (expr) 14280 || TYPE_REFERENCE_TO (expr) 14281 || TYPE_CACHED_VALUES_P (expr) 14282 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) 14283 || TYPE_NEXT_VARIANT (expr))) 14284 { 14285 /* Allow these fields to be modified. */ 14286 tree tmp; 14287 memcpy ((char *) &buf, expr, tree_size (expr)); 14288 expr = tmp = (tree) &buf; 14289 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0; 14290 TYPE_POINTER_TO (tmp) = NULL; 14291 TYPE_REFERENCE_TO (tmp) = NULL; 14292 TYPE_NEXT_VARIANT (tmp) = NULL; 14293 if (TYPE_CACHED_VALUES_P (tmp)) 14294 { 14295 TYPE_CACHED_VALUES_P (tmp) = 0; 14296 TYPE_CACHED_VALUES (tmp) = NULL; 14297 } 14298 } 14299 md5_process_bytes (expr, tree_size (expr), ctx); 14300 fold_checksum_tree (TREE_TYPE (expr), ctx, ht); 14301 if (TREE_CODE_CLASS (code) != tcc_type 14302 && TREE_CODE_CLASS (code) != tcc_declaration 14303 && code != TREE_LIST 14304 && code != SSA_NAME) 14305 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht); 14306 switch (TREE_CODE_CLASS (code)) 14307 { 14308 case tcc_constant: 14309 switch (code) 14310 { 14311 case STRING_CST: 14312 md5_process_bytes (TREE_STRING_POINTER (expr), 14313 TREE_STRING_LENGTH (expr), ctx); 14314 break; 14315 case COMPLEX_CST: 14316 fold_checksum_tree (TREE_REALPART (expr), ctx, ht); 14317 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht); 14318 break; 14319 case VECTOR_CST: 14320 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht); 14321 break; 14322 default: 14323 break; 14324 } 14325 break; 14326 case tcc_exceptional: 14327 switch (code) 14328 { 14329 case TREE_LIST: 14330 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht); 14331 fold_checksum_tree (TREE_VALUE (expr), ctx, ht); 14332 expr = TREE_CHAIN (expr); 14333 goto recursive_label; 14334 break; 14335 case TREE_VEC: 14336 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i) 14337 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht); 14338 break; 14339 default: 14340 break; 14341 } 14342 break; 14343 case tcc_expression: 14344 case tcc_reference: 14345 case tcc_comparison: 14346 case tcc_unary: 14347 case tcc_binary: 14348 case tcc_statement: 14349 case tcc_vl_exp: 14350 len = TREE_OPERAND_LENGTH (expr); 14351 for (i = 0; i < len; ++i) 14352 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht); 14353 break; 14354 case tcc_declaration: 14355 fold_checksum_tree (DECL_NAME (expr), ctx, ht); 14356 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht); 14357 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON)) 14358 { 14359 fold_checksum_tree (DECL_SIZE (expr), ctx, ht); 14360 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht); 14361 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht); 14362 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht); 14363 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht); 14364 } 14365 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS)) 14366 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht); 14367 14368 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON)) 14369 { 14370 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht); 14371 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht); 14372 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht); 14373 } 14374 break; 14375 case tcc_type: 14376 if (TREE_CODE (expr) == ENUMERAL_TYPE) 14377 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht); 14378 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht); 14379 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht); 14380 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht); 14381 fold_checksum_tree (TYPE_NAME (expr), ctx, ht); 14382 if (INTEGRAL_TYPE_P (expr) 14383 || SCALAR_FLOAT_TYPE_P (expr)) 14384 { 14385 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht); 14386 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht); 14387 } 14388 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht); 14389 if (TREE_CODE (expr) == RECORD_TYPE 14390 || TREE_CODE (expr) == UNION_TYPE 14391 || TREE_CODE (expr) == QUAL_UNION_TYPE) 14392 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht); 14393 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht); 14394 break; 14395 default: 14396 break; 14397 } 14398 } 14399 14400 /* Helper function for outputting the checksum of a tree T. When 14401 debugging with gdb, you can "define mynext" to be "next" followed 14402 by "call debug_fold_checksum (op0)", then just trace down till the 14403 outputs differ. */ 14404 14405 void 14406 debug_fold_checksum (const_tree t) 14407 { 14408 int i; 14409 unsigned char checksum[16]; 14410 struct md5_ctx ctx; 14411 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14412 14413 md5_init_ctx (&ctx); 14414 fold_checksum_tree (t, &ctx, ht); 14415 md5_finish_ctx (&ctx, checksum); 14416 htab_empty (ht); 14417 14418 for (i = 0; i < 16; i++) 14419 fprintf (stderr, "%d ", checksum[i]); 14420 14421 fprintf (stderr, "\n"); 14422 } 14423 14424 #endif 14425 14426 /* Fold a unary tree expression with code CODE of type TYPE with an 14427 operand OP0. LOC is the location of the resulting expression. 14428 Return a folded expression if successful. Otherwise, return a tree 14429 expression with code CODE of type TYPE with an operand OP0. */ 14430 14431 tree 14432 fold_build1_stat_loc (location_t loc, 14433 enum tree_code code, tree type, tree op0 MEM_STAT_DECL) 14434 { 14435 tree tem; 14436 #ifdef ENABLE_FOLD_CHECKING 14437 unsigned char checksum_before[16], checksum_after[16]; 14438 struct md5_ctx ctx; 14439 htab_t ht; 14440 14441 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14442 md5_init_ctx (&ctx); 14443 fold_checksum_tree (op0, &ctx, ht); 14444 md5_finish_ctx (&ctx, checksum_before); 14445 htab_empty (ht); 14446 #endif 14447 14448 tem = fold_unary_loc (loc, code, type, op0); 14449 if (!tem) 14450 { 14451 tem = build1_stat (code, type, op0 PASS_MEM_STAT); 14452 SET_EXPR_LOCATION (tem, loc); 14453 } 14454 14455 #ifdef ENABLE_FOLD_CHECKING 14456 md5_init_ctx (&ctx); 14457 fold_checksum_tree (op0, &ctx, ht); 14458 md5_finish_ctx (&ctx, checksum_after); 14459 htab_delete (ht); 14460 14461 if (memcmp (checksum_before, checksum_after, 16)) 14462 fold_check_failed (op0, tem); 14463 #endif 14464 return tem; 14465 } 14466 14467 /* Fold a binary tree expression with code CODE of type TYPE with 14468 operands OP0 and OP1. LOC is the location of the resulting 14469 expression. Return a folded expression if successful. Otherwise, 14470 return a tree expression with code CODE of type TYPE with operands 14471 OP0 and OP1. */ 14472 14473 tree 14474 fold_build2_stat_loc (location_t loc, 14475 enum tree_code code, tree type, tree op0, tree op1 14476 MEM_STAT_DECL) 14477 { 14478 tree tem; 14479 #ifdef ENABLE_FOLD_CHECKING 14480 unsigned char checksum_before_op0[16], 14481 checksum_before_op1[16], 14482 checksum_after_op0[16], 14483 checksum_after_op1[16]; 14484 struct md5_ctx ctx; 14485 htab_t ht; 14486 14487 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14488 md5_init_ctx (&ctx); 14489 fold_checksum_tree (op0, &ctx, ht); 14490 md5_finish_ctx (&ctx, checksum_before_op0); 14491 htab_empty (ht); 14492 14493 md5_init_ctx (&ctx); 14494 fold_checksum_tree (op1, &ctx, ht); 14495 md5_finish_ctx (&ctx, checksum_before_op1); 14496 htab_empty (ht); 14497 #endif 14498 14499 tem = fold_binary_loc (loc, code, type, op0, op1); 14500 if (!tem) 14501 { 14502 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT); 14503 SET_EXPR_LOCATION (tem, loc); 14504 } 14505 14506 #ifdef ENABLE_FOLD_CHECKING 14507 md5_init_ctx (&ctx); 14508 fold_checksum_tree (op0, &ctx, ht); 14509 md5_finish_ctx (&ctx, checksum_after_op0); 14510 htab_empty (ht); 14511 14512 if (memcmp (checksum_before_op0, checksum_after_op0, 16)) 14513 fold_check_failed (op0, tem); 14514 14515 md5_init_ctx (&ctx); 14516 fold_checksum_tree (op1, &ctx, ht); 14517 md5_finish_ctx (&ctx, checksum_after_op1); 14518 htab_delete (ht); 14519 14520 if (memcmp (checksum_before_op1, checksum_after_op1, 16)) 14521 fold_check_failed (op1, tem); 14522 #endif 14523 return tem; 14524 } 14525 14526 /* Fold a ternary tree expression with code CODE of type TYPE with 14527 operands OP0, OP1, and OP2. Return a folded expression if 14528 successful. Otherwise, return a tree expression with code CODE of 14529 type TYPE with operands OP0, OP1, and OP2. */ 14530 14531 tree 14532 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type, 14533 tree op0, tree op1, tree op2 MEM_STAT_DECL) 14534 { 14535 tree tem; 14536 #ifdef ENABLE_FOLD_CHECKING 14537 unsigned char checksum_before_op0[16], 14538 checksum_before_op1[16], 14539 checksum_before_op2[16], 14540 checksum_after_op0[16], 14541 checksum_after_op1[16], 14542 checksum_after_op2[16]; 14543 struct md5_ctx ctx; 14544 htab_t ht; 14545 14546 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14547 md5_init_ctx (&ctx); 14548 fold_checksum_tree (op0, &ctx, ht); 14549 md5_finish_ctx (&ctx, checksum_before_op0); 14550 htab_empty (ht); 14551 14552 md5_init_ctx (&ctx); 14553 fold_checksum_tree (op1, &ctx, ht); 14554 md5_finish_ctx (&ctx, checksum_before_op1); 14555 htab_empty (ht); 14556 14557 md5_init_ctx (&ctx); 14558 fold_checksum_tree (op2, &ctx, ht); 14559 md5_finish_ctx (&ctx, checksum_before_op2); 14560 htab_empty (ht); 14561 #endif 14562 14563 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); 14564 tem = fold_ternary_loc (loc, code, type, op0, op1, op2); 14565 if (!tem) 14566 { 14567 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT); 14568 SET_EXPR_LOCATION (tem, loc); 14569 } 14570 14571 #ifdef ENABLE_FOLD_CHECKING 14572 md5_init_ctx (&ctx); 14573 fold_checksum_tree (op0, &ctx, ht); 14574 md5_finish_ctx (&ctx, checksum_after_op0); 14575 htab_empty (ht); 14576 14577 if (memcmp (checksum_before_op0, checksum_after_op0, 16)) 14578 fold_check_failed (op0, tem); 14579 14580 md5_init_ctx (&ctx); 14581 fold_checksum_tree (op1, &ctx, ht); 14582 md5_finish_ctx (&ctx, checksum_after_op1); 14583 htab_empty (ht); 14584 14585 if (memcmp (checksum_before_op1, checksum_after_op1, 16)) 14586 fold_check_failed (op1, tem); 14587 14588 md5_init_ctx (&ctx); 14589 fold_checksum_tree (op2, &ctx, ht); 14590 md5_finish_ctx (&ctx, checksum_after_op2); 14591 htab_delete (ht); 14592 14593 if (memcmp (checksum_before_op2, checksum_after_op2, 16)) 14594 fold_check_failed (op2, tem); 14595 #endif 14596 return tem; 14597 } 14598 14599 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS 14600 arguments in ARGARRAY, and a null static chain. 14601 Return a folded expression if successful. Otherwise, return a CALL_EXPR 14602 of type TYPE from the given operands as constructed by build_call_array. */ 14603 14604 tree 14605 fold_build_call_array_loc (location_t loc, tree type, tree fn, 14606 int nargs, tree *argarray) 14607 { 14608 tree tem; 14609 #ifdef ENABLE_FOLD_CHECKING 14610 unsigned char checksum_before_fn[16], 14611 checksum_before_arglist[16], 14612 checksum_after_fn[16], 14613 checksum_after_arglist[16]; 14614 struct md5_ctx ctx; 14615 htab_t ht; 14616 int i; 14617 14618 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14619 md5_init_ctx (&ctx); 14620 fold_checksum_tree (fn, &ctx, ht); 14621 md5_finish_ctx (&ctx, checksum_before_fn); 14622 htab_empty (ht); 14623 14624 md5_init_ctx (&ctx); 14625 for (i = 0; i < nargs; i++) 14626 fold_checksum_tree (argarray[i], &ctx, ht); 14627 md5_finish_ctx (&ctx, checksum_before_arglist); 14628 htab_empty (ht); 14629 #endif 14630 14631 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray); 14632 14633 #ifdef ENABLE_FOLD_CHECKING 14634 md5_init_ctx (&ctx); 14635 fold_checksum_tree (fn, &ctx, ht); 14636 md5_finish_ctx (&ctx, checksum_after_fn); 14637 htab_empty (ht); 14638 14639 if (memcmp (checksum_before_fn, checksum_after_fn, 16)) 14640 fold_check_failed (fn, tem); 14641 14642 md5_init_ctx (&ctx); 14643 for (i = 0; i < nargs; i++) 14644 fold_checksum_tree (argarray[i], &ctx, ht); 14645 md5_finish_ctx (&ctx, checksum_after_arglist); 14646 htab_delete (ht); 14647 14648 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16)) 14649 fold_check_failed (NULL_TREE, tem); 14650 #endif 14651 return tem; 14652 } 14653 14654 /* Perform constant folding and related simplification of initializer 14655 expression EXPR. These behave identically to "fold_buildN" but ignore 14656 potential run-time traps and exceptions that fold must preserve. */ 14657 14658 #define START_FOLD_INIT \ 14659 int saved_signaling_nans = flag_signaling_nans;\ 14660 int saved_trapping_math = flag_trapping_math;\ 14661 int saved_rounding_math = flag_rounding_math;\ 14662 int saved_trapv = flag_trapv;\ 14663 int saved_folding_initializer = folding_initializer;\ 14664 flag_signaling_nans = 0;\ 14665 flag_trapping_math = 0;\ 14666 flag_rounding_math = 0;\ 14667 flag_trapv = 0;\ 14668 folding_initializer = 1; 14669 14670 #define END_FOLD_INIT \ 14671 flag_signaling_nans = saved_signaling_nans;\ 14672 flag_trapping_math = saved_trapping_math;\ 14673 flag_rounding_math = saved_rounding_math;\ 14674 flag_trapv = saved_trapv;\ 14675 folding_initializer = saved_folding_initializer; 14676 14677 tree 14678 fold_build1_initializer_loc (location_t loc, enum tree_code code, 14679 tree type, tree op) 14680 { 14681 tree result; 14682 START_FOLD_INIT; 14683 14684 result = fold_build1_loc (loc, code, type, op); 14685 14686 END_FOLD_INIT; 14687 return result; 14688 } 14689 14690 tree 14691 fold_build2_initializer_loc (location_t loc, enum tree_code code, 14692 tree type, tree op0, tree op1) 14693 { 14694 tree result; 14695 START_FOLD_INIT; 14696 14697 result = fold_build2_loc (loc, code, type, op0, op1); 14698 14699 END_FOLD_INIT; 14700 return result; 14701 } 14702 14703 tree 14704 fold_build3_initializer_loc (location_t loc, enum tree_code code, 14705 tree type, tree op0, tree op1, tree op2) 14706 { 14707 tree result; 14708 START_FOLD_INIT; 14709 14710 result = fold_build3_loc (loc, code, type, op0, op1, op2); 14711 14712 END_FOLD_INIT; 14713 return result; 14714 } 14715 14716 tree 14717 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn, 14718 int nargs, tree *argarray) 14719 { 14720 tree result; 14721 START_FOLD_INIT; 14722 14723 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray); 14724 14725 END_FOLD_INIT; 14726 return result; 14727 } 14728 14729 #undef START_FOLD_INIT 14730 #undef END_FOLD_INIT 14731 14732 /* Determine if first argument is a multiple of second argument. Return 0 if 14733 it is not, or we cannot easily determined it to be. 14734 14735 An example of the sort of thing we care about (at this point; this routine 14736 could surely be made more general, and expanded to do what the *_DIV_EXPR's 14737 fold cases do now) is discovering that 14738 14739 SAVE_EXPR (I) * SAVE_EXPR (J * 8) 14740 14741 is a multiple of 14742 14743 SAVE_EXPR (J * 8) 14744 14745 when we know that the two SAVE_EXPR (J * 8) nodes are the same node. 14746 14747 This code also handles discovering that 14748 14749 SAVE_EXPR (I) * SAVE_EXPR (J * 8) 14750 14751 is a multiple of 8 so we don't have to worry about dealing with a 14752 possible remainder. 14753 14754 Note that we *look* inside a SAVE_EXPR only to determine how it was 14755 calculated; it is not safe for fold to do much of anything else with the 14756 internals of a SAVE_EXPR, since it cannot know when it will be evaluated 14757 at run time. For example, the latter example above *cannot* be implemented 14758 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at 14759 evaluation time of the original SAVE_EXPR is not necessarily the same at 14760 the time the new expression is evaluated. The only optimization of this 14761 sort that would be valid is changing 14762 14763 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8) 14764 14765 divided by 8 to 14766 14767 SAVE_EXPR (I) * SAVE_EXPR (J) 14768 14769 (where the same SAVE_EXPR (J) is used in the original and the 14770 transformed version). */ 14771 14772 int 14773 multiple_of_p (tree type, const_tree top, const_tree bottom) 14774 { 14775 if (operand_equal_p (top, bottom, 0)) 14776 return 1; 14777 14778 if (TREE_CODE (type) != INTEGER_TYPE) 14779 return 0; 14780 14781 switch (TREE_CODE (top)) 14782 { 14783 case BIT_AND_EXPR: 14784 /* Bitwise and provides a power of two multiple. If the mask is 14785 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */ 14786 if (!integer_pow2p (bottom)) 14787 return 0; 14788 /* FALLTHRU */ 14789 14790 case MULT_EXPR: 14791 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom) 14792 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom)); 14793 14794 case PLUS_EXPR: 14795 case MINUS_EXPR: 14796 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom) 14797 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom)); 14798 14799 case LSHIFT_EXPR: 14800 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST) 14801 { 14802 tree op1, t1; 14803 14804 op1 = TREE_OPERAND (top, 1); 14805 /* const_binop may not detect overflow correctly, 14806 so check for it explicitly here. */ 14807 if (TYPE_PRECISION (TREE_TYPE (size_one_node)) 14808 > TREE_INT_CST_LOW (op1) 14809 && TREE_INT_CST_HIGH (op1) == 0 14810 && 0 != (t1 = fold_convert (type, 14811 const_binop (LSHIFT_EXPR, 14812 size_one_node, 14813 op1, 0))) 14814 && !TREE_OVERFLOW (t1)) 14815 return multiple_of_p (type, t1, bottom); 14816 } 14817 return 0; 14818 14819 case NOP_EXPR: 14820 /* Can't handle conversions from non-integral or wider integral type. */ 14821 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE) 14822 || (TYPE_PRECISION (type) 14823 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0))))) 14824 return 0; 14825 14826 /* .. fall through ... */ 14827 14828 case SAVE_EXPR: 14829 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom); 14830 14831 case INTEGER_CST: 14832 if (TREE_CODE (bottom) != INTEGER_CST 14833 || integer_zerop (bottom) 14834 || (TYPE_UNSIGNED (type) 14835 && (tree_int_cst_sgn (top) < 0 14836 || tree_int_cst_sgn (bottom) < 0))) 14837 return 0; 14838 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR, 14839 top, bottom, 0)); 14840 14841 default: 14842 return 0; 14843 } 14844 } 14845 14846 /* Return true if CODE or TYPE is known to be non-negative. */ 14847 14848 static bool 14849 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type) 14850 { 14851 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type)) 14852 && truth_value_p (code)) 14853 /* Truth values evaluate to 0 or 1, which is nonnegative unless we 14854 have a signed:1 type (where the value is -1 and 0). */ 14855 return true; 14856 return false; 14857 } 14858 14859 /* Return true if (CODE OP0) is known to be non-negative. If the return 14860 value is based on the assumption that signed overflow is undefined, 14861 set *STRICT_OVERFLOW_P to true; otherwise, don't change 14862 *STRICT_OVERFLOW_P. */ 14863 14864 bool 14865 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0, 14866 bool *strict_overflow_p) 14867 { 14868 if (TYPE_UNSIGNED (type)) 14869 return true; 14870 14871 switch (code) 14872 { 14873 case ABS_EXPR: 14874 /* We can't return 1 if flag_wrapv is set because 14875 ABS_EXPR<INT_MIN> = INT_MIN. */ 14876 if (!INTEGRAL_TYPE_P (type)) 14877 return true; 14878 if (TYPE_OVERFLOW_UNDEFINED (type)) 14879 { 14880 *strict_overflow_p = true; 14881 return true; 14882 } 14883 break; 14884 14885 case NON_LVALUE_EXPR: 14886 case FLOAT_EXPR: 14887 case FIX_TRUNC_EXPR: 14888 return tree_expr_nonnegative_warnv_p (op0, 14889 strict_overflow_p); 14890 14891 case NOP_EXPR: 14892 { 14893 tree inner_type = TREE_TYPE (op0); 14894 tree outer_type = type; 14895 14896 if (TREE_CODE (outer_type) == REAL_TYPE) 14897 { 14898 if (TREE_CODE (inner_type) == REAL_TYPE) 14899 return tree_expr_nonnegative_warnv_p (op0, 14900 strict_overflow_p); 14901 if (TREE_CODE (inner_type) == INTEGER_TYPE) 14902 { 14903 if (TYPE_UNSIGNED (inner_type)) 14904 return true; 14905 return tree_expr_nonnegative_warnv_p (op0, 14906 strict_overflow_p); 14907 } 14908 } 14909 else if (TREE_CODE (outer_type) == INTEGER_TYPE) 14910 { 14911 if (TREE_CODE (inner_type) == REAL_TYPE) 14912 return tree_expr_nonnegative_warnv_p (op0, 14913 strict_overflow_p); 14914 if (TREE_CODE (inner_type) == INTEGER_TYPE) 14915 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type) 14916 && TYPE_UNSIGNED (inner_type); 14917 } 14918 } 14919 break; 14920 14921 default: 14922 return tree_simple_nonnegative_warnv_p (code, type); 14923 } 14924 14925 /* We don't know sign of `t', so be conservative and return false. */ 14926 return false; 14927 } 14928 14929 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return 14930 value is based on the assumption that signed overflow is undefined, 14931 set *STRICT_OVERFLOW_P to true; otherwise, don't change 14932 *STRICT_OVERFLOW_P. */ 14933 14934 bool 14935 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0, 14936 tree op1, bool *strict_overflow_p) 14937 { 14938 if (TYPE_UNSIGNED (type)) 14939 return true; 14940 14941 switch (code) 14942 { 14943 case POINTER_PLUS_EXPR: 14944 case PLUS_EXPR: 14945 if (FLOAT_TYPE_P (type)) 14946 return (tree_expr_nonnegative_warnv_p (op0, 14947 strict_overflow_p) 14948 && tree_expr_nonnegative_warnv_p (op1, 14949 strict_overflow_p)); 14950 14951 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are 14952 both unsigned and at least 2 bits shorter than the result. */ 14953 if (TREE_CODE (type) == INTEGER_TYPE 14954 && TREE_CODE (op0) == NOP_EXPR 14955 && TREE_CODE (op1) == NOP_EXPR) 14956 { 14957 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0)); 14958 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0)); 14959 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1) 14960 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2)) 14961 { 14962 unsigned int prec = MAX (TYPE_PRECISION (inner1), 14963 TYPE_PRECISION (inner2)) + 1; 14964 return prec < TYPE_PRECISION (type); 14965 } 14966 } 14967 break; 14968 14969 case MULT_EXPR: 14970 if (FLOAT_TYPE_P (type)) 14971 { 14972 /* x * x for floating point x is always non-negative. */ 14973 if (operand_equal_p (op0, op1, 0)) 14974 return true; 14975 return (tree_expr_nonnegative_warnv_p (op0, 14976 strict_overflow_p) 14977 && tree_expr_nonnegative_warnv_p (op1, 14978 strict_overflow_p)); 14979 } 14980 14981 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are 14982 both unsigned and their total bits is shorter than the result. */ 14983 if (TREE_CODE (type) == INTEGER_TYPE 14984 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST) 14985 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST)) 14986 { 14987 tree inner0 = (TREE_CODE (op0) == NOP_EXPR) 14988 ? TREE_TYPE (TREE_OPERAND (op0, 0)) 14989 : TREE_TYPE (op0); 14990 tree inner1 = (TREE_CODE (op1) == NOP_EXPR) 14991 ? TREE_TYPE (TREE_OPERAND (op1, 0)) 14992 : TREE_TYPE (op1); 14993 14994 bool unsigned0 = TYPE_UNSIGNED (inner0); 14995 bool unsigned1 = TYPE_UNSIGNED (inner1); 14996 14997 if (TREE_CODE (op0) == INTEGER_CST) 14998 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0; 14999 15000 if (TREE_CODE (op1) == INTEGER_CST) 15001 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0; 15002 15003 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0 15004 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1) 15005 { 15006 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST) 15007 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true) 15008 : TYPE_PRECISION (inner0); 15009 15010 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST) 15011 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true) 15012 : TYPE_PRECISION (inner1); 15013 15014 return precision0 + precision1 < TYPE_PRECISION (type); 15015 } 15016 } 15017 return false; 15018 15019 case BIT_AND_EXPR: 15020 case MAX_EXPR: 15021 return (tree_expr_nonnegative_warnv_p (op0, 15022 strict_overflow_p) 15023 || tree_expr_nonnegative_warnv_p (op1, 15024 strict_overflow_p)); 15025 15026 case BIT_IOR_EXPR: 15027 case BIT_XOR_EXPR: 15028 case MIN_EXPR: 15029 case RDIV_EXPR: 15030 case TRUNC_DIV_EXPR: 15031 case CEIL_DIV_EXPR: 15032 case FLOOR_DIV_EXPR: 15033 case ROUND_DIV_EXPR: 15034 return (tree_expr_nonnegative_warnv_p (op0, 15035 strict_overflow_p) 15036 && tree_expr_nonnegative_warnv_p (op1, 15037 strict_overflow_p)); 15038 15039 case TRUNC_MOD_EXPR: 15040 case CEIL_MOD_EXPR: 15041 case FLOOR_MOD_EXPR: 15042 case ROUND_MOD_EXPR: 15043 return tree_expr_nonnegative_warnv_p (op0, 15044 strict_overflow_p); 15045 default: 15046 return tree_simple_nonnegative_warnv_p (code, type); 15047 } 15048 15049 /* We don't know sign of `t', so be conservative and return false. */ 15050 return false; 15051 } 15052 15053 /* Return true if T is known to be non-negative. If the return 15054 value is based on the assumption that signed overflow is undefined, 15055 set *STRICT_OVERFLOW_P to true; otherwise, don't change 15056 *STRICT_OVERFLOW_P. */ 15057 15058 bool 15059 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p) 15060 { 15061 if (TYPE_UNSIGNED (TREE_TYPE (t))) 15062 return true; 15063 15064 switch (TREE_CODE (t)) 15065 { 15066 case INTEGER_CST: 15067 return tree_int_cst_sgn (t) >= 0; 15068 15069 case REAL_CST: 15070 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t)); 15071 15072 case FIXED_CST: 15073 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t)); 15074 15075 case COND_EXPR: 15076 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 15077 strict_overflow_p) 15078 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2), 15079 strict_overflow_p)); 15080 default: 15081 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), 15082 TREE_TYPE (t)); 15083 } 15084 /* We don't know sign of `t', so be conservative and return false. */ 15085 return false; 15086 } 15087 15088 /* Return true if T is known to be non-negative. If the return 15089 value is based on the assumption that signed overflow is undefined, 15090 set *STRICT_OVERFLOW_P to true; otherwise, don't change 15091 *STRICT_OVERFLOW_P. */ 15092 15093 bool 15094 tree_call_nonnegative_warnv_p (tree type, tree fndecl, 15095 tree arg0, tree arg1, bool *strict_overflow_p) 15096 { 15097 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 15098 switch (DECL_FUNCTION_CODE (fndecl)) 15099 { 15100 CASE_FLT_FN (BUILT_IN_ACOS): 15101 CASE_FLT_FN (BUILT_IN_ACOSH): 15102 CASE_FLT_FN (BUILT_IN_CABS): 15103 CASE_FLT_FN (BUILT_IN_COSH): 15104 CASE_FLT_FN (BUILT_IN_ERFC): 15105 CASE_FLT_FN (BUILT_IN_EXP): 15106 CASE_FLT_FN (BUILT_IN_EXP10): 15107 CASE_FLT_FN (BUILT_IN_EXP2): 15108 CASE_FLT_FN (BUILT_IN_FABS): 15109 CASE_FLT_FN (BUILT_IN_FDIM): 15110 CASE_FLT_FN (BUILT_IN_HYPOT): 15111 CASE_FLT_FN (BUILT_IN_POW10): 15112 CASE_INT_FN (BUILT_IN_FFS): 15113 CASE_INT_FN (BUILT_IN_PARITY): 15114 CASE_INT_FN (BUILT_IN_POPCOUNT): 15115 case BUILT_IN_BSWAP32: 15116 case BUILT_IN_BSWAP64: 15117 /* Always true. */ 15118 return true; 15119 15120 CASE_FLT_FN (BUILT_IN_SQRT): 15121 /* sqrt(-0.0) is -0.0. */ 15122 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))) 15123 return true; 15124 return tree_expr_nonnegative_warnv_p (arg0, 15125 strict_overflow_p); 15126 15127 CASE_FLT_FN (BUILT_IN_ASINH): 15128 CASE_FLT_FN (BUILT_IN_ATAN): 15129 CASE_FLT_FN (BUILT_IN_ATANH): 15130 CASE_FLT_FN (BUILT_IN_CBRT): 15131 CASE_FLT_FN (BUILT_IN_CEIL): 15132 CASE_FLT_FN (BUILT_IN_ERF): 15133 CASE_FLT_FN (BUILT_IN_EXPM1): 15134 CASE_FLT_FN (BUILT_IN_FLOOR): 15135 CASE_FLT_FN (BUILT_IN_FMOD): 15136 CASE_FLT_FN (BUILT_IN_FREXP): 15137 CASE_FLT_FN (BUILT_IN_LCEIL): 15138 CASE_FLT_FN (BUILT_IN_LDEXP): 15139 CASE_FLT_FN (BUILT_IN_LFLOOR): 15140 CASE_FLT_FN (BUILT_IN_LLCEIL): 15141 CASE_FLT_FN (BUILT_IN_LLFLOOR): 15142 CASE_FLT_FN (BUILT_IN_LLRINT): 15143 CASE_FLT_FN (BUILT_IN_LLROUND): 15144 CASE_FLT_FN (BUILT_IN_LRINT): 15145 CASE_FLT_FN (BUILT_IN_LROUND): 15146 CASE_FLT_FN (BUILT_IN_MODF): 15147 CASE_FLT_FN (BUILT_IN_NEARBYINT): 15148 CASE_FLT_FN (BUILT_IN_RINT): 15149 CASE_FLT_FN (BUILT_IN_ROUND): 15150 CASE_FLT_FN (BUILT_IN_SCALB): 15151 CASE_FLT_FN (BUILT_IN_SCALBLN): 15152 CASE_FLT_FN (BUILT_IN_SCALBN): 15153 CASE_FLT_FN (BUILT_IN_SIGNBIT): 15154 CASE_FLT_FN (BUILT_IN_SIGNIFICAND): 15155 CASE_FLT_FN (BUILT_IN_SINH): 15156 CASE_FLT_FN (BUILT_IN_TANH): 15157 CASE_FLT_FN (BUILT_IN_TRUNC): 15158 /* True if the 1st argument is nonnegative. */ 15159 return tree_expr_nonnegative_warnv_p (arg0, 15160 strict_overflow_p); 15161 15162 CASE_FLT_FN (BUILT_IN_FMAX): 15163 /* True if the 1st OR 2nd arguments are nonnegative. */ 15164 return (tree_expr_nonnegative_warnv_p (arg0, 15165 strict_overflow_p) 15166 || (tree_expr_nonnegative_warnv_p (arg1, 15167 strict_overflow_p))); 15168 15169 CASE_FLT_FN (BUILT_IN_FMIN): 15170 /* True if the 1st AND 2nd arguments are nonnegative. */ 15171 return (tree_expr_nonnegative_warnv_p (arg0, 15172 strict_overflow_p) 15173 && (tree_expr_nonnegative_warnv_p (arg1, 15174 strict_overflow_p))); 15175 15176 CASE_FLT_FN (BUILT_IN_COPYSIGN): 15177 /* True if the 2nd argument is nonnegative. */ 15178 return tree_expr_nonnegative_warnv_p (arg1, 15179 strict_overflow_p); 15180 15181 CASE_FLT_FN (BUILT_IN_POWI): 15182 /* True if the 1st argument is nonnegative or the second 15183 argument is an even integer. */ 15184 if (TREE_CODE (arg1) == INTEGER_CST 15185 && (TREE_INT_CST_LOW (arg1) & 1) == 0) 15186 return true; 15187 return tree_expr_nonnegative_warnv_p (arg0, 15188 strict_overflow_p); 15189 15190 CASE_FLT_FN (BUILT_IN_POW): 15191 /* True if the 1st argument is nonnegative or the second 15192 argument is an even integer valued real. */ 15193 if (TREE_CODE (arg1) == REAL_CST) 15194 { 15195 REAL_VALUE_TYPE c; 15196 HOST_WIDE_INT n; 15197 15198 c = TREE_REAL_CST (arg1); 15199 n = real_to_integer (&c); 15200 if ((n & 1) == 0) 15201 { 15202 REAL_VALUE_TYPE cint; 15203 real_from_integer (&cint, VOIDmode, n, 15204 n < 0 ? -1 : 0, 0); 15205 if (real_identical (&c, &cint)) 15206 return true; 15207 } 15208 } 15209 return tree_expr_nonnegative_warnv_p (arg0, 15210 strict_overflow_p); 15211 15212 default: 15213 break; 15214 } 15215 return tree_simple_nonnegative_warnv_p (CALL_EXPR, 15216 type); 15217 } 15218 15219 /* Return true if T is known to be non-negative. If the return 15220 value is based on the assumption that signed overflow is undefined, 15221 set *STRICT_OVERFLOW_P to true; otherwise, don't change 15222 *STRICT_OVERFLOW_P. */ 15223 15224 bool 15225 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p) 15226 { 15227 enum tree_code code = TREE_CODE (t); 15228 if (TYPE_UNSIGNED (TREE_TYPE (t))) 15229 return true; 15230 15231 switch (code) 15232 { 15233 case TARGET_EXPR: 15234 { 15235 tree temp = TARGET_EXPR_SLOT (t); 15236 t = TARGET_EXPR_INITIAL (t); 15237 15238 /* If the initializer is non-void, then it's a normal expression 15239 that will be assigned to the slot. */ 15240 if (!VOID_TYPE_P (t)) 15241 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p); 15242 15243 /* Otherwise, the initializer sets the slot in some way. One common 15244 way is an assignment statement at the end of the initializer. */ 15245 while (1) 15246 { 15247 if (TREE_CODE (t) == BIND_EXPR) 15248 t = expr_last (BIND_EXPR_BODY (t)); 15249 else if (TREE_CODE (t) == TRY_FINALLY_EXPR 15250 || TREE_CODE (t) == TRY_CATCH_EXPR) 15251 t = expr_last (TREE_OPERAND (t, 0)); 15252 else if (TREE_CODE (t) == STATEMENT_LIST) 15253 t = expr_last (t); 15254 else 15255 break; 15256 } 15257 if (TREE_CODE (t) == MODIFY_EXPR 15258 && TREE_OPERAND (t, 0) == temp) 15259 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 15260 strict_overflow_p); 15261 15262 return false; 15263 } 15264 15265 case CALL_EXPR: 15266 { 15267 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE; 15268 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE; 15269 15270 return tree_call_nonnegative_warnv_p (TREE_TYPE (t), 15271 get_callee_fndecl (t), 15272 arg0, 15273 arg1, 15274 strict_overflow_p); 15275 } 15276 case COMPOUND_EXPR: 15277 case MODIFY_EXPR: 15278 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 15279 strict_overflow_p); 15280 case BIND_EXPR: 15281 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)), 15282 strict_overflow_p); 15283 case SAVE_EXPR: 15284 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 15285 strict_overflow_p); 15286 15287 default: 15288 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), 15289 TREE_TYPE (t)); 15290 } 15291 15292 /* We don't know sign of `t', so be conservative and return false. */ 15293 return false; 15294 } 15295 15296 /* Return true if T is known to be non-negative. If the return 15297 value is based on the assumption that signed overflow is undefined, 15298 set *STRICT_OVERFLOW_P to true; otherwise, don't change 15299 *STRICT_OVERFLOW_P. */ 15300 15301 bool 15302 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p) 15303 { 15304 enum tree_code code; 15305 if (t == error_mark_node) 15306 return false; 15307 15308 code = TREE_CODE (t); 15309 switch (TREE_CODE_CLASS (code)) 15310 { 15311 case tcc_binary: 15312 case tcc_comparison: 15313 return tree_binary_nonnegative_warnv_p (TREE_CODE (t), 15314 TREE_TYPE (t), 15315 TREE_OPERAND (t, 0), 15316 TREE_OPERAND (t, 1), 15317 strict_overflow_p); 15318 15319 case tcc_unary: 15320 return tree_unary_nonnegative_warnv_p (TREE_CODE (t), 15321 TREE_TYPE (t), 15322 TREE_OPERAND (t, 0), 15323 strict_overflow_p); 15324 15325 case tcc_constant: 15326 case tcc_declaration: 15327 case tcc_reference: 15328 return tree_single_nonnegative_warnv_p (t, strict_overflow_p); 15329 15330 default: 15331 break; 15332 } 15333 15334 switch (code) 15335 { 15336 case TRUTH_AND_EXPR: 15337 case TRUTH_OR_EXPR: 15338 case TRUTH_XOR_EXPR: 15339 return tree_binary_nonnegative_warnv_p (TREE_CODE (t), 15340 TREE_TYPE (t), 15341 TREE_OPERAND (t, 0), 15342 TREE_OPERAND (t, 1), 15343 strict_overflow_p); 15344 case TRUTH_NOT_EXPR: 15345 return tree_unary_nonnegative_warnv_p (TREE_CODE (t), 15346 TREE_TYPE (t), 15347 TREE_OPERAND (t, 0), 15348 strict_overflow_p); 15349 15350 case COND_EXPR: 15351 case CONSTRUCTOR: 15352 case OBJ_TYPE_REF: 15353 case ASSERT_EXPR: 15354 case ADDR_EXPR: 15355 case WITH_SIZE_EXPR: 15356 case SSA_NAME: 15357 return tree_single_nonnegative_warnv_p (t, strict_overflow_p); 15358 15359 default: 15360 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p); 15361 } 15362 } 15363 15364 /* Return true if `t' is known to be non-negative. Handle warnings 15365 about undefined signed overflow. */ 15366 15367 bool 15368 tree_expr_nonnegative_p (tree t) 15369 { 15370 bool ret, strict_overflow_p; 15371 15372 strict_overflow_p = false; 15373 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p); 15374 if (strict_overflow_p) 15375 fold_overflow_warning (("assuming signed overflow does not occur when " 15376 "determining that expression is always " 15377 "non-negative"), 15378 WARN_STRICT_OVERFLOW_MISC); 15379 return ret; 15380 } 15381 15382 15383 /* Return true when (CODE OP0) is an address and is known to be nonzero. 15384 For floating point we further ensure that T is not denormal. 15385 Similar logic is present in nonzero_address in rtlanal.h. 15386 15387 If the return value is based on the assumption that signed overflow 15388 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 15389 change *STRICT_OVERFLOW_P. */ 15390 15391 bool 15392 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0, 15393 bool *strict_overflow_p) 15394 { 15395 switch (code) 15396 { 15397 case ABS_EXPR: 15398 return tree_expr_nonzero_warnv_p (op0, 15399 strict_overflow_p); 15400 15401 case NOP_EXPR: 15402 { 15403 tree inner_type = TREE_TYPE (op0); 15404 tree outer_type = type; 15405 15406 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type) 15407 && tree_expr_nonzero_warnv_p (op0, 15408 strict_overflow_p)); 15409 } 15410 break; 15411 15412 case NON_LVALUE_EXPR: 15413 return tree_expr_nonzero_warnv_p (op0, 15414 strict_overflow_p); 15415 15416 default: 15417 break; 15418 } 15419 15420 return false; 15421 } 15422 15423 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero. 15424 For floating point we further ensure that T is not denormal. 15425 Similar logic is present in nonzero_address in rtlanal.h. 15426 15427 If the return value is based on the assumption that signed overflow 15428 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 15429 change *STRICT_OVERFLOW_P. */ 15430 15431 bool 15432 tree_binary_nonzero_warnv_p (enum tree_code code, 15433 tree type, 15434 tree op0, 15435 tree op1, bool *strict_overflow_p) 15436 { 15437 bool sub_strict_overflow_p; 15438 switch (code) 15439 { 15440 case POINTER_PLUS_EXPR: 15441 case PLUS_EXPR: 15442 if (TYPE_OVERFLOW_UNDEFINED (type)) 15443 { 15444 /* With the presence of negative values it is hard 15445 to say something. */ 15446 sub_strict_overflow_p = false; 15447 if (!tree_expr_nonnegative_warnv_p (op0, 15448 &sub_strict_overflow_p) 15449 || !tree_expr_nonnegative_warnv_p (op1, 15450 &sub_strict_overflow_p)) 15451 return false; 15452 /* One of operands must be positive and the other non-negative. */ 15453 /* We don't set *STRICT_OVERFLOW_P here: even if this value 15454 overflows, on a twos-complement machine the sum of two 15455 nonnegative numbers can never be zero. */ 15456 return (tree_expr_nonzero_warnv_p (op0, 15457 strict_overflow_p) 15458 || tree_expr_nonzero_warnv_p (op1, 15459 strict_overflow_p)); 15460 } 15461 break; 15462 15463 case MULT_EXPR: 15464 if (TYPE_OVERFLOW_UNDEFINED (type)) 15465 { 15466 if (tree_expr_nonzero_warnv_p (op0, 15467 strict_overflow_p) 15468 && tree_expr_nonzero_warnv_p (op1, 15469 strict_overflow_p)) 15470 { 15471 *strict_overflow_p = true; 15472 return true; 15473 } 15474 } 15475 break; 15476 15477 case MIN_EXPR: 15478 sub_strict_overflow_p = false; 15479 if (tree_expr_nonzero_warnv_p (op0, 15480 &sub_strict_overflow_p) 15481 && tree_expr_nonzero_warnv_p (op1, 15482 &sub_strict_overflow_p)) 15483 { 15484 if (sub_strict_overflow_p) 15485 *strict_overflow_p = true; 15486 } 15487 break; 15488 15489 case MAX_EXPR: 15490 sub_strict_overflow_p = false; 15491 if (tree_expr_nonzero_warnv_p (op0, 15492 &sub_strict_overflow_p)) 15493 { 15494 if (sub_strict_overflow_p) 15495 *strict_overflow_p = true; 15496 15497 /* When both operands are nonzero, then MAX must be too. */ 15498 if (tree_expr_nonzero_warnv_p (op1, 15499 strict_overflow_p)) 15500 return true; 15501 15502 /* MAX where operand 0 is positive is positive. */ 15503 return tree_expr_nonnegative_warnv_p (op0, 15504 strict_overflow_p); 15505 } 15506 /* MAX where operand 1 is positive is positive. */ 15507 else if (tree_expr_nonzero_warnv_p (op1, 15508 &sub_strict_overflow_p) 15509 && tree_expr_nonnegative_warnv_p (op1, 15510 &sub_strict_overflow_p)) 15511 { 15512 if (sub_strict_overflow_p) 15513 *strict_overflow_p = true; 15514 return true; 15515 } 15516 break; 15517 15518 case BIT_IOR_EXPR: 15519 return (tree_expr_nonzero_warnv_p (op1, 15520 strict_overflow_p) 15521 || tree_expr_nonzero_warnv_p (op0, 15522 strict_overflow_p)); 15523 15524 default: 15525 break; 15526 } 15527 15528 return false; 15529 } 15530 15531 /* Return true when T is an address and is known to be nonzero. 15532 For floating point we further ensure that T is not denormal. 15533 Similar logic is present in nonzero_address in rtlanal.h. 15534 15535 If the return value is based on the assumption that signed overflow 15536 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 15537 change *STRICT_OVERFLOW_P. */ 15538 15539 bool 15540 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p) 15541 { 15542 bool sub_strict_overflow_p; 15543 switch (TREE_CODE (t)) 15544 { 15545 case INTEGER_CST: 15546 return !integer_zerop (t); 15547 15548 case ADDR_EXPR: 15549 { 15550 tree base = get_base_address (TREE_OPERAND (t, 0)); 15551 15552 if (!base) 15553 return false; 15554 15555 /* Weak declarations may link to NULL. Other things may also be NULL 15556 so protect with -fdelete-null-pointer-checks; but not variables 15557 allocated on the stack. */ 15558 if (DECL_P (base) 15559 && (flag_delete_null_pointer_checks 15560 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base)))) 15561 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base); 15562 15563 /* Constants are never weak. */ 15564 if (CONSTANT_CLASS_P (base)) 15565 return true; 15566 15567 return false; 15568 } 15569 15570 case COND_EXPR: 15571 sub_strict_overflow_p = false; 15572 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 15573 &sub_strict_overflow_p) 15574 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2), 15575 &sub_strict_overflow_p)) 15576 { 15577 if (sub_strict_overflow_p) 15578 *strict_overflow_p = true; 15579 return true; 15580 } 15581 break; 15582 15583 default: 15584 break; 15585 } 15586 return false; 15587 } 15588 15589 /* Return true when T is an address and is known to be nonzero. 15590 For floating point we further ensure that T is not denormal. 15591 Similar logic is present in nonzero_address in rtlanal.h. 15592 15593 If the return value is based on the assumption that signed overflow 15594 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 15595 change *STRICT_OVERFLOW_P. */ 15596 15597 bool 15598 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p) 15599 { 15600 tree type = TREE_TYPE (t); 15601 enum tree_code code; 15602 15603 /* Doing something useful for floating point would need more work. */ 15604 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) 15605 return false; 15606 15607 code = TREE_CODE (t); 15608 switch (TREE_CODE_CLASS (code)) 15609 { 15610 case tcc_unary: 15611 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0), 15612 strict_overflow_p); 15613 case tcc_binary: 15614 case tcc_comparison: 15615 return tree_binary_nonzero_warnv_p (code, type, 15616 TREE_OPERAND (t, 0), 15617 TREE_OPERAND (t, 1), 15618 strict_overflow_p); 15619 case tcc_constant: 15620 case tcc_declaration: 15621 case tcc_reference: 15622 return tree_single_nonzero_warnv_p (t, strict_overflow_p); 15623 15624 default: 15625 break; 15626 } 15627 15628 switch (code) 15629 { 15630 case TRUTH_NOT_EXPR: 15631 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0), 15632 strict_overflow_p); 15633 15634 case TRUTH_AND_EXPR: 15635 case TRUTH_OR_EXPR: 15636 case TRUTH_XOR_EXPR: 15637 return tree_binary_nonzero_warnv_p (code, type, 15638 TREE_OPERAND (t, 0), 15639 TREE_OPERAND (t, 1), 15640 strict_overflow_p); 15641 15642 case COND_EXPR: 15643 case CONSTRUCTOR: 15644 case OBJ_TYPE_REF: 15645 case ASSERT_EXPR: 15646 case ADDR_EXPR: 15647 case WITH_SIZE_EXPR: 15648 case SSA_NAME: 15649 return tree_single_nonzero_warnv_p (t, strict_overflow_p); 15650 15651 case COMPOUND_EXPR: 15652 case MODIFY_EXPR: 15653 case BIND_EXPR: 15654 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 15655 strict_overflow_p); 15656 15657 case SAVE_EXPR: 15658 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), 15659 strict_overflow_p); 15660 15661 case CALL_EXPR: 15662 return alloca_call_p (t); 15663 15664 default: 15665 break; 15666 } 15667 return false; 15668 } 15669 15670 /* Return true when T is an address and is known to be nonzero. 15671 Handle warnings about undefined signed overflow. */ 15672 15673 bool 15674 tree_expr_nonzero_p (tree t) 15675 { 15676 bool ret, strict_overflow_p; 15677 15678 strict_overflow_p = false; 15679 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p); 15680 if (strict_overflow_p) 15681 fold_overflow_warning (("assuming signed overflow does not occur when " 15682 "determining that expression is always " 15683 "non-zero"), 15684 WARN_STRICT_OVERFLOW_MISC); 15685 return ret; 15686 } 15687 15688 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1, 15689 attempt to fold the expression to a constant without modifying TYPE, 15690 OP0 or OP1. 15691 15692 If the expression could be simplified to a constant, then return 15693 the constant. If the expression would not be simplified to a 15694 constant, then return NULL_TREE. */ 15695 15696 tree 15697 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1) 15698 { 15699 tree tem = fold_binary (code, type, op0, op1); 15700 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE; 15701 } 15702 15703 /* Given the components of a unary expression CODE, TYPE and OP0, 15704 attempt to fold the expression to a constant without modifying 15705 TYPE or OP0. 15706 15707 If the expression could be simplified to a constant, then return 15708 the constant. If the expression would not be simplified to a 15709 constant, then return NULL_TREE. */ 15710 15711 tree 15712 fold_unary_to_constant (enum tree_code code, tree type, tree op0) 15713 { 15714 tree tem = fold_unary (code, type, op0); 15715 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE; 15716 } 15717 15718 /* If EXP represents referencing an element in a constant string 15719 (either via pointer arithmetic or array indexing), return the 15720 tree representing the value accessed, otherwise return NULL. */ 15721 15722 tree 15723 fold_read_from_constant_string (tree exp) 15724 { 15725 if ((TREE_CODE (exp) == INDIRECT_REF 15726 || TREE_CODE (exp) == ARRAY_REF) 15727 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE) 15728 { 15729 tree exp1 = TREE_OPERAND (exp, 0); 15730 tree index; 15731 tree string; 15732 location_t loc = EXPR_LOCATION (exp); 15733 15734 if (TREE_CODE (exp) == INDIRECT_REF) 15735 string = string_constant (exp1, &index); 15736 else 15737 { 15738 tree low_bound = array_ref_low_bound (exp); 15739 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1)); 15740 15741 /* Optimize the special-case of a zero lower bound. 15742 15743 We convert the low_bound to sizetype to avoid some problems 15744 with constant folding. (E.g. suppose the lower bound is 1, 15745 and its mode is QI. Without the conversion,l (ARRAY 15746 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) 15747 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */ 15748 if (! integer_zerop (low_bound)) 15749 index = size_diffop_loc (loc, index, 15750 fold_convert_loc (loc, sizetype, low_bound)); 15751 15752 string = exp1; 15753 } 15754 15755 if (string 15756 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string))) 15757 && TREE_CODE (string) == STRING_CST 15758 && TREE_CODE (index) == INTEGER_CST 15759 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0 15760 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) 15761 == MODE_INT) 15762 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1)) 15763 return build_int_cst_type (TREE_TYPE (exp), 15764 (TREE_STRING_POINTER (string) 15765 [TREE_INT_CST_LOW (index)])); 15766 } 15767 return NULL; 15768 } 15769 15770 /* Return the tree for neg (ARG0) when ARG0 is known to be either 15771 an integer constant, real, or fixed-point constant. 15772 15773 TYPE is the type of the result. */ 15774 15775 static tree 15776 fold_negate_const (tree arg0, tree type) 15777 { 15778 tree t = NULL_TREE; 15779 15780 switch (TREE_CODE (arg0)) 15781 { 15782 case INTEGER_CST: 15783 { 15784 unsigned HOST_WIDE_INT low; 15785 HOST_WIDE_INT high; 15786 int overflow = neg_double (TREE_INT_CST_LOW (arg0), 15787 TREE_INT_CST_HIGH (arg0), 15788 &low, &high); 15789 t = force_fit_type_double (type, low, high, 1, 15790 (overflow | TREE_OVERFLOW (arg0)) 15791 && !TYPE_UNSIGNED (type)); 15792 break; 15793 } 15794 15795 case REAL_CST: 15796 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0))); 15797 break; 15798 15799 case FIXED_CST: 15800 { 15801 FIXED_VALUE_TYPE f; 15802 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR, 15803 &(TREE_FIXED_CST (arg0)), NULL, 15804 TYPE_SATURATING (type)); 15805 t = build_fixed (type, f); 15806 /* Propagate overflow flags. */ 15807 if (overflow_p | TREE_OVERFLOW (arg0)) 15808 TREE_OVERFLOW (t) = 1; 15809 break; 15810 } 15811 15812 default: 15813 gcc_unreachable (); 15814 } 15815 15816 return t; 15817 } 15818 15819 /* Return the tree for abs (ARG0) when ARG0 is known to be either 15820 an integer constant or real constant. 15821 15822 TYPE is the type of the result. */ 15823 15824 tree 15825 fold_abs_const (tree arg0, tree type) 15826 { 15827 tree t = NULL_TREE; 15828 15829 switch (TREE_CODE (arg0)) 15830 { 15831 case INTEGER_CST: 15832 /* If the value is unsigned, then the absolute value is 15833 the same as the ordinary value. */ 15834 if (TYPE_UNSIGNED (type)) 15835 t = arg0; 15836 /* Similarly, if the value is non-negative. */ 15837 else if (INT_CST_LT (integer_minus_one_node, arg0)) 15838 t = arg0; 15839 /* If the value is negative, then the absolute value is 15840 its negation. */ 15841 else 15842 { 15843 unsigned HOST_WIDE_INT low; 15844 HOST_WIDE_INT high; 15845 int overflow = neg_double (TREE_INT_CST_LOW (arg0), 15846 TREE_INT_CST_HIGH (arg0), 15847 &low, &high); 15848 t = force_fit_type_double (type, low, high, -1, 15849 overflow | TREE_OVERFLOW (arg0)); 15850 } 15851 break; 15852 15853 case REAL_CST: 15854 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0))) 15855 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0))); 15856 else 15857 t = arg0; 15858 break; 15859 15860 default: 15861 gcc_unreachable (); 15862 } 15863 15864 return t; 15865 } 15866 15867 /* Return the tree for not (ARG0) when ARG0 is known to be an integer 15868 constant. TYPE is the type of the result. */ 15869 15870 static tree 15871 fold_not_const (tree arg0, tree type) 15872 { 15873 tree t = NULL_TREE; 15874 15875 gcc_assert (TREE_CODE (arg0) == INTEGER_CST); 15876 15877 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0), 15878 ~TREE_INT_CST_HIGH (arg0), 0, 15879 TREE_OVERFLOW (arg0)); 15880 15881 return t; 15882 } 15883 15884 /* Given CODE, a relational operator, the target type, TYPE and two 15885 constant operands OP0 and OP1, return the result of the 15886 relational operation. If the result is not a compile time 15887 constant, then return NULL_TREE. */ 15888 15889 static tree 15890 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1) 15891 { 15892 int result, invert; 15893 15894 /* From here on, the only cases we handle are when the result is 15895 known to be a constant. */ 15896 15897 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST) 15898 { 15899 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0); 15900 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1); 15901 15902 /* Handle the cases where either operand is a NaN. */ 15903 if (real_isnan (c0) || real_isnan (c1)) 15904 { 15905 switch (code) 15906 { 15907 case EQ_EXPR: 15908 case ORDERED_EXPR: 15909 result = 0; 15910 break; 15911 15912 case NE_EXPR: 15913 case UNORDERED_EXPR: 15914 case UNLT_EXPR: 15915 case UNLE_EXPR: 15916 case UNGT_EXPR: 15917 case UNGE_EXPR: 15918 case UNEQ_EXPR: 15919 result = 1; 15920 break; 15921 15922 case LT_EXPR: 15923 case LE_EXPR: 15924 case GT_EXPR: 15925 case GE_EXPR: 15926 case LTGT_EXPR: 15927 if (flag_trapping_math) 15928 return NULL_TREE; 15929 result = 0; 15930 break; 15931 15932 default: 15933 gcc_unreachable (); 15934 } 15935 15936 return constant_boolean_node (result, type); 15937 } 15938 15939 return constant_boolean_node (real_compare (code, c0, c1), type); 15940 } 15941 15942 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST) 15943 { 15944 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0); 15945 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1); 15946 return constant_boolean_node (fixed_compare (code, c0, c1), type); 15947 } 15948 15949 /* Handle equality/inequality of complex constants. */ 15950 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST) 15951 { 15952 tree rcond = fold_relational_const (code, type, 15953 TREE_REALPART (op0), 15954 TREE_REALPART (op1)); 15955 tree icond = fold_relational_const (code, type, 15956 TREE_IMAGPART (op0), 15957 TREE_IMAGPART (op1)); 15958 if (code == EQ_EXPR) 15959 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond); 15960 else if (code == NE_EXPR) 15961 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond); 15962 else 15963 return NULL_TREE; 15964 } 15965 15966 /* From here on we only handle LT, LE, GT, GE, EQ and NE. 15967 15968 To compute GT, swap the arguments and do LT. 15969 To compute GE, do LT and invert the result. 15970 To compute LE, swap the arguments, do LT and invert the result. 15971 To compute NE, do EQ and invert the result. 15972 15973 Therefore, the code below must handle only EQ and LT. */ 15974 15975 if (code == LE_EXPR || code == GT_EXPR) 15976 { 15977 tree tem = op0; 15978 op0 = op1; 15979 op1 = tem; 15980 code = swap_tree_comparison (code); 15981 } 15982 15983 /* Note that it is safe to invert for real values here because we 15984 have already handled the one case that it matters. */ 15985 15986 invert = 0; 15987 if (code == NE_EXPR || code == GE_EXPR) 15988 { 15989 invert = 1; 15990 code = invert_tree_comparison (code, false); 15991 } 15992 15993 /* Compute a result for LT or EQ if args permit; 15994 Otherwise return T. */ 15995 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST) 15996 { 15997 if (code == EQ_EXPR) 15998 result = tree_int_cst_equal (op0, op1); 15999 else if (TYPE_UNSIGNED (TREE_TYPE (op0))) 16000 result = INT_CST_LT_UNSIGNED (op0, op1); 16001 else 16002 result = INT_CST_LT (op0, op1); 16003 } 16004 else 16005 return NULL_TREE; 16006 16007 if (invert) 16008 result ^= 1; 16009 return constant_boolean_node (result, type); 16010 } 16011 16012 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the 16013 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR 16014 itself. */ 16015 16016 tree 16017 fold_build_cleanup_point_expr (tree type, tree expr) 16018 { 16019 /* If the expression does not have side effects then we don't have to wrap 16020 it with a cleanup point expression. */ 16021 if (!TREE_SIDE_EFFECTS (expr)) 16022 return expr; 16023 16024 /* If the expression is a return, check to see if the expression inside the 16025 return has no side effects or the right hand side of the modify expression 16026 inside the return. If either don't have side effects set we don't need to 16027 wrap the expression in a cleanup point expression. Note we don't check the 16028 left hand side of the modify because it should always be a return decl. */ 16029 if (TREE_CODE (expr) == RETURN_EXPR) 16030 { 16031 tree op = TREE_OPERAND (expr, 0); 16032 if (!op || !TREE_SIDE_EFFECTS (op)) 16033 return expr; 16034 op = TREE_OPERAND (op, 1); 16035 if (!TREE_SIDE_EFFECTS (op)) 16036 return expr; 16037 } 16038 16039 return build1 (CLEANUP_POINT_EXPR, type, expr); 16040 } 16041 16042 /* Given a pointer value OP0 and a type TYPE, return a simplified version 16043 of an indirection through OP0, or NULL_TREE if no simplification is 16044 possible. */ 16045 16046 tree 16047 fold_indirect_ref_1 (location_t loc, tree type, tree op0) 16048 { 16049 tree sub = op0; 16050 tree subtype; 16051 16052 STRIP_NOPS (sub); 16053 subtype = TREE_TYPE (sub); 16054 if (!POINTER_TYPE_P (subtype)) 16055 return NULL_TREE; 16056 16057 if (TREE_CODE (sub) == ADDR_EXPR) 16058 { 16059 tree op = TREE_OPERAND (sub, 0); 16060 tree optype = TREE_TYPE (op); 16061 /* *&CONST_DECL -> to the value of the const decl. */ 16062 if (TREE_CODE (op) == CONST_DECL) 16063 return DECL_INITIAL (op); 16064 /* *&p => p; make sure to handle *&"str"[cst] here. */ 16065 if (type == optype) 16066 { 16067 tree fop = fold_read_from_constant_string (op); 16068 if (fop) 16069 return fop; 16070 else 16071 return op; 16072 } 16073 /* *(foo *)&fooarray => fooarray[0] */ 16074 else if (TREE_CODE (optype) == ARRAY_TYPE 16075 && type == TREE_TYPE (optype) 16076 && (!in_gimple_form 16077 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)) 16078 { 16079 tree type_domain = TYPE_DOMAIN (optype); 16080 tree min_val = size_zero_node; 16081 if (type_domain && TYPE_MIN_VALUE (type_domain)) 16082 min_val = TYPE_MIN_VALUE (type_domain); 16083 if (in_gimple_form 16084 && TREE_CODE (min_val) != INTEGER_CST) 16085 return NULL_TREE; 16086 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); 16087 SET_EXPR_LOCATION (op0, loc); 16088 return op0; 16089 } 16090 /* *(foo *)&complexfoo => __real__ complexfoo */ 16091 else if (TREE_CODE (optype) == COMPLEX_TYPE 16092 && type == TREE_TYPE (optype)) 16093 return fold_build1_loc (loc, REALPART_EXPR, type, op); 16094 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */ 16095 else if (TREE_CODE (optype) == VECTOR_TYPE 16096 && type == TREE_TYPE (optype)) 16097 { 16098 tree part_width = TYPE_SIZE (type); 16099 tree index = bitsize_int (0); 16100 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index); 16101 } 16102 } 16103 16104 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */ 16105 if (TREE_CODE (sub) == POINTER_PLUS_EXPR 16106 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) 16107 { 16108 tree op00 = TREE_OPERAND (sub, 0); 16109 tree op01 = TREE_OPERAND (sub, 1); 16110 tree op00type; 16111 16112 STRIP_NOPS (op00); 16113 op00type = TREE_TYPE (op00); 16114 if (TREE_CODE (op00) == ADDR_EXPR 16115 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE 16116 && type == TREE_TYPE (TREE_TYPE (op00type))) 16117 { 16118 HOST_WIDE_INT offset = tree_low_cst (op01, 0); 16119 tree part_width = TYPE_SIZE (type); 16120 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT; 16121 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; 16122 tree index = bitsize_int (indexi); 16123 16124 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type))) 16125 return fold_build3_loc (loc, 16126 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0), 16127 part_width, index); 16128 16129 } 16130 } 16131 16132 16133 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */ 16134 if (TREE_CODE (sub) == POINTER_PLUS_EXPR 16135 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) 16136 { 16137 tree op00 = TREE_OPERAND (sub, 0); 16138 tree op01 = TREE_OPERAND (sub, 1); 16139 tree op00type; 16140 16141 STRIP_NOPS (op00); 16142 op00type = TREE_TYPE (op00); 16143 if (TREE_CODE (op00) == ADDR_EXPR 16144 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE 16145 && type == TREE_TYPE (TREE_TYPE (op00type))) 16146 { 16147 tree size = TYPE_SIZE_UNIT (type); 16148 if (tree_int_cst_equal (size, op01)) 16149 return fold_build1_loc (loc, IMAGPART_EXPR, type, 16150 TREE_OPERAND (op00, 0)); 16151 } 16152 } 16153 16154 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ 16155 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE 16156 && type == TREE_TYPE (TREE_TYPE (subtype)) 16157 && (!in_gimple_form 16158 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)) 16159 { 16160 tree type_domain; 16161 tree min_val = size_zero_node; 16162 sub = build_fold_indirect_ref_loc (loc, sub); 16163 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); 16164 if (type_domain && TYPE_MIN_VALUE (type_domain)) 16165 min_val = TYPE_MIN_VALUE (type_domain); 16166 if (in_gimple_form 16167 && TREE_CODE (min_val) != INTEGER_CST) 16168 return NULL_TREE; 16169 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); 16170 SET_EXPR_LOCATION (op0, loc); 16171 return op0; 16172 } 16173 16174 return NULL_TREE; 16175 } 16176 16177 /* Builds an expression for an indirection through T, simplifying some 16178 cases. */ 16179 16180 tree 16181 build_fold_indirect_ref_loc (location_t loc, tree t) 16182 { 16183 tree type = TREE_TYPE (TREE_TYPE (t)); 16184 tree sub = fold_indirect_ref_1 (loc, type, t); 16185 16186 if (sub) 16187 return sub; 16188 16189 t = build1 (INDIRECT_REF, type, t); 16190 SET_EXPR_LOCATION (t, loc); 16191 return t; 16192 } 16193 16194 /* Given an INDIRECT_REF T, return either T or a simplified version. */ 16195 16196 tree 16197 fold_indirect_ref_loc (location_t loc, tree t) 16198 { 16199 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0)); 16200 16201 if (sub) 16202 return sub; 16203 else 16204 return t; 16205 } 16206 16207 /* Strip non-trapping, non-side-effecting tree nodes from an expression 16208 whose result is ignored. The type of the returned tree need not be 16209 the same as the original expression. */ 16210 16211 tree 16212 fold_ignored_result (tree t) 16213 { 16214 if (!TREE_SIDE_EFFECTS (t)) 16215 return integer_zero_node; 16216 16217 for (;;) 16218 switch (TREE_CODE_CLASS (TREE_CODE (t))) 16219 { 16220 case tcc_unary: 16221 t = TREE_OPERAND (t, 0); 16222 break; 16223 16224 case tcc_binary: 16225 case tcc_comparison: 16226 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))) 16227 t = TREE_OPERAND (t, 0); 16228 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))) 16229 t = TREE_OPERAND (t, 1); 16230 else 16231 return t; 16232 break; 16233 16234 case tcc_expression: 16235 switch (TREE_CODE (t)) 16236 { 16237 case COMPOUND_EXPR: 16238 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))) 16239 return t; 16240 t = TREE_OPERAND (t, 0); 16241 break; 16242 16243 case COND_EXPR: 16244 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)) 16245 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2))) 16246 return t; 16247 t = TREE_OPERAND (t, 0); 16248 break; 16249 16250 default: 16251 return t; 16252 } 16253 break; 16254 16255 default: 16256 return t; 16257 } 16258 } 16259 16260 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. 16261 This can only be applied to objects of a sizetype. */ 16262 16263 tree 16264 round_up_loc (location_t loc, tree value, int divisor) 16265 { 16266 tree div = NULL_TREE; 16267 16268 gcc_assert (divisor > 0); 16269 if (divisor == 1) 16270 return value; 16271 16272 /* See if VALUE is already a multiple of DIVISOR. If so, we don't 16273 have to do anything. Only do this when we are not given a const, 16274 because in that case, this check is more expensive than just 16275 doing it. */ 16276 if (TREE_CODE (value) != INTEGER_CST) 16277 { 16278 div = build_int_cst (TREE_TYPE (value), divisor); 16279 16280 if (multiple_of_p (TREE_TYPE (value), value, div)) 16281 return value; 16282 } 16283 16284 /* If divisor is a power of two, simplify this to bit manipulation. */ 16285 if (divisor == (divisor & -divisor)) 16286 { 16287 if (TREE_CODE (value) == INTEGER_CST) 16288 { 16289 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value); 16290 unsigned HOST_WIDE_INT high; 16291 bool overflow_p; 16292 16293 if ((low & (divisor - 1)) == 0) 16294 return value; 16295 16296 overflow_p = TREE_OVERFLOW (value); 16297 high = TREE_INT_CST_HIGH (value); 16298 low &= ~(divisor - 1); 16299 low += divisor; 16300 if (low == 0) 16301 { 16302 high++; 16303 if (high == 0) 16304 overflow_p = true; 16305 } 16306 16307 return force_fit_type_double (TREE_TYPE (value), low, high, 16308 -1, overflow_p); 16309 } 16310 else 16311 { 16312 tree t; 16313 16314 t = build_int_cst (TREE_TYPE (value), divisor - 1); 16315 value = size_binop_loc (loc, PLUS_EXPR, value, t); 16316 t = build_int_cst (TREE_TYPE (value), -divisor); 16317 value = size_binop_loc (loc, BIT_AND_EXPR, value, t); 16318 } 16319 } 16320 else 16321 { 16322 if (!div) 16323 div = build_int_cst (TREE_TYPE (value), divisor); 16324 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div); 16325 value = size_binop_loc (loc, MULT_EXPR, value, div); 16326 } 16327 16328 return value; 16329 } 16330 16331 /* Likewise, but round down. */ 16332 16333 tree 16334 round_down_loc (location_t loc, tree value, int divisor) 16335 { 16336 tree div = NULL_TREE; 16337 16338 gcc_assert (divisor > 0); 16339 if (divisor == 1) 16340 return value; 16341 16342 /* See if VALUE is already a multiple of DIVISOR. If so, we don't 16343 have to do anything. Only do this when we are not given a const, 16344 because in that case, this check is more expensive than just 16345 doing it. */ 16346 if (TREE_CODE (value) != INTEGER_CST) 16347 { 16348 div = build_int_cst (TREE_TYPE (value), divisor); 16349 16350 if (multiple_of_p (TREE_TYPE (value), value, div)) 16351 return value; 16352 } 16353 16354 /* If divisor is a power of two, simplify this to bit manipulation. */ 16355 if (divisor == (divisor & -divisor)) 16356 { 16357 tree t; 16358 16359 t = build_int_cst (TREE_TYPE (value), -divisor); 16360 value = size_binop_loc (loc, BIT_AND_EXPR, value, t); 16361 } 16362 else 16363 { 16364 if (!div) 16365 div = build_int_cst (TREE_TYPE (value), divisor); 16366 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div); 16367 value = size_binop_loc (loc, MULT_EXPR, value, div); 16368 } 16369 16370 return value; 16371 } 16372 16373 /* Returns the pointer to the base of the object addressed by EXP and 16374 extracts the information about the offset of the access, storing it 16375 to PBITPOS and POFFSET. */ 16376 16377 static tree 16378 split_address_to_core_and_offset (tree exp, 16379 HOST_WIDE_INT *pbitpos, tree *poffset) 16380 { 16381 tree core; 16382 enum machine_mode mode; 16383 int unsignedp, volatilep; 16384 HOST_WIDE_INT bitsize; 16385 location_t loc = EXPR_LOCATION (exp); 16386 16387 if (TREE_CODE (exp) == ADDR_EXPR) 16388 { 16389 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos, 16390 poffset, &mode, &unsignedp, &volatilep, 16391 false); 16392 core = build_fold_addr_expr_loc (loc, core); 16393 } 16394 else 16395 { 16396 core = exp; 16397 *pbitpos = 0; 16398 *poffset = NULL_TREE; 16399 } 16400 16401 return core; 16402 } 16403 16404 /* Returns true if addresses of E1 and E2 differ by a constant, false 16405 otherwise. If they do, E1 - E2 is stored in *DIFF. */ 16406 16407 bool 16408 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff) 16409 { 16410 tree core1, core2; 16411 HOST_WIDE_INT bitpos1, bitpos2; 16412 tree toffset1, toffset2, tdiff, type; 16413 16414 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1); 16415 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2); 16416 16417 if (bitpos1 % BITS_PER_UNIT != 0 16418 || bitpos2 % BITS_PER_UNIT != 0 16419 || !operand_equal_p (core1, core2, 0)) 16420 return false; 16421 16422 if (toffset1 && toffset2) 16423 { 16424 type = TREE_TYPE (toffset1); 16425 if (type != TREE_TYPE (toffset2)) 16426 toffset2 = fold_convert (type, toffset2); 16427 16428 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2); 16429 if (!cst_and_fits_in_hwi (tdiff)) 16430 return false; 16431 16432 *diff = int_cst_value (tdiff); 16433 } 16434 else if (toffset1 || toffset2) 16435 { 16436 /* If only one of the offsets is non-constant, the difference cannot 16437 be a constant. */ 16438 return false; 16439 } 16440 else 16441 *diff = 0; 16442 16443 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT; 16444 return true; 16445 } 16446 16447 /* Simplify the floating point expression EXP when the sign of the 16448 result is not significant. Return NULL_TREE if no simplification 16449 is possible. */ 16450 16451 tree 16452 fold_strip_sign_ops (tree exp) 16453 { 16454 tree arg0, arg1; 16455 location_t loc = EXPR_LOCATION (exp); 16456 16457 switch (TREE_CODE (exp)) 16458 { 16459 case ABS_EXPR: 16460 case NEGATE_EXPR: 16461 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0)); 16462 return arg0 ? arg0 : TREE_OPERAND (exp, 0); 16463 16464 case MULT_EXPR: 16465 case RDIV_EXPR: 16466 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp)))) 16467 return NULL_TREE; 16468 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0)); 16469 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); 16470 if (arg0 != NULL_TREE || arg1 != NULL_TREE) 16471 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp), 16472 arg0 ? arg0 : TREE_OPERAND (exp, 0), 16473 arg1 ? arg1 : TREE_OPERAND (exp, 1)); 16474 break; 16475 16476 case COMPOUND_EXPR: 16477 arg0 = TREE_OPERAND (exp, 0); 16478 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); 16479 if (arg1) 16480 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1); 16481 break; 16482 16483 case COND_EXPR: 16484 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); 16485 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2)); 16486 if (arg0 || arg1) 16487 return fold_build3_loc (loc, 16488 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0), 16489 arg0 ? arg0 : TREE_OPERAND (exp, 1), 16490 arg1 ? arg1 : TREE_OPERAND (exp, 2)); 16491 break; 16492 16493 case CALL_EXPR: 16494 { 16495 const enum built_in_function fcode = builtin_mathfn_code (exp); 16496 switch (fcode) 16497 { 16498 CASE_FLT_FN (BUILT_IN_COPYSIGN): 16499 /* Strip copysign function call, return the 1st argument. */ 16500 arg0 = CALL_EXPR_ARG (exp, 0); 16501 arg1 = CALL_EXPR_ARG (exp, 1); 16502 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1); 16503 16504 default: 16505 /* Strip sign ops from the argument of "odd" math functions. */ 16506 if (negate_mathfn_p (fcode)) 16507 { 16508 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0)); 16509 if (arg0) 16510 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0); 16511 } 16512 break; 16513 } 16514 } 16515 break; 16516 16517 default: 16518 break; 16519 } 16520 return NULL_TREE; 16521 } 16522