1 /* Expand builtin functions. 2 Copyright (C) 1988-2013 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "tm.h" 24 #include "machmode.h" 25 #include "rtl.h" 26 #include "tree.h" 27 #include "realmpfr.h" 28 #include "gimple.h" 29 #include "flags.h" 30 #include "regs.h" 31 #include "hard-reg-set.h" 32 #include "except.h" 33 #include "function.h" 34 #include "insn-config.h" 35 #include "expr.h" 36 #include "optabs.h" 37 #include "libfuncs.h" 38 #include "recog.h" 39 #include "output.h" 40 #include "typeclass.h" 41 #include "predict.h" 42 #include "tm_p.h" 43 #include "target.h" 44 #include "langhooks.h" 45 #include "basic-block.h" 46 #include "tree-mudflap.h" 47 #include "tree-flow.h" 48 #include "value-prof.h" 49 #include "diagnostic-core.h" 50 #include "builtins.h" 51 52 53 #ifndef PAD_VARARGS_DOWN 54 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN 55 #endif 56 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t)); 57 58 struct target_builtins default_target_builtins; 59 #if SWITCHABLE_TARGET 60 struct target_builtins *this_target_builtins = &default_target_builtins; 61 #endif 62 63 /* Define the names of the builtin function types and codes. */ 64 const char *const built_in_class_names[4] 65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"}; 66 67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X, 68 const char * built_in_names[(int) END_BUILTINS] = 69 { 70 #include "builtins.def" 71 }; 72 #undef DEF_BUILTIN 73 74 /* Setup an array of _DECL trees, make sure each element is 75 initialized to NULL_TREE. */ 76 builtin_info_type builtin_info; 77 78 static const char *c_getstr (tree); 79 static rtx c_readstr (const char *, enum machine_mode); 80 static int target_char_cast (tree, char *); 81 static rtx get_memory_rtx (tree, tree); 82 static int apply_args_size (void); 83 static int apply_result_size (void); 84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) 85 static rtx result_vector (int, rtx); 86 #endif 87 static void expand_builtin_update_setjmp_buf (rtx); 88 static void expand_builtin_prefetch (tree); 89 static rtx expand_builtin_apply_args (void); 90 static rtx expand_builtin_apply_args_1 (void); 91 static rtx expand_builtin_apply (rtx, rtx, rtx); 92 static void expand_builtin_return (rtx); 93 static enum type_class type_to_class (tree); 94 static rtx expand_builtin_classify_type (tree); 95 static void expand_errno_check (tree, rtx); 96 static rtx expand_builtin_mathfn (tree, rtx, rtx); 97 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx); 98 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx); 99 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx); 100 static rtx expand_builtin_interclass_mathfn (tree, rtx); 101 static rtx expand_builtin_sincos (tree); 102 static rtx expand_builtin_cexpi (tree, rtx); 103 static rtx expand_builtin_int_roundingfn (tree, rtx); 104 static rtx expand_builtin_int_roundingfn_2 (tree, rtx); 105 static rtx expand_builtin_next_arg (void); 106 static rtx expand_builtin_va_start (tree); 107 static rtx expand_builtin_va_end (tree); 108 static rtx expand_builtin_va_copy (tree); 109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode); 110 static rtx expand_builtin_strcmp (tree, rtx); 111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode); 112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode); 113 static rtx expand_builtin_memcpy (tree, rtx); 114 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode); 115 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, 116 enum machine_mode, int); 117 static rtx expand_builtin_strcpy (tree, rtx); 118 static rtx expand_builtin_strcpy_args (tree, tree, rtx); 119 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode); 120 static rtx expand_builtin_strncpy (tree, rtx); 121 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode); 122 static rtx expand_builtin_memset (tree, rtx, enum machine_mode); 123 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree); 124 static rtx expand_builtin_bzero (tree); 125 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode); 126 static rtx expand_builtin_alloca (tree, bool); 127 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab); 128 static rtx expand_builtin_frame_address (tree, tree); 129 static tree stabilize_va_list_loc (location_t, tree, int); 130 static rtx expand_builtin_expect (tree, rtx); 131 static tree fold_builtin_constant_p (tree); 132 static tree fold_builtin_expect (location_t, tree, tree); 133 static tree fold_builtin_classify_type (tree); 134 static tree fold_builtin_strlen (location_t, tree, tree); 135 static tree fold_builtin_inf (location_t, tree, int); 136 static tree fold_builtin_nan (tree, tree, int); 137 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...); 138 static bool validate_arg (const_tree, enum tree_code code); 139 static bool integer_valued_real_p (tree); 140 static tree fold_trunc_transparent_mathfn (location_t, tree, tree); 141 static bool readonly_data_expr (tree); 142 static rtx expand_builtin_fabs (tree, rtx, rtx); 143 static rtx expand_builtin_signbit (tree, rtx); 144 static tree fold_builtin_sqrt (location_t, tree, tree); 145 static tree fold_builtin_cbrt (location_t, tree, tree); 146 static tree fold_builtin_pow (location_t, tree, tree, tree, tree); 147 static tree fold_builtin_powi (location_t, tree, tree, tree, tree); 148 static tree fold_builtin_cos (location_t, tree, tree, tree); 149 static tree fold_builtin_cosh (location_t, tree, tree, tree); 150 static tree fold_builtin_tan (tree, tree); 151 static tree fold_builtin_trunc (location_t, tree, tree); 152 static tree fold_builtin_floor (location_t, tree, tree); 153 static tree fold_builtin_ceil (location_t, tree, tree); 154 static tree fold_builtin_round (location_t, tree, tree); 155 static tree fold_builtin_int_roundingfn (location_t, tree, tree); 156 static tree fold_builtin_bitop (tree, tree); 157 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int); 158 static tree fold_builtin_strchr (location_t, tree, tree, tree); 159 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree); 160 static tree fold_builtin_memcmp (location_t, tree, tree, tree); 161 static tree fold_builtin_strcmp (location_t, tree, tree); 162 static tree fold_builtin_strncmp (location_t, tree, tree, tree); 163 static tree fold_builtin_signbit (location_t, tree, tree); 164 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree); 165 static tree fold_builtin_isascii (location_t, tree); 166 static tree fold_builtin_toascii (location_t, tree); 167 static tree fold_builtin_isdigit (location_t, tree); 168 static tree fold_builtin_fabs (location_t, tree, tree); 169 static tree fold_builtin_abs (location_t, tree, tree); 170 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code, 171 enum tree_code); 172 static tree fold_builtin_n (location_t, tree, tree *, int, bool); 173 static tree fold_builtin_0 (location_t, tree, bool); 174 static tree fold_builtin_1 (location_t, tree, tree, bool); 175 static tree fold_builtin_2 (location_t, tree, tree, tree, bool); 176 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool); 177 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool); 178 static tree fold_builtin_varargs (location_t, tree, tree, bool); 179 180 static tree fold_builtin_strpbrk (location_t, tree, tree, tree); 181 static tree fold_builtin_strstr (location_t, tree, tree, tree); 182 static tree fold_builtin_strrchr (location_t, tree, tree, tree); 183 static tree fold_builtin_strncat (location_t, tree, tree, tree); 184 static tree fold_builtin_strspn (location_t, tree, tree); 185 static tree fold_builtin_strcspn (location_t, tree, tree); 186 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int); 187 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int); 188 189 static rtx expand_builtin_object_size (tree); 190 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode, 191 enum built_in_function); 192 static void maybe_emit_chk_warning (tree, enum built_in_function); 193 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function); 194 static void maybe_emit_free_warning (tree); 195 static tree fold_builtin_object_size (tree, tree); 196 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree); 197 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree); 198 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function); 199 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function); 200 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool, 201 enum built_in_function); 202 static bool init_target_chars (void); 203 204 static unsigned HOST_WIDE_INT target_newline; 205 static unsigned HOST_WIDE_INT target_percent; 206 static unsigned HOST_WIDE_INT target_c; 207 static unsigned HOST_WIDE_INT target_s; 208 static char target_percent_c[3]; 209 static char target_percent_s[3]; 210 static char target_percent_s_newline[4]; 211 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t), 212 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool); 213 static tree do_mpfr_arg2 (tree, tree, tree, 214 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t)); 215 static tree do_mpfr_arg3 (tree, tree, tree, tree, 216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t)); 217 static tree do_mpfr_sincos (tree, tree, tree); 218 static tree do_mpfr_bessel_n (tree, tree, tree, 219 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t), 220 const REAL_VALUE_TYPE *, bool); 221 static tree do_mpfr_remquo (tree, tree, tree); 222 static tree do_mpfr_lgamma_r (tree, tree, tree); 223 static void expand_builtin_sync_synchronize (void); 224 225 /* Return true if NAME starts with __builtin_ or __sync_. */ 226 227 static bool 228 is_builtin_name (const char *name) 229 { 230 if (strncmp (name, "__builtin_", 10) == 0) 231 return true; 232 if (strncmp (name, "__sync_", 7) == 0) 233 return true; 234 if (strncmp (name, "__atomic_", 9) == 0) 235 return true; 236 return false; 237 } 238 239 240 /* Return true if DECL is a function symbol representing a built-in. */ 241 242 bool 243 is_builtin_fn (tree decl) 244 { 245 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl); 246 } 247 248 249 /* Return true if NODE should be considered for inline expansion regardless 250 of the optimization level. This means whenever a function is invoked with 251 its "internal" name, which normally contains the prefix "__builtin". */ 252 253 static bool 254 called_as_built_in (tree node) 255 { 256 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since 257 we want the name used to call the function, not the name it 258 will have. */ 259 const char *name = IDENTIFIER_POINTER (DECL_NAME (node)); 260 return is_builtin_name (name); 261 } 262 263 /* Compute values M and N such that M divides (address of EXP - N) and such 264 that N < M. If these numbers can be determined, store M in alignp and N in 265 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to 266 *alignp and any bit-offset to *bitposp. 267 268 Note that the address (and thus the alignment) computed here is based 269 on the address to which a symbol resolves, whereas DECL_ALIGN is based 270 on the address at which an object is actually located. These two 271 addresses are not always the same. For example, on ARM targets, 272 the address &foo of a Thumb function foo() has the lowest bit set, 273 whereas foo() itself starts on an even address. 274 275 If ADDR_P is true we are taking the address of the memory reference EXP 276 and thus cannot rely on the access taking place. */ 277 278 static bool 279 get_object_alignment_2 (tree exp, unsigned int *alignp, 280 unsigned HOST_WIDE_INT *bitposp, bool addr_p) 281 { 282 HOST_WIDE_INT bitsize, bitpos; 283 tree offset; 284 enum machine_mode mode; 285 int unsignedp, volatilep; 286 unsigned int inner, align = BITS_PER_UNIT; 287 bool known_alignment = false; 288 289 /* Get the innermost object and the constant (bitpos) and possibly 290 variable (offset) offset of the access. */ 291 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, 292 &mode, &unsignedp, &volatilep, true); 293 294 /* Extract alignment information from the innermost object and 295 possibly adjust bitpos and offset. */ 296 if (TREE_CODE (exp) == FUNCTION_DECL) 297 { 298 /* Function addresses can encode extra information besides their 299 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION 300 allows the low bit to be used as a virtual bit, we know 301 that the address itself must be at least 2-byte aligned. */ 302 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn) 303 align = 2 * BITS_PER_UNIT; 304 } 305 else if (TREE_CODE (exp) == LABEL_DECL) 306 ; 307 else if (TREE_CODE (exp) == CONST_DECL) 308 { 309 /* The alignment of a CONST_DECL is determined by its initializer. */ 310 exp = DECL_INITIAL (exp); 311 align = TYPE_ALIGN (TREE_TYPE (exp)); 312 #ifdef CONSTANT_ALIGNMENT 313 if (CONSTANT_CLASS_P (exp)) 314 align = (unsigned) CONSTANT_ALIGNMENT (exp, align); 315 #endif 316 known_alignment = true; 317 } 318 else if (DECL_P (exp)) 319 { 320 align = DECL_ALIGN (exp); 321 known_alignment = true; 322 } 323 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR) 324 { 325 align = TYPE_ALIGN (TREE_TYPE (exp)); 326 } 327 else if (TREE_CODE (exp) == INDIRECT_REF 328 || TREE_CODE (exp) == MEM_REF 329 || TREE_CODE (exp) == TARGET_MEM_REF) 330 { 331 tree addr = TREE_OPERAND (exp, 0); 332 unsigned ptr_align; 333 unsigned HOST_WIDE_INT ptr_bitpos; 334 335 if (TREE_CODE (addr) == BIT_AND_EXPR 336 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST) 337 { 338 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)) 339 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))); 340 align *= BITS_PER_UNIT; 341 addr = TREE_OPERAND (addr, 0); 342 } 343 344 known_alignment 345 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos); 346 align = MAX (ptr_align, align); 347 348 /* The alignment of the pointer operand in a TARGET_MEM_REF 349 has to take the variable offset parts into account. */ 350 if (TREE_CODE (exp) == TARGET_MEM_REF) 351 { 352 if (TMR_INDEX (exp)) 353 { 354 unsigned HOST_WIDE_INT step = 1; 355 if (TMR_STEP (exp)) 356 step = TREE_INT_CST_LOW (TMR_STEP (exp)); 357 align = MIN (align, (step & -step) * BITS_PER_UNIT); 358 } 359 if (TMR_INDEX2 (exp)) 360 align = BITS_PER_UNIT; 361 known_alignment = false; 362 } 363 364 /* When EXP is an actual memory reference then we can use 365 TYPE_ALIGN of a pointer indirection to derive alignment. 366 Do so only if get_pointer_alignment_1 did not reveal absolute 367 alignment knowledge and if using that alignment would 368 improve the situation. */ 369 if (!addr_p && !known_alignment 370 && TYPE_ALIGN (TREE_TYPE (exp)) > align) 371 align = TYPE_ALIGN (TREE_TYPE (exp)); 372 else 373 { 374 /* Else adjust bitpos accordingly. */ 375 bitpos += ptr_bitpos; 376 if (TREE_CODE (exp) == MEM_REF 377 || TREE_CODE (exp) == TARGET_MEM_REF) 378 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT; 379 } 380 } 381 else if (TREE_CODE (exp) == STRING_CST) 382 { 383 /* STRING_CST are the only constant objects we allow to be not 384 wrapped inside a CONST_DECL. */ 385 align = TYPE_ALIGN (TREE_TYPE (exp)); 386 #ifdef CONSTANT_ALIGNMENT 387 if (CONSTANT_CLASS_P (exp)) 388 align = (unsigned) CONSTANT_ALIGNMENT (exp, align); 389 #endif 390 known_alignment = true; 391 } 392 393 /* If there is a non-constant offset part extract the maximum 394 alignment that can prevail. */ 395 inner = ~0U; 396 while (offset) 397 { 398 tree next_offset; 399 400 if (TREE_CODE (offset) == PLUS_EXPR) 401 { 402 next_offset = TREE_OPERAND (offset, 0); 403 offset = TREE_OPERAND (offset, 1); 404 } 405 else 406 next_offset = NULL; 407 if (host_integerp (offset, 1)) 408 { 409 /* Any overflow in calculating offset_bits won't change 410 the alignment. */ 411 unsigned offset_bits 412 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT); 413 414 if (offset_bits) 415 inner = MIN (inner, (offset_bits & -offset_bits)); 416 } 417 else if (TREE_CODE (offset) == MULT_EXPR 418 && host_integerp (TREE_OPERAND (offset, 1), 1)) 419 { 420 /* Any overflow in calculating offset_factor won't change 421 the alignment. */ 422 unsigned offset_factor 423 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1) 424 * BITS_PER_UNIT); 425 426 if (offset_factor) 427 inner = MIN (inner, (offset_factor & -offset_factor)); 428 } 429 else 430 { 431 inner = MIN (inner, BITS_PER_UNIT); 432 break; 433 } 434 offset = next_offset; 435 } 436 /* Alignment is innermost object alignment adjusted by the constant 437 and non-constant offset parts. */ 438 align = MIN (align, inner); 439 440 *alignp = align; 441 *bitposp = bitpos & (*alignp - 1); 442 return known_alignment; 443 } 444 445 /* For a memory reference expression EXP compute values M and N such that M 446 divides (&EXP - N) and such that N < M. If these numbers can be determined, 447 store M in alignp and N in *BITPOSP and return true. Otherwise return false 448 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */ 449 450 bool 451 get_object_alignment_1 (tree exp, unsigned int *alignp, 452 unsigned HOST_WIDE_INT *bitposp) 453 { 454 return get_object_alignment_2 (exp, alignp, bitposp, false); 455 } 456 457 /* Return the alignment in bits of EXP, an object. */ 458 459 unsigned int 460 get_object_alignment (tree exp) 461 { 462 unsigned HOST_WIDE_INT bitpos = 0; 463 unsigned int align; 464 465 get_object_alignment_1 (exp, &align, &bitpos); 466 467 /* align and bitpos now specify known low bits of the pointer. 468 ptr & (align - 1) == bitpos. */ 469 470 if (bitpos != 0) 471 align = (bitpos & -bitpos); 472 return align; 473 } 474 475 /* For a pointer valued expression EXP compute values M and N such that M 476 divides (EXP - N) and such that N < M. If these numbers can be determined, 477 store M in alignp and N in *BITPOSP and return true. Return false if 478 the results are just a conservative approximation. 479 480 If EXP is not a pointer, false is returned too. */ 481 482 bool 483 get_pointer_alignment_1 (tree exp, unsigned int *alignp, 484 unsigned HOST_WIDE_INT *bitposp) 485 { 486 STRIP_NOPS (exp); 487 488 if (TREE_CODE (exp) == ADDR_EXPR) 489 return get_object_alignment_2 (TREE_OPERAND (exp, 0), 490 alignp, bitposp, true); 491 else if (TREE_CODE (exp) == SSA_NAME 492 && POINTER_TYPE_P (TREE_TYPE (exp))) 493 { 494 unsigned int ptr_align, ptr_misalign; 495 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp); 496 497 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign)) 498 { 499 *bitposp = ptr_misalign * BITS_PER_UNIT; 500 *alignp = ptr_align * BITS_PER_UNIT; 501 /* We cannot really tell whether this result is an approximation. */ 502 return true; 503 } 504 else 505 { 506 *bitposp = 0; 507 *alignp = BITS_PER_UNIT; 508 return false; 509 } 510 } 511 else if (TREE_CODE (exp) == INTEGER_CST) 512 { 513 *alignp = BIGGEST_ALIGNMENT; 514 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT) 515 & (BIGGEST_ALIGNMENT - 1)); 516 return true; 517 } 518 519 *bitposp = 0; 520 *alignp = BITS_PER_UNIT; 521 return false; 522 } 523 524 /* Return the alignment in bits of EXP, a pointer valued expression. 525 The alignment returned is, by default, the alignment of the thing that 526 EXP points to. If it is not a POINTER_TYPE, 0 is returned. 527 528 Otherwise, look at the expression to see if we can do better, i.e., if the 529 expression is actually pointing at an object whose alignment is tighter. */ 530 531 unsigned int 532 get_pointer_alignment (tree exp) 533 { 534 unsigned HOST_WIDE_INT bitpos = 0; 535 unsigned int align; 536 537 get_pointer_alignment_1 (exp, &align, &bitpos); 538 539 /* align and bitpos now specify known low bits of the pointer. 540 ptr & (align - 1) == bitpos. */ 541 542 if (bitpos != 0) 543 align = (bitpos & -bitpos); 544 545 return align; 546 } 547 548 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right 549 way, because it could contain a zero byte in the middle. 550 TREE_STRING_LENGTH is the size of the character array, not the string. 551 552 ONLY_VALUE should be nonzero if the result is not going to be emitted 553 into the instruction stream and zero if it is going to be expanded. 554 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3 555 is returned, otherwise NULL, since 556 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not 557 evaluate the side-effects. 558 559 The value returned is of type `ssizetype'. 560 561 Unfortunately, string_constant can't access the values of const char 562 arrays with initializers, so neither can we do so here. */ 563 564 tree 565 c_strlen (tree src, int only_value) 566 { 567 tree offset_node; 568 HOST_WIDE_INT offset; 569 int max; 570 const char *ptr; 571 location_t loc; 572 573 STRIP_NOPS (src); 574 if (TREE_CODE (src) == COND_EXPR 575 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0)))) 576 { 577 tree len1, len2; 578 579 len1 = c_strlen (TREE_OPERAND (src, 1), only_value); 580 len2 = c_strlen (TREE_OPERAND (src, 2), only_value); 581 if (tree_int_cst_equal (len1, len2)) 582 return len1; 583 } 584 585 if (TREE_CODE (src) == COMPOUND_EXPR 586 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0)))) 587 return c_strlen (TREE_OPERAND (src, 1), only_value); 588 589 loc = EXPR_LOC_OR_HERE (src); 590 591 src = string_constant (src, &offset_node); 592 if (src == 0) 593 return NULL_TREE; 594 595 max = TREE_STRING_LENGTH (src) - 1; 596 ptr = TREE_STRING_POINTER (src); 597 598 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST) 599 { 600 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't 601 compute the offset to the following null if we don't know where to 602 start searching for it. */ 603 int i; 604 605 for (i = 0; i < max; i++) 606 if (ptr[i] == 0) 607 return NULL_TREE; 608 609 /* We don't know the starting offset, but we do know that the string 610 has no internal zero bytes. We can assume that the offset falls 611 within the bounds of the string; otherwise, the programmer deserves 612 what he gets. Subtract the offset from the length of the string, 613 and return that. This would perhaps not be valid if we were dealing 614 with named arrays in addition to literal string constants. */ 615 616 return size_diffop_loc (loc, size_int (max), offset_node); 617 } 618 619 /* We have a known offset into the string. Start searching there for 620 a null character if we can represent it as a single HOST_WIDE_INT. */ 621 if (offset_node == 0) 622 offset = 0; 623 else if (! host_integerp (offset_node, 0)) 624 offset = -1; 625 else 626 offset = tree_low_cst (offset_node, 0); 627 628 /* If the offset is known to be out of bounds, warn, and call strlen at 629 runtime. */ 630 if (offset < 0 || offset > max) 631 { 632 /* Suppress multiple warnings for propagated constant strings. */ 633 if (! TREE_NO_WARNING (src)) 634 { 635 warning_at (loc, 0, "offset outside bounds of constant string"); 636 TREE_NO_WARNING (src) = 1; 637 } 638 return NULL_TREE; 639 } 640 641 /* Use strlen to search for the first zero byte. Since any strings 642 constructed with build_string will have nulls appended, we win even 643 if we get handed something like (char[4])"abcd". 644 645 Since OFFSET is our starting index into the string, no further 646 calculation is needed. */ 647 return ssize_int (strlen (ptr + offset)); 648 } 649 650 /* Return a char pointer for a C string if it is a string constant 651 or sum of string constant and integer constant. */ 652 653 static const char * 654 c_getstr (tree src) 655 { 656 tree offset_node; 657 658 src = string_constant (src, &offset_node); 659 if (src == 0) 660 return 0; 661 662 if (offset_node == 0) 663 return TREE_STRING_POINTER (src); 664 else if (!host_integerp (offset_node, 1) 665 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0) 666 return 0; 667 668 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1); 669 } 670 671 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading 672 GET_MODE_BITSIZE (MODE) bits from string constant STR. */ 673 674 static rtx 675 c_readstr (const char *str, enum machine_mode mode) 676 { 677 HOST_WIDE_INT c[2]; 678 HOST_WIDE_INT ch; 679 unsigned int i, j; 680 681 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT); 682 683 c[0] = 0; 684 c[1] = 0; 685 ch = 1; 686 for (i = 0; i < GET_MODE_SIZE (mode); i++) 687 { 688 j = i; 689 if (WORDS_BIG_ENDIAN) 690 j = GET_MODE_SIZE (mode) - i - 1; 691 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN 692 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD) 693 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1; 694 j *= BITS_PER_UNIT; 695 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT); 696 697 if (ch) 698 ch = (unsigned char) str[i]; 699 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT); 700 } 701 return immed_double_const (c[0], c[1], mode); 702 } 703 704 /* Cast a target constant CST to target CHAR and if that value fits into 705 host char type, return zero and put that value into variable pointed to by 706 P. */ 707 708 static int 709 target_char_cast (tree cst, char *p) 710 { 711 unsigned HOST_WIDE_INT val, hostval; 712 713 if (TREE_CODE (cst) != INTEGER_CST 714 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT) 715 return 1; 716 717 val = TREE_INT_CST_LOW (cst); 718 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT) 719 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1; 720 721 hostval = val; 722 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT) 723 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1; 724 725 if (val != hostval) 726 return 1; 727 728 *p = hostval; 729 return 0; 730 } 731 732 /* Similar to save_expr, but assumes that arbitrary code is not executed 733 in between the multiple evaluations. In particular, we assume that a 734 non-addressable local variable will not be modified. */ 735 736 static tree 737 builtin_save_expr (tree exp) 738 { 739 if (TREE_CODE (exp) == SSA_NAME 740 || (TREE_ADDRESSABLE (exp) == 0 741 && (TREE_CODE (exp) == PARM_DECL 742 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))) 743 return exp; 744 745 return save_expr (exp); 746 } 747 748 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT 749 times to get the address of either a higher stack frame, or a return 750 address located within it (depending on FNDECL_CODE). */ 751 752 static rtx 753 expand_builtin_return_addr (enum built_in_function fndecl_code, int count) 754 { 755 int i; 756 757 #ifdef INITIAL_FRAME_ADDRESS_RTX 758 rtx tem = INITIAL_FRAME_ADDRESS_RTX; 759 #else 760 rtx tem; 761 762 /* For a zero count with __builtin_return_address, we don't care what 763 frame address we return, because target-specific definitions will 764 override us. Therefore frame pointer elimination is OK, and using 765 the soft frame pointer is OK. 766 767 For a nonzero count, or a zero count with __builtin_frame_address, 768 we require a stable offset from the current frame pointer to the 769 previous one, so we must use the hard frame pointer, and 770 we must disable frame pointer elimination. */ 771 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS) 772 tem = frame_pointer_rtx; 773 else 774 { 775 tem = hard_frame_pointer_rtx; 776 777 /* Tell reload not to eliminate the frame pointer. */ 778 crtl->accesses_prior_frames = 1; 779 } 780 #endif 781 782 /* Some machines need special handling before we can access 783 arbitrary frames. For example, on the SPARC, we must first flush 784 all register windows to the stack. */ 785 #ifdef SETUP_FRAME_ADDRESSES 786 if (count > 0) 787 SETUP_FRAME_ADDRESSES (); 788 #endif 789 790 /* On the SPARC, the return address is not in the frame, it is in a 791 register. There is no way to access it off of the current frame 792 pointer, but it can be accessed off the previous frame pointer by 793 reading the value from the register window save area. */ 794 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME 795 if (fndecl_code == BUILT_IN_RETURN_ADDRESS) 796 count--; 797 #endif 798 799 /* Scan back COUNT frames to the specified frame. */ 800 for (i = 0; i < count; i++) 801 { 802 /* Assume the dynamic chain pointer is in the word that the 803 frame address points to, unless otherwise specified. */ 804 #ifdef DYNAMIC_CHAIN_ADDRESS 805 tem = DYNAMIC_CHAIN_ADDRESS (tem); 806 #endif 807 tem = memory_address (Pmode, tem); 808 tem = gen_frame_mem (Pmode, tem); 809 tem = copy_to_reg (tem); 810 } 811 812 /* For __builtin_frame_address, return what we've got. But, on 813 the SPARC for example, we may have to add a bias. */ 814 if (fndecl_code == BUILT_IN_FRAME_ADDRESS) 815 #ifdef FRAME_ADDR_RTX 816 return FRAME_ADDR_RTX (tem); 817 #else 818 return tem; 819 #endif 820 821 /* For __builtin_return_address, get the return address from that frame. */ 822 #ifdef RETURN_ADDR_RTX 823 tem = RETURN_ADDR_RTX (count, tem); 824 #else 825 tem = memory_address (Pmode, 826 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode))); 827 tem = gen_frame_mem (Pmode, tem); 828 #endif 829 return tem; 830 } 831 832 /* Alias set used for setjmp buffer. */ 833 static alias_set_type setjmp_alias_set = -1; 834 835 /* Construct the leading half of a __builtin_setjmp call. Control will 836 return to RECEIVER_LABEL. This is also called directly by the SJLJ 837 exception handling code. */ 838 839 void 840 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label) 841 { 842 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); 843 rtx stack_save; 844 rtx mem; 845 846 if (setjmp_alias_set == -1) 847 setjmp_alias_set = new_alias_set (); 848 849 buf_addr = convert_memory_address (Pmode, buf_addr); 850 851 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX)); 852 853 /* We store the frame pointer and the address of receiver_label in 854 the buffer and use the rest of it for the stack save area, which 855 is machine-dependent. */ 856 857 mem = gen_rtx_MEM (Pmode, buf_addr); 858 set_mem_alias_set (mem, setjmp_alias_set); 859 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ()); 860 861 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr, 862 GET_MODE_SIZE (Pmode))), 863 set_mem_alias_set (mem, setjmp_alias_set); 864 865 emit_move_insn (validize_mem (mem), 866 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label))); 867 868 stack_save = gen_rtx_MEM (sa_mode, 869 plus_constant (Pmode, buf_addr, 870 2 * GET_MODE_SIZE (Pmode))); 871 set_mem_alias_set (stack_save, setjmp_alias_set); 872 emit_stack_save (SAVE_NONLOCAL, &stack_save); 873 874 /* If there is further processing to do, do it. */ 875 #ifdef HAVE_builtin_setjmp_setup 876 if (HAVE_builtin_setjmp_setup) 877 emit_insn (gen_builtin_setjmp_setup (buf_addr)); 878 #endif 879 880 /* We have a nonlocal label. */ 881 cfun->has_nonlocal_label = 1; 882 } 883 884 /* Construct the trailing part of a __builtin_setjmp call. This is 885 also called directly by the SJLJ exception handling code. */ 886 887 void 888 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED) 889 { 890 rtx chain; 891 892 /* Clobber the FP when we get here, so we have to make sure it's 893 marked as used by this function. */ 894 emit_use (hard_frame_pointer_rtx); 895 896 /* Mark the static chain as clobbered here so life information 897 doesn't get messed up for it. */ 898 chain = targetm.calls.static_chain (current_function_decl, true); 899 if (chain && REG_P (chain)) 900 emit_clobber (chain); 901 902 /* Now put in the code to restore the frame pointer, and argument 903 pointer, if needed. */ 904 #ifdef HAVE_nonlocal_goto 905 if (! HAVE_nonlocal_goto) 906 #endif 907 { 908 /* First adjust our frame pointer to its actual value. It was 909 previously set to the start of the virtual area corresponding to 910 the stacked variables when we branched here and now needs to be 911 adjusted to the actual hardware fp value. 912 913 Assignments to virtual registers are converted by 914 instantiate_virtual_regs into the corresponding assignment 915 to the underlying register (fp in this case) that makes 916 the original assignment true. 917 So the following insn will actually be decrementing fp by 918 STARTING_FRAME_OFFSET. */ 919 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); 920 921 /* Restoring the frame pointer also modifies the hard frame pointer. 922 Mark it used (so that the previous assignment remains live once 923 the frame pointer is eliminated) and clobbered (to represent the 924 implicit update from the assignment). */ 925 emit_use (hard_frame_pointer_rtx); 926 emit_clobber (hard_frame_pointer_rtx); 927 } 928 929 #if !HARD_FRAME_POINTER_IS_ARG_POINTER 930 if (fixed_regs[ARG_POINTER_REGNUM]) 931 { 932 #ifdef ELIMINABLE_REGS 933 size_t i; 934 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS; 935 936 for (i = 0; i < ARRAY_SIZE (elim_regs); i++) 937 if (elim_regs[i].from == ARG_POINTER_REGNUM 938 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) 939 break; 940 941 if (i == ARRAY_SIZE (elim_regs)) 942 #endif 943 { 944 /* Now restore our arg pointer from the address at which it 945 was saved in our stack frame. */ 946 emit_move_insn (crtl->args.internal_arg_pointer, 947 copy_to_reg (get_arg_pointer_save_area ())); 948 } 949 } 950 #endif 951 952 #ifdef HAVE_builtin_setjmp_receiver 953 if (HAVE_builtin_setjmp_receiver) 954 emit_insn (gen_builtin_setjmp_receiver (receiver_label)); 955 else 956 #endif 957 #ifdef HAVE_nonlocal_goto_receiver 958 if (HAVE_nonlocal_goto_receiver) 959 emit_insn (gen_nonlocal_goto_receiver ()); 960 else 961 #endif 962 { /* Nothing */ } 963 964 /* We must not allow the code we just generated to be reordered by 965 scheduling. Specifically, the update of the frame pointer must 966 happen immediately, not later. */ 967 emit_insn (gen_blockage ()); 968 } 969 970 /* __builtin_longjmp is passed a pointer to an array of five words (not 971 all will be used on all machines). It operates similarly to the C 972 library function of the same name, but is more efficient. Much of 973 the code below is copied from the handling of non-local gotos. */ 974 975 static void 976 expand_builtin_longjmp (rtx buf_addr, rtx value) 977 { 978 rtx fp, lab, stack, insn, last; 979 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); 980 981 /* DRAP is needed for stack realign if longjmp is expanded to current 982 function */ 983 if (SUPPORTS_STACK_ALIGNMENT) 984 crtl->need_drap = true; 985 986 if (setjmp_alias_set == -1) 987 setjmp_alias_set = new_alias_set (); 988 989 buf_addr = convert_memory_address (Pmode, buf_addr); 990 991 buf_addr = force_reg (Pmode, buf_addr); 992 993 /* We require that the user must pass a second argument of 1, because 994 that is what builtin_setjmp will return. */ 995 gcc_assert (value == const1_rtx); 996 997 last = get_last_insn (); 998 #ifdef HAVE_builtin_longjmp 999 if (HAVE_builtin_longjmp) 1000 emit_insn (gen_builtin_longjmp (buf_addr)); 1001 else 1002 #endif 1003 { 1004 fp = gen_rtx_MEM (Pmode, buf_addr); 1005 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr, 1006 GET_MODE_SIZE (Pmode))); 1007 1008 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr, 1009 2 * GET_MODE_SIZE (Pmode))); 1010 set_mem_alias_set (fp, setjmp_alias_set); 1011 set_mem_alias_set (lab, setjmp_alias_set); 1012 set_mem_alias_set (stack, setjmp_alias_set); 1013 1014 /* Pick up FP, label, and SP from the block and jump. This code is 1015 from expand_goto in stmt.c; see there for detailed comments. */ 1016 #ifdef HAVE_nonlocal_goto 1017 if (HAVE_nonlocal_goto) 1018 /* We have to pass a value to the nonlocal_goto pattern that will 1019 get copied into the static_chain pointer, but it does not matter 1020 what that value is, because builtin_setjmp does not use it. */ 1021 emit_insn (gen_nonlocal_goto (value, lab, stack, fp)); 1022 else 1023 #endif 1024 { 1025 lab = copy_to_reg (lab); 1026 1027 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))); 1028 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx)); 1029 1030 emit_move_insn (hard_frame_pointer_rtx, fp); 1031 emit_stack_restore (SAVE_NONLOCAL, stack); 1032 1033 emit_use (hard_frame_pointer_rtx); 1034 emit_use (stack_pointer_rtx); 1035 emit_indirect_jump (lab); 1036 } 1037 } 1038 1039 /* Search backwards and mark the jump insn as a non-local goto. 1040 Note that this precludes the use of __builtin_longjmp to a 1041 __builtin_setjmp target in the same function. However, we've 1042 already cautioned the user that these functions are for 1043 internal exception handling use only. */ 1044 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) 1045 { 1046 gcc_assert (insn != last); 1047 1048 if (JUMP_P (insn)) 1049 { 1050 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx); 1051 break; 1052 } 1053 else if (CALL_P (insn)) 1054 break; 1055 } 1056 } 1057 1058 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label 1059 and the address of the save area. */ 1060 1061 static rtx 1062 expand_builtin_nonlocal_goto (tree exp) 1063 { 1064 tree t_label, t_save_area; 1065 rtx r_label, r_save_area, r_fp, r_sp, insn; 1066 1067 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) 1068 return NULL_RTX; 1069 1070 t_label = CALL_EXPR_ARG (exp, 0); 1071 t_save_area = CALL_EXPR_ARG (exp, 1); 1072 1073 r_label = expand_normal (t_label); 1074 r_label = convert_memory_address (Pmode, r_label); 1075 r_save_area = expand_normal (t_save_area); 1076 r_save_area = convert_memory_address (Pmode, r_save_area); 1077 /* Copy the address of the save location to a register just in case it was 1078 based on the frame pointer. */ 1079 r_save_area = copy_to_reg (r_save_area); 1080 r_fp = gen_rtx_MEM (Pmode, r_save_area); 1081 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL), 1082 plus_constant (Pmode, r_save_area, 1083 GET_MODE_SIZE (Pmode))); 1084 1085 crtl->has_nonlocal_goto = 1; 1086 1087 #ifdef HAVE_nonlocal_goto 1088 /* ??? We no longer need to pass the static chain value, afaik. */ 1089 if (HAVE_nonlocal_goto) 1090 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp)); 1091 else 1092 #endif 1093 { 1094 r_label = copy_to_reg (r_label); 1095 1096 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))); 1097 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx)); 1098 1099 /* Restore frame pointer for containing function. */ 1100 emit_move_insn (hard_frame_pointer_rtx, r_fp); 1101 emit_stack_restore (SAVE_NONLOCAL, r_sp); 1102 1103 /* USE of hard_frame_pointer_rtx added for consistency; 1104 not clear if really needed. */ 1105 emit_use (hard_frame_pointer_rtx); 1106 emit_use (stack_pointer_rtx); 1107 1108 /* If the architecture is using a GP register, we must 1109 conservatively assume that the target function makes use of it. 1110 The prologue of functions with nonlocal gotos must therefore 1111 initialize the GP register to the appropriate value, and we 1112 must then make sure that this value is live at the point 1113 of the jump. (Note that this doesn't necessarily apply 1114 to targets with a nonlocal_goto pattern; they are free 1115 to implement it in their own way. Note also that this is 1116 a no-op if the GP register is a global invariant.) */ 1117 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM 1118 && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) 1119 emit_use (pic_offset_table_rtx); 1120 1121 emit_indirect_jump (r_label); 1122 } 1123 1124 /* Search backwards to the jump insn and mark it as a 1125 non-local goto. */ 1126 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) 1127 { 1128 if (JUMP_P (insn)) 1129 { 1130 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx); 1131 break; 1132 } 1133 else if (CALL_P (insn)) 1134 break; 1135 } 1136 1137 return const0_rtx; 1138 } 1139 1140 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words 1141 (not all will be used on all machines) that was passed to __builtin_setjmp. 1142 It updates the stack pointer in that block to correspond to the current 1143 stack pointer. */ 1144 1145 static void 1146 expand_builtin_update_setjmp_buf (rtx buf_addr) 1147 { 1148 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); 1149 rtx stack_save 1150 = gen_rtx_MEM (sa_mode, 1151 memory_address 1152 (sa_mode, 1153 plus_constant (Pmode, buf_addr, 1154 2 * GET_MODE_SIZE (Pmode)))); 1155 1156 emit_stack_save (SAVE_NONLOCAL, &stack_save); 1157 } 1158 1159 /* Expand a call to __builtin_prefetch. For a target that does not support 1160 data prefetch, evaluate the memory address argument in case it has side 1161 effects. */ 1162 1163 static void 1164 expand_builtin_prefetch (tree exp) 1165 { 1166 tree arg0, arg1, arg2; 1167 int nargs; 1168 rtx op0, op1, op2; 1169 1170 if (!validate_arglist (exp, POINTER_TYPE, 0)) 1171 return; 1172 1173 arg0 = CALL_EXPR_ARG (exp, 0); 1174 1175 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to 1176 zero (read) and argument 2 (locality) defaults to 3 (high degree of 1177 locality). */ 1178 nargs = call_expr_nargs (exp); 1179 if (nargs > 1) 1180 arg1 = CALL_EXPR_ARG (exp, 1); 1181 else 1182 arg1 = integer_zero_node; 1183 if (nargs > 2) 1184 arg2 = CALL_EXPR_ARG (exp, 2); 1185 else 1186 arg2 = integer_three_node; 1187 1188 /* Argument 0 is an address. */ 1189 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL); 1190 1191 /* Argument 1 (read/write flag) must be a compile-time constant int. */ 1192 if (TREE_CODE (arg1) != INTEGER_CST) 1193 { 1194 error ("second argument to %<__builtin_prefetch%> must be a constant"); 1195 arg1 = integer_zero_node; 1196 } 1197 op1 = expand_normal (arg1); 1198 /* Argument 1 must be either zero or one. */ 1199 if (INTVAL (op1) != 0 && INTVAL (op1) != 1) 1200 { 1201 warning (0, "invalid second argument to %<__builtin_prefetch%>;" 1202 " using zero"); 1203 op1 = const0_rtx; 1204 } 1205 1206 /* Argument 2 (locality) must be a compile-time constant int. */ 1207 if (TREE_CODE (arg2) != INTEGER_CST) 1208 { 1209 error ("third argument to %<__builtin_prefetch%> must be a constant"); 1210 arg2 = integer_zero_node; 1211 } 1212 op2 = expand_normal (arg2); 1213 /* Argument 2 must be 0, 1, 2, or 3. */ 1214 if (INTVAL (op2) < 0 || INTVAL (op2) > 3) 1215 { 1216 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero"); 1217 op2 = const0_rtx; 1218 } 1219 1220 #ifdef HAVE_prefetch 1221 if (HAVE_prefetch) 1222 { 1223 struct expand_operand ops[3]; 1224 1225 create_address_operand (&ops[0], op0); 1226 create_integer_operand (&ops[1], INTVAL (op1)); 1227 create_integer_operand (&ops[2], INTVAL (op2)); 1228 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops)) 1229 return; 1230 } 1231 #endif 1232 1233 /* Don't do anything with direct references to volatile memory, but 1234 generate code to handle other side effects. */ 1235 if (!MEM_P (op0) && side_effects_p (op0)) 1236 emit_insn (op0); 1237 } 1238 1239 /* Get a MEM rtx for expression EXP which is the address of an operand 1240 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is 1241 the maximum length of the block of memory that might be accessed or 1242 NULL if unknown. */ 1243 1244 static rtx 1245 get_memory_rtx (tree exp, tree len) 1246 { 1247 tree orig_exp = exp; 1248 rtx addr, mem; 1249 1250 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived 1251 from its expression, for expr->a.b only <variable>.a.b is recorded. */ 1252 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp)) 1253 exp = TREE_OPERAND (exp, 0); 1254 1255 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL); 1256 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr)); 1257 1258 /* Get an expression we can use to find the attributes to assign to MEM. 1259 First remove any nops. */ 1260 while (CONVERT_EXPR_P (exp) 1261 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0)))) 1262 exp = TREE_OPERAND (exp, 0); 1263 1264 /* Build a MEM_REF representing the whole accessed area as a byte blob, 1265 (as builtin stringops may alias with anything). */ 1266 exp = fold_build2 (MEM_REF, 1267 build_array_type (char_type_node, 1268 build_range_type (sizetype, 1269 size_one_node, len)), 1270 exp, build_int_cst (ptr_type_node, 0)); 1271 1272 /* If the MEM_REF has no acceptable address, try to get the base object 1273 from the original address we got, and build an all-aliasing 1274 unknown-sized access to that one. */ 1275 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0))) 1276 set_mem_attributes (mem, exp, 0); 1277 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR 1278 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0), 1279 0)))) 1280 { 1281 exp = build_fold_addr_expr (exp); 1282 exp = fold_build2 (MEM_REF, 1283 build_array_type (char_type_node, 1284 build_range_type (sizetype, 1285 size_zero_node, 1286 NULL)), 1287 exp, build_int_cst (ptr_type_node, 0)); 1288 set_mem_attributes (mem, exp, 0); 1289 } 1290 set_mem_alias_set (mem, 0); 1291 return mem; 1292 } 1293 1294 /* Built-in functions to perform an untyped call and return. */ 1295 1296 #define apply_args_mode \ 1297 (this_target_builtins->x_apply_args_mode) 1298 #define apply_result_mode \ 1299 (this_target_builtins->x_apply_result_mode) 1300 1301 /* Return the size required for the block returned by __builtin_apply_args, 1302 and initialize apply_args_mode. */ 1303 1304 static int 1305 apply_args_size (void) 1306 { 1307 static int size = -1; 1308 int align; 1309 unsigned int regno; 1310 enum machine_mode mode; 1311 1312 /* The values computed by this function never change. */ 1313 if (size < 0) 1314 { 1315 /* The first value is the incoming arg-pointer. */ 1316 size = GET_MODE_SIZE (Pmode); 1317 1318 /* The second value is the structure value address unless this is 1319 passed as an "invisible" first argument. */ 1320 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0)) 1321 size += GET_MODE_SIZE (Pmode); 1322 1323 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 1324 if (FUNCTION_ARG_REGNO_P (regno)) 1325 { 1326 mode = targetm.calls.get_raw_arg_mode (regno); 1327 1328 gcc_assert (mode != VOIDmode); 1329 1330 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; 1331 if (size % align != 0) 1332 size = CEIL (size, align) * align; 1333 size += GET_MODE_SIZE (mode); 1334 apply_args_mode[regno] = mode; 1335 } 1336 else 1337 { 1338 apply_args_mode[regno] = VOIDmode; 1339 } 1340 } 1341 return size; 1342 } 1343 1344 /* Return the size required for the block returned by __builtin_apply, 1345 and initialize apply_result_mode. */ 1346 1347 static int 1348 apply_result_size (void) 1349 { 1350 static int size = -1; 1351 int align, regno; 1352 enum machine_mode mode; 1353 1354 /* The values computed by this function never change. */ 1355 if (size < 0) 1356 { 1357 size = 0; 1358 1359 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 1360 if (targetm.calls.function_value_regno_p (regno)) 1361 { 1362 mode = targetm.calls.get_raw_result_mode (regno); 1363 1364 gcc_assert (mode != VOIDmode); 1365 1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; 1367 if (size % align != 0) 1368 size = CEIL (size, align) * align; 1369 size += GET_MODE_SIZE (mode); 1370 apply_result_mode[regno] = mode; 1371 } 1372 else 1373 apply_result_mode[regno] = VOIDmode; 1374 1375 /* Allow targets that use untyped_call and untyped_return to override 1376 the size so that machine-specific information can be stored here. */ 1377 #ifdef APPLY_RESULT_SIZE 1378 size = APPLY_RESULT_SIZE; 1379 #endif 1380 } 1381 return size; 1382 } 1383 1384 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) 1385 /* Create a vector describing the result block RESULT. If SAVEP is true, 1386 the result block is used to save the values; otherwise it is used to 1387 restore the values. */ 1388 1389 static rtx 1390 result_vector (int savep, rtx result) 1391 { 1392 int regno, size, align, nelts; 1393 enum machine_mode mode; 1394 rtx reg, mem; 1395 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER); 1396 1397 size = nelts = 0; 1398 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 1399 if ((mode = apply_result_mode[regno]) != VOIDmode) 1400 { 1401 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; 1402 if (size % align != 0) 1403 size = CEIL (size, align) * align; 1404 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)); 1405 mem = adjust_address (result, mode, size); 1406 savevec[nelts++] = (savep 1407 ? gen_rtx_SET (VOIDmode, mem, reg) 1408 : gen_rtx_SET (VOIDmode, reg, mem)); 1409 size += GET_MODE_SIZE (mode); 1410 } 1411 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec)); 1412 } 1413 #endif /* HAVE_untyped_call or HAVE_untyped_return */ 1414 1415 /* Save the state required to perform an untyped call with the same 1416 arguments as were passed to the current function. */ 1417 1418 static rtx 1419 expand_builtin_apply_args_1 (void) 1420 { 1421 rtx registers, tem; 1422 int size, align, regno; 1423 enum machine_mode mode; 1424 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1); 1425 1426 /* Create a block where the arg-pointer, structure value address, 1427 and argument registers can be saved. */ 1428 registers = assign_stack_local (BLKmode, apply_args_size (), -1); 1429 1430 /* Walk past the arg-pointer and structure value address. */ 1431 size = GET_MODE_SIZE (Pmode); 1432 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0)) 1433 size += GET_MODE_SIZE (Pmode); 1434 1435 /* Save each register used in calling a function to the block. */ 1436 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 1437 if ((mode = apply_args_mode[regno]) != VOIDmode) 1438 { 1439 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; 1440 if (size % align != 0) 1441 size = CEIL (size, align) * align; 1442 1443 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)); 1444 1445 emit_move_insn (adjust_address (registers, mode, size), tem); 1446 size += GET_MODE_SIZE (mode); 1447 } 1448 1449 /* Save the arg pointer to the block. */ 1450 tem = copy_to_reg (crtl->args.internal_arg_pointer); 1451 #ifdef STACK_GROWS_DOWNWARD 1452 /* We need the pointer as the caller actually passed them to us, not 1453 as we might have pretended they were passed. Make sure it's a valid 1454 operand, as emit_move_insn isn't expected to handle a PLUS. */ 1455 tem 1456 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size), 1457 NULL_RTX); 1458 #endif 1459 emit_move_insn (adjust_address (registers, Pmode, 0), tem); 1460 1461 size = GET_MODE_SIZE (Pmode); 1462 1463 /* Save the structure value address unless this is passed as an 1464 "invisible" first argument. */ 1465 if (struct_incoming_value) 1466 { 1467 emit_move_insn (adjust_address (registers, Pmode, size), 1468 copy_to_reg (struct_incoming_value)); 1469 size += GET_MODE_SIZE (Pmode); 1470 } 1471 1472 /* Return the address of the block. */ 1473 return copy_addr_to_reg (XEXP (registers, 0)); 1474 } 1475 1476 /* __builtin_apply_args returns block of memory allocated on 1477 the stack into which is stored the arg pointer, structure 1478 value address, static chain, and all the registers that might 1479 possibly be used in performing a function call. The code is 1480 moved to the start of the function so the incoming values are 1481 saved. */ 1482 1483 static rtx 1484 expand_builtin_apply_args (void) 1485 { 1486 /* Don't do __builtin_apply_args more than once in a function. 1487 Save the result of the first call and reuse it. */ 1488 if (apply_args_value != 0) 1489 return apply_args_value; 1490 { 1491 /* When this function is called, it means that registers must be 1492 saved on entry to this function. So we migrate the 1493 call to the first insn of this function. */ 1494 rtx temp; 1495 rtx seq; 1496 1497 start_sequence (); 1498 temp = expand_builtin_apply_args_1 (); 1499 seq = get_insns (); 1500 end_sequence (); 1501 1502 apply_args_value = temp; 1503 1504 /* Put the insns after the NOTE that starts the function. 1505 If this is inside a start_sequence, make the outer-level insn 1506 chain current, so the code is placed at the start of the 1507 function. If internal_arg_pointer is a non-virtual pseudo, 1508 it needs to be placed after the function that initializes 1509 that pseudo. */ 1510 push_topmost_sequence (); 1511 if (REG_P (crtl->args.internal_arg_pointer) 1512 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER) 1513 emit_insn_before (seq, parm_birth_insn); 1514 else 1515 emit_insn_before (seq, NEXT_INSN (entry_of_function ())); 1516 pop_topmost_sequence (); 1517 return temp; 1518 } 1519 } 1520 1521 /* Perform an untyped call and save the state required to perform an 1522 untyped return of whatever value was returned by the given function. */ 1523 1524 static rtx 1525 expand_builtin_apply (rtx function, rtx arguments, rtx argsize) 1526 { 1527 int size, align, regno; 1528 enum machine_mode mode; 1529 rtx incoming_args, result, reg, dest, src, call_insn; 1530 rtx old_stack_level = 0; 1531 rtx call_fusage = 0; 1532 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0); 1533 1534 arguments = convert_memory_address (Pmode, arguments); 1535 1536 /* Create a block where the return registers can be saved. */ 1537 result = assign_stack_local (BLKmode, apply_result_size (), -1); 1538 1539 /* Fetch the arg pointer from the ARGUMENTS block. */ 1540 incoming_args = gen_reg_rtx (Pmode); 1541 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments)); 1542 #ifndef STACK_GROWS_DOWNWARD 1543 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize, 1544 incoming_args, 0, OPTAB_LIB_WIDEN); 1545 #endif 1546 1547 /* Push a new argument block and copy the arguments. Do not allow 1548 the (potential) memcpy call below to interfere with our stack 1549 manipulations. */ 1550 do_pending_stack_adjust (); 1551 NO_DEFER_POP; 1552 1553 /* Save the stack with nonlocal if available. */ 1554 #ifdef HAVE_save_stack_nonlocal 1555 if (HAVE_save_stack_nonlocal) 1556 emit_stack_save (SAVE_NONLOCAL, &old_stack_level); 1557 else 1558 #endif 1559 emit_stack_save (SAVE_BLOCK, &old_stack_level); 1560 1561 /* Allocate a block of memory onto the stack and copy the memory 1562 arguments to the outgoing arguments address. We can pass TRUE 1563 as the 4th argument because we just saved the stack pointer 1564 and will restore it right after the call. */ 1565 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true); 1566 1567 /* Set DRAP flag to true, even though allocate_dynamic_stack_space 1568 may have already set current_function_calls_alloca to true. 1569 current_function_calls_alloca won't be set if argsize is zero, 1570 so we have to guarantee need_drap is true here. */ 1571 if (SUPPORTS_STACK_ALIGNMENT) 1572 crtl->need_drap = true; 1573 1574 dest = virtual_outgoing_args_rtx; 1575 #ifndef STACK_GROWS_DOWNWARD 1576 if (CONST_INT_P (argsize)) 1577 dest = plus_constant (Pmode, dest, -INTVAL (argsize)); 1578 else 1579 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize)); 1580 #endif 1581 dest = gen_rtx_MEM (BLKmode, dest); 1582 set_mem_align (dest, PARM_BOUNDARY); 1583 src = gen_rtx_MEM (BLKmode, incoming_args); 1584 set_mem_align (src, PARM_BOUNDARY); 1585 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL); 1586 1587 /* Refer to the argument block. */ 1588 apply_args_size (); 1589 arguments = gen_rtx_MEM (BLKmode, arguments); 1590 set_mem_align (arguments, PARM_BOUNDARY); 1591 1592 /* Walk past the arg-pointer and structure value address. */ 1593 size = GET_MODE_SIZE (Pmode); 1594 if (struct_value) 1595 size += GET_MODE_SIZE (Pmode); 1596 1597 /* Restore each of the registers previously saved. Make USE insns 1598 for each of these registers for use in making the call. */ 1599 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 1600 if ((mode = apply_args_mode[regno]) != VOIDmode) 1601 { 1602 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; 1603 if (size % align != 0) 1604 size = CEIL (size, align) * align; 1605 reg = gen_rtx_REG (mode, regno); 1606 emit_move_insn (reg, adjust_address (arguments, mode, size)); 1607 use_reg (&call_fusage, reg); 1608 size += GET_MODE_SIZE (mode); 1609 } 1610 1611 /* Restore the structure value address unless this is passed as an 1612 "invisible" first argument. */ 1613 size = GET_MODE_SIZE (Pmode); 1614 if (struct_value) 1615 { 1616 rtx value = gen_reg_rtx (Pmode); 1617 emit_move_insn (value, adjust_address (arguments, Pmode, size)); 1618 emit_move_insn (struct_value, value); 1619 if (REG_P (struct_value)) 1620 use_reg (&call_fusage, struct_value); 1621 size += GET_MODE_SIZE (Pmode); 1622 } 1623 1624 /* All arguments and registers used for the call are set up by now! */ 1625 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0); 1626 1627 /* Ensure address is valid. SYMBOL_REF is already valid, so no need, 1628 and we don't want to load it into a register as an optimization, 1629 because prepare_call_address already did it if it should be done. */ 1630 if (GET_CODE (function) != SYMBOL_REF) 1631 function = memory_address (FUNCTION_MODE, function); 1632 1633 /* Generate the actual call instruction and save the return value. */ 1634 #ifdef HAVE_untyped_call 1635 if (HAVE_untyped_call) 1636 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function), 1637 result, result_vector (1, result))); 1638 else 1639 #endif 1640 #ifdef HAVE_call_value 1641 if (HAVE_call_value) 1642 { 1643 rtx valreg = 0; 1644 1645 /* Locate the unique return register. It is not possible to 1646 express a call that sets more than one return register using 1647 call_value; use untyped_call for that. In fact, untyped_call 1648 only needs to save the return registers in the given block. */ 1649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 1650 if ((mode = apply_result_mode[regno]) != VOIDmode) 1651 { 1652 gcc_assert (!valreg); /* HAVE_untyped_call required. */ 1653 1654 valreg = gen_rtx_REG (mode, regno); 1655 } 1656 1657 emit_call_insn (GEN_CALL_VALUE (valreg, 1658 gen_rtx_MEM (FUNCTION_MODE, function), 1659 const0_rtx, NULL_RTX, const0_rtx)); 1660 1661 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg); 1662 } 1663 else 1664 #endif 1665 gcc_unreachable (); 1666 1667 /* Find the CALL insn we just emitted, and attach the register usage 1668 information. */ 1669 call_insn = last_call_insn (); 1670 add_function_usage_to (call_insn, call_fusage); 1671 1672 /* Restore the stack. */ 1673 #ifdef HAVE_save_stack_nonlocal 1674 if (HAVE_save_stack_nonlocal) 1675 emit_stack_restore (SAVE_NONLOCAL, old_stack_level); 1676 else 1677 #endif 1678 emit_stack_restore (SAVE_BLOCK, old_stack_level); 1679 fixup_args_size_notes (call_insn, get_last_insn(), 0); 1680 1681 OK_DEFER_POP; 1682 1683 /* Return the address of the result block. */ 1684 result = copy_addr_to_reg (XEXP (result, 0)); 1685 return convert_memory_address (ptr_mode, result); 1686 } 1687 1688 /* Perform an untyped return. */ 1689 1690 static void 1691 expand_builtin_return (rtx result) 1692 { 1693 int size, align, regno; 1694 enum machine_mode mode; 1695 rtx reg; 1696 rtx call_fusage = 0; 1697 1698 result = convert_memory_address (Pmode, result); 1699 1700 apply_result_size (); 1701 result = gen_rtx_MEM (BLKmode, result); 1702 1703 #ifdef HAVE_untyped_return 1704 if (HAVE_untyped_return) 1705 { 1706 emit_jump_insn (gen_untyped_return (result, result_vector (0, result))); 1707 emit_barrier (); 1708 return; 1709 } 1710 #endif 1711 1712 /* Restore the return value and note that each value is used. */ 1713 size = 0; 1714 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 1715 if ((mode = apply_result_mode[regno]) != VOIDmode) 1716 { 1717 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; 1718 if (size % align != 0) 1719 size = CEIL (size, align) * align; 1720 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)); 1721 emit_move_insn (reg, adjust_address (result, mode, size)); 1722 1723 push_to_sequence (call_fusage); 1724 emit_use (reg); 1725 call_fusage = get_insns (); 1726 end_sequence (); 1727 size += GET_MODE_SIZE (mode); 1728 } 1729 1730 /* Put the USE insns before the return. */ 1731 emit_insn (call_fusage); 1732 1733 /* Return whatever values was restored by jumping directly to the end 1734 of the function. */ 1735 expand_naked_return (); 1736 } 1737 1738 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */ 1739 1740 static enum type_class 1741 type_to_class (tree type) 1742 { 1743 switch (TREE_CODE (type)) 1744 { 1745 case VOID_TYPE: return void_type_class; 1746 case INTEGER_TYPE: return integer_type_class; 1747 case ENUMERAL_TYPE: return enumeral_type_class; 1748 case BOOLEAN_TYPE: return boolean_type_class; 1749 case POINTER_TYPE: return pointer_type_class; 1750 case REFERENCE_TYPE: return reference_type_class; 1751 case OFFSET_TYPE: return offset_type_class; 1752 case REAL_TYPE: return real_type_class; 1753 case COMPLEX_TYPE: return complex_type_class; 1754 case FUNCTION_TYPE: return function_type_class; 1755 case METHOD_TYPE: return method_type_class; 1756 case RECORD_TYPE: return record_type_class; 1757 case UNION_TYPE: 1758 case QUAL_UNION_TYPE: return union_type_class; 1759 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type) 1760 ? string_type_class : array_type_class); 1761 case LANG_TYPE: return lang_type_class; 1762 default: return no_type_class; 1763 } 1764 } 1765 1766 /* Expand a call EXP to __builtin_classify_type. */ 1767 1768 static rtx 1769 expand_builtin_classify_type (tree exp) 1770 { 1771 if (call_expr_nargs (exp)) 1772 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))); 1773 return GEN_INT (no_type_class); 1774 } 1775 1776 /* This helper macro, meant to be used in mathfn_built_in below, 1777 determines which among a set of three builtin math functions is 1778 appropriate for a given type mode. The `F' and `L' cases are 1779 automatically generated from the `double' case. */ 1780 #define CASE_MATHFN(BUILT_IN_MATHFN) \ 1781 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \ 1782 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \ 1783 fcodel = BUILT_IN_MATHFN##L ; break; 1784 /* Similar to above, but appends _R after any F/L suffix. */ 1785 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \ 1786 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \ 1787 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \ 1788 fcodel = BUILT_IN_MATHFN##L_R ; break; 1789 1790 /* Return mathematic function equivalent to FN but operating directly on TYPE, 1791 if available. If IMPLICIT is true use the implicit builtin declaration, 1792 otherwise use the explicit declaration. If we can't do the conversion, 1793 return zero. */ 1794 1795 static tree 1796 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p) 1797 { 1798 enum built_in_function fcode, fcodef, fcodel, fcode2; 1799 1800 switch (fn) 1801 { 1802 CASE_MATHFN (BUILT_IN_ACOS) 1803 CASE_MATHFN (BUILT_IN_ACOSH) 1804 CASE_MATHFN (BUILT_IN_ASIN) 1805 CASE_MATHFN (BUILT_IN_ASINH) 1806 CASE_MATHFN (BUILT_IN_ATAN) 1807 CASE_MATHFN (BUILT_IN_ATAN2) 1808 CASE_MATHFN (BUILT_IN_ATANH) 1809 CASE_MATHFN (BUILT_IN_CBRT) 1810 CASE_MATHFN (BUILT_IN_CEIL) 1811 CASE_MATHFN (BUILT_IN_CEXPI) 1812 CASE_MATHFN (BUILT_IN_COPYSIGN) 1813 CASE_MATHFN (BUILT_IN_COS) 1814 CASE_MATHFN (BUILT_IN_COSH) 1815 CASE_MATHFN (BUILT_IN_DREM) 1816 CASE_MATHFN (BUILT_IN_ERF) 1817 CASE_MATHFN (BUILT_IN_ERFC) 1818 CASE_MATHFN (BUILT_IN_EXP) 1819 CASE_MATHFN (BUILT_IN_EXP10) 1820 CASE_MATHFN (BUILT_IN_EXP2) 1821 CASE_MATHFN (BUILT_IN_EXPM1) 1822 CASE_MATHFN (BUILT_IN_FABS) 1823 CASE_MATHFN (BUILT_IN_FDIM) 1824 CASE_MATHFN (BUILT_IN_FLOOR) 1825 CASE_MATHFN (BUILT_IN_FMA) 1826 CASE_MATHFN (BUILT_IN_FMAX) 1827 CASE_MATHFN (BUILT_IN_FMIN) 1828 CASE_MATHFN (BUILT_IN_FMOD) 1829 CASE_MATHFN (BUILT_IN_FREXP) 1830 CASE_MATHFN (BUILT_IN_GAMMA) 1831 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */ 1832 CASE_MATHFN (BUILT_IN_HUGE_VAL) 1833 CASE_MATHFN (BUILT_IN_HYPOT) 1834 CASE_MATHFN (BUILT_IN_ILOGB) 1835 CASE_MATHFN (BUILT_IN_ICEIL) 1836 CASE_MATHFN (BUILT_IN_IFLOOR) 1837 CASE_MATHFN (BUILT_IN_INF) 1838 CASE_MATHFN (BUILT_IN_IRINT) 1839 CASE_MATHFN (BUILT_IN_IROUND) 1840 CASE_MATHFN (BUILT_IN_ISINF) 1841 CASE_MATHFN (BUILT_IN_J0) 1842 CASE_MATHFN (BUILT_IN_J1) 1843 CASE_MATHFN (BUILT_IN_JN) 1844 CASE_MATHFN (BUILT_IN_LCEIL) 1845 CASE_MATHFN (BUILT_IN_LDEXP) 1846 CASE_MATHFN (BUILT_IN_LFLOOR) 1847 CASE_MATHFN (BUILT_IN_LGAMMA) 1848 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */ 1849 CASE_MATHFN (BUILT_IN_LLCEIL) 1850 CASE_MATHFN (BUILT_IN_LLFLOOR) 1851 CASE_MATHFN (BUILT_IN_LLRINT) 1852 CASE_MATHFN (BUILT_IN_LLROUND) 1853 CASE_MATHFN (BUILT_IN_LOG) 1854 CASE_MATHFN (BUILT_IN_LOG10) 1855 CASE_MATHFN (BUILT_IN_LOG1P) 1856 CASE_MATHFN (BUILT_IN_LOG2) 1857 CASE_MATHFN (BUILT_IN_LOGB) 1858 CASE_MATHFN (BUILT_IN_LRINT) 1859 CASE_MATHFN (BUILT_IN_LROUND) 1860 CASE_MATHFN (BUILT_IN_MODF) 1861 CASE_MATHFN (BUILT_IN_NAN) 1862 CASE_MATHFN (BUILT_IN_NANS) 1863 CASE_MATHFN (BUILT_IN_NEARBYINT) 1864 CASE_MATHFN (BUILT_IN_NEXTAFTER) 1865 CASE_MATHFN (BUILT_IN_NEXTTOWARD) 1866 CASE_MATHFN (BUILT_IN_POW) 1867 CASE_MATHFN (BUILT_IN_POWI) 1868 CASE_MATHFN (BUILT_IN_POW10) 1869 CASE_MATHFN (BUILT_IN_REMAINDER) 1870 CASE_MATHFN (BUILT_IN_REMQUO) 1871 CASE_MATHFN (BUILT_IN_RINT) 1872 CASE_MATHFN (BUILT_IN_ROUND) 1873 CASE_MATHFN (BUILT_IN_SCALB) 1874 CASE_MATHFN (BUILT_IN_SCALBLN) 1875 CASE_MATHFN (BUILT_IN_SCALBN) 1876 CASE_MATHFN (BUILT_IN_SIGNBIT) 1877 CASE_MATHFN (BUILT_IN_SIGNIFICAND) 1878 CASE_MATHFN (BUILT_IN_SIN) 1879 CASE_MATHFN (BUILT_IN_SINCOS) 1880 CASE_MATHFN (BUILT_IN_SINH) 1881 CASE_MATHFN (BUILT_IN_SQRT) 1882 CASE_MATHFN (BUILT_IN_TAN) 1883 CASE_MATHFN (BUILT_IN_TANH) 1884 CASE_MATHFN (BUILT_IN_TGAMMA) 1885 CASE_MATHFN (BUILT_IN_TRUNC) 1886 CASE_MATHFN (BUILT_IN_Y0) 1887 CASE_MATHFN (BUILT_IN_Y1) 1888 CASE_MATHFN (BUILT_IN_YN) 1889 1890 default: 1891 return NULL_TREE; 1892 } 1893 1894 if (TYPE_MAIN_VARIANT (type) == double_type_node) 1895 fcode2 = fcode; 1896 else if (TYPE_MAIN_VARIANT (type) == float_type_node) 1897 fcode2 = fcodef; 1898 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node) 1899 fcode2 = fcodel; 1900 else 1901 return NULL_TREE; 1902 1903 if (implicit_p && !builtin_decl_implicit_p (fcode2)) 1904 return NULL_TREE; 1905 1906 return builtin_decl_explicit (fcode2); 1907 } 1908 1909 /* Like mathfn_built_in_1(), but always use the implicit array. */ 1910 1911 tree 1912 mathfn_built_in (tree type, enum built_in_function fn) 1913 { 1914 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1); 1915 } 1916 1917 /* If errno must be maintained, expand the RTL to check if the result, 1918 TARGET, of a built-in function call, EXP, is NaN, and if so set 1919 errno to EDOM. */ 1920 1921 static void 1922 expand_errno_check (tree exp, rtx target) 1923 { 1924 rtx lab = gen_label_rtx (); 1925 1926 /* Test the result; if it is NaN, set errno=EDOM because 1927 the argument was not in the domain. */ 1928 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target), 1929 NULL_RTX, NULL_RTX, lab, 1930 /* The jump is very likely. */ 1931 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1)); 1932 1933 #ifdef TARGET_EDOM 1934 /* If this built-in doesn't throw an exception, set errno directly. */ 1935 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0))) 1936 { 1937 #ifdef GEN_ERRNO_RTX 1938 rtx errno_rtx = GEN_ERRNO_RTX; 1939 #else 1940 rtx errno_rtx 1941 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno")); 1942 #endif 1943 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM)); 1944 emit_label (lab); 1945 return; 1946 } 1947 #endif 1948 1949 /* Make sure the library call isn't expanded as a tail call. */ 1950 CALL_EXPR_TAILCALL (exp) = 0; 1951 1952 /* We can't set errno=EDOM directly; let the library call do it. 1953 Pop the arguments right away in case the call gets deleted. */ 1954 NO_DEFER_POP; 1955 expand_call (exp, target, 0); 1956 OK_DEFER_POP; 1957 emit_label (lab); 1958 } 1959 1960 /* Expand a call to one of the builtin math functions (sqrt, exp, or log). 1961 Return NULL_RTX if a normal call should be emitted rather than expanding 1962 the function in-line. EXP is the expression that is a call to the builtin 1963 function; if convenient, the result should be placed in TARGET. 1964 SUBTARGET may be used as the target for computing one of EXP's operands. */ 1965 1966 static rtx 1967 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget) 1968 { 1969 optab builtin_optab; 1970 rtx op0, insns; 1971 tree fndecl = get_callee_fndecl (exp); 1972 enum machine_mode mode; 1973 bool errno_set = false; 1974 bool try_widening = false; 1975 tree arg; 1976 1977 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) 1978 return NULL_RTX; 1979 1980 arg = CALL_EXPR_ARG (exp, 0); 1981 1982 switch (DECL_FUNCTION_CODE (fndecl)) 1983 { 1984 CASE_FLT_FN (BUILT_IN_SQRT): 1985 errno_set = ! tree_expr_nonnegative_p (arg); 1986 try_widening = true; 1987 builtin_optab = sqrt_optab; 1988 break; 1989 CASE_FLT_FN (BUILT_IN_EXP): 1990 errno_set = true; builtin_optab = exp_optab; break; 1991 CASE_FLT_FN (BUILT_IN_EXP10): 1992 CASE_FLT_FN (BUILT_IN_POW10): 1993 errno_set = true; builtin_optab = exp10_optab; break; 1994 CASE_FLT_FN (BUILT_IN_EXP2): 1995 errno_set = true; builtin_optab = exp2_optab; break; 1996 CASE_FLT_FN (BUILT_IN_EXPM1): 1997 errno_set = true; builtin_optab = expm1_optab; break; 1998 CASE_FLT_FN (BUILT_IN_LOGB): 1999 errno_set = true; builtin_optab = logb_optab; break; 2000 CASE_FLT_FN (BUILT_IN_LOG): 2001 errno_set = true; builtin_optab = log_optab; break; 2002 CASE_FLT_FN (BUILT_IN_LOG10): 2003 errno_set = true; builtin_optab = log10_optab; break; 2004 CASE_FLT_FN (BUILT_IN_LOG2): 2005 errno_set = true; builtin_optab = log2_optab; break; 2006 CASE_FLT_FN (BUILT_IN_LOG1P): 2007 errno_set = true; builtin_optab = log1p_optab; break; 2008 CASE_FLT_FN (BUILT_IN_ASIN): 2009 builtin_optab = asin_optab; break; 2010 CASE_FLT_FN (BUILT_IN_ACOS): 2011 builtin_optab = acos_optab; break; 2012 CASE_FLT_FN (BUILT_IN_TAN): 2013 builtin_optab = tan_optab; break; 2014 CASE_FLT_FN (BUILT_IN_ATAN): 2015 builtin_optab = atan_optab; break; 2016 CASE_FLT_FN (BUILT_IN_FLOOR): 2017 builtin_optab = floor_optab; break; 2018 CASE_FLT_FN (BUILT_IN_CEIL): 2019 builtin_optab = ceil_optab; break; 2020 CASE_FLT_FN (BUILT_IN_TRUNC): 2021 builtin_optab = btrunc_optab; break; 2022 CASE_FLT_FN (BUILT_IN_ROUND): 2023 builtin_optab = round_optab; break; 2024 CASE_FLT_FN (BUILT_IN_NEARBYINT): 2025 builtin_optab = nearbyint_optab; 2026 if (flag_trapping_math) 2027 break; 2028 /* Else fallthrough and expand as rint. */ 2029 CASE_FLT_FN (BUILT_IN_RINT): 2030 builtin_optab = rint_optab; break; 2031 CASE_FLT_FN (BUILT_IN_SIGNIFICAND): 2032 builtin_optab = significand_optab; break; 2033 default: 2034 gcc_unreachable (); 2035 } 2036 2037 /* Make a suitable register to place result in. */ 2038 mode = TYPE_MODE (TREE_TYPE (exp)); 2039 2040 if (! flag_errno_math || ! HONOR_NANS (mode)) 2041 errno_set = false; 2042 2043 /* Before working hard, check whether the instruction is available, but try 2044 to widen the mode for specific operations. */ 2045 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing 2046 || (try_widening && !excess_precision_type (TREE_TYPE (exp)))) 2047 && (!errno_set || !optimize_insn_for_size_p ())) 2048 { 2049 rtx result = gen_reg_rtx (mode); 2050 2051 /* Wrap the computation of the argument in a SAVE_EXPR, as we may 2052 need to expand the argument again. This way, we will not perform 2053 side-effects more the once. */ 2054 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg); 2055 2056 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL); 2057 2058 start_sequence (); 2059 2060 /* Compute into RESULT. 2061 Set RESULT to wherever the result comes back. */ 2062 result = expand_unop (mode, builtin_optab, op0, result, 0); 2063 2064 if (result != 0) 2065 { 2066 if (errno_set) 2067 expand_errno_check (exp, result); 2068 2069 /* Output the entire sequence. */ 2070 insns = get_insns (); 2071 end_sequence (); 2072 emit_insn (insns); 2073 return result; 2074 } 2075 2076 /* If we were unable to expand via the builtin, stop the sequence 2077 (without outputting the insns) and call to the library function 2078 with the stabilized argument list. */ 2079 end_sequence (); 2080 } 2081 2082 return expand_call (exp, target, target == const0_rtx); 2083 } 2084 2085 /* Expand a call to the builtin binary math functions (pow and atan2). 2086 Return NULL_RTX if a normal call should be emitted rather than expanding the 2087 function in-line. EXP is the expression that is a call to the builtin 2088 function; if convenient, the result should be placed in TARGET. 2089 SUBTARGET may be used as the target for computing one of EXP's 2090 operands. */ 2091 2092 static rtx 2093 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget) 2094 { 2095 optab builtin_optab; 2096 rtx op0, op1, insns, result; 2097 int op1_type = REAL_TYPE; 2098 tree fndecl = get_callee_fndecl (exp); 2099 tree arg0, arg1; 2100 enum machine_mode mode; 2101 bool errno_set = true; 2102 2103 switch (DECL_FUNCTION_CODE (fndecl)) 2104 { 2105 CASE_FLT_FN (BUILT_IN_SCALBN): 2106 CASE_FLT_FN (BUILT_IN_SCALBLN): 2107 CASE_FLT_FN (BUILT_IN_LDEXP): 2108 op1_type = INTEGER_TYPE; 2109 default: 2110 break; 2111 } 2112 2113 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE)) 2114 return NULL_RTX; 2115 2116 arg0 = CALL_EXPR_ARG (exp, 0); 2117 arg1 = CALL_EXPR_ARG (exp, 1); 2118 2119 switch (DECL_FUNCTION_CODE (fndecl)) 2120 { 2121 CASE_FLT_FN (BUILT_IN_POW): 2122 builtin_optab = pow_optab; break; 2123 CASE_FLT_FN (BUILT_IN_ATAN2): 2124 builtin_optab = atan2_optab; break; 2125 CASE_FLT_FN (BUILT_IN_SCALB): 2126 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2) 2127 return 0; 2128 builtin_optab = scalb_optab; break; 2129 CASE_FLT_FN (BUILT_IN_SCALBN): 2130 CASE_FLT_FN (BUILT_IN_SCALBLN): 2131 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2) 2132 return 0; 2133 /* Fall through... */ 2134 CASE_FLT_FN (BUILT_IN_LDEXP): 2135 builtin_optab = ldexp_optab; break; 2136 CASE_FLT_FN (BUILT_IN_FMOD): 2137 builtin_optab = fmod_optab; break; 2138 CASE_FLT_FN (BUILT_IN_REMAINDER): 2139 CASE_FLT_FN (BUILT_IN_DREM): 2140 builtin_optab = remainder_optab; break; 2141 default: 2142 gcc_unreachable (); 2143 } 2144 2145 /* Make a suitable register to place result in. */ 2146 mode = TYPE_MODE (TREE_TYPE (exp)); 2147 2148 /* Before working hard, check whether the instruction is available. */ 2149 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing) 2150 return NULL_RTX; 2151 2152 result = gen_reg_rtx (mode); 2153 2154 if (! flag_errno_math || ! HONOR_NANS (mode)) 2155 errno_set = false; 2156 2157 if (errno_set && optimize_insn_for_size_p ()) 2158 return 0; 2159 2160 /* Always stabilize the argument list. */ 2161 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0); 2162 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1); 2163 2164 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL); 2165 op1 = expand_normal (arg1); 2166 2167 start_sequence (); 2168 2169 /* Compute into RESULT. 2170 Set RESULT to wherever the result comes back. */ 2171 result = expand_binop (mode, builtin_optab, op0, op1, 2172 result, 0, OPTAB_DIRECT); 2173 2174 /* If we were unable to expand via the builtin, stop the sequence 2175 (without outputting the insns) and call to the library function 2176 with the stabilized argument list. */ 2177 if (result == 0) 2178 { 2179 end_sequence (); 2180 return expand_call (exp, target, target == const0_rtx); 2181 } 2182 2183 if (errno_set) 2184 expand_errno_check (exp, result); 2185 2186 /* Output the entire sequence. */ 2187 insns = get_insns (); 2188 end_sequence (); 2189 emit_insn (insns); 2190 2191 return result; 2192 } 2193 2194 /* Expand a call to the builtin trinary math functions (fma). 2195 Return NULL_RTX if a normal call should be emitted rather than expanding the 2196 function in-line. EXP is the expression that is a call to the builtin 2197 function; if convenient, the result should be placed in TARGET. 2198 SUBTARGET may be used as the target for computing one of EXP's 2199 operands. */ 2200 2201 static rtx 2202 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget) 2203 { 2204 optab builtin_optab; 2205 rtx op0, op1, op2, insns, result; 2206 tree fndecl = get_callee_fndecl (exp); 2207 tree arg0, arg1, arg2; 2208 enum machine_mode mode; 2209 2210 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE)) 2211 return NULL_RTX; 2212 2213 arg0 = CALL_EXPR_ARG (exp, 0); 2214 arg1 = CALL_EXPR_ARG (exp, 1); 2215 arg2 = CALL_EXPR_ARG (exp, 2); 2216 2217 switch (DECL_FUNCTION_CODE (fndecl)) 2218 { 2219 CASE_FLT_FN (BUILT_IN_FMA): 2220 builtin_optab = fma_optab; break; 2221 default: 2222 gcc_unreachable (); 2223 } 2224 2225 /* Make a suitable register to place result in. */ 2226 mode = TYPE_MODE (TREE_TYPE (exp)); 2227 2228 /* Before working hard, check whether the instruction is available. */ 2229 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing) 2230 return NULL_RTX; 2231 2232 result = gen_reg_rtx (mode); 2233 2234 /* Always stabilize the argument list. */ 2235 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0); 2236 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1); 2237 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2); 2238 2239 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL); 2240 op1 = expand_normal (arg1); 2241 op2 = expand_normal (arg2); 2242 2243 start_sequence (); 2244 2245 /* Compute into RESULT. 2246 Set RESULT to wherever the result comes back. */ 2247 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2, 2248 result, 0); 2249 2250 /* If we were unable to expand via the builtin, stop the sequence 2251 (without outputting the insns) and call to the library function 2252 with the stabilized argument list. */ 2253 if (result == 0) 2254 { 2255 end_sequence (); 2256 return expand_call (exp, target, target == const0_rtx); 2257 } 2258 2259 /* Output the entire sequence. */ 2260 insns = get_insns (); 2261 end_sequence (); 2262 emit_insn (insns); 2263 2264 return result; 2265 } 2266 2267 /* Expand a call to the builtin sin and cos math functions. 2268 Return NULL_RTX if a normal call should be emitted rather than expanding the 2269 function in-line. EXP is the expression that is a call to the builtin 2270 function; if convenient, the result should be placed in TARGET. 2271 SUBTARGET may be used as the target for computing one of EXP's 2272 operands. */ 2273 2274 static rtx 2275 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget) 2276 { 2277 optab builtin_optab; 2278 rtx op0, insns; 2279 tree fndecl = get_callee_fndecl (exp); 2280 enum machine_mode mode; 2281 tree arg; 2282 2283 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) 2284 return NULL_RTX; 2285 2286 arg = CALL_EXPR_ARG (exp, 0); 2287 2288 switch (DECL_FUNCTION_CODE (fndecl)) 2289 { 2290 CASE_FLT_FN (BUILT_IN_SIN): 2291 CASE_FLT_FN (BUILT_IN_COS): 2292 builtin_optab = sincos_optab; break; 2293 default: 2294 gcc_unreachable (); 2295 } 2296 2297 /* Make a suitable register to place result in. */ 2298 mode = TYPE_MODE (TREE_TYPE (exp)); 2299 2300 /* Check if sincos insn is available, otherwise fallback 2301 to sin or cos insn. */ 2302 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing) 2303 switch (DECL_FUNCTION_CODE (fndecl)) 2304 { 2305 CASE_FLT_FN (BUILT_IN_SIN): 2306 builtin_optab = sin_optab; break; 2307 CASE_FLT_FN (BUILT_IN_COS): 2308 builtin_optab = cos_optab; break; 2309 default: 2310 gcc_unreachable (); 2311 } 2312 2313 /* Before working hard, check whether the instruction is available. */ 2314 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing) 2315 { 2316 rtx result = gen_reg_rtx (mode); 2317 2318 /* Wrap the computation of the argument in a SAVE_EXPR, as we may 2319 need to expand the argument again. This way, we will not perform 2320 side-effects more the once. */ 2321 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg); 2322 2323 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL); 2324 2325 start_sequence (); 2326 2327 /* Compute into RESULT. 2328 Set RESULT to wherever the result comes back. */ 2329 if (builtin_optab == sincos_optab) 2330 { 2331 int ok; 2332 2333 switch (DECL_FUNCTION_CODE (fndecl)) 2334 { 2335 CASE_FLT_FN (BUILT_IN_SIN): 2336 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0); 2337 break; 2338 CASE_FLT_FN (BUILT_IN_COS): 2339 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0); 2340 break; 2341 default: 2342 gcc_unreachable (); 2343 } 2344 gcc_assert (ok); 2345 } 2346 else 2347 result = expand_unop (mode, builtin_optab, op0, result, 0); 2348 2349 if (result != 0) 2350 { 2351 /* Output the entire sequence. */ 2352 insns = get_insns (); 2353 end_sequence (); 2354 emit_insn (insns); 2355 return result; 2356 } 2357 2358 /* If we were unable to expand via the builtin, stop the sequence 2359 (without outputting the insns) and call to the library function 2360 with the stabilized argument list. */ 2361 end_sequence (); 2362 } 2363 2364 return expand_call (exp, target, target == const0_rtx); 2365 } 2366 2367 /* Given an interclass math builtin decl FNDECL and it's argument ARG 2368 return an RTL instruction code that implements the functionality. 2369 If that isn't possible or available return CODE_FOR_nothing. */ 2370 2371 static enum insn_code 2372 interclass_mathfn_icode (tree arg, tree fndecl) 2373 { 2374 bool errno_set = false; 2375 optab builtin_optab = unknown_optab; 2376 enum machine_mode mode; 2377 2378 switch (DECL_FUNCTION_CODE (fndecl)) 2379 { 2380 CASE_FLT_FN (BUILT_IN_ILOGB): 2381 errno_set = true; builtin_optab = ilogb_optab; break; 2382 CASE_FLT_FN (BUILT_IN_ISINF): 2383 builtin_optab = isinf_optab; break; 2384 case BUILT_IN_ISNORMAL: 2385 case BUILT_IN_ISFINITE: 2386 CASE_FLT_FN (BUILT_IN_FINITE): 2387 case BUILT_IN_FINITED32: 2388 case BUILT_IN_FINITED64: 2389 case BUILT_IN_FINITED128: 2390 case BUILT_IN_ISINFD32: 2391 case BUILT_IN_ISINFD64: 2392 case BUILT_IN_ISINFD128: 2393 /* These builtins have no optabs (yet). */ 2394 break; 2395 default: 2396 gcc_unreachable (); 2397 } 2398 2399 /* There's no easy way to detect the case we need to set EDOM. */ 2400 if (flag_errno_math && errno_set) 2401 return CODE_FOR_nothing; 2402 2403 /* Optab mode depends on the mode of the input argument. */ 2404 mode = TYPE_MODE (TREE_TYPE (arg)); 2405 2406 if (builtin_optab) 2407 return optab_handler (builtin_optab, mode); 2408 return CODE_FOR_nothing; 2409 } 2410 2411 /* Expand a call to one of the builtin math functions that operate on 2412 floating point argument and output an integer result (ilogb, isinf, 2413 isnan, etc). 2414 Return 0 if a normal call should be emitted rather than expanding the 2415 function in-line. EXP is the expression that is a call to the builtin 2416 function; if convenient, the result should be placed in TARGET. */ 2417 2418 static rtx 2419 expand_builtin_interclass_mathfn (tree exp, rtx target) 2420 { 2421 enum insn_code icode = CODE_FOR_nothing; 2422 rtx op0; 2423 tree fndecl = get_callee_fndecl (exp); 2424 enum machine_mode mode; 2425 tree arg; 2426 2427 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) 2428 return NULL_RTX; 2429 2430 arg = CALL_EXPR_ARG (exp, 0); 2431 icode = interclass_mathfn_icode (arg, fndecl); 2432 mode = TYPE_MODE (TREE_TYPE (arg)); 2433 2434 if (icode != CODE_FOR_nothing) 2435 { 2436 struct expand_operand ops[1]; 2437 rtx last = get_last_insn (); 2438 tree orig_arg = arg; 2439 2440 /* Wrap the computation of the argument in a SAVE_EXPR, as we may 2441 need to expand the argument again. This way, we will not perform 2442 side-effects more the once. */ 2443 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg); 2444 2445 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL); 2446 2447 if (mode != GET_MODE (op0)) 2448 op0 = convert_to_mode (mode, op0, 0); 2449 2450 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp))); 2451 if (maybe_legitimize_operands (icode, 0, 1, ops) 2452 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN)) 2453 return ops[0].value; 2454 2455 delete_insns_since (last); 2456 CALL_EXPR_ARG (exp, 0) = orig_arg; 2457 } 2458 2459 return NULL_RTX; 2460 } 2461 2462 /* Expand a call to the builtin sincos math function. 2463 Return NULL_RTX if a normal call should be emitted rather than expanding the 2464 function in-line. EXP is the expression that is a call to the builtin 2465 function. */ 2466 2467 static rtx 2468 expand_builtin_sincos (tree exp) 2469 { 2470 rtx op0, op1, op2, target1, target2; 2471 enum machine_mode mode; 2472 tree arg, sinp, cosp; 2473 int result; 2474 location_t loc = EXPR_LOCATION (exp); 2475 tree alias_type, alias_off; 2476 2477 if (!validate_arglist (exp, REAL_TYPE, 2478 POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) 2479 return NULL_RTX; 2480 2481 arg = CALL_EXPR_ARG (exp, 0); 2482 sinp = CALL_EXPR_ARG (exp, 1); 2483 cosp = CALL_EXPR_ARG (exp, 2); 2484 2485 /* Make a suitable register to place result in. */ 2486 mode = TYPE_MODE (TREE_TYPE (arg)); 2487 2488 /* Check if sincos insn is available, otherwise emit the call. */ 2489 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing) 2490 return NULL_RTX; 2491 2492 target1 = gen_reg_rtx (mode); 2493 target2 = gen_reg_rtx (mode); 2494 2495 op0 = expand_normal (arg); 2496 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true); 2497 alias_off = build_int_cst (alias_type, 0); 2498 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg), 2499 sinp, alias_off)); 2500 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg), 2501 cosp, alias_off)); 2502 2503 /* Compute into target1 and target2. 2504 Set TARGET to wherever the result comes back. */ 2505 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0); 2506 gcc_assert (result); 2507 2508 /* Move target1 and target2 to the memory locations indicated 2509 by op1 and op2. */ 2510 emit_move_insn (op1, target1); 2511 emit_move_insn (op2, target2); 2512 2513 return const0_rtx; 2514 } 2515 2516 /* Expand a call to the internal cexpi builtin to the sincos math function. 2517 EXP is the expression that is a call to the builtin function; if convenient, 2518 the result should be placed in TARGET. */ 2519 2520 static rtx 2521 expand_builtin_cexpi (tree exp, rtx target) 2522 { 2523 tree fndecl = get_callee_fndecl (exp); 2524 tree arg, type; 2525 enum machine_mode mode; 2526 rtx op0, op1, op2; 2527 location_t loc = EXPR_LOCATION (exp); 2528 2529 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) 2530 return NULL_RTX; 2531 2532 arg = CALL_EXPR_ARG (exp, 0); 2533 type = TREE_TYPE (arg); 2534 mode = TYPE_MODE (TREE_TYPE (arg)); 2535 2536 /* Try expanding via a sincos optab, fall back to emitting a libcall 2537 to sincos or cexp. We are sure we have sincos or cexp because cexpi 2538 is only generated from sincos, cexp or if we have either of them. */ 2539 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing) 2540 { 2541 op1 = gen_reg_rtx (mode); 2542 op2 = gen_reg_rtx (mode); 2543 2544 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL); 2545 2546 /* Compute into op1 and op2. */ 2547 expand_twoval_unop (sincos_optab, op0, op2, op1, 0); 2548 } 2549 else if (TARGET_HAS_SINCOS) 2550 { 2551 tree call, fn = NULL_TREE; 2552 tree top1, top2; 2553 rtx op1a, op2a; 2554 2555 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF) 2556 fn = builtin_decl_explicit (BUILT_IN_SINCOSF); 2557 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI) 2558 fn = builtin_decl_explicit (BUILT_IN_SINCOS); 2559 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL) 2560 fn = builtin_decl_explicit (BUILT_IN_SINCOSL); 2561 else 2562 gcc_unreachable (); 2563 2564 op1 = assign_temp (TREE_TYPE (arg), 1, 1); 2565 op2 = assign_temp (TREE_TYPE (arg), 1, 1); 2566 op1a = copy_addr_to_reg (XEXP (op1, 0)); 2567 op2a = copy_addr_to_reg (XEXP (op2, 0)); 2568 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a); 2569 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a); 2570 2571 /* Make sure not to fold the sincos call again. */ 2572 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); 2573 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)), 2574 call, 3, arg, top1, top2)); 2575 } 2576 else 2577 { 2578 tree call, fn = NULL_TREE, narg; 2579 tree ctype = build_complex_type (type); 2580 2581 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF) 2582 fn = builtin_decl_explicit (BUILT_IN_CEXPF); 2583 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI) 2584 fn = builtin_decl_explicit (BUILT_IN_CEXP); 2585 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL) 2586 fn = builtin_decl_explicit (BUILT_IN_CEXPL); 2587 else 2588 gcc_unreachable (); 2589 2590 /* If we don't have a decl for cexp create one. This is the 2591 friendliest fallback if the user calls __builtin_cexpi 2592 without full target C99 function support. */ 2593 if (fn == NULL_TREE) 2594 { 2595 tree fntype; 2596 const char *name = NULL; 2597 2598 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF) 2599 name = "cexpf"; 2600 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI) 2601 name = "cexp"; 2602 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL) 2603 name = "cexpl"; 2604 2605 fntype = build_function_type_list (ctype, ctype, NULL_TREE); 2606 fn = build_fn_decl (name, fntype); 2607 } 2608 2609 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype, 2610 build_real (type, dconst0), arg); 2611 2612 /* Make sure not to fold the cexp call again. */ 2613 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); 2614 return expand_expr (build_call_nary (ctype, call, 1, narg), 2615 target, VOIDmode, EXPAND_NORMAL); 2616 } 2617 2618 /* Now build the proper return type. */ 2619 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type), 2620 make_tree (TREE_TYPE (arg), op2), 2621 make_tree (TREE_TYPE (arg), op1)), 2622 target, VOIDmode, EXPAND_NORMAL); 2623 } 2624 2625 /* Conveniently construct a function call expression. FNDECL names the 2626 function to be called, N is the number of arguments, and the "..." 2627 parameters are the argument expressions. Unlike build_call_exr 2628 this doesn't fold the call, hence it will always return a CALL_EXPR. */ 2629 2630 static tree 2631 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...) 2632 { 2633 va_list ap; 2634 tree fntype = TREE_TYPE (fndecl); 2635 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl); 2636 2637 va_start (ap, n); 2638 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap); 2639 va_end (ap); 2640 SET_EXPR_LOCATION (fn, loc); 2641 return fn; 2642 } 2643 2644 /* Expand a call to one of the builtin rounding functions gcc defines 2645 as an extension (lfloor and lceil). As these are gcc extensions we 2646 do not need to worry about setting errno to EDOM. 2647 If expanding via optab fails, lower expression to (int)(floor(x)). 2648 EXP is the expression that is a call to the builtin function; 2649 if convenient, the result should be placed in TARGET. */ 2650 2651 static rtx 2652 expand_builtin_int_roundingfn (tree exp, rtx target) 2653 { 2654 convert_optab builtin_optab; 2655 rtx op0, insns, tmp; 2656 tree fndecl = get_callee_fndecl (exp); 2657 enum built_in_function fallback_fn; 2658 tree fallback_fndecl; 2659 enum machine_mode mode; 2660 tree arg; 2661 2662 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) 2663 gcc_unreachable (); 2664 2665 arg = CALL_EXPR_ARG (exp, 0); 2666 2667 switch (DECL_FUNCTION_CODE (fndecl)) 2668 { 2669 CASE_FLT_FN (BUILT_IN_ICEIL): 2670 CASE_FLT_FN (BUILT_IN_LCEIL): 2671 CASE_FLT_FN (BUILT_IN_LLCEIL): 2672 builtin_optab = lceil_optab; 2673 fallback_fn = BUILT_IN_CEIL; 2674 break; 2675 2676 CASE_FLT_FN (BUILT_IN_IFLOOR): 2677 CASE_FLT_FN (BUILT_IN_LFLOOR): 2678 CASE_FLT_FN (BUILT_IN_LLFLOOR): 2679 builtin_optab = lfloor_optab; 2680 fallback_fn = BUILT_IN_FLOOR; 2681 break; 2682 2683 default: 2684 gcc_unreachable (); 2685 } 2686 2687 /* Make a suitable register to place result in. */ 2688 mode = TYPE_MODE (TREE_TYPE (exp)); 2689 2690 target = gen_reg_rtx (mode); 2691 2692 /* Wrap the computation of the argument in a SAVE_EXPR, as we may 2693 need to expand the argument again. This way, we will not perform 2694 side-effects more the once. */ 2695 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg); 2696 2697 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL); 2698 2699 start_sequence (); 2700 2701 /* Compute into TARGET. */ 2702 if (expand_sfix_optab (target, op0, builtin_optab)) 2703 { 2704 /* Output the entire sequence. */ 2705 insns = get_insns (); 2706 end_sequence (); 2707 emit_insn (insns); 2708 return target; 2709 } 2710 2711 /* If we were unable to expand via the builtin, stop the sequence 2712 (without outputting the insns). */ 2713 end_sequence (); 2714 2715 /* Fall back to floating point rounding optab. */ 2716 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn); 2717 2718 /* For non-C99 targets we may end up without a fallback fndecl here 2719 if the user called __builtin_lfloor directly. In this case emit 2720 a call to the floor/ceil variants nevertheless. This should result 2721 in the best user experience for not full C99 targets. */ 2722 if (fallback_fndecl == NULL_TREE) 2723 { 2724 tree fntype; 2725 const char *name = NULL; 2726 2727 switch (DECL_FUNCTION_CODE (fndecl)) 2728 { 2729 case BUILT_IN_ICEIL: 2730 case BUILT_IN_LCEIL: 2731 case BUILT_IN_LLCEIL: 2732 name = "ceil"; 2733 break; 2734 case BUILT_IN_ICEILF: 2735 case BUILT_IN_LCEILF: 2736 case BUILT_IN_LLCEILF: 2737 name = "ceilf"; 2738 break; 2739 case BUILT_IN_ICEILL: 2740 case BUILT_IN_LCEILL: 2741 case BUILT_IN_LLCEILL: 2742 name = "ceill"; 2743 break; 2744 case BUILT_IN_IFLOOR: 2745 case BUILT_IN_LFLOOR: 2746 case BUILT_IN_LLFLOOR: 2747 name = "floor"; 2748 break; 2749 case BUILT_IN_IFLOORF: 2750 case BUILT_IN_LFLOORF: 2751 case BUILT_IN_LLFLOORF: 2752 name = "floorf"; 2753 break; 2754 case BUILT_IN_IFLOORL: 2755 case BUILT_IN_LFLOORL: 2756 case BUILT_IN_LLFLOORL: 2757 name = "floorl"; 2758 break; 2759 default: 2760 gcc_unreachable (); 2761 } 2762 2763 fntype = build_function_type_list (TREE_TYPE (arg), 2764 TREE_TYPE (arg), NULL_TREE); 2765 fallback_fndecl = build_fn_decl (name, fntype); 2766 } 2767 2768 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg); 2769 2770 tmp = expand_normal (exp); 2771 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp)); 2772 2773 /* Truncate the result of floating point optab to integer 2774 via expand_fix (). */ 2775 target = gen_reg_rtx (mode); 2776 expand_fix (target, tmp, 0); 2777 2778 return target; 2779 } 2780 2781 /* Expand a call to one of the builtin math functions doing integer 2782 conversion (lrint). 2783 Return 0 if a normal call should be emitted rather than expanding the 2784 function in-line. EXP is the expression that is a call to the builtin 2785 function; if convenient, the result should be placed in TARGET. */ 2786 2787 static rtx 2788 expand_builtin_int_roundingfn_2 (tree exp, rtx target) 2789 { 2790 convert_optab builtin_optab; 2791 rtx op0, insns; 2792 tree fndecl = get_callee_fndecl (exp); 2793 tree arg; 2794 enum machine_mode mode; 2795 enum built_in_function fallback_fn = BUILT_IN_NONE; 2796 2797 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) 2798 gcc_unreachable (); 2799 2800 arg = CALL_EXPR_ARG (exp, 0); 2801 2802 switch (DECL_FUNCTION_CODE (fndecl)) 2803 { 2804 CASE_FLT_FN (BUILT_IN_IRINT): 2805 fallback_fn = BUILT_IN_LRINT; 2806 /* FALLTHRU */ 2807 CASE_FLT_FN (BUILT_IN_LRINT): 2808 CASE_FLT_FN (BUILT_IN_LLRINT): 2809 builtin_optab = lrint_optab; 2810 break; 2811 2812 CASE_FLT_FN (BUILT_IN_IROUND): 2813 fallback_fn = BUILT_IN_LROUND; 2814 /* FALLTHRU */ 2815 CASE_FLT_FN (BUILT_IN_LROUND): 2816 CASE_FLT_FN (BUILT_IN_LLROUND): 2817 builtin_optab = lround_optab; 2818 break; 2819 2820 default: 2821 gcc_unreachable (); 2822 } 2823 2824 /* There's no easy way to detect the case we need to set EDOM. */ 2825 if (flag_errno_math && fallback_fn == BUILT_IN_NONE) 2826 return NULL_RTX; 2827 2828 /* Make a suitable register to place result in. */ 2829 mode = TYPE_MODE (TREE_TYPE (exp)); 2830 2831 /* There's no easy way to detect the case we need to set EDOM. */ 2832 if (!flag_errno_math) 2833 { 2834 rtx result = gen_reg_rtx (mode); 2835 2836 /* Wrap the computation of the argument in a SAVE_EXPR, as we may 2837 need to expand the argument again. This way, we will not perform 2838 side-effects more the once. */ 2839 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg); 2840 2841 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL); 2842 2843 start_sequence (); 2844 2845 if (expand_sfix_optab (result, op0, builtin_optab)) 2846 { 2847 /* Output the entire sequence. */ 2848 insns = get_insns (); 2849 end_sequence (); 2850 emit_insn (insns); 2851 return result; 2852 } 2853 2854 /* If we were unable to expand via the builtin, stop the sequence 2855 (without outputting the insns) and call to the library function 2856 with the stabilized argument list. */ 2857 end_sequence (); 2858 } 2859 2860 if (fallback_fn != BUILT_IN_NONE) 2861 { 2862 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99 2863 targets, (int) round (x) should never be transformed into 2864 BUILT_IN_IROUND and if __builtin_iround is called directly, emit 2865 a call to lround in the hope that the target provides at least some 2866 C99 functions. This should result in the best user experience for 2867 not full C99 targets. */ 2868 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg), 2869 fallback_fn, 0); 2870 2871 exp = build_call_nofold_loc (EXPR_LOCATION (exp), 2872 fallback_fndecl, 1, arg); 2873 2874 target = expand_call (exp, NULL_RTX, target == const0_rtx); 2875 target = maybe_emit_group_store (target, TREE_TYPE (exp)); 2876 return convert_to_mode (mode, target, 0); 2877 } 2878 2879 return expand_call (exp, target, target == const0_rtx); 2880 } 2881 2882 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if 2883 a normal call should be emitted rather than expanding the function 2884 in-line. EXP is the expression that is a call to the builtin 2885 function; if convenient, the result should be placed in TARGET. */ 2886 2887 static rtx 2888 expand_builtin_powi (tree exp, rtx target) 2889 { 2890 tree arg0, arg1; 2891 rtx op0, op1; 2892 enum machine_mode mode; 2893 enum machine_mode mode2; 2894 2895 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE)) 2896 return NULL_RTX; 2897 2898 arg0 = CALL_EXPR_ARG (exp, 0); 2899 arg1 = CALL_EXPR_ARG (exp, 1); 2900 mode = TYPE_MODE (TREE_TYPE (exp)); 2901 2902 /* Emit a libcall to libgcc. */ 2903 2904 /* Mode of the 2nd argument must match that of an int. */ 2905 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0); 2906 2907 if (target == NULL_RTX) 2908 target = gen_reg_rtx (mode); 2909 2910 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL); 2911 if (GET_MODE (op0) != mode) 2912 op0 = convert_to_mode (mode, op0, 0); 2913 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL); 2914 if (GET_MODE (op1) != mode2) 2915 op1 = convert_to_mode (mode2, op1, 0); 2916 2917 target = emit_library_call_value (optab_libfunc (powi_optab, mode), 2918 target, LCT_CONST, mode, 2, 2919 op0, mode, op1, mode2); 2920 2921 return target; 2922 } 2923 2924 /* Expand expression EXP which is a call to the strlen builtin. Return 2925 NULL_RTX if we failed the caller should emit a normal call, otherwise 2926 try to get the result in TARGET, if convenient. */ 2927 2928 static rtx 2929 expand_builtin_strlen (tree exp, rtx target, 2930 enum machine_mode target_mode) 2931 { 2932 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE)) 2933 return NULL_RTX; 2934 else 2935 { 2936 struct expand_operand ops[4]; 2937 rtx pat; 2938 tree len; 2939 tree src = CALL_EXPR_ARG (exp, 0); 2940 rtx src_reg, before_strlen; 2941 enum machine_mode insn_mode = target_mode; 2942 enum insn_code icode = CODE_FOR_nothing; 2943 unsigned int align; 2944 2945 /* If the length can be computed at compile-time, return it. */ 2946 len = c_strlen (src, 0); 2947 if (len) 2948 return expand_expr (len, target, target_mode, EXPAND_NORMAL); 2949 2950 /* If the length can be computed at compile-time and is constant 2951 integer, but there are side-effects in src, evaluate 2952 src for side-effects, then return len. 2953 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar"); 2954 can be optimized into: i++; x = 3; */ 2955 len = c_strlen (src, 1); 2956 if (len && TREE_CODE (len) == INTEGER_CST) 2957 { 2958 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL); 2959 return expand_expr (len, target, target_mode, EXPAND_NORMAL); 2960 } 2961 2962 align = get_pointer_alignment (src) / BITS_PER_UNIT; 2963 2964 /* If SRC is not a pointer type, don't do this operation inline. */ 2965 if (align == 0) 2966 return NULL_RTX; 2967 2968 /* Bail out if we can't compute strlen in the right mode. */ 2969 while (insn_mode != VOIDmode) 2970 { 2971 icode = optab_handler (strlen_optab, insn_mode); 2972 if (icode != CODE_FOR_nothing) 2973 break; 2974 2975 insn_mode = GET_MODE_WIDER_MODE (insn_mode); 2976 } 2977 if (insn_mode == VOIDmode) 2978 return NULL_RTX; 2979 2980 /* Make a place to hold the source address. We will not expand 2981 the actual source until we are sure that the expansion will 2982 not fail -- there are trees that cannot be expanded twice. */ 2983 src_reg = gen_reg_rtx (Pmode); 2984 2985 /* Mark the beginning of the strlen sequence so we can emit the 2986 source operand later. */ 2987 before_strlen = get_last_insn (); 2988 2989 create_output_operand (&ops[0], target, insn_mode); 2990 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg)); 2991 create_integer_operand (&ops[2], 0); 2992 create_integer_operand (&ops[3], align); 2993 if (!maybe_expand_insn (icode, 4, ops)) 2994 return NULL_RTX; 2995 2996 /* Now that we are assured of success, expand the source. */ 2997 start_sequence (); 2998 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL); 2999 if (pat != src_reg) 3000 { 3001 #ifdef POINTERS_EXTEND_UNSIGNED 3002 if (GET_MODE (pat) != Pmode) 3003 pat = convert_to_mode (Pmode, pat, 3004 POINTERS_EXTEND_UNSIGNED); 3005 #endif 3006 emit_move_insn (src_reg, pat); 3007 } 3008 pat = get_insns (); 3009 end_sequence (); 3010 3011 if (before_strlen) 3012 emit_insn_after (pat, before_strlen); 3013 else 3014 emit_insn_before (pat, get_insns ()); 3015 3016 /* Return the value in the proper mode for this function. */ 3017 if (GET_MODE (ops[0].value) == target_mode) 3018 target = ops[0].value; 3019 else if (target != 0) 3020 convert_move (target, ops[0].value, 0); 3021 else 3022 target = convert_to_mode (target_mode, ops[0].value, 0); 3023 3024 return target; 3025 } 3026 } 3027 3028 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE) 3029 bytes from constant string DATA + OFFSET and return it as target 3030 constant. */ 3031 3032 static rtx 3033 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset, 3034 enum machine_mode mode) 3035 { 3036 const char *str = (const char *) data; 3037 3038 gcc_assert (offset >= 0 3039 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode) 3040 <= strlen (str) + 1)); 3041 3042 return c_readstr (str + offset, mode); 3043 } 3044 3045 /* Expand a call EXP to the memcpy builtin. 3046 Return NULL_RTX if we failed, the caller should emit a normal call, 3047 otherwise try to get the result in TARGET, if convenient (and in 3048 mode MODE if that's convenient). */ 3049 3050 static rtx 3051 expand_builtin_memcpy (tree exp, rtx target) 3052 { 3053 if (!validate_arglist (exp, 3054 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) 3055 return NULL_RTX; 3056 else 3057 { 3058 tree dest = CALL_EXPR_ARG (exp, 0); 3059 tree src = CALL_EXPR_ARG (exp, 1); 3060 tree len = CALL_EXPR_ARG (exp, 2); 3061 const char *src_str; 3062 unsigned int src_align = get_pointer_alignment (src); 3063 unsigned int dest_align = get_pointer_alignment (dest); 3064 rtx dest_mem, src_mem, dest_addr, len_rtx; 3065 HOST_WIDE_INT expected_size = -1; 3066 unsigned int expected_align = 0; 3067 3068 /* If DEST is not a pointer type, call the normal function. */ 3069 if (dest_align == 0) 3070 return NULL_RTX; 3071 3072 /* If either SRC is not a pointer type, don't do this 3073 operation in-line. */ 3074 if (src_align == 0) 3075 return NULL_RTX; 3076 3077 if (currently_expanding_gimple_stmt) 3078 stringop_block_profile (currently_expanding_gimple_stmt, 3079 &expected_align, &expected_size); 3080 3081 if (expected_align < dest_align) 3082 expected_align = dest_align; 3083 dest_mem = get_memory_rtx (dest, len); 3084 set_mem_align (dest_mem, dest_align); 3085 len_rtx = expand_normal (len); 3086 src_str = c_getstr (src); 3087 3088 /* If SRC is a string constant and block move would be done 3089 by pieces, we can avoid loading the string from memory 3090 and only stored the computed constants. */ 3091 if (src_str 3092 && CONST_INT_P (len_rtx) 3093 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1 3094 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str, 3095 CONST_CAST (char *, src_str), 3096 dest_align, false)) 3097 { 3098 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx), 3099 builtin_memcpy_read_str, 3100 CONST_CAST (char *, src_str), 3101 dest_align, false, 0); 3102 dest_mem = force_operand (XEXP (dest_mem, 0), target); 3103 dest_mem = convert_memory_address (ptr_mode, dest_mem); 3104 return dest_mem; 3105 } 3106 3107 src_mem = get_memory_rtx (src, len); 3108 set_mem_align (src_mem, src_align); 3109 3110 /* Copy word part most expediently. */ 3111 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, 3112 CALL_EXPR_TAILCALL (exp) 3113 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL, 3114 expected_align, expected_size); 3115 3116 if (dest_addr == 0) 3117 { 3118 dest_addr = force_operand (XEXP (dest_mem, 0), target); 3119 dest_addr = convert_memory_address (ptr_mode, dest_addr); 3120 } 3121 return dest_addr; 3122 } 3123 } 3124 3125 /* Expand a call EXP to the mempcpy builtin. 3126 Return NULL_RTX if we failed; the caller should emit a normal call, 3127 otherwise try to get the result in TARGET, if convenient (and in 3128 mode MODE if that's convenient). If ENDP is 0 return the 3129 destination pointer, if ENDP is 1 return the end pointer ala 3130 mempcpy, and if ENDP is 2 return the end pointer minus one ala 3131 stpcpy. */ 3132 3133 static rtx 3134 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode) 3135 { 3136 if (!validate_arglist (exp, 3137 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) 3138 return NULL_RTX; 3139 else 3140 { 3141 tree dest = CALL_EXPR_ARG (exp, 0); 3142 tree src = CALL_EXPR_ARG (exp, 1); 3143 tree len = CALL_EXPR_ARG (exp, 2); 3144 return expand_builtin_mempcpy_args (dest, src, len, 3145 target, mode, /*endp=*/ 1); 3146 } 3147 } 3148 3149 /* Helper function to do the actual work for expand_builtin_mempcpy. The 3150 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out 3151 so that this can also be called without constructing an actual CALL_EXPR. 3152 The other arguments and return value are the same as for 3153 expand_builtin_mempcpy. */ 3154 3155 static rtx 3156 expand_builtin_mempcpy_args (tree dest, tree src, tree len, 3157 rtx target, enum machine_mode mode, int endp) 3158 { 3159 /* If return value is ignored, transform mempcpy into memcpy. */ 3160 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY)) 3161 { 3162 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 3163 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3, 3164 dest, src, len); 3165 return expand_expr (result, target, mode, EXPAND_NORMAL); 3166 } 3167 else 3168 { 3169 const char *src_str; 3170 unsigned int src_align = get_pointer_alignment (src); 3171 unsigned int dest_align = get_pointer_alignment (dest); 3172 rtx dest_mem, src_mem, len_rtx; 3173 3174 /* If either SRC or DEST is not a pointer type, don't do this 3175 operation in-line. */ 3176 if (dest_align == 0 || src_align == 0) 3177 return NULL_RTX; 3178 3179 /* If LEN is not constant, call the normal function. */ 3180 if (! host_integerp (len, 1)) 3181 return NULL_RTX; 3182 3183 len_rtx = expand_normal (len); 3184 src_str = c_getstr (src); 3185 3186 /* If SRC is a string constant and block move would be done 3187 by pieces, we can avoid loading the string from memory 3188 and only stored the computed constants. */ 3189 if (src_str 3190 && CONST_INT_P (len_rtx) 3191 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1 3192 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str, 3193 CONST_CAST (char *, src_str), 3194 dest_align, false)) 3195 { 3196 dest_mem = get_memory_rtx (dest, len); 3197 set_mem_align (dest_mem, dest_align); 3198 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx), 3199 builtin_memcpy_read_str, 3200 CONST_CAST (char *, src_str), 3201 dest_align, false, endp); 3202 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX); 3203 dest_mem = convert_memory_address (ptr_mode, dest_mem); 3204 return dest_mem; 3205 } 3206 3207 if (CONST_INT_P (len_rtx) 3208 && can_move_by_pieces (INTVAL (len_rtx), 3209 MIN (dest_align, src_align))) 3210 { 3211 dest_mem = get_memory_rtx (dest, len); 3212 set_mem_align (dest_mem, dest_align); 3213 src_mem = get_memory_rtx (src, len); 3214 set_mem_align (src_mem, src_align); 3215 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx), 3216 MIN (dest_align, src_align), endp); 3217 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX); 3218 dest_mem = convert_memory_address (ptr_mode, dest_mem); 3219 return dest_mem; 3220 } 3221 3222 return NULL_RTX; 3223 } 3224 } 3225 3226 #ifndef HAVE_movstr 3227 # define HAVE_movstr 0 3228 # define CODE_FOR_movstr CODE_FOR_nothing 3229 #endif 3230 3231 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if 3232 we failed, the caller should emit a normal call, otherwise try to 3233 get the result in TARGET, if convenient. If ENDP is 0 return the 3234 destination pointer, if ENDP is 1 return the end pointer ala 3235 mempcpy, and if ENDP is 2 return the end pointer minus one ala 3236 stpcpy. */ 3237 3238 static rtx 3239 expand_movstr (tree dest, tree src, rtx target, int endp) 3240 { 3241 struct expand_operand ops[3]; 3242 rtx dest_mem; 3243 rtx src_mem; 3244 3245 if (!HAVE_movstr) 3246 return NULL_RTX; 3247 3248 dest_mem = get_memory_rtx (dest, NULL); 3249 src_mem = get_memory_rtx (src, NULL); 3250 if (!endp) 3251 { 3252 target = force_reg (Pmode, XEXP (dest_mem, 0)); 3253 dest_mem = replace_equiv_address (dest_mem, target); 3254 } 3255 3256 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode); 3257 create_fixed_operand (&ops[1], dest_mem); 3258 create_fixed_operand (&ops[2], src_mem); 3259 expand_insn (CODE_FOR_movstr, 3, ops); 3260 3261 if (endp && target != const0_rtx) 3262 { 3263 target = ops[0].value; 3264 /* movstr is supposed to set end to the address of the NUL 3265 terminator. If the caller requested a mempcpy-like return value, 3266 adjust it. */ 3267 if (endp == 1) 3268 { 3269 rtx tem = plus_constant (GET_MODE (target), 3270 gen_lowpart (GET_MODE (target), target), 1); 3271 emit_move_insn (target, force_operand (tem, NULL_RTX)); 3272 } 3273 } 3274 return target; 3275 } 3276 3277 /* Expand expression EXP, which is a call to the strcpy builtin. Return 3278 NULL_RTX if we failed the caller should emit a normal call, otherwise 3279 try to get the result in TARGET, if convenient (and in mode MODE if that's 3280 convenient). */ 3281 3282 static rtx 3283 expand_builtin_strcpy (tree exp, rtx target) 3284 { 3285 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) 3286 { 3287 tree dest = CALL_EXPR_ARG (exp, 0); 3288 tree src = CALL_EXPR_ARG (exp, 1); 3289 return expand_builtin_strcpy_args (dest, src, target); 3290 } 3291 return NULL_RTX; 3292 } 3293 3294 /* Helper function to do the actual work for expand_builtin_strcpy. The 3295 arguments to the builtin_strcpy call DEST and SRC are broken out 3296 so that this can also be called without constructing an actual CALL_EXPR. 3297 The other arguments and return value are the same as for 3298 expand_builtin_strcpy. */ 3299 3300 static rtx 3301 expand_builtin_strcpy_args (tree dest, tree src, rtx target) 3302 { 3303 return expand_movstr (dest, src, target, /*endp=*/0); 3304 } 3305 3306 /* Expand a call EXP to the stpcpy builtin. 3307 Return NULL_RTX if we failed the caller should emit a normal call, 3308 otherwise try to get the result in TARGET, if convenient (and in 3309 mode MODE if that's convenient). */ 3310 3311 static rtx 3312 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode) 3313 { 3314 tree dst, src; 3315 location_t loc = EXPR_LOCATION (exp); 3316 3317 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) 3318 return NULL_RTX; 3319 3320 dst = CALL_EXPR_ARG (exp, 0); 3321 src = CALL_EXPR_ARG (exp, 1); 3322 3323 /* If return value is ignored, transform stpcpy into strcpy. */ 3324 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY)) 3325 { 3326 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 3327 tree result = build_call_nofold_loc (loc, fn, 2, dst, src); 3328 return expand_expr (result, target, mode, EXPAND_NORMAL); 3329 } 3330 else 3331 { 3332 tree len, lenp1; 3333 rtx ret; 3334 3335 /* Ensure we get an actual string whose length can be evaluated at 3336 compile-time, not an expression containing a string. This is 3337 because the latter will potentially produce pessimized code 3338 when used to produce the return value. */ 3339 if (! c_getstr (src) || ! (len = c_strlen (src, 0))) 3340 return expand_movstr (dst, src, target, /*endp=*/2); 3341 3342 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1)); 3343 ret = expand_builtin_mempcpy_args (dst, src, lenp1, 3344 target, mode, /*endp=*/2); 3345 3346 if (ret) 3347 return ret; 3348 3349 if (TREE_CODE (len) == INTEGER_CST) 3350 { 3351 rtx len_rtx = expand_normal (len); 3352 3353 if (CONST_INT_P (len_rtx)) 3354 { 3355 ret = expand_builtin_strcpy_args (dst, src, target); 3356 3357 if (ret) 3358 { 3359 if (! target) 3360 { 3361 if (mode != VOIDmode) 3362 target = gen_reg_rtx (mode); 3363 else 3364 target = gen_reg_rtx (GET_MODE (ret)); 3365 } 3366 if (GET_MODE (target) != GET_MODE (ret)) 3367 ret = gen_lowpart (GET_MODE (target), ret); 3368 3369 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx)); 3370 ret = emit_move_insn (target, force_operand (ret, NULL_RTX)); 3371 gcc_assert (ret); 3372 3373 return target; 3374 } 3375 } 3376 } 3377 3378 return expand_movstr (dst, src, target, /*endp=*/2); 3379 } 3380 } 3381 3382 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE) 3383 bytes from constant string DATA + OFFSET and return it as target 3384 constant. */ 3385 3386 rtx 3387 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset, 3388 enum machine_mode mode) 3389 { 3390 const char *str = (const char *) data; 3391 3392 if ((unsigned HOST_WIDE_INT) offset > strlen (str)) 3393 return const0_rtx; 3394 3395 return c_readstr (str + offset, mode); 3396 } 3397 3398 /* Expand expression EXP, which is a call to the strncpy builtin. Return 3399 NULL_RTX if we failed the caller should emit a normal call. */ 3400 3401 static rtx 3402 expand_builtin_strncpy (tree exp, rtx target) 3403 { 3404 location_t loc = EXPR_LOCATION (exp); 3405 3406 if (validate_arglist (exp, 3407 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) 3408 { 3409 tree dest = CALL_EXPR_ARG (exp, 0); 3410 tree src = CALL_EXPR_ARG (exp, 1); 3411 tree len = CALL_EXPR_ARG (exp, 2); 3412 tree slen = c_strlen (src, 1); 3413 3414 /* We must be passed a constant len and src parameter. */ 3415 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1)) 3416 return NULL_RTX; 3417 3418 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1)); 3419 3420 /* We're required to pad with trailing zeros if the requested 3421 len is greater than strlen(s2)+1. In that case try to 3422 use store_by_pieces, if it fails, punt. */ 3423 if (tree_int_cst_lt (slen, len)) 3424 { 3425 unsigned int dest_align = get_pointer_alignment (dest); 3426 const char *p = c_getstr (src); 3427 rtx dest_mem; 3428 3429 if (!p || dest_align == 0 || !host_integerp (len, 1) 3430 || !can_store_by_pieces (tree_low_cst (len, 1), 3431 builtin_strncpy_read_str, 3432 CONST_CAST (char *, p), 3433 dest_align, false)) 3434 return NULL_RTX; 3435 3436 dest_mem = get_memory_rtx (dest, len); 3437 store_by_pieces (dest_mem, tree_low_cst (len, 1), 3438 builtin_strncpy_read_str, 3439 CONST_CAST (char *, p), dest_align, false, 0); 3440 dest_mem = force_operand (XEXP (dest_mem, 0), target); 3441 dest_mem = convert_memory_address (ptr_mode, dest_mem); 3442 return dest_mem; 3443 } 3444 } 3445 return NULL_RTX; 3446 } 3447 3448 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE) 3449 bytes from constant string DATA + OFFSET and return it as target 3450 constant. */ 3451 3452 rtx 3453 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED, 3454 enum machine_mode mode) 3455 { 3456 const char *c = (const char *) data; 3457 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode)); 3458 3459 memset (p, *c, GET_MODE_SIZE (mode)); 3460 3461 return c_readstr (p, mode); 3462 } 3463 3464 /* Callback routine for store_by_pieces. Return the RTL of a register 3465 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned 3466 char value given in the RTL register data. For example, if mode is 3467 4 bytes wide, return the RTL for 0x01010101*data. */ 3468 3469 static rtx 3470 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED, 3471 enum machine_mode mode) 3472 { 3473 rtx target, coeff; 3474 size_t size; 3475 char *p; 3476 3477 size = GET_MODE_SIZE (mode); 3478 if (size == 1) 3479 return (rtx) data; 3480 3481 p = XALLOCAVEC (char, size); 3482 memset (p, 1, size); 3483 coeff = c_readstr (p, mode); 3484 3485 target = convert_to_mode (mode, (rtx) data, 1); 3486 target = expand_mult (mode, target, coeff, NULL_RTX, 1); 3487 return force_reg (mode, target); 3488 } 3489 3490 /* Expand expression EXP, which is a call to the memset builtin. Return 3491 NULL_RTX if we failed the caller should emit a normal call, otherwise 3492 try to get the result in TARGET, if convenient (and in mode MODE if that's 3493 convenient). */ 3494 3495 static rtx 3496 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode) 3497 { 3498 if (!validate_arglist (exp, 3499 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)) 3500 return NULL_RTX; 3501 else 3502 { 3503 tree dest = CALL_EXPR_ARG (exp, 0); 3504 tree val = CALL_EXPR_ARG (exp, 1); 3505 tree len = CALL_EXPR_ARG (exp, 2); 3506 return expand_builtin_memset_args (dest, val, len, target, mode, exp); 3507 } 3508 } 3509 3510 /* Helper function to do the actual work for expand_builtin_memset. The 3511 arguments to the builtin_memset call DEST, VAL, and LEN are broken out 3512 so that this can also be called without constructing an actual CALL_EXPR. 3513 The other arguments and return value are the same as for 3514 expand_builtin_memset. */ 3515 3516 static rtx 3517 expand_builtin_memset_args (tree dest, tree val, tree len, 3518 rtx target, enum machine_mode mode, tree orig_exp) 3519 { 3520 tree fndecl, fn; 3521 enum built_in_function fcode; 3522 enum machine_mode val_mode; 3523 char c; 3524 unsigned int dest_align; 3525 rtx dest_mem, dest_addr, len_rtx; 3526 HOST_WIDE_INT expected_size = -1; 3527 unsigned int expected_align = 0; 3528 3529 dest_align = get_pointer_alignment (dest); 3530 3531 /* If DEST is not a pointer type, don't do this operation in-line. */ 3532 if (dest_align == 0) 3533 return NULL_RTX; 3534 3535 if (currently_expanding_gimple_stmt) 3536 stringop_block_profile (currently_expanding_gimple_stmt, 3537 &expected_align, &expected_size); 3538 3539 if (expected_align < dest_align) 3540 expected_align = dest_align; 3541 3542 /* If the LEN parameter is zero, return DEST. */ 3543 if (integer_zerop (len)) 3544 { 3545 /* Evaluate and ignore VAL in case it has side-effects. */ 3546 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL); 3547 return expand_expr (dest, target, mode, EXPAND_NORMAL); 3548 } 3549 3550 /* Stabilize the arguments in case we fail. */ 3551 dest = builtin_save_expr (dest); 3552 val = builtin_save_expr (val); 3553 len = builtin_save_expr (len); 3554 3555 len_rtx = expand_normal (len); 3556 dest_mem = get_memory_rtx (dest, len); 3557 val_mode = TYPE_MODE (unsigned_char_type_node); 3558 3559 if (TREE_CODE (val) != INTEGER_CST) 3560 { 3561 rtx val_rtx; 3562 3563 val_rtx = expand_normal (val); 3564 val_rtx = convert_to_mode (val_mode, val_rtx, 0); 3565 3566 /* Assume that we can memset by pieces if we can store 3567 * the coefficients by pieces (in the required modes). 3568 * We can't pass builtin_memset_gen_str as that emits RTL. */ 3569 c = 1; 3570 if (host_integerp (len, 1) 3571 && can_store_by_pieces (tree_low_cst (len, 1), 3572 builtin_memset_read_str, &c, dest_align, 3573 true)) 3574 { 3575 val_rtx = force_reg (val_mode, val_rtx); 3576 store_by_pieces (dest_mem, tree_low_cst (len, 1), 3577 builtin_memset_gen_str, val_rtx, dest_align, 3578 true, 0); 3579 } 3580 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx, 3581 dest_align, expected_align, 3582 expected_size)) 3583 goto do_libcall; 3584 3585 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX); 3586 dest_mem = convert_memory_address (ptr_mode, dest_mem); 3587 return dest_mem; 3588 } 3589 3590 if (target_char_cast (val, &c)) 3591 goto do_libcall; 3592 3593 if (c) 3594 { 3595 if (host_integerp (len, 1) 3596 && can_store_by_pieces (tree_low_cst (len, 1), 3597 builtin_memset_read_str, &c, dest_align, 3598 true)) 3599 store_by_pieces (dest_mem, tree_low_cst (len, 1), 3600 builtin_memset_read_str, &c, dest_align, true, 0); 3601 else if (!set_storage_via_setmem (dest_mem, len_rtx, 3602 gen_int_mode (c, val_mode), 3603 dest_align, expected_align, 3604 expected_size)) 3605 goto do_libcall; 3606 3607 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX); 3608 dest_mem = convert_memory_address (ptr_mode, dest_mem); 3609 return dest_mem; 3610 } 3611 3612 set_mem_align (dest_mem, dest_align); 3613 dest_addr = clear_storage_hints (dest_mem, len_rtx, 3614 CALL_EXPR_TAILCALL (orig_exp) 3615 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL, 3616 expected_align, expected_size); 3617 3618 if (dest_addr == 0) 3619 { 3620 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); 3621 dest_addr = convert_memory_address (ptr_mode, dest_addr); 3622 } 3623 3624 return dest_addr; 3625 3626 do_libcall: 3627 fndecl = get_callee_fndecl (orig_exp); 3628 fcode = DECL_FUNCTION_CODE (fndecl); 3629 if (fcode == BUILT_IN_MEMSET) 3630 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3, 3631 dest, val, len); 3632 else if (fcode == BUILT_IN_BZERO) 3633 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2, 3634 dest, len); 3635 else 3636 gcc_unreachable (); 3637 gcc_assert (TREE_CODE (fn) == CALL_EXPR); 3638 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp); 3639 return expand_call (fn, target, target == const0_rtx); 3640 } 3641 3642 /* Expand expression EXP, which is a call to the bzero builtin. Return 3643 NULL_RTX if we failed the caller should emit a normal call. */ 3644 3645 static rtx 3646 expand_builtin_bzero (tree exp) 3647 { 3648 tree dest, size; 3649 location_t loc = EXPR_LOCATION (exp); 3650 3651 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) 3652 return NULL_RTX; 3653 3654 dest = CALL_EXPR_ARG (exp, 0); 3655 size = CALL_EXPR_ARG (exp, 1); 3656 3657 /* New argument list transforming bzero(ptr x, int y) to 3658 memset(ptr x, int 0, size_t y). This is done this way 3659 so that if it isn't expanded inline, we fallback to 3660 calling bzero instead of memset. */ 3661 3662 return expand_builtin_memset_args (dest, integer_zero_node, 3663 fold_convert_loc (loc, 3664 size_type_node, size), 3665 const0_rtx, VOIDmode, exp); 3666 } 3667 3668 /* Expand expression EXP, which is a call to the memcmp built-in function. 3669 Return NULL_RTX if we failed and the caller should emit a normal call, 3670 otherwise try to get the result in TARGET, if convenient (and in mode 3671 MODE, if that's convenient). */ 3672 3673 static rtx 3674 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target, 3675 ATTRIBUTE_UNUSED enum machine_mode mode) 3676 { 3677 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp); 3678 3679 if (!validate_arglist (exp, 3680 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) 3681 return NULL_RTX; 3682 3683 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for 3684 implementing memcmp because it will stop if it encounters two 3685 zero bytes. */ 3686 #if defined HAVE_cmpmemsi 3687 { 3688 rtx arg1_rtx, arg2_rtx, arg3_rtx; 3689 rtx result; 3690 rtx insn; 3691 tree arg1 = CALL_EXPR_ARG (exp, 0); 3692 tree arg2 = CALL_EXPR_ARG (exp, 1); 3693 tree len = CALL_EXPR_ARG (exp, 2); 3694 3695 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT; 3696 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT; 3697 enum machine_mode insn_mode; 3698 3699 if (HAVE_cmpmemsi) 3700 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode; 3701 else 3702 return NULL_RTX; 3703 3704 /* If we don't have POINTER_TYPE, call the function. */ 3705 if (arg1_align == 0 || arg2_align == 0) 3706 return NULL_RTX; 3707 3708 /* Make a place to write the result of the instruction. */ 3709 result = target; 3710 if (! (result != 0 3711 && REG_P (result) && GET_MODE (result) == insn_mode 3712 && REGNO (result) >= FIRST_PSEUDO_REGISTER)) 3713 result = gen_reg_rtx (insn_mode); 3714 3715 arg1_rtx = get_memory_rtx (arg1, len); 3716 arg2_rtx = get_memory_rtx (arg2, len); 3717 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len)); 3718 3719 /* Set MEM_SIZE as appropriate. */ 3720 if (CONST_INT_P (arg3_rtx)) 3721 { 3722 set_mem_size (arg1_rtx, INTVAL (arg3_rtx)); 3723 set_mem_size (arg2_rtx, INTVAL (arg3_rtx)); 3724 } 3725 3726 if (HAVE_cmpmemsi) 3727 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx, 3728 GEN_INT (MIN (arg1_align, arg2_align))); 3729 else 3730 gcc_unreachable (); 3731 3732 if (insn) 3733 emit_insn (insn); 3734 else 3735 emit_library_call_value (memcmp_libfunc, result, LCT_PURE, 3736 TYPE_MODE (integer_type_node), 3, 3737 XEXP (arg1_rtx, 0), Pmode, 3738 XEXP (arg2_rtx, 0), Pmode, 3739 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx, 3740 TYPE_UNSIGNED (sizetype)), 3741 TYPE_MODE (sizetype)); 3742 3743 /* Return the value in the proper mode for this function. */ 3744 mode = TYPE_MODE (TREE_TYPE (exp)); 3745 if (GET_MODE (result) == mode) 3746 return result; 3747 else if (target != 0) 3748 { 3749 convert_move (target, result, 0); 3750 return target; 3751 } 3752 else 3753 return convert_to_mode (mode, result, 0); 3754 } 3755 #endif /* HAVE_cmpmemsi. */ 3756 3757 return NULL_RTX; 3758 } 3759 3760 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX 3761 if we failed the caller should emit a normal call, otherwise try to get 3762 the result in TARGET, if convenient. */ 3763 3764 static rtx 3765 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target) 3766 { 3767 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) 3768 return NULL_RTX; 3769 3770 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi 3771 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing 3772 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing) 3773 { 3774 rtx arg1_rtx, arg2_rtx; 3775 rtx result, insn = NULL_RTX; 3776 tree fndecl, fn; 3777 tree arg1 = CALL_EXPR_ARG (exp, 0); 3778 tree arg2 = CALL_EXPR_ARG (exp, 1); 3779 3780 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT; 3781 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT; 3782 3783 /* If we don't have POINTER_TYPE, call the function. */ 3784 if (arg1_align == 0 || arg2_align == 0) 3785 return NULL_RTX; 3786 3787 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */ 3788 arg1 = builtin_save_expr (arg1); 3789 arg2 = builtin_save_expr (arg2); 3790 3791 arg1_rtx = get_memory_rtx (arg1, NULL); 3792 arg2_rtx = get_memory_rtx (arg2, NULL); 3793 3794 #ifdef HAVE_cmpstrsi 3795 /* Try to call cmpstrsi. */ 3796 if (HAVE_cmpstrsi) 3797 { 3798 enum machine_mode insn_mode 3799 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode; 3800 3801 /* Make a place to write the result of the instruction. */ 3802 result = target; 3803 if (! (result != 0 3804 && REG_P (result) && GET_MODE (result) == insn_mode 3805 && REGNO (result) >= FIRST_PSEUDO_REGISTER)) 3806 result = gen_reg_rtx (insn_mode); 3807 3808 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, 3809 GEN_INT (MIN (arg1_align, arg2_align))); 3810 } 3811 #endif 3812 #ifdef HAVE_cmpstrnsi 3813 /* Try to determine at least one length and call cmpstrnsi. */ 3814 if (!insn && HAVE_cmpstrnsi) 3815 { 3816 tree len; 3817 rtx arg3_rtx; 3818 3819 enum machine_mode insn_mode 3820 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode; 3821 tree len1 = c_strlen (arg1, 1); 3822 tree len2 = c_strlen (arg2, 1); 3823 3824 if (len1) 3825 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1); 3826 if (len2) 3827 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2); 3828 3829 /* If we don't have a constant length for the first, use the length 3830 of the second, if we know it. We don't require a constant for 3831 this case; some cost analysis could be done if both are available 3832 but neither is constant. For now, assume they're equally cheap, 3833 unless one has side effects. If both strings have constant lengths, 3834 use the smaller. */ 3835 3836 if (!len1) 3837 len = len2; 3838 else if (!len2) 3839 len = len1; 3840 else if (TREE_SIDE_EFFECTS (len1)) 3841 len = len2; 3842 else if (TREE_SIDE_EFFECTS (len2)) 3843 len = len1; 3844 else if (TREE_CODE (len1) != INTEGER_CST) 3845 len = len2; 3846 else if (TREE_CODE (len2) != INTEGER_CST) 3847 len = len1; 3848 else if (tree_int_cst_lt (len1, len2)) 3849 len = len1; 3850 else 3851 len = len2; 3852 3853 /* If both arguments have side effects, we cannot optimize. */ 3854 if (!len || TREE_SIDE_EFFECTS (len)) 3855 goto do_libcall; 3856 3857 arg3_rtx = expand_normal (len); 3858 3859 /* Make a place to write the result of the instruction. */ 3860 result = target; 3861 if (! (result != 0 3862 && REG_P (result) && GET_MODE (result) == insn_mode 3863 && REGNO (result) >= FIRST_PSEUDO_REGISTER)) 3864 result = gen_reg_rtx (insn_mode); 3865 3866 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx, 3867 GEN_INT (MIN (arg1_align, arg2_align))); 3868 } 3869 #endif 3870 3871 if (insn) 3872 { 3873 enum machine_mode mode; 3874 emit_insn (insn); 3875 3876 /* Return the value in the proper mode for this function. */ 3877 mode = TYPE_MODE (TREE_TYPE (exp)); 3878 if (GET_MODE (result) == mode) 3879 return result; 3880 if (target == 0) 3881 return convert_to_mode (mode, result, 0); 3882 convert_move (target, result, 0); 3883 return target; 3884 } 3885 3886 /* Expand the library call ourselves using a stabilized argument 3887 list to avoid re-evaluating the function's arguments twice. */ 3888 #ifdef HAVE_cmpstrnsi 3889 do_libcall: 3890 #endif 3891 fndecl = get_callee_fndecl (exp); 3892 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2); 3893 gcc_assert (TREE_CODE (fn) == CALL_EXPR); 3894 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp); 3895 return expand_call (fn, target, target == const0_rtx); 3896 } 3897 #endif 3898 return NULL_RTX; 3899 } 3900 3901 /* Expand expression EXP, which is a call to the strncmp builtin. Return 3902 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get 3903 the result in TARGET, if convenient. */ 3904 3905 static rtx 3906 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target, 3907 ATTRIBUTE_UNUSED enum machine_mode mode) 3908 { 3909 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp); 3910 3911 if (!validate_arglist (exp, 3912 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) 3913 return NULL_RTX; 3914 3915 /* If c_strlen can determine an expression for one of the string 3916 lengths, and it doesn't have side effects, then emit cmpstrnsi 3917 using length MIN(strlen(string)+1, arg3). */ 3918 #ifdef HAVE_cmpstrnsi 3919 if (HAVE_cmpstrnsi) 3920 { 3921 tree len, len1, len2; 3922 rtx arg1_rtx, arg2_rtx, arg3_rtx; 3923 rtx result, insn; 3924 tree fndecl, fn; 3925 tree arg1 = CALL_EXPR_ARG (exp, 0); 3926 tree arg2 = CALL_EXPR_ARG (exp, 1); 3927 tree arg3 = CALL_EXPR_ARG (exp, 2); 3928 3929 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT; 3930 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT; 3931 enum machine_mode insn_mode 3932 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode; 3933 3934 len1 = c_strlen (arg1, 1); 3935 len2 = c_strlen (arg2, 1); 3936 3937 if (len1) 3938 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1); 3939 if (len2) 3940 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2); 3941 3942 /* If we don't have a constant length for the first, use the length 3943 of the second, if we know it. We don't require a constant for 3944 this case; some cost analysis could be done if both are available 3945 but neither is constant. For now, assume they're equally cheap, 3946 unless one has side effects. If both strings have constant lengths, 3947 use the smaller. */ 3948 3949 if (!len1) 3950 len = len2; 3951 else if (!len2) 3952 len = len1; 3953 else if (TREE_SIDE_EFFECTS (len1)) 3954 len = len2; 3955 else if (TREE_SIDE_EFFECTS (len2)) 3956 len = len1; 3957 else if (TREE_CODE (len1) != INTEGER_CST) 3958 len = len2; 3959 else if (TREE_CODE (len2) != INTEGER_CST) 3960 len = len1; 3961 else if (tree_int_cst_lt (len1, len2)) 3962 len = len1; 3963 else 3964 len = len2; 3965 3966 /* If both arguments have side effects, we cannot optimize. */ 3967 if (!len || TREE_SIDE_EFFECTS (len)) 3968 return NULL_RTX; 3969 3970 /* The actual new length parameter is MIN(len,arg3). */ 3971 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, 3972 fold_convert_loc (loc, TREE_TYPE (len), arg3)); 3973 3974 /* If we don't have POINTER_TYPE, call the function. */ 3975 if (arg1_align == 0 || arg2_align == 0) 3976 return NULL_RTX; 3977 3978 /* Make a place to write the result of the instruction. */ 3979 result = target; 3980 if (! (result != 0 3981 && REG_P (result) && GET_MODE (result) == insn_mode 3982 && REGNO (result) >= FIRST_PSEUDO_REGISTER)) 3983 result = gen_reg_rtx (insn_mode); 3984 3985 /* Stabilize the arguments in case gen_cmpstrnsi fails. */ 3986 arg1 = builtin_save_expr (arg1); 3987 arg2 = builtin_save_expr (arg2); 3988 len = builtin_save_expr (len); 3989 3990 arg1_rtx = get_memory_rtx (arg1, len); 3991 arg2_rtx = get_memory_rtx (arg2, len); 3992 arg3_rtx = expand_normal (len); 3993 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx, 3994 GEN_INT (MIN (arg1_align, arg2_align))); 3995 if (insn) 3996 { 3997 emit_insn (insn); 3998 3999 /* Return the value in the proper mode for this function. */ 4000 mode = TYPE_MODE (TREE_TYPE (exp)); 4001 if (GET_MODE (result) == mode) 4002 return result; 4003 if (target == 0) 4004 return convert_to_mode (mode, result, 0); 4005 convert_move (target, result, 0); 4006 return target; 4007 } 4008 4009 /* Expand the library call ourselves using a stabilized argument 4010 list to avoid re-evaluating the function's arguments twice. */ 4011 fndecl = get_callee_fndecl (exp); 4012 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3, 4013 arg1, arg2, len); 4014 gcc_assert (TREE_CODE (fn) == CALL_EXPR); 4015 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp); 4016 return expand_call (fn, target, target == const0_rtx); 4017 } 4018 #endif 4019 return NULL_RTX; 4020 } 4021 4022 /* Expand a call to __builtin_saveregs, generating the result in TARGET, 4023 if that's convenient. */ 4024 4025 rtx 4026 expand_builtin_saveregs (void) 4027 { 4028 rtx val, seq; 4029 4030 /* Don't do __builtin_saveregs more than once in a function. 4031 Save the result of the first call and reuse it. */ 4032 if (saveregs_value != 0) 4033 return saveregs_value; 4034 4035 /* When this function is called, it means that registers must be 4036 saved on entry to this function. So we migrate the call to the 4037 first insn of this function. */ 4038 4039 start_sequence (); 4040 4041 /* Do whatever the machine needs done in this case. */ 4042 val = targetm.calls.expand_builtin_saveregs (); 4043 4044 seq = get_insns (); 4045 end_sequence (); 4046 4047 saveregs_value = val; 4048 4049 /* Put the insns after the NOTE that starts the function. If this 4050 is inside a start_sequence, make the outer-level insn chain current, so 4051 the code is placed at the start of the function. */ 4052 push_topmost_sequence (); 4053 emit_insn_after (seq, entry_of_function ()); 4054 pop_topmost_sequence (); 4055 4056 return val; 4057 } 4058 4059 /* Expand a call to __builtin_next_arg. */ 4060 4061 static rtx 4062 expand_builtin_next_arg (void) 4063 { 4064 /* Checking arguments is already done in fold_builtin_next_arg 4065 that must be called before this function. */ 4066 return expand_binop (ptr_mode, add_optab, 4067 crtl->args.internal_arg_pointer, 4068 crtl->args.arg_offset_rtx, 4069 NULL_RTX, 0, OPTAB_LIB_WIDEN); 4070 } 4071 4072 /* Make it easier for the backends by protecting the valist argument 4073 from multiple evaluations. */ 4074 4075 static tree 4076 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue) 4077 { 4078 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist)); 4079 4080 /* The current way of determining the type of valist is completely 4081 bogus. We should have the information on the va builtin instead. */ 4082 if (!vatype) 4083 vatype = targetm.fn_abi_va_list (cfun->decl); 4084 4085 if (TREE_CODE (vatype) == ARRAY_TYPE) 4086 { 4087 if (TREE_SIDE_EFFECTS (valist)) 4088 valist = save_expr (valist); 4089 4090 /* For this case, the backends will be expecting a pointer to 4091 vatype, but it's possible we've actually been given an array 4092 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)). 4093 So fix it. */ 4094 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE) 4095 { 4096 tree p1 = build_pointer_type (TREE_TYPE (vatype)); 4097 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1); 4098 } 4099 } 4100 else 4101 { 4102 tree pt = build_pointer_type (vatype); 4103 4104 if (! needs_lvalue) 4105 { 4106 if (! TREE_SIDE_EFFECTS (valist)) 4107 return valist; 4108 4109 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist); 4110 TREE_SIDE_EFFECTS (valist) = 1; 4111 } 4112 4113 if (TREE_SIDE_EFFECTS (valist)) 4114 valist = save_expr (valist); 4115 valist = fold_build2_loc (loc, MEM_REF, 4116 vatype, valist, build_int_cst (pt, 0)); 4117 } 4118 4119 return valist; 4120 } 4121 4122 /* The "standard" definition of va_list is void*. */ 4123 4124 tree 4125 std_build_builtin_va_list (void) 4126 { 4127 return ptr_type_node; 4128 } 4129 4130 /* The "standard" abi va_list is va_list_type_node. */ 4131 4132 tree 4133 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED) 4134 { 4135 return va_list_type_node; 4136 } 4137 4138 /* The "standard" type of va_list is va_list_type_node. */ 4139 4140 tree 4141 std_canonical_va_list_type (tree type) 4142 { 4143 tree wtype, htype; 4144 4145 if (INDIRECT_REF_P (type)) 4146 type = TREE_TYPE (type); 4147 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type))) 4148 type = TREE_TYPE (type); 4149 wtype = va_list_type_node; 4150 htype = type; 4151 /* Treat structure va_list types. */ 4152 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype)) 4153 htype = TREE_TYPE (htype); 4154 else if (TREE_CODE (wtype) == ARRAY_TYPE) 4155 { 4156 /* If va_list is an array type, the argument may have decayed 4157 to a pointer type, e.g. by being passed to another function. 4158 In that case, unwrap both types so that we can compare the 4159 underlying records. */ 4160 if (TREE_CODE (htype) == ARRAY_TYPE 4161 || POINTER_TYPE_P (htype)) 4162 { 4163 wtype = TREE_TYPE (wtype); 4164 htype = TREE_TYPE (htype); 4165 } 4166 } 4167 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype)) 4168 return va_list_type_node; 4169 4170 return NULL_TREE; 4171 } 4172 4173 /* The "standard" implementation of va_start: just assign `nextarg' to 4174 the variable. */ 4175 4176 void 4177 std_expand_builtin_va_start (tree valist, rtx nextarg) 4178 { 4179 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE); 4180 convert_move (va_r, nextarg, 0); 4181 } 4182 4183 /* Expand EXP, a call to __builtin_va_start. */ 4184 4185 static rtx 4186 expand_builtin_va_start (tree exp) 4187 { 4188 rtx nextarg; 4189 tree valist; 4190 location_t loc = EXPR_LOCATION (exp); 4191 4192 if (call_expr_nargs (exp) < 2) 4193 { 4194 error_at (loc, "too few arguments to function %<va_start%>"); 4195 return const0_rtx; 4196 } 4197 4198 if (fold_builtin_next_arg (exp, true)) 4199 return const0_rtx; 4200 4201 nextarg = expand_builtin_next_arg (); 4202 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1); 4203 4204 if (targetm.expand_builtin_va_start) 4205 targetm.expand_builtin_va_start (valist, nextarg); 4206 else 4207 std_expand_builtin_va_start (valist, nextarg); 4208 4209 return const0_rtx; 4210 } 4211 4212 /* The "standard" implementation of va_arg: read the value from the 4213 current (padded) address and increment by the (padded) size. */ 4214 4215 tree 4216 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p, 4217 gimple_seq *post_p) 4218 { 4219 tree addr, t, type_size, rounded_size, valist_tmp; 4220 unsigned HOST_WIDE_INT align, boundary; 4221 bool indirect; 4222 4223 #ifdef ARGS_GROW_DOWNWARD 4224 /* All of the alignment and movement below is for args-grow-up machines. 4225 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all 4226 implement their own specialized gimplify_va_arg_expr routines. */ 4227 gcc_unreachable (); 4228 #endif 4229 4230 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false); 4231 if (indirect) 4232 type = build_pointer_type (type); 4233 4234 align = PARM_BOUNDARY / BITS_PER_UNIT; 4235 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type); 4236 4237 /* When we align parameter on stack for caller, if the parameter 4238 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be 4239 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee 4240 here with caller. */ 4241 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT) 4242 boundary = MAX_SUPPORTED_STACK_ALIGNMENT; 4243 4244 boundary /= BITS_PER_UNIT; 4245 4246 /* Hoist the valist value into a temporary for the moment. */ 4247 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL); 4248 4249 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually 4250 requires greater alignment, we must perform dynamic alignment. */ 4251 if (boundary > align 4252 && !integer_zerop (TYPE_SIZE (type))) 4253 { 4254 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp, 4255 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1)); 4256 gimplify_and_add (t, pre_p); 4257 4258 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp, 4259 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist), 4260 valist_tmp, 4261 build_int_cst (TREE_TYPE (valist), -boundary))); 4262 gimplify_and_add (t, pre_p); 4263 } 4264 else 4265 boundary = align; 4266 4267 /* If the actual alignment is less than the alignment of the type, 4268 adjust the type accordingly so that we don't assume strict alignment 4269 when dereferencing the pointer. */ 4270 boundary *= BITS_PER_UNIT; 4271 if (boundary < TYPE_ALIGN (type)) 4272 { 4273 type = build_variant_type_copy (type); 4274 TYPE_ALIGN (type) = boundary; 4275 } 4276 4277 /* Compute the rounded size of the type. */ 4278 type_size = size_in_bytes (type); 4279 rounded_size = round_up (type_size, align); 4280 4281 /* Reduce rounded_size so it's sharable with the postqueue. */ 4282 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue); 4283 4284 /* Get AP. */ 4285 addr = valist_tmp; 4286 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size)) 4287 { 4288 /* Small args are padded downward. */ 4289 t = fold_build2_loc (input_location, GT_EXPR, sizetype, 4290 rounded_size, size_int (align)); 4291 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node, 4292 size_binop (MINUS_EXPR, rounded_size, type_size)); 4293 addr = fold_build_pointer_plus (addr, t); 4294 } 4295 4296 /* Compute new value for AP. */ 4297 t = fold_build_pointer_plus (valist_tmp, rounded_size); 4298 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t); 4299 gimplify_and_add (t, pre_p); 4300 4301 addr = fold_convert (build_pointer_type (type), addr); 4302 4303 if (indirect) 4304 addr = build_va_arg_indirect_ref (addr); 4305 4306 return build_va_arg_indirect_ref (addr); 4307 } 4308 4309 /* Build an indirect-ref expression over the given TREE, which represents a 4310 piece of a va_arg() expansion. */ 4311 tree 4312 build_va_arg_indirect_ref (tree addr) 4313 { 4314 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr); 4315 4316 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */ 4317 mf_mark (addr); 4318 4319 return addr; 4320 } 4321 4322 /* Return a dummy expression of type TYPE in order to keep going after an 4323 error. */ 4324 4325 static tree 4326 dummy_object (tree type) 4327 { 4328 tree t = build_int_cst (build_pointer_type (type), 0); 4329 return build2 (MEM_REF, type, t, t); 4330 } 4331 4332 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a 4333 builtin function, but a very special sort of operator. */ 4334 4335 enum gimplify_status 4336 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 4337 { 4338 tree promoted_type, have_va_type; 4339 tree valist = TREE_OPERAND (*expr_p, 0); 4340 tree type = TREE_TYPE (*expr_p); 4341 tree t; 4342 location_t loc = EXPR_LOCATION (*expr_p); 4343 4344 /* Verify that valist is of the proper type. */ 4345 have_va_type = TREE_TYPE (valist); 4346 if (have_va_type == error_mark_node) 4347 return GS_ERROR; 4348 have_va_type = targetm.canonical_va_list_type (have_va_type); 4349 4350 if (have_va_type == NULL_TREE) 4351 { 4352 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>"); 4353 return GS_ERROR; 4354 } 4355 4356 /* Generate a diagnostic for requesting data of a type that cannot 4357 be passed through `...' due to type promotion at the call site. */ 4358 if ((promoted_type = lang_hooks.types.type_promotes_to (type)) 4359 != type) 4360 { 4361 static bool gave_help; 4362 bool warned; 4363 4364 /* Unfortunately, this is merely undefined, rather than a constraint 4365 violation, so we cannot make this an error. If this call is never 4366 executed, the program is still strictly conforming. */ 4367 warned = warning_at (loc, 0, 4368 "%qT is promoted to %qT when passed through %<...%>", 4369 type, promoted_type); 4370 if (!gave_help && warned) 4371 { 4372 gave_help = true; 4373 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)", 4374 promoted_type, type); 4375 } 4376 4377 /* We can, however, treat "undefined" any way we please. 4378 Call abort to encourage the user to fix the program. */ 4379 if (warned) 4380 inform (loc, "if this code is reached, the program will abort"); 4381 /* Before the abort, allow the evaluation of the va_list 4382 expression to exit or longjmp. */ 4383 gimplify_and_add (valist, pre_p); 4384 t = build_call_expr_loc (loc, 4385 builtin_decl_implicit (BUILT_IN_TRAP), 0); 4386 gimplify_and_add (t, pre_p); 4387 4388 /* This is dead code, but go ahead and finish so that the 4389 mode of the result comes out right. */ 4390 *expr_p = dummy_object (type); 4391 return GS_ALL_DONE; 4392 } 4393 else 4394 { 4395 /* Make it easier for the backends by protecting the valist argument 4396 from multiple evaluations. */ 4397 if (TREE_CODE (have_va_type) == ARRAY_TYPE) 4398 { 4399 /* For this case, the backends will be expecting a pointer to 4400 TREE_TYPE (abi), but it's possible we've 4401 actually been given an array (an actual TARGET_FN_ABI_VA_LIST). 4402 So fix it. */ 4403 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE) 4404 { 4405 tree p1 = build_pointer_type (TREE_TYPE (have_va_type)); 4406 valist = fold_convert_loc (loc, p1, 4407 build_fold_addr_expr_loc (loc, valist)); 4408 } 4409 4410 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue); 4411 } 4412 else 4413 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue); 4414 4415 if (!targetm.gimplify_va_arg_expr) 4416 /* FIXME: Once most targets are converted we should merely 4417 assert this is non-null. */ 4418 return GS_ALL_DONE; 4419 4420 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p); 4421 return GS_OK; 4422 } 4423 } 4424 4425 /* Expand EXP, a call to __builtin_va_end. */ 4426 4427 static rtx 4428 expand_builtin_va_end (tree exp) 4429 { 4430 tree valist = CALL_EXPR_ARG (exp, 0); 4431 4432 /* Evaluate for side effects, if needed. I hate macros that don't 4433 do that. */ 4434 if (TREE_SIDE_EFFECTS (valist)) 4435 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL); 4436 4437 return const0_rtx; 4438 } 4439 4440 /* Expand EXP, a call to __builtin_va_copy. We do this as a 4441 builtin rather than just as an assignment in stdarg.h because of the 4442 nastiness of array-type va_list types. */ 4443 4444 static rtx 4445 expand_builtin_va_copy (tree exp) 4446 { 4447 tree dst, src, t; 4448 location_t loc = EXPR_LOCATION (exp); 4449 4450 dst = CALL_EXPR_ARG (exp, 0); 4451 src = CALL_EXPR_ARG (exp, 1); 4452 4453 dst = stabilize_va_list_loc (loc, dst, 1); 4454 src = stabilize_va_list_loc (loc, src, 0); 4455 4456 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE); 4457 4458 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE) 4459 { 4460 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src); 4461 TREE_SIDE_EFFECTS (t) = 1; 4462 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 4463 } 4464 else 4465 { 4466 rtx dstb, srcb, size; 4467 4468 /* Evaluate to pointers. */ 4469 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL); 4470 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL); 4471 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)), 4472 NULL_RTX, VOIDmode, EXPAND_NORMAL); 4473 4474 dstb = convert_memory_address (Pmode, dstb); 4475 srcb = convert_memory_address (Pmode, srcb); 4476 4477 /* "Dereference" to BLKmode memories. */ 4478 dstb = gen_rtx_MEM (BLKmode, dstb); 4479 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst)))); 4480 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl))); 4481 srcb = gen_rtx_MEM (BLKmode, srcb); 4482 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src)))); 4483 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl))); 4484 4485 /* Copy. */ 4486 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL); 4487 } 4488 4489 return const0_rtx; 4490 } 4491 4492 /* Expand a call to one of the builtin functions __builtin_frame_address or 4493 __builtin_return_address. */ 4494 4495 static rtx 4496 expand_builtin_frame_address (tree fndecl, tree exp) 4497 { 4498 /* The argument must be a nonnegative integer constant. 4499 It counts the number of frames to scan up the stack. 4500 The value is the return address saved in that frame. */ 4501 if (call_expr_nargs (exp) == 0) 4502 /* Warning about missing arg was already issued. */ 4503 return const0_rtx; 4504 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1)) 4505 { 4506 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) 4507 error ("invalid argument to %<__builtin_frame_address%>"); 4508 else 4509 error ("invalid argument to %<__builtin_return_address%>"); 4510 return const0_rtx; 4511 } 4512 else 4513 { 4514 rtx tem 4515 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), 4516 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1)); 4517 4518 /* Some ports cannot access arbitrary stack frames. */ 4519 if (tem == NULL) 4520 { 4521 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) 4522 warning (0, "unsupported argument to %<__builtin_frame_address%>"); 4523 else 4524 warning (0, "unsupported argument to %<__builtin_return_address%>"); 4525 return const0_rtx; 4526 } 4527 4528 /* For __builtin_frame_address, return what we've got. */ 4529 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) 4530 return tem; 4531 4532 if (!REG_P (tem) 4533 && ! CONSTANT_P (tem)) 4534 tem = copy_addr_to_reg (tem); 4535 return tem; 4536 } 4537 } 4538 4539 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we 4540 failed and the caller should emit a normal call. CANNOT_ACCUMULATE 4541 is the same as for allocate_dynamic_stack_space. */ 4542 4543 static rtx 4544 expand_builtin_alloca (tree exp, bool cannot_accumulate) 4545 { 4546 rtx op0; 4547 rtx result; 4548 bool valid_arglist; 4549 unsigned int align; 4550 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp)) 4551 == BUILT_IN_ALLOCA_WITH_ALIGN); 4552 4553 /* Emit normal call if we use mudflap. */ 4554 if (flag_mudflap) 4555 return NULL_RTX; 4556 4557 valid_arglist 4558 = (alloca_with_align 4559 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE) 4560 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE)); 4561 4562 if (!valid_arglist) 4563 return NULL_RTX; 4564 4565 /* Compute the argument. */ 4566 op0 = expand_normal (CALL_EXPR_ARG (exp, 0)); 4567 4568 /* Compute the alignment. */ 4569 align = (alloca_with_align 4570 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)) 4571 : BIGGEST_ALIGNMENT); 4572 4573 /* Allocate the desired space. */ 4574 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate); 4575 result = convert_memory_address (ptr_mode, result); 4576 4577 return result; 4578 } 4579 4580 /* Expand a call to bswap builtin in EXP. 4581 Return NULL_RTX if a normal call should be emitted rather than expanding the 4582 function in-line. If convenient, the result should be placed in TARGET. 4583 SUBTARGET may be used as the target for computing one of EXP's operands. */ 4584 4585 static rtx 4586 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target, 4587 rtx subtarget) 4588 { 4589 tree arg; 4590 rtx op0; 4591 4592 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE)) 4593 return NULL_RTX; 4594 4595 arg = CALL_EXPR_ARG (exp, 0); 4596 op0 = expand_expr (arg, 4597 subtarget && GET_MODE (subtarget) == target_mode 4598 ? subtarget : NULL_RTX, 4599 target_mode, EXPAND_NORMAL); 4600 if (GET_MODE (op0) != target_mode) 4601 op0 = convert_to_mode (target_mode, op0, 1); 4602 4603 target = expand_unop (target_mode, bswap_optab, op0, target, 1); 4604 4605 gcc_assert (target); 4606 4607 return convert_to_mode (target_mode, target, 1); 4608 } 4609 4610 /* Expand a call to a unary builtin in EXP. 4611 Return NULL_RTX if a normal call should be emitted rather than expanding the 4612 function in-line. If convenient, the result should be placed in TARGET. 4613 SUBTARGET may be used as the target for computing one of EXP's operands. */ 4614 4615 static rtx 4616 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target, 4617 rtx subtarget, optab op_optab) 4618 { 4619 rtx op0; 4620 4621 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE)) 4622 return NULL_RTX; 4623 4624 /* Compute the argument. */ 4625 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), 4626 (subtarget 4627 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))) 4628 == GET_MODE (subtarget))) ? subtarget : NULL_RTX, 4629 VOIDmode, EXPAND_NORMAL); 4630 /* Compute op, into TARGET if possible. 4631 Set TARGET to wherever the result comes back. */ 4632 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))), 4633 op_optab, op0, target, op_optab != clrsb_optab); 4634 gcc_assert (target); 4635 4636 return convert_to_mode (target_mode, target, 0); 4637 } 4638 4639 /* Expand a call to __builtin_expect. We just return our argument 4640 as the builtin_expect semantic should've been already executed by 4641 tree branch prediction pass. */ 4642 4643 static rtx 4644 expand_builtin_expect (tree exp, rtx target) 4645 { 4646 tree arg; 4647 4648 if (call_expr_nargs (exp) < 2) 4649 return const0_rtx; 4650 arg = CALL_EXPR_ARG (exp, 0); 4651 4652 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL); 4653 /* When guessing was done, the hints should be already stripped away. */ 4654 gcc_assert (!flag_guess_branch_prob 4655 || optimize == 0 || seen_error ()); 4656 return target; 4657 } 4658 4659 /* Expand a call to __builtin_assume_aligned. We just return our first 4660 argument as the builtin_assume_aligned semantic should've been already 4661 executed by CCP. */ 4662 4663 static rtx 4664 expand_builtin_assume_aligned (tree exp, rtx target) 4665 { 4666 if (call_expr_nargs (exp) < 2) 4667 return const0_rtx; 4668 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode, 4669 EXPAND_NORMAL); 4670 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1)) 4671 && (call_expr_nargs (exp) < 3 4672 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2)))); 4673 return target; 4674 } 4675 4676 void 4677 expand_builtin_trap (void) 4678 { 4679 #ifdef HAVE_trap 4680 if (HAVE_trap) 4681 { 4682 rtx insn = emit_insn (gen_trap ()); 4683 /* For trap insns when not accumulating outgoing args force 4684 REG_ARGS_SIZE note to prevent crossjumping of calls with 4685 different args sizes. */ 4686 if (!ACCUMULATE_OUTGOING_ARGS) 4687 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); 4688 } 4689 else 4690 #endif 4691 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0); 4692 emit_barrier (); 4693 } 4694 4695 /* Expand a call to __builtin_unreachable. We do nothing except emit 4696 a barrier saying that control flow will not pass here. 4697 4698 It is the responsibility of the program being compiled to ensure 4699 that control flow does never reach __builtin_unreachable. */ 4700 static void 4701 expand_builtin_unreachable (void) 4702 { 4703 emit_barrier (); 4704 } 4705 4706 /* Expand EXP, a call to fabs, fabsf or fabsl. 4707 Return NULL_RTX if a normal call should be emitted rather than expanding 4708 the function inline. If convenient, the result should be placed 4709 in TARGET. SUBTARGET may be used as the target for computing 4710 the operand. */ 4711 4712 static rtx 4713 expand_builtin_fabs (tree exp, rtx target, rtx subtarget) 4714 { 4715 enum machine_mode mode; 4716 tree arg; 4717 rtx op0; 4718 4719 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) 4720 return NULL_RTX; 4721 4722 arg = CALL_EXPR_ARG (exp, 0); 4723 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg); 4724 mode = TYPE_MODE (TREE_TYPE (arg)); 4725 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL); 4726 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1)); 4727 } 4728 4729 /* Expand EXP, a call to copysign, copysignf, or copysignl. 4730 Return NULL is a normal call should be emitted rather than expanding the 4731 function inline. If convenient, the result should be placed in TARGET. 4732 SUBTARGET may be used as the target for computing the operand. */ 4733 4734 static rtx 4735 expand_builtin_copysign (tree exp, rtx target, rtx subtarget) 4736 { 4737 rtx op0, op1; 4738 tree arg; 4739 4740 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE)) 4741 return NULL_RTX; 4742 4743 arg = CALL_EXPR_ARG (exp, 0); 4744 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL); 4745 4746 arg = CALL_EXPR_ARG (exp, 1); 4747 op1 = expand_normal (arg); 4748 4749 return expand_copysign (op0, op1, target); 4750 } 4751 4752 /* Create a new constant string literal and return a char* pointer to it. 4753 The STRING_CST value is the LEN characters at STR. */ 4754 tree 4755 build_string_literal (int len, const char *str) 4756 { 4757 tree t, elem, index, type; 4758 4759 t = build_string (len, str); 4760 elem = build_type_variant (char_type_node, 1, 0); 4761 index = build_index_type (size_int (len - 1)); 4762 type = build_array_type (elem, index); 4763 TREE_TYPE (t) = type; 4764 TREE_CONSTANT (t) = 1; 4765 TREE_READONLY (t) = 1; 4766 TREE_STATIC (t) = 1; 4767 4768 type = build_pointer_type (elem); 4769 t = build1 (ADDR_EXPR, type, 4770 build4 (ARRAY_REF, elem, 4771 t, integer_zero_node, NULL_TREE, NULL_TREE)); 4772 return t; 4773 } 4774 4775 /* Expand a call to __builtin___clear_cache. */ 4776 4777 static rtx 4778 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED) 4779 { 4780 #ifndef HAVE_clear_cache 4781 #ifdef CLEAR_INSN_CACHE 4782 /* There is no "clear_cache" insn, and __clear_cache() in libgcc 4783 does something. Just do the default expansion to a call to 4784 __clear_cache(). */ 4785 return NULL_RTX; 4786 #else 4787 /* There is no "clear_cache" insn, and __clear_cache() in libgcc 4788 does nothing. There is no need to call it. Do nothing. */ 4789 return const0_rtx; 4790 #endif /* CLEAR_INSN_CACHE */ 4791 #else 4792 /* We have a "clear_cache" insn, and it will handle everything. */ 4793 tree begin, end; 4794 rtx begin_rtx, end_rtx; 4795 4796 /* We must not expand to a library call. If we did, any 4797 fallback library function in libgcc that might contain a call to 4798 __builtin___clear_cache() would recurse infinitely. */ 4799 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) 4800 { 4801 error ("both arguments to %<__builtin___clear_cache%> must be pointers"); 4802 return const0_rtx; 4803 } 4804 4805 if (HAVE_clear_cache) 4806 { 4807 struct expand_operand ops[2]; 4808 4809 begin = CALL_EXPR_ARG (exp, 0); 4810 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL); 4811 4812 end = CALL_EXPR_ARG (exp, 1); 4813 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL); 4814 4815 create_address_operand (&ops[0], begin_rtx); 4816 create_address_operand (&ops[1], end_rtx); 4817 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops)) 4818 return const0_rtx; 4819 } 4820 return const0_rtx; 4821 #endif /* HAVE_clear_cache */ 4822 } 4823 4824 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */ 4825 4826 static rtx 4827 round_trampoline_addr (rtx tramp) 4828 { 4829 rtx temp, addend, mask; 4830 4831 /* If we don't need too much alignment, we'll have been guaranteed 4832 proper alignment by get_trampoline_type. */ 4833 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY) 4834 return tramp; 4835 4836 /* Round address up to desired boundary. */ 4837 temp = gen_reg_rtx (Pmode); 4838 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1); 4839 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT); 4840 4841 temp = expand_simple_binop (Pmode, PLUS, tramp, addend, 4842 temp, 0, OPTAB_LIB_WIDEN); 4843 tramp = expand_simple_binop (Pmode, AND, temp, mask, 4844 temp, 0, OPTAB_LIB_WIDEN); 4845 4846 return tramp; 4847 } 4848 4849 static rtx 4850 expand_builtin_init_trampoline (tree exp, bool onstack) 4851 { 4852 tree t_tramp, t_func, t_chain; 4853 rtx m_tramp, r_tramp, r_chain, tmp; 4854 4855 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, 4856 POINTER_TYPE, VOID_TYPE)) 4857 return NULL_RTX; 4858 4859 t_tramp = CALL_EXPR_ARG (exp, 0); 4860 t_func = CALL_EXPR_ARG (exp, 1); 4861 t_chain = CALL_EXPR_ARG (exp, 2); 4862 4863 r_tramp = expand_normal (t_tramp); 4864 m_tramp = gen_rtx_MEM (BLKmode, r_tramp); 4865 MEM_NOTRAP_P (m_tramp) = 1; 4866 4867 /* If ONSTACK, the TRAMP argument should be the address of a field 4868 within the local function's FRAME decl. Either way, let's see if 4869 we can fill in the MEM_ATTRs for this memory. */ 4870 if (TREE_CODE (t_tramp) == ADDR_EXPR) 4871 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true); 4872 4873 /* Creator of a heap trampoline is responsible for making sure the 4874 address is aligned to at least STACK_BOUNDARY. Normally malloc 4875 will ensure this anyhow. */ 4876 tmp = round_trampoline_addr (r_tramp); 4877 if (tmp != r_tramp) 4878 { 4879 m_tramp = change_address (m_tramp, BLKmode, tmp); 4880 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT); 4881 set_mem_size (m_tramp, TRAMPOLINE_SIZE); 4882 } 4883 4884 /* The FUNC argument should be the address of the nested function. 4885 Extract the actual function decl to pass to the hook. */ 4886 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR); 4887 t_func = TREE_OPERAND (t_func, 0); 4888 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL); 4889 4890 r_chain = expand_normal (t_chain); 4891 4892 /* Generate insns to initialize the trampoline. */ 4893 targetm.calls.trampoline_init (m_tramp, t_func, r_chain); 4894 4895 if (onstack) 4896 { 4897 trampolines_created = 1; 4898 4899 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines, 4900 "trampoline generated for nested function %qD", t_func); 4901 } 4902 4903 return const0_rtx; 4904 } 4905 4906 static rtx 4907 expand_builtin_adjust_trampoline (tree exp) 4908 { 4909 rtx tramp; 4910 4911 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE)) 4912 return NULL_RTX; 4913 4914 tramp = expand_normal (CALL_EXPR_ARG (exp, 0)); 4915 tramp = round_trampoline_addr (tramp); 4916 if (targetm.calls.trampoline_adjust_address) 4917 tramp = targetm.calls.trampoline_adjust_address (tramp); 4918 4919 return tramp; 4920 } 4921 4922 /* Expand the call EXP to the built-in signbit, signbitf or signbitl 4923 function. The function first checks whether the back end provides 4924 an insn to implement signbit for the respective mode. If not, it 4925 checks whether the floating point format of the value is such that 4926 the sign bit can be extracted. If that is not the case, the 4927 function returns NULL_RTX to indicate that a normal call should be 4928 emitted rather than expanding the function in-line. EXP is the 4929 expression that is a call to the builtin function; if convenient, 4930 the result should be placed in TARGET. */ 4931 static rtx 4932 expand_builtin_signbit (tree exp, rtx target) 4933 { 4934 const struct real_format *fmt; 4935 enum machine_mode fmode, imode, rmode; 4936 tree arg; 4937 int word, bitpos; 4938 enum insn_code icode; 4939 rtx temp; 4940 location_t loc = EXPR_LOCATION (exp); 4941 4942 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) 4943 return NULL_RTX; 4944 4945 arg = CALL_EXPR_ARG (exp, 0); 4946 fmode = TYPE_MODE (TREE_TYPE (arg)); 4947 rmode = TYPE_MODE (TREE_TYPE (exp)); 4948 fmt = REAL_MODE_FORMAT (fmode); 4949 4950 arg = builtin_save_expr (arg); 4951 4952 /* Expand the argument yielding a RTX expression. */ 4953 temp = expand_normal (arg); 4954 4955 /* Check if the back end provides an insn that handles signbit for the 4956 argument's mode. */ 4957 icode = optab_handler (signbit_optab, fmode); 4958 if (icode != CODE_FOR_nothing) 4959 { 4960 rtx last = get_last_insn (); 4961 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); 4962 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN)) 4963 return target; 4964 delete_insns_since (last); 4965 } 4966 4967 /* For floating point formats without a sign bit, implement signbit 4968 as "ARG < 0.0". */ 4969 bitpos = fmt->signbit_ro; 4970 if (bitpos < 0) 4971 { 4972 /* But we can't do this if the format supports signed zero. */ 4973 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode)) 4974 return NULL_RTX; 4975 4976 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg, 4977 build_real (TREE_TYPE (arg), dconst0)); 4978 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL); 4979 } 4980 4981 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD) 4982 { 4983 imode = int_mode_for_mode (fmode); 4984 if (imode == BLKmode) 4985 return NULL_RTX; 4986 temp = gen_lowpart (imode, temp); 4987 } 4988 else 4989 { 4990 imode = word_mode; 4991 /* Handle targets with different FP word orders. */ 4992 if (FLOAT_WORDS_BIG_ENDIAN) 4993 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD; 4994 else 4995 word = bitpos / BITS_PER_WORD; 4996 temp = operand_subword_force (temp, word, fmode); 4997 bitpos = bitpos % BITS_PER_WORD; 4998 } 4999 5000 /* Force the intermediate word_mode (or narrower) result into a 5001 register. This avoids attempting to create paradoxical SUBREGs 5002 of floating point modes below. */ 5003 temp = force_reg (imode, temp); 5004 5005 /* If the bitpos is within the "result mode" lowpart, the operation 5006 can be implement with a single bitwise AND. Otherwise, we need 5007 a right shift and an AND. */ 5008 5009 if (bitpos < GET_MODE_BITSIZE (rmode)) 5010 { 5011 double_int mask = double_int_zero.set_bit (bitpos); 5012 5013 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode)) 5014 temp = gen_lowpart (rmode, temp); 5015 temp = expand_binop (rmode, and_optab, temp, 5016 immed_double_int_const (mask, rmode), 5017 NULL_RTX, 1, OPTAB_LIB_WIDEN); 5018 } 5019 else 5020 { 5021 /* Perform a logical right shift to place the signbit in the least 5022 significant bit, then truncate the result to the desired mode 5023 and mask just this bit. */ 5024 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1); 5025 temp = gen_lowpart (rmode, temp); 5026 temp = expand_binop (rmode, and_optab, temp, const1_rtx, 5027 NULL_RTX, 1, OPTAB_LIB_WIDEN); 5028 } 5029 5030 return temp; 5031 } 5032 5033 /* Expand fork or exec calls. TARGET is the desired target of the 5034 call. EXP is the call. FN is the 5035 identificator of the actual function. IGNORE is nonzero if the 5036 value is to be ignored. */ 5037 5038 static rtx 5039 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore) 5040 { 5041 tree id, decl; 5042 tree call; 5043 5044 /* If we are not profiling, just call the function. */ 5045 if (!profile_arc_flag) 5046 return NULL_RTX; 5047 5048 /* Otherwise call the wrapper. This should be equivalent for the rest of 5049 compiler, so the code does not diverge, and the wrapper may run the 5050 code necessary for keeping the profiling sane. */ 5051 5052 switch (DECL_FUNCTION_CODE (fn)) 5053 { 5054 case BUILT_IN_FORK: 5055 id = get_identifier ("__gcov_fork"); 5056 break; 5057 5058 case BUILT_IN_EXECL: 5059 id = get_identifier ("__gcov_execl"); 5060 break; 5061 5062 case BUILT_IN_EXECV: 5063 id = get_identifier ("__gcov_execv"); 5064 break; 5065 5066 case BUILT_IN_EXECLP: 5067 id = get_identifier ("__gcov_execlp"); 5068 break; 5069 5070 case BUILT_IN_EXECLE: 5071 id = get_identifier ("__gcov_execle"); 5072 break; 5073 5074 case BUILT_IN_EXECVP: 5075 id = get_identifier ("__gcov_execvp"); 5076 break; 5077 5078 case BUILT_IN_EXECVE: 5079 id = get_identifier ("__gcov_execve"); 5080 break; 5081 5082 default: 5083 gcc_unreachable (); 5084 } 5085 5086 decl = build_decl (DECL_SOURCE_LOCATION (fn), 5087 FUNCTION_DECL, id, TREE_TYPE (fn)); 5088 DECL_EXTERNAL (decl) = 1; 5089 TREE_PUBLIC (decl) = 1; 5090 DECL_ARTIFICIAL (decl) = 1; 5091 TREE_NOTHROW (decl) = 1; 5092 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT; 5093 DECL_VISIBILITY_SPECIFIED (decl) = 1; 5094 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0); 5095 return expand_call (call, target, ignore); 5096 } 5097 5098 5099 5100 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of 5101 the pointer in these functions is void*, the tree optimizers may remove 5102 casts. The mode computed in expand_builtin isn't reliable either, due 5103 to __sync_bool_compare_and_swap. 5104 5105 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the 5106 group of builtins. This gives us log2 of the mode size. */ 5107 5108 static inline enum machine_mode 5109 get_builtin_sync_mode (int fcode_diff) 5110 { 5111 /* The size is not negotiable, so ask not to get BLKmode in return 5112 if the target indicates that a smaller size would be better. */ 5113 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0); 5114 } 5115 5116 /* Expand the memory expression LOC and return the appropriate memory operand 5117 for the builtin_sync operations. */ 5118 5119 static rtx 5120 get_builtin_sync_mem (tree loc, enum machine_mode mode) 5121 { 5122 rtx addr, mem; 5123 5124 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM); 5125 addr = convert_memory_address (Pmode, addr); 5126 5127 /* Note that we explicitly do not want any alias information for this 5128 memory, so that we kill all other live memories. Otherwise we don't 5129 satisfy the full barrier semantics of the intrinsic. */ 5130 mem = validize_mem (gen_rtx_MEM (mode, addr)); 5131 5132 /* The alignment needs to be at least according to that of the mode. */ 5133 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode), 5134 get_pointer_alignment (loc))); 5135 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER); 5136 MEM_VOLATILE_P (mem) = 1; 5137 5138 return mem; 5139 } 5140 5141 /* Make sure an argument is in the right mode. 5142 EXP is the tree argument. 5143 MODE is the mode it should be in. */ 5144 5145 static rtx 5146 expand_expr_force_mode (tree exp, enum machine_mode mode) 5147 { 5148 rtx val; 5149 enum machine_mode old_mode; 5150 5151 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL); 5152 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care 5153 of CONST_INTs, where we know the old_mode only from the call argument. */ 5154 5155 old_mode = GET_MODE (val); 5156 if (old_mode == VOIDmode) 5157 old_mode = TYPE_MODE (TREE_TYPE (exp)); 5158 val = convert_modes (mode, old_mode, val, 1); 5159 return val; 5160 } 5161 5162 5163 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics. 5164 EXP is the CALL_EXPR. CODE is the rtx code 5165 that corresponds to the arithmetic or logical operation from the name; 5166 an exception here is that NOT actually means NAND. TARGET is an optional 5167 place for us to store the results; AFTER is true if this is the 5168 fetch_and_xxx form. */ 5169 5170 static rtx 5171 expand_builtin_sync_operation (enum machine_mode mode, tree exp, 5172 enum rtx_code code, bool after, 5173 rtx target) 5174 { 5175 rtx val, mem; 5176 location_t loc = EXPR_LOCATION (exp); 5177 5178 if (code == NOT && warn_sync_nand) 5179 { 5180 tree fndecl = get_callee_fndecl (exp); 5181 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); 5182 5183 static bool warned_f_a_n, warned_n_a_f; 5184 5185 switch (fcode) 5186 { 5187 case BUILT_IN_SYNC_FETCH_AND_NAND_1: 5188 case BUILT_IN_SYNC_FETCH_AND_NAND_2: 5189 case BUILT_IN_SYNC_FETCH_AND_NAND_4: 5190 case BUILT_IN_SYNC_FETCH_AND_NAND_8: 5191 case BUILT_IN_SYNC_FETCH_AND_NAND_16: 5192 if (warned_f_a_n) 5193 break; 5194 5195 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N); 5196 inform (loc, "%qD changed semantics in GCC 4.4", fndecl); 5197 warned_f_a_n = true; 5198 break; 5199 5200 case BUILT_IN_SYNC_NAND_AND_FETCH_1: 5201 case BUILT_IN_SYNC_NAND_AND_FETCH_2: 5202 case BUILT_IN_SYNC_NAND_AND_FETCH_4: 5203 case BUILT_IN_SYNC_NAND_AND_FETCH_8: 5204 case BUILT_IN_SYNC_NAND_AND_FETCH_16: 5205 if (warned_n_a_f) 5206 break; 5207 5208 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N); 5209 inform (loc, "%qD changed semantics in GCC 4.4", fndecl); 5210 warned_n_a_f = true; 5211 break; 5212 5213 default: 5214 gcc_unreachable (); 5215 } 5216 } 5217 5218 /* Expand the operands. */ 5219 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); 5220 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode); 5221 5222 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST, 5223 after); 5224 } 5225 5226 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap 5227 intrinsics. EXP is the CALL_EXPR. IS_BOOL is 5228 true if this is the boolean form. TARGET is a place for us to store the 5229 results; this is NOT optional if IS_BOOL is true. */ 5230 5231 static rtx 5232 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp, 5233 bool is_bool, rtx target) 5234 { 5235 rtx old_val, new_val, mem; 5236 rtx *pbool, *poval; 5237 5238 /* Expand the operands. */ 5239 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); 5240 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode); 5241 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode); 5242 5243 pbool = poval = NULL; 5244 if (target != const0_rtx) 5245 { 5246 if (is_bool) 5247 pbool = ⌖ 5248 else 5249 poval = ⌖ 5250 } 5251 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val, 5252 false, MEMMODEL_SEQ_CST, 5253 MEMMODEL_SEQ_CST)) 5254 return NULL_RTX; 5255 5256 return target; 5257 } 5258 5259 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most 5260 general form is actually an atomic exchange, and some targets only 5261 support a reduced form with the second argument being a constant 1. 5262 EXP is the CALL_EXPR; TARGET is an optional place for us to store 5263 the results. */ 5264 5265 static rtx 5266 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp, 5267 rtx target) 5268 { 5269 rtx val, mem; 5270 5271 /* Expand the operands. */ 5272 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); 5273 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode); 5274 5275 return expand_sync_lock_test_and_set (target, mem, val); 5276 } 5277 5278 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */ 5279 5280 static void 5281 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp) 5282 { 5283 rtx mem; 5284 5285 /* Expand the operands. */ 5286 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); 5287 5288 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true); 5289 } 5290 5291 /* Given an integer representing an ``enum memmodel'', verify its 5292 correctness and return the memory model enum. */ 5293 5294 static enum memmodel 5295 get_memmodel (tree exp) 5296 { 5297 rtx op; 5298 unsigned HOST_WIDE_INT val; 5299 5300 /* If the parameter is not a constant, it's a run time value so we'll just 5301 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */ 5302 if (TREE_CODE (exp) != INTEGER_CST) 5303 return MEMMODEL_SEQ_CST; 5304 5305 op = expand_normal (exp); 5306 5307 val = INTVAL (op); 5308 if (targetm.memmodel_check) 5309 val = targetm.memmodel_check (val); 5310 else if (val & ~MEMMODEL_MASK) 5311 { 5312 warning (OPT_Winvalid_memory_model, 5313 "Unknown architecture specifier in memory model to builtin."); 5314 return MEMMODEL_SEQ_CST; 5315 } 5316 5317 if ((INTVAL(op) & MEMMODEL_MASK) >= MEMMODEL_LAST) 5318 { 5319 warning (OPT_Winvalid_memory_model, 5320 "invalid memory model argument to builtin"); 5321 return MEMMODEL_SEQ_CST; 5322 } 5323 5324 return (enum memmodel) val; 5325 } 5326 5327 /* Expand the __atomic_exchange intrinsic: 5328 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel) 5329 EXP is the CALL_EXPR. 5330 TARGET is an optional place for us to store the results. */ 5331 5332 static rtx 5333 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target) 5334 { 5335 rtx val, mem; 5336 enum memmodel model; 5337 5338 model = get_memmodel (CALL_EXPR_ARG (exp, 2)); 5339 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME) 5340 { 5341 error ("invalid memory model for %<__atomic_exchange%>"); 5342 return NULL_RTX; 5343 } 5344 5345 if (!flag_inline_atomics) 5346 return NULL_RTX; 5347 5348 /* Expand the operands. */ 5349 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); 5350 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode); 5351 5352 return expand_atomic_exchange (target, mem, val, model); 5353 } 5354 5355 /* Expand the __atomic_compare_exchange intrinsic: 5356 bool __atomic_compare_exchange (TYPE *object, TYPE *expect, 5357 TYPE desired, BOOL weak, 5358 enum memmodel success, 5359 enum memmodel failure) 5360 EXP is the CALL_EXPR. 5361 TARGET is an optional place for us to store the results. */ 5362 5363 static rtx 5364 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp, 5365 rtx target) 5366 { 5367 rtx expect, desired, mem, oldval, label; 5368 enum memmodel success, failure; 5369 tree weak; 5370 bool is_weak; 5371 5372 success = get_memmodel (CALL_EXPR_ARG (exp, 4)); 5373 failure = get_memmodel (CALL_EXPR_ARG (exp, 5)); 5374 5375 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE 5376 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL) 5377 { 5378 error ("invalid failure memory model for %<__atomic_compare_exchange%>"); 5379 return NULL_RTX; 5380 } 5381 5382 if (failure > success) 5383 { 5384 error ("failure memory model cannot be stronger than success " 5385 "memory model for %<__atomic_compare_exchange%>"); 5386 return NULL_RTX; 5387 } 5388 5389 if (!flag_inline_atomics) 5390 return NULL_RTX; 5391 5392 /* Expand the operands. */ 5393 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); 5394 5395 expect = expand_normal (CALL_EXPR_ARG (exp, 1)); 5396 expect = convert_memory_address (Pmode, expect); 5397 expect = gen_rtx_MEM (mode, expect); 5398 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode); 5399 5400 weak = CALL_EXPR_ARG (exp, 3); 5401 is_weak = false; 5402 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0) 5403 is_weak = true; 5404 5405 if (target == const0_rtx) 5406 target = NULL; 5407 5408 /* Lest the rtl backend create a race condition with an imporoper store 5409 to memory, always create a new pseudo for OLDVAL. */ 5410 oldval = NULL; 5411 5412 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired, 5413 is_weak, success, failure)) 5414 return NULL_RTX; 5415 5416 /* Conditionally store back to EXPECT, lest we create a race condition 5417 with an improper store to memory. */ 5418 /* ??? With a rearrangement of atomics at the gimple level, we can handle 5419 the normal case where EXPECT is totally private, i.e. a register. At 5420 which point the store can be unconditional. */ 5421 label = gen_label_rtx (); 5422 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label); 5423 emit_move_insn (expect, oldval); 5424 emit_label (label); 5425 5426 return target; 5427 } 5428 5429 /* Expand the __atomic_load intrinsic: 5430 TYPE __atomic_load (TYPE *object, enum memmodel) 5431 EXP is the CALL_EXPR. 5432 TARGET is an optional place for us to store the results. */ 5433 5434 static rtx 5435 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target) 5436 { 5437 rtx mem; 5438 enum memmodel model; 5439 5440 model = get_memmodel (CALL_EXPR_ARG (exp, 1)); 5441 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE 5442 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL) 5443 { 5444 error ("invalid memory model for %<__atomic_load%>"); 5445 return NULL_RTX; 5446 } 5447 5448 if (!flag_inline_atomics) 5449 return NULL_RTX; 5450 5451 /* Expand the operand. */ 5452 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); 5453 5454 return expand_atomic_load (target, mem, model); 5455 } 5456 5457 5458 /* Expand the __atomic_store intrinsic: 5459 void __atomic_store (TYPE *object, TYPE desired, enum memmodel) 5460 EXP is the CALL_EXPR. 5461 TARGET is an optional place for us to store the results. */ 5462 5463 static rtx 5464 expand_builtin_atomic_store (enum machine_mode mode, tree exp) 5465 { 5466 rtx mem, val; 5467 enum memmodel model; 5468 5469 model = get_memmodel (CALL_EXPR_ARG (exp, 2)); 5470 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED 5471 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST 5472 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE) 5473 { 5474 error ("invalid memory model for %<__atomic_store%>"); 5475 return NULL_RTX; 5476 } 5477 5478 if (!flag_inline_atomics) 5479 return NULL_RTX; 5480 5481 /* Expand the operands. */ 5482 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); 5483 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode); 5484 5485 return expand_atomic_store (mem, val, model, false); 5486 } 5487 5488 /* Expand the __atomic_fetch_XXX intrinsic: 5489 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel) 5490 EXP is the CALL_EXPR. 5491 TARGET is an optional place for us to store the results. 5492 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR. 5493 FETCH_AFTER is true if returning the result of the operation. 5494 FETCH_AFTER is false if returning the value before the operation. 5495 IGNORE is true if the result is not used. 5496 EXT_CALL is the correct builtin for an external call if this cannot be 5497 resolved to an instruction sequence. */ 5498 5499 static rtx 5500 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target, 5501 enum rtx_code code, bool fetch_after, 5502 bool ignore, enum built_in_function ext_call) 5503 { 5504 rtx val, mem, ret; 5505 enum memmodel model; 5506 tree fndecl; 5507 tree addr; 5508 5509 model = get_memmodel (CALL_EXPR_ARG (exp, 2)); 5510 5511 /* Expand the operands. */ 5512 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); 5513 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode); 5514 5515 /* Only try generating instructions if inlining is turned on. */ 5516 if (flag_inline_atomics) 5517 { 5518 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after); 5519 if (ret) 5520 return ret; 5521 } 5522 5523 /* Return if a different routine isn't needed for the library call. */ 5524 if (ext_call == BUILT_IN_NONE) 5525 return NULL_RTX; 5526 5527 /* Change the call to the specified function. */ 5528 fndecl = get_callee_fndecl (exp); 5529 addr = CALL_EXPR_FN (exp); 5530 STRIP_NOPS (addr); 5531 5532 gcc_assert (TREE_OPERAND (addr, 0) == fndecl); 5533 TREE_OPERAND (addr, 0) = builtin_decl_explicit(ext_call); 5534 5535 /* Expand the call here so we can emit trailing code. */ 5536 ret = expand_call (exp, target, ignore); 5537 5538 /* Replace the original function just in case it matters. */ 5539 TREE_OPERAND (addr, 0) = fndecl; 5540 5541 /* Then issue the arithmetic correction to return the right result. */ 5542 if (!ignore) 5543 { 5544 if (code == NOT) 5545 { 5546 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true, 5547 OPTAB_LIB_WIDEN); 5548 ret = expand_simple_unop (mode, NOT, ret, target, true); 5549 } 5550 else 5551 ret = expand_simple_binop (mode, code, ret, val, target, true, 5552 OPTAB_LIB_WIDEN); 5553 } 5554 return ret; 5555 } 5556 5557 5558 #ifndef HAVE_atomic_clear 5559 # define HAVE_atomic_clear 0 5560 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX) 5561 #endif 5562 5563 /* Expand an atomic clear operation. 5564 void _atomic_clear (BOOL *obj, enum memmodel) 5565 EXP is the call expression. */ 5566 5567 static rtx 5568 expand_builtin_atomic_clear (tree exp) 5569 { 5570 enum machine_mode mode; 5571 rtx mem, ret; 5572 enum memmodel model; 5573 5574 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0); 5575 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); 5576 model = get_memmodel (CALL_EXPR_ARG (exp, 1)); 5577 5578 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE 5579 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL) 5580 { 5581 error ("invalid memory model for %<__atomic_store%>"); 5582 return const0_rtx; 5583 } 5584 5585 if (HAVE_atomic_clear) 5586 { 5587 emit_insn (gen_atomic_clear (mem, model)); 5588 return const0_rtx; 5589 } 5590 5591 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release. 5592 Failing that, a store is issued by __atomic_store. The only way this can 5593 fail is if the bool type is larger than a word size. Unlikely, but 5594 handle it anyway for completeness. Assume a single threaded model since 5595 there is no atomic support in this case, and no barriers are required. */ 5596 ret = expand_atomic_store (mem, const0_rtx, model, true); 5597 if (!ret) 5598 emit_move_insn (mem, const0_rtx); 5599 return const0_rtx; 5600 } 5601 5602 /* Expand an atomic test_and_set operation. 5603 bool _atomic_test_and_set (BOOL *obj, enum memmodel) 5604 EXP is the call expression. */ 5605 5606 static rtx 5607 expand_builtin_atomic_test_and_set (tree exp, rtx target) 5608 { 5609 rtx mem; 5610 enum memmodel model; 5611 enum machine_mode mode; 5612 5613 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0); 5614 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); 5615 model = get_memmodel (CALL_EXPR_ARG (exp, 1)); 5616 5617 return expand_atomic_test_and_set (target, mem, model); 5618 } 5619 5620 5621 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on 5622 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */ 5623 5624 static tree 5625 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1) 5626 { 5627 int size; 5628 enum machine_mode mode; 5629 unsigned int mode_align, type_align; 5630 5631 if (TREE_CODE (arg0) != INTEGER_CST) 5632 return NULL_TREE; 5633 5634 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT; 5635 mode = mode_for_size (size, MODE_INT, 0); 5636 mode_align = GET_MODE_ALIGNMENT (mode); 5637 5638 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0) 5639 type_align = mode_align; 5640 else 5641 { 5642 tree ttype = TREE_TYPE (arg1); 5643 5644 /* This function is usually invoked and folded immediately by the front 5645 end before anything else has a chance to look at it. The pointer 5646 parameter at this point is usually cast to a void *, so check for that 5647 and look past the cast. */ 5648 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype) 5649 && VOID_TYPE_P (TREE_TYPE (ttype))) 5650 arg1 = TREE_OPERAND (arg1, 0); 5651 5652 ttype = TREE_TYPE (arg1); 5653 gcc_assert (POINTER_TYPE_P (ttype)); 5654 5655 /* Get the underlying type of the object. */ 5656 ttype = TREE_TYPE (ttype); 5657 type_align = TYPE_ALIGN (ttype); 5658 } 5659 5660 /* If the object has smaller alignment, the the lock free routines cannot 5661 be used. */ 5662 if (type_align < mode_align) 5663 return boolean_false_node; 5664 5665 /* Check if a compare_and_swap pattern exists for the mode which represents 5666 the required size. The pattern is not allowed to fail, so the existence 5667 of the pattern indicates support is present. */ 5668 if (can_compare_and_swap_p (mode, true)) 5669 return boolean_true_node; 5670 else 5671 return boolean_false_node; 5672 } 5673 5674 /* Return true if the parameters to call EXP represent an object which will 5675 always generate lock free instructions. The first argument represents the 5676 size of the object, and the second parameter is a pointer to the object 5677 itself. If NULL is passed for the object, then the result is based on 5678 typical alignment for an object of the specified size. Otherwise return 5679 false. */ 5680 5681 static rtx 5682 expand_builtin_atomic_always_lock_free (tree exp) 5683 { 5684 tree size; 5685 tree arg0 = CALL_EXPR_ARG (exp, 0); 5686 tree arg1 = CALL_EXPR_ARG (exp, 1); 5687 5688 if (TREE_CODE (arg0) != INTEGER_CST) 5689 { 5690 error ("non-constant argument 1 to __atomic_always_lock_free"); 5691 return const0_rtx; 5692 } 5693 5694 size = fold_builtin_atomic_always_lock_free (arg0, arg1); 5695 if (size == boolean_true_node) 5696 return const1_rtx; 5697 return const0_rtx; 5698 } 5699 5700 /* Return a one or zero if it can be determined that object ARG1 of size ARG 5701 is lock free on this architecture. */ 5702 5703 static tree 5704 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1) 5705 { 5706 if (!flag_inline_atomics) 5707 return NULL_TREE; 5708 5709 /* If it isn't always lock free, don't generate a result. */ 5710 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node) 5711 return boolean_true_node; 5712 5713 return NULL_TREE; 5714 } 5715 5716 /* Return true if the parameters to call EXP represent an object which will 5717 always generate lock free instructions. The first argument represents the 5718 size of the object, and the second parameter is a pointer to the object 5719 itself. If NULL is passed for the object, then the result is based on 5720 typical alignment for an object of the specified size. Otherwise return 5721 NULL*/ 5722 5723 static rtx 5724 expand_builtin_atomic_is_lock_free (tree exp) 5725 { 5726 tree size; 5727 tree arg0 = CALL_EXPR_ARG (exp, 0); 5728 tree arg1 = CALL_EXPR_ARG (exp, 1); 5729 5730 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0))) 5731 { 5732 error ("non-integer argument 1 to __atomic_is_lock_free"); 5733 return NULL_RTX; 5734 } 5735 5736 if (!flag_inline_atomics) 5737 return NULL_RTX; 5738 5739 /* If the value is known at compile time, return the RTX for it. */ 5740 size = fold_builtin_atomic_is_lock_free (arg0, arg1); 5741 if (size == boolean_true_node) 5742 return const1_rtx; 5743 5744 return NULL_RTX; 5745 } 5746 5747 /* Expand the __atomic_thread_fence intrinsic: 5748 void __atomic_thread_fence (enum memmodel) 5749 EXP is the CALL_EXPR. */ 5750 5751 static void 5752 expand_builtin_atomic_thread_fence (tree exp) 5753 { 5754 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0)); 5755 expand_mem_thread_fence (model); 5756 } 5757 5758 /* Expand the __atomic_signal_fence intrinsic: 5759 void __atomic_signal_fence (enum memmodel) 5760 EXP is the CALL_EXPR. */ 5761 5762 static void 5763 expand_builtin_atomic_signal_fence (tree exp) 5764 { 5765 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0)); 5766 expand_mem_signal_fence (model); 5767 } 5768 5769 /* Expand the __sync_synchronize intrinsic. */ 5770 5771 static void 5772 expand_builtin_sync_synchronize (void) 5773 { 5774 expand_mem_thread_fence (MEMMODEL_SEQ_CST); 5775 } 5776 5777 static rtx 5778 expand_builtin_thread_pointer (tree exp, rtx target) 5779 { 5780 enum insn_code icode; 5781 if (!validate_arglist (exp, VOID_TYPE)) 5782 return const0_rtx; 5783 icode = direct_optab_handler (get_thread_pointer_optab, Pmode); 5784 if (icode != CODE_FOR_nothing) 5785 { 5786 struct expand_operand op; 5787 if (!REG_P (target) || GET_MODE (target) != Pmode) 5788 target = gen_reg_rtx (Pmode); 5789 create_output_operand (&op, target, Pmode); 5790 expand_insn (icode, 1, &op); 5791 return target; 5792 } 5793 error ("__builtin_thread_pointer is not supported on this target"); 5794 return const0_rtx; 5795 } 5796 5797 static void 5798 expand_builtin_set_thread_pointer (tree exp) 5799 { 5800 enum insn_code icode; 5801 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE)) 5802 return; 5803 icode = direct_optab_handler (set_thread_pointer_optab, Pmode); 5804 if (icode != CODE_FOR_nothing) 5805 { 5806 struct expand_operand op; 5807 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, 5808 Pmode, EXPAND_NORMAL); 5809 create_input_operand (&op, val, Pmode); 5810 expand_insn (icode, 1, &op); 5811 return; 5812 } 5813 error ("__builtin_set_thread_pointer is not supported on this target"); 5814 } 5815 5816 5817 /* Expand an expression EXP that calls a built-in function, 5818 with result going to TARGET if that's convenient 5819 (and in mode MODE if that's convenient). 5820 SUBTARGET may be used as the target for computing one of EXP's operands. 5821 IGNORE is nonzero if the value is to be ignored. */ 5822 5823 rtx 5824 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode, 5825 int ignore) 5826 { 5827 tree fndecl = get_callee_fndecl (exp); 5828 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); 5829 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp)); 5830 int flags; 5831 5832 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) 5833 return targetm.expand_builtin (exp, target, subtarget, mode, ignore); 5834 5835 /* When not optimizing, generate calls to library functions for a certain 5836 set of builtins. */ 5837 if (!optimize 5838 && !called_as_built_in (fndecl) 5839 && fcode != BUILT_IN_ALLOCA 5840 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN 5841 && fcode != BUILT_IN_FREE) 5842 return expand_call (exp, target, ignore); 5843 5844 /* The built-in function expanders test for target == const0_rtx 5845 to determine whether the function's result will be ignored. */ 5846 if (ignore) 5847 target = const0_rtx; 5848 5849 /* If the result of a pure or const built-in function is ignored, and 5850 none of its arguments are volatile, we can avoid expanding the 5851 built-in call and just evaluate the arguments for side-effects. */ 5852 if (target == const0_rtx 5853 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE)) 5854 && !(flags & ECF_LOOPING_CONST_OR_PURE)) 5855 { 5856 bool volatilep = false; 5857 tree arg; 5858 call_expr_arg_iterator iter; 5859 5860 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) 5861 if (TREE_THIS_VOLATILE (arg)) 5862 { 5863 volatilep = true; 5864 break; 5865 } 5866 5867 if (! volatilep) 5868 { 5869 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) 5870 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL); 5871 return const0_rtx; 5872 } 5873 } 5874 5875 switch (fcode) 5876 { 5877 CASE_FLT_FN (BUILT_IN_FABS): 5878 case BUILT_IN_FABSD32: 5879 case BUILT_IN_FABSD64: 5880 case BUILT_IN_FABSD128: 5881 target = expand_builtin_fabs (exp, target, subtarget); 5882 if (target) 5883 return target; 5884 break; 5885 5886 CASE_FLT_FN (BUILT_IN_COPYSIGN): 5887 target = expand_builtin_copysign (exp, target, subtarget); 5888 if (target) 5889 return target; 5890 break; 5891 5892 /* Just do a normal library call if we were unable to fold 5893 the values. */ 5894 CASE_FLT_FN (BUILT_IN_CABS): 5895 break; 5896 5897 CASE_FLT_FN (BUILT_IN_EXP): 5898 CASE_FLT_FN (BUILT_IN_EXP10): 5899 CASE_FLT_FN (BUILT_IN_POW10): 5900 CASE_FLT_FN (BUILT_IN_EXP2): 5901 CASE_FLT_FN (BUILT_IN_EXPM1): 5902 CASE_FLT_FN (BUILT_IN_LOGB): 5903 CASE_FLT_FN (BUILT_IN_LOG): 5904 CASE_FLT_FN (BUILT_IN_LOG10): 5905 CASE_FLT_FN (BUILT_IN_LOG2): 5906 CASE_FLT_FN (BUILT_IN_LOG1P): 5907 CASE_FLT_FN (BUILT_IN_TAN): 5908 CASE_FLT_FN (BUILT_IN_ASIN): 5909 CASE_FLT_FN (BUILT_IN_ACOS): 5910 CASE_FLT_FN (BUILT_IN_ATAN): 5911 CASE_FLT_FN (BUILT_IN_SIGNIFICAND): 5912 /* Treat these like sqrt only if unsafe math optimizations are allowed, 5913 because of possible accuracy problems. */ 5914 if (! flag_unsafe_math_optimizations) 5915 break; 5916 CASE_FLT_FN (BUILT_IN_SQRT): 5917 CASE_FLT_FN (BUILT_IN_FLOOR): 5918 CASE_FLT_FN (BUILT_IN_CEIL): 5919 CASE_FLT_FN (BUILT_IN_TRUNC): 5920 CASE_FLT_FN (BUILT_IN_ROUND): 5921 CASE_FLT_FN (BUILT_IN_NEARBYINT): 5922 CASE_FLT_FN (BUILT_IN_RINT): 5923 target = expand_builtin_mathfn (exp, target, subtarget); 5924 if (target) 5925 return target; 5926 break; 5927 5928 CASE_FLT_FN (BUILT_IN_FMA): 5929 target = expand_builtin_mathfn_ternary (exp, target, subtarget); 5930 if (target) 5931 return target; 5932 break; 5933 5934 CASE_FLT_FN (BUILT_IN_ILOGB): 5935 if (! flag_unsafe_math_optimizations) 5936 break; 5937 CASE_FLT_FN (BUILT_IN_ISINF): 5938 CASE_FLT_FN (BUILT_IN_FINITE): 5939 case BUILT_IN_ISFINITE: 5940 case BUILT_IN_ISNORMAL: 5941 target = expand_builtin_interclass_mathfn (exp, target); 5942 if (target) 5943 return target; 5944 break; 5945 5946 CASE_FLT_FN (BUILT_IN_ICEIL): 5947 CASE_FLT_FN (BUILT_IN_LCEIL): 5948 CASE_FLT_FN (BUILT_IN_LLCEIL): 5949 CASE_FLT_FN (BUILT_IN_LFLOOR): 5950 CASE_FLT_FN (BUILT_IN_IFLOOR): 5951 CASE_FLT_FN (BUILT_IN_LLFLOOR): 5952 target = expand_builtin_int_roundingfn (exp, target); 5953 if (target) 5954 return target; 5955 break; 5956 5957 CASE_FLT_FN (BUILT_IN_IRINT): 5958 CASE_FLT_FN (BUILT_IN_LRINT): 5959 CASE_FLT_FN (BUILT_IN_LLRINT): 5960 CASE_FLT_FN (BUILT_IN_IROUND): 5961 CASE_FLT_FN (BUILT_IN_LROUND): 5962 CASE_FLT_FN (BUILT_IN_LLROUND): 5963 target = expand_builtin_int_roundingfn_2 (exp, target); 5964 if (target) 5965 return target; 5966 break; 5967 5968 CASE_FLT_FN (BUILT_IN_POWI): 5969 target = expand_builtin_powi (exp, target); 5970 if (target) 5971 return target; 5972 break; 5973 5974 CASE_FLT_FN (BUILT_IN_ATAN2): 5975 CASE_FLT_FN (BUILT_IN_LDEXP): 5976 CASE_FLT_FN (BUILT_IN_SCALB): 5977 CASE_FLT_FN (BUILT_IN_SCALBN): 5978 CASE_FLT_FN (BUILT_IN_SCALBLN): 5979 if (! flag_unsafe_math_optimizations) 5980 break; 5981 5982 CASE_FLT_FN (BUILT_IN_FMOD): 5983 CASE_FLT_FN (BUILT_IN_REMAINDER): 5984 CASE_FLT_FN (BUILT_IN_DREM): 5985 CASE_FLT_FN (BUILT_IN_POW): 5986 target = expand_builtin_mathfn_2 (exp, target, subtarget); 5987 if (target) 5988 return target; 5989 break; 5990 5991 CASE_FLT_FN (BUILT_IN_CEXPI): 5992 target = expand_builtin_cexpi (exp, target); 5993 gcc_assert (target); 5994 return target; 5995 5996 CASE_FLT_FN (BUILT_IN_SIN): 5997 CASE_FLT_FN (BUILT_IN_COS): 5998 if (! flag_unsafe_math_optimizations) 5999 break; 6000 target = expand_builtin_mathfn_3 (exp, target, subtarget); 6001 if (target) 6002 return target; 6003 break; 6004 6005 CASE_FLT_FN (BUILT_IN_SINCOS): 6006 if (! flag_unsafe_math_optimizations) 6007 break; 6008 target = expand_builtin_sincos (exp); 6009 if (target) 6010 return target; 6011 break; 6012 6013 case BUILT_IN_APPLY_ARGS: 6014 return expand_builtin_apply_args (); 6015 6016 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes 6017 FUNCTION with a copy of the parameters described by 6018 ARGUMENTS, and ARGSIZE. It returns a block of memory 6019 allocated on the stack into which is stored all the registers 6020 that might possibly be used for returning the result of a 6021 function. ARGUMENTS is the value returned by 6022 __builtin_apply_args. ARGSIZE is the number of bytes of 6023 arguments that must be copied. ??? How should this value be 6024 computed? We'll also need a safe worst case value for varargs 6025 functions. */ 6026 case BUILT_IN_APPLY: 6027 if (!validate_arglist (exp, POINTER_TYPE, 6028 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE) 6029 && !validate_arglist (exp, REFERENCE_TYPE, 6030 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) 6031 return const0_rtx; 6032 else 6033 { 6034 rtx ops[3]; 6035 6036 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0)); 6037 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1)); 6038 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2)); 6039 6040 return expand_builtin_apply (ops[0], ops[1], ops[2]); 6041 } 6042 6043 /* __builtin_return (RESULT) causes the function to return the 6044 value described by RESULT. RESULT is address of the block of 6045 memory returned by __builtin_apply. */ 6046 case BUILT_IN_RETURN: 6047 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE)) 6048 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0))); 6049 return const0_rtx; 6050 6051 case BUILT_IN_SAVEREGS: 6052 return expand_builtin_saveregs (); 6053 6054 case BUILT_IN_VA_ARG_PACK: 6055 /* All valid uses of __builtin_va_arg_pack () are removed during 6056 inlining. */ 6057 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp); 6058 return const0_rtx; 6059 6060 case BUILT_IN_VA_ARG_PACK_LEN: 6061 /* All valid uses of __builtin_va_arg_pack_len () are removed during 6062 inlining. */ 6063 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp); 6064 return const0_rtx; 6065 6066 /* Return the address of the first anonymous stack arg. */ 6067 case BUILT_IN_NEXT_ARG: 6068 if (fold_builtin_next_arg (exp, false)) 6069 return const0_rtx; 6070 return expand_builtin_next_arg (); 6071 6072 case BUILT_IN_CLEAR_CACHE: 6073 target = expand_builtin___clear_cache (exp); 6074 if (target) 6075 return target; 6076 break; 6077 6078 case BUILT_IN_CLASSIFY_TYPE: 6079 return expand_builtin_classify_type (exp); 6080 6081 case BUILT_IN_CONSTANT_P: 6082 return const0_rtx; 6083 6084 case BUILT_IN_FRAME_ADDRESS: 6085 case BUILT_IN_RETURN_ADDRESS: 6086 return expand_builtin_frame_address (fndecl, exp); 6087 6088 /* Returns the address of the area where the structure is returned. 6089 0 otherwise. */ 6090 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS: 6091 if (call_expr_nargs (exp) != 0 6092 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))) 6093 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl)))) 6094 return const0_rtx; 6095 else 6096 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); 6097 6098 case BUILT_IN_ALLOCA: 6099 case BUILT_IN_ALLOCA_WITH_ALIGN: 6100 /* If the allocation stems from the declaration of a variable-sized 6101 object, it cannot accumulate. */ 6102 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp)); 6103 if (target) 6104 return target; 6105 break; 6106 6107 case BUILT_IN_STACK_SAVE: 6108 return expand_stack_save (); 6109 6110 case BUILT_IN_STACK_RESTORE: 6111 expand_stack_restore (CALL_EXPR_ARG (exp, 0)); 6112 return const0_rtx; 6113 6114 case BUILT_IN_BSWAP16: 6115 case BUILT_IN_BSWAP32: 6116 case BUILT_IN_BSWAP64: 6117 target = expand_builtin_bswap (target_mode, exp, target, subtarget); 6118 if (target) 6119 return target; 6120 break; 6121 6122 CASE_INT_FN (BUILT_IN_FFS): 6123 case BUILT_IN_FFSIMAX: 6124 target = expand_builtin_unop (target_mode, exp, target, 6125 subtarget, ffs_optab); 6126 if (target) 6127 return target; 6128 break; 6129 6130 CASE_INT_FN (BUILT_IN_CLZ): 6131 case BUILT_IN_CLZIMAX: 6132 target = expand_builtin_unop (target_mode, exp, target, 6133 subtarget, clz_optab); 6134 if (target) 6135 return target; 6136 break; 6137 6138 CASE_INT_FN (BUILT_IN_CTZ): 6139 case BUILT_IN_CTZIMAX: 6140 target = expand_builtin_unop (target_mode, exp, target, 6141 subtarget, ctz_optab); 6142 if (target) 6143 return target; 6144 break; 6145 6146 CASE_INT_FN (BUILT_IN_CLRSB): 6147 case BUILT_IN_CLRSBIMAX: 6148 target = expand_builtin_unop (target_mode, exp, target, 6149 subtarget, clrsb_optab); 6150 if (target) 6151 return target; 6152 break; 6153 6154 CASE_INT_FN (BUILT_IN_POPCOUNT): 6155 case BUILT_IN_POPCOUNTIMAX: 6156 target = expand_builtin_unop (target_mode, exp, target, 6157 subtarget, popcount_optab); 6158 if (target) 6159 return target; 6160 break; 6161 6162 CASE_INT_FN (BUILT_IN_PARITY): 6163 case BUILT_IN_PARITYIMAX: 6164 target = expand_builtin_unop (target_mode, exp, target, 6165 subtarget, parity_optab); 6166 if (target) 6167 return target; 6168 break; 6169 6170 case BUILT_IN_STRLEN: 6171 target = expand_builtin_strlen (exp, target, target_mode); 6172 if (target) 6173 return target; 6174 break; 6175 6176 case BUILT_IN_STRCPY: 6177 target = expand_builtin_strcpy (exp, target); 6178 if (target) 6179 return target; 6180 break; 6181 6182 case BUILT_IN_STRNCPY: 6183 target = expand_builtin_strncpy (exp, target); 6184 if (target) 6185 return target; 6186 break; 6187 6188 case BUILT_IN_STPCPY: 6189 target = expand_builtin_stpcpy (exp, target, mode); 6190 if (target) 6191 return target; 6192 break; 6193 6194 case BUILT_IN_MEMCPY: 6195 target = expand_builtin_memcpy (exp, target); 6196 if (target) 6197 return target; 6198 break; 6199 6200 case BUILT_IN_MEMPCPY: 6201 target = expand_builtin_mempcpy (exp, target, mode); 6202 if (target) 6203 return target; 6204 break; 6205 6206 case BUILT_IN_MEMSET: 6207 target = expand_builtin_memset (exp, target, mode); 6208 if (target) 6209 return target; 6210 break; 6211 6212 case BUILT_IN_BZERO: 6213 target = expand_builtin_bzero (exp); 6214 if (target) 6215 return target; 6216 break; 6217 6218 case BUILT_IN_STRCMP: 6219 target = expand_builtin_strcmp (exp, target); 6220 if (target) 6221 return target; 6222 break; 6223 6224 case BUILT_IN_STRNCMP: 6225 target = expand_builtin_strncmp (exp, target, mode); 6226 if (target) 6227 return target; 6228 break; 6229 6230 case BUILT_IN_BCMP: 6231 case BUILT_IN_MEMCMP: 6232 target = expand_builtin_memcmp (exp, target, mode); 6233 if (target) 6234 return target; 6235 break; 6236 6237 case BUILT_IN_SETJMP: 6238 /* This should have been lowered to the builtins below. */ 6239 gcc_unreachable (); 6240 6241 case BUILT_IN_SETJMP_SETUP: 6242 /* __builtin_setjmp_setup is passed a pointer to an array of five words 6243 and the receiver label. */ 6244 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) 6245 { 6246 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget, 6247 VOIDmode, EXPAND_NORMAL); 6248 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0); 6249 rtx label_r = label_rtx (label); 6250 6251 /* This is copied from the handling of non-local gotos. */ 6252 expand_builtin_setjmp_setup (buf_addr, label_r); 6253 nonlocal_goto_handler_labels 6254 = gen_rtx_EXPR_LIST (VOIDmode, label_r, 6255 nonlocal_goto_handler_labels); 6256 /* ??? Do not let expand_label treat us as such since we would 6257 not want to be both on the list of non-local labels and on 6258 the list of forced labels. */ 6259 FORCED_LABEL (label) = 0; 6260 return const0_rtx; 6261 } 6262 break; 6263 6264 case BUILT_IN_SETJMP_DISPATCHER: 6265 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */ 6266 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE)) 6267 { 6268 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0); 6269 rtx label_r = label_rtx (label); 6270 6271 /* Remove the dispatcher label from the list of non-local labels 6272 since the receiver labels have been added to it above. */ 6273 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels); 6274 return const0_rtx; 6275 } 6276 break; 6277 6278 case BUILT_IN_SETJMP_RECEIVER: 6279 /* __builtin_setjmp_receiver is passed the receiver label. */ 6280 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE)) 6281 { 6282 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0); 6283 rtx label_r = label_rtx (label); 6284 6285 expand_builtin_setjmp_receiver (label_r); 6286 return const0_rtx; 6287 } 6288 break; 6289 6290 /* __builtin_longjmp is passed a pointer to an array of five words. 6291 It's similar to the C library longjmp function but works with 6292 __builtin_setjmp above. */ 6293 case BUILT_IN_LONGJMP: 6294 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) 6295 { 6296 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget, 6297 VOIDmode, EXPAND_NORMAL); 6298 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1)); 6299 6300 if (value != const1_rtx) 6301 { 6302 error ("%<__builtin_longjmp%> second argument must be 1"); 6303 return const0_rtx; 6304 } 6305 6306 expand_builtin_longjmp (buf_addr, value); 6307 return const0_rtx; 6308 } 6309 break; 6310 6311 case BUILT_IN_NONLOCAL_GOTO: 6312 target = expand_builtin_nonlocal_goto (exp); 6313 if (target) 6314 return target; 6315 break; 6316 6317 /* This updates the setjmp buffer that is its argument with the value 6318 of the current stack pointer. */ 6319 case BUILT_IN_UPDATE_SETJMP_BUF: 6320 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE)) 6321 { 6322 rtx buf_addr 6323 = expand_normal (CALL_EXPR_ARG (exp, 0)); 6324 6325 expand_builtin_update_setjmp_buf (buf_addr); 6326 return const0_rtx; 6327 } 6328 break; 6329 6330 case BUILT_IN_TRAP: 6331 expand_builtin_trap (); 6332 return const0_rtx; 6333 6334 case BUILT_IN_UNREACHABLE: 6335 expand_builtin_unreachable (); 6336 return const0_rtx; 6337 6338 CASE_FLT_FN (BUILT_IN_SIGNBIT): 6339 case BUILT_IN_SIGNBITD32: 6340 case BUILT_IN_SIGNBITD64: 6341 case BUILT_IN_SIGNBITD128: 6342 target = expand_builtin_signbit (exp, target); 6343 if (target) 6344 return target; 6345 break; 6346 6347 /* Various hooks for the DWARF 2 __throw routine. */ 6348 case BUILT_IN_UNWIND_INIT: 6349 expand_builtin_unwind_init (); 6350 return const0_rtx; 6351 case BUILT_IN_DWARF_CFA: 6352 return virtual_cfa_rtx; 6353 #ifdef DWARF2_UNWIND_INFO 6354 case BUILT_IN_DWARF_SP_COLUMN: 6355 return expand_builtin_dwarf_sp_column (); 6356 case BUILT_IN_INIT_DWARF_REG_SIZES: 6357 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0)); 6358 return const0_rtx; 6359 #endif 6360 case BUILT_IN_FROB_RETURN_ADDR: 6361 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0)); 6362 case BUILT_IN_EXTRACT_RETURN_ADDR: 6363 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0)); 6364 case BUILT_IN_EH_RETURN: 6365 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0), 6366 CALL_EXPR_ARG (exp, 1)); 6367 return const0_rtx; 6368 #ifdef EH_RETURN_DATA_REGNO 6369 case BUILT_IN_EH_RETURN_DATA_REGNO: 6370 return expand_builtin_eh_return_data_regno (exp); 6371 #endif 6372 case BUILT_IN_EXTEND_POINTER: 6373 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0)); 6374 case BUILT_IN_EH_POINTER: 6375 return expand_builtin_eh_pointer (exp); 6376 case BUILT_IN_EH_FILTER: 6377 return expand_builtin_eh_filter (exp); 6378 case BUILT_IN_EH_COPY_VALUES: 6379 return expand_builtin_eh_copy_values (exp); 6380 6381 case BUILT_IN_VA_START: 6382 return expand_builtin_va_start (exp); 6383 case BUILT_IN_VA_END: 6384 return expand_builtin_va_end (exp); 6385 case BUILT_IN_VA_COPY: 6386 return expand_builtin_va_copy (exp); 6387 case BUILT_IN_EXPECT: 6388 return expand_builtin_expect (exp, target); 6389 case BUILT_IN_ASSUME_ALIGNED: 6390 return expand_builtin_assume_aligned (exp, target); 6391 case BUILT_IN_PREFETCH: 6392 expand_builtin_prefetch (exp); 6393 return const0_rtx; 6394 6395 case BUILT_IN_INIT_TRAMPOLINE: 6396 return expand_builtin_init_trampoline (exp, true); 6397 case BUILT_IN_INIT_HEAP_TRAMPOLINE: 6398 return expand_builtin_init_trampoline (exp, false); 6399 case BUILT_IN_ADJUST_TRAMPOLINE: 6400 return expand_builtin_adjust_trampoline (exp); 6401 6402 case BUILT_IN_FORK: 6403 case BUILT_IN_EXECL: 6404 case BUILT_IN_EXECV: 6405 case BUILT_IN_EXECLP: 6406 case BUILT_IN_EXECLE: 6407 case BUILT_IN_EXECVP: 6408 case BUILT_IN_EXECVE: 6409 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore); 6410 if (target) 6411 return target; 6412 break; 6413 6414 case BUILT_IN_SYNC_FETCH_AND_ADD_1: 6415 case BUILT_IN_SYNC_FETCH_AND_ADD_2: 6416 case BUILT_IN_SYNC_FETCH_AND_ADD_4: 6417 case BUILT_IN_SYNC_FETCH_AND_ADD_8: 6418 case BUILT_IN_SYNC_FETCH_AND_ADD_16: 6419 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1); 6420 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target); 6421 if (target) 6422 return target; 6423 break; 6424 6425 case BUILT_IN_SYNC_FETCH_AND_SUB_1: 6426 case BUILT_IN_SYNC_FETCH_AND_SUB_2: 6427 case BUILT_IN_SYNC_FETCH_AND_SUB_4: 6428 case BUILT_IN_SYNC_FETCH_AND_SUB_8: 6429 case BUILT_IN_SYNC_FETCH_AND_SUB_16: 6430 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1); 6431 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target); 6432 if (target) 6433 return target; 6434 break; 6435 6436 case BUILT_IN_SYNC_FETCH_AND_OR_1: 6437 case BUILT_IN_SYNC_FETCH_AND_OR_2: 6438 case BUILT_IN_SYNC_FETCH_AND_OR_4: 6439 case BUILT_IN_SYNC_FETCH_AND_OR_8: 6440 case BUILT_IN_SYNC_FETCH_AND_OR_16: 6441 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1); 6442 target = expand_builtin_sync_operation (mode, exp, IOR, false, target); 6443 if (target) 6444 return target; 6445 break; 6446 6447 case BUILT_IN_SYNC_FETCH_AND_AND_1: 6448 case BUILT_IN_SYNC_FETCH_AND_AND_2: 6449 case BUILT_IN_SYNC_FETCH_AND_AND_4: 6450 case BUILT_IN_SYNC_FETCH_AND_AND_8: 6451 case BUILT_IN_SYNC_FETCH_AND_AND_16: 6452 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1); 6453 target = expand_builtin_sync_operation (mode, exp, AND, false, target); 6454 if (target) 6455 return target; 6456 break; 6457 6458 case BUILT_IN_SYNC_FETCH_AND_XOR_1: 6459 case BUILT_IN_SYNC_FETCH_AND_XOR_2: 6460 case BUILT_IN_SYNC_FETCH_AND_XOR_4: 6461 case BUILT_IN_SYNC_FETCH_AND_XOR_8: 6462 case BUILT_IN_SYNC_FETCH_AND_XOR_16: 6463 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1); 6464 target = expand_builtin_sync_operation (mode, exp, XOR, false, target); 6465 if (target) 6466 return target; 6467 break; 6468 6469 case BUILT_IN_SYNC_FETCH_AND_NAND_1: 6470 case BUILT_IN_SYNC_FETCH_AND_NAND_2: 6471 case BUILT_IN_SYNC_FETCH_AND_NAND_4: 6472 case BUILT_IN_SYNC_FETCH_AND_NAND_8: 6473 case BUILT_IN_SYNC_FETCH_AND_NAND_16: 6474 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1); 6475 target = expand_builtin_sync_operation (mode, exp, NOT, false, target); 6476 if (target) 6477 return target; 6478 break; 6479 6480 case BUILT_IN_SYNC_ADD_AND_FETCH_1: 6481 case BUILT_IN_SYNC_ADD_AND_FETCH_2: 6482 case BUILT_IN_SYNC_ADD_AND_FETCH_4: 6483 case BUILT_IN_SYNC_ADD_AND_FETCH_8: 6484 case BUILT_IN_SYNC_ADD_AND_FETCH_16: 6485 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1); 6486 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target); 6487 if (target) 6488 return target; 6489 break; 6490 6491 case BUILT_IN_SYNC_SUB_AND_FETCH_1: 6492 case BUILT_IN_SYNC_SUB_AND_FETCH_2: 6493 case BUILT_IN_SYNC_SUB_AND_FETCH_4: 6494 case BUILT_IN_SYNC_SUB_AND_FETCH_8: 6495 case BUILT_IN_SYNC_SUB_AND_FETCH_16: 6496 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1); 6497 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target); 6498 if (target) 6499 return target; 6500 break; 6501 6502 case BUILT_IN_SYNC_OR_AND_FETCH_1: 6503 case BUILT_IN_SYNC_OR_AND_FETCH_2: 6504 case BUILT_IN_SYNC_OR_AND_FETCH_4: 6505 case BUILT_IN_SYNC_OR_AND_FETCH_8: 6506 case BUILT_IN_SYNC_OR_AND_FETCH_16: 6507 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1); 6508 target = expand_builtin_sync_operation (mode, exp, IOR, true, target); 6509 if (target) 6510 return target; 6511 break; 6512 6513 case BUILT_IN_SYNC_AND_AND_FETCH_1: 6514 case BUILT_IN_SYNC_AND_AND_FETCH_2: 6515 case BUILT_IN_SYNC_AND_AND_FETCH_4: 6516 case BUILT_IN_SYNC_AND_AND_FETCH_8: 6517 case BUILT_IN_SYNC_AND_AND_FETCH_16: 6518 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1); 6519 target = expand_builtin_sync_operation (mode, exp, AND, true, target); 6520 if (target) 6521 return target; 6522 break; 6523 6524 case BUILT_IN_SYNC_XOR_AND_FETCH_1: 6525 case BUILT_IN_SYNC_XOR_AND_FETCH_2: 6526 case BUILT_IN_SYNC_XOR_AND_FETCH_4: 6527 case BUILT_IN_SYNC_XOR_AND_FETCH_8: 6528 case BUILT_IN_SYNC_XOR_AND_FETCH_16: 6529 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1); 6530 target = expand_builtin_sync_operation (mode, exp, XOR, true, target); 6531 if (target) 6532 return target; 6533 break; 6534 6535 case BUILT_IN_SYNC_NAND_AND_FETCH_1: 6536 case BUILT_IN_SYNC_NAND_AND_FETCH_2: 6537 case BUILT_IN_SYNC_NAND_AND_FETCH_4: 6538 case BUILT_IN_SYNC_NAND_AND_FETCH_8: 6539 case BUILT_IN_SYNC_NAND_AND_FETCH_16: 6540 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1); 6541 target = expand_builtin_sync_operation (mode, exp, NOT, true, target); 6542 if (target) 6543 return target; 6544 break; 6545 6546 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1: 6547 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2: 6548 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4: 6549 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8: 6550 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16: 6551 if (mode == VOIDmode) 6552 mode = TYPE_MODE (boolean_type_node); 6553 if (!target || !register_operand (target, mode)) 6554 target = gen_reg_rtx (mode); 6555 6556 mode = get_builtin_sync_mode 6557 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1); 6558 target = expand_builtin_compare_and_swap (mode, exp, true, target); 6559 if (target) 6560 return target; 6561 break; 6562 6563 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1: 6564 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2: 6565 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4: 6566 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8: 6567 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16: 6568 mode = get_builtin_sync_mode 6569 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1); 6570 target = expand_builtin_compare_and_swap (mode, exp, false, target); 6571 if (target) 6572 return target; 6573 break; 6574 6575 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1: 6576 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2: 6577 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4: 6578 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8: 6579 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16: 6580 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1); 6581 target = expand_builtin_sync_lock_test_and_set (mode, exp, target); 6582 if (target) 6583 return target; 6584 break; 6585 6586 case BUILT_IN_SYNC_LOCK_RELEASE_1: 6587 case BUILT_IN_SYNC_LOCK_RELEASE_2: 6588 case BUILT_IN_SYNC_LOCK_RELEASE_4: 6589 case BUILT_IN_SYNC_LOCK_RELEASE_8: 6590 case BUILT_IN_SYNC_LOCK_RELEASE_16: 6591 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1); 6592 expand_builtin_sync_lock_release (mode, exp); 6593 return const0_rtx; 6594 6595 case BUILT_IN_SYNC_SYNCHRONIZE: 6596 expand_builtin_sync_synchronize (); 6597 return const0_rtx; 6598 6599 case BUILT_IN_ATOMIC_EXCHANGE_1: 6600 case BUILT_IN_ATOMIC_EXCHANGE_2: 6601 case BUILT_IN_ATOMIC_EXCHANGE_4: 6602 case BUILT_IN_ATOMIC_EXCHANGE_8: 6603 case BUILT_IN_ATOMIC_EXCHANGE_16: 6604 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1); 6605 target = expand_builtin_atomic_exchange (mode, exp, target); 6606 if (target) 6607 return target; 6608 break; 6609 6610 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1: 6611 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2: 6612 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4: 6613 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8: 6614 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16: 6615 { 6616 unsigned int nargs, z; 6617 vec<tree, va_gc> *vec; 6618 6619 mode = 6620 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1); 6621 target = expand_builtin_atomic_compare_exchange (mode, exp, target); 6622 if (target) 6623 return target; 6624 6625 /* If this is turned into an external library call, the weak parameter 6626 must be dropped to match the expected parameter list. */ 6627 nargs = call_expr_nargs (exp); 6628 vec_alloc (vec, nargs - 1); 6629 for (z = 0; z < 3; z++) 6630 vec->quick_push (CALL_EXPR_ARG (exp, z)); 6631 /* Skip the boolean weak parameter. */ 6632 for (z = 4; z < 6; z++) 6633 vec->quick_push (CALL_EXPR_ARG (exp, z)); 6634 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec); 6635 break; 6636 } 6637 6638 case BUILT_IN_ATOMIC_LOAD_1: 6639 case BUILT_IN_ATOMIC_LOAD_2: 6640 case BUILT_IN_ATOMIC_LOAD_4: 6641 case BUILT_IN_ATOMIC_LOAD_8: 6642 case BUILT_IN_ATOMIC_LOAD_16: 6643 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1); 6644 target = expand_builtin_atomic_load (mode, exp, target); 6645 if (target) 6646 return target; 6647 break; 6648 6649 case BUILT_IN_ATOMIC_STORE_1: 6650 case BUILT_IN_ATOMIC_STORE_2: 6651 case BUILT_IN_ATOMIC_STORE_4: 6652 case BUILT_IN_ATOMIC_STORE_8: 6653 case BUILT_IN_ATOMIC_STORE_16: 6654 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1); 6655 target = expand_builtin_atomic_store (mode, exp); 6656 if (target) 6657 return const0_rtx; 6658 break; 6659 6660 case BUILT_IN_ATOMIC_ADD_FETCH_1: 6661 case BUILT_IN_ATOMIC_ADD_FETCH_2: 6662 case BUILT_IN_ATOMIC_ADD_FETCH_4: 6663 case BUILT_IN_ATOMIC_ADD_FETCH_8: 6664 case BUILT_IN_ATOMIC_ADD_FETCH_16: 6665 { 6666 enum built_in_function lib; 6667 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1); 6668 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 + 6669 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1)); 6670 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true, 6671 ignore, lib); 6672 if (target) 6673 return target; 6674 break; 6675 } 6676 case BUILT_IN_ATOMIC_SUB_FETCH_1: 6677 case BUILT_IN_ATOMIC_SUB_FETCH_2: 6678 case BUILT_IN_ATOMIC_SUB_FETCH_4: 6679 case BUILT_IN_ATOMIC_SUB_FETCH_8: 6680 case BUILT_IN_ATOMIC_SUB_FETCH_16: 6681 { 6682 enum built_in_function lib; 6683 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1); 6684 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 + 6685 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1)); 6686 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true, 6687 ignore, lib); 6688 if (target) 6689 return target; 6690 break; 6691 } 6692 case BUILT_IN_ATOMIC_AND_FETCH_1: 6693 case BUILT_IN_ATOMIC_AND_FETCH_2: 6694 case BUILT_IN_ATOMIC_AND_FETCH_4: 6695 case BUILT_IN_ATOMIC_AND_FETCH_8: 6696 case BUILT_IN_ATOMIC_AND_FETCH_16: 6697 { 6698 enum built_in_function lib; 6699 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1); 6700 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 + 6701 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1)); 6702 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true, 6703 ignore, lib); 6704 if (target) 6705 return target; 6706 break; 6707 } 6708 case BUILT_IN_ATOMIC_NAND_FETCH_1: 6709 case BUILT_IN_ATOMIC_NAND_FETCH_2: 6710 case BUILT_IN_ATOMIC_NAND_FETCH_4: 6711 case BUILT_IN_ATOMIC_NAND_FETCH_8: 6712 case BUILT_IN_ATOMIC_NAND_FETCH_16: 6713 { 6714 enum built_in_function lib; 6715 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1); 6716 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 + 6717 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1)); 6718 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true, 6719 ignore, lib); 6720 if (target) 6721 return target; 6722 break; 6723 } 6724 case BUILT_IN_ATOMIC_XOR_FETCH_1: 6725 case BUILT_IN_ATOMIC_XOR_FETCH_2: 6726 case BUILT_IN_ATOMIC_XOR_FETCH_4: 6727 case BUILT_IN_ATOMIC_XOR_FETCH_8: 6728 case BUILT_IN_ATOMIC_XOR_FETCH_16: 6729 { 6730 enum built_in_function lib; 6731 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1); 6732 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 + 6733 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1)); 6734 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true, 6735 ignore, lib); 6736 if (target) 6737 return target; 6738 break; 6739 } 6740 case BUILT_IN_ATOMIC_OR_FETCH_1: 6741 case BUILT_IN_ATOMIC_OR_FETCH_2: 6742 case BUILT_IN_ATOMIC_OR_FETCH_4: 6743 case BUILT_IN_ATOMIC_OR_FETCH_8: 6744 case BUILT_IN_ATOMIC_OR_FETCH_16: 6745 { 6746 enum built_in_function lib; 6747 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1); 6748 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 + 6749 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1)); 6750 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true, 6751 ignore, lib); 6752 if (target) 6753 return target; 6754 break; 6755 } 6756 case BUILT_IN_ATOMIC_FETCH_ADD_1: 6757 case BUILT_IN_ATOMIC_FETCH_ADD_2: 6758 case BUILT_IN_ATOMIC_FETCH_ADD_4: 6759 case BUILT_IN_ATOMIC_FETCH_ADD_8: 6760 case BUILT_IN_ATOMIC_FETCH_ADD_16: 6761 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1); 6762 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false, 6763 ignore, BUILT_IN_NONE); 6764 if (target) 6765 return target; 6766 break; 6767 6768 case BUILT_IN_ATOMIC_FETCH_SUB_1: 6769 case BUILT_IN_ATOMIC_FETCH_SUB_2: 6770 case BUILT_IN_ATOMIC_FETCH_SUB_4: 6771 case BUILT_IN_ATOMIC_FETCH_SUB_8: 6772 case BUILT_IN_ATOMIC_FETCH_SUB_16: 6773 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1); 6774 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false, 6775 ignore, BUILT_IN_NONE); 6776 if (target) 6777 return target; 6778 break; 6779 6780 case BUILT_IN_ATOMIC_FETCH_AND_1: 6781 case BUILT_IN_ATOMIC_FETCH_AND_2: 6782 case BUILT_IN_ATOMIC_FETCH_AND_4: 6783 case BUILT_IN_ATOMIC_FETCH_AND_8: 6784 case BUILT_IN_ATOMIC_FETCH_AND_16: 6785 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1); 6786 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false, 6787 ignore, BUILT_IN_NONE); 6788 if (target) 6789 return target; 6790 break; 6791 6792 case BUILT_IN_ATOMIC_FETCH_NAND_1: 6793 case BUILT_IN_ATOMIC_FETCH_NAND_2: 6794 case BUILT_IN_ATOMIC_FETCH_NAND_4: 6795 case BUILT_IN_ATOMIC_FETCH_NAND_8: 6796 case BUILT_IN_ATOMIC_FETCH_NAND_16: 6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1); 6798 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false, 6799 ignore, BUILT_IN_NONE); 6800 if (target) 6801 return target; 6802 break; 6803 6804 case BUILT_IN_ATOMIC_FETCH_XOR_1: 6805 case BUILT_IN_ATOMIC_FETCH_XOR_2: 6806 case BUILT_IN_ATOMIC_FETCH_XOR_4: 6807 case BUILT_IN_ATOMIC_FETCH_XOR_8: 6808 case BUILT_IN_ATOMIC_FETCH_XOR_16: 6809 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1); 6810 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false, 6811 ignore, BUILT_IN_NONE); 6812 if (target) 6813 return target; 6814 break; 6815 6816 case BUILT_IN_ATOMIC_FETCH_OR_1: 6817 case BUILT_IN_ATOMIC_FETCH_OR_2: 6818 case BUILT_IN_ATOMIC_FETCH_OR_4: 6819 case BUILT_IN_ATOMIC_FETCH_OR_8: 6820 case BUILT_IN_ATOMIC_FETCH_OR_16: 6821 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1); 6822 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false, 6823 ignore, BUILT_IN_NONE); 6824 if (target) 6825 return target; 6826 break; 6827 6828 case BUILT_IN_ATOMIC_TEST_AND_SET: 6829 return expand_builtin_atomic_test_and_set (exp, target); 6830 6831 case BUILT_IN_ATOMIC_CLEAR: 6832 return expand_builtin_atomic_clear (exp); 6833 6834 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE: 6835 return expand_builtin_atomic_always_lock_free (exp); 6836 6837 case BUILT_IN_ATOMIC_IS_LOCK_FREE: 6838 target = expand_builtin_atomic_is_lock_free (exp); 6839 if (target) 6840 return target; 6841 break; 6842 6843 case BUILT_IN_ATOMIC_THREAD_FENCE: 6844 expand_builtin_atomic_thread_fence (exp); 6845 return const0_rtx; 6846 6847 case BUILT_IN_ATOMIC_SIGNAL_FENCE: 6848 expand_builtin_atomic_signal_fence (exp); 6849 return const0_rtx; 6850 6851 case BUILT_IN_OBJECT_SIZE: 6852 return expand_builtin_object_size (exp); 6853 6854 case BUILT_IN_MEMCPY_CHK: 6855 case BUILT_IN_MEMPCPY_CHK: 6856 case BUILT_IN_MEMMOVE_CHK: 6857 case BUILT_IN_MEMSET_CHK: 6858 target = expand_builtin_memory_chk (exp, target, mode, fcode); 6859 if (target) 6860 return target; 6861 break; 6862 6863 case BUILT_IN_STRCPY_CHK: 6864 case BUILT_IN_STPCPY_CHK: 6865 case BUILT_IN_STRNCPY_CHK: 6866 case BUILT_IN_STPNCPY_CHK: 6867 case BUILT_IN_STRCAT_CHK: 6868 case BUILT_IN_STRNCAT_CHK: 6869 case BUILT_IN_SNPRINTF_CHK: 6870 case BUILT_IN_VSNPRINTF_CHK: 6871 maybe_emit_chk_warning (exp, fcode); 6872 break; 6873 6874 case BUILT_IN_SPRINTF_CHK: 6875 case BUILT_IN_VSPRINTF_CHK: 6876 maybe_emit_sprintf_chk_warning (exp, fcode); 6877 break; 6878 6879 case BUILT_IN_FREE: 6880 if (warn_free_nonheap_object) 6881 maybe_emit_free_warning (exp); 6882 break; 6883 6884 case BUILT_IN_THREAD_POINTER: 6885 return expand_builtin_thread_pointer (exp, target); 6886 6887 case BUILT_IN_SET_THREAD_POINTER: 6888 expand_builtin_set_thread_pointer (exp); 6889 return const0_rtx; 6890 6891 default: /* just do library call, if unknown builtin */ 6892 break; 6893 } 6894 6895 /* The switch statement above can drop through to cause the function 6896 to be called normally. */ 6897 return expand_call (exp, target, ignore); 6898 } 6899 6900 /* Determine whether a tree node represents a call to a built-in 6901 function. If the tree T is a call to a built-in function with 6902 the right number of arguments of the appropriate types, return 6903 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. 6904 Otherwise the return value is END_BUILTINS. */ 6905 6906 enum built_in_function 6907 builtin_mathfn_code (const_tree t) 6908 { 6909 const_tree fndecl, arg, parmlist; 6910 const_tree argtype, parmtype; 6911 const_call_expr_arg_iterator iter; 6912 6913 if (TREE_CODE (t) != CALL_EXPR 6914 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR) 6915 return END_BUILTINS; 6916 6917 fndecl = get_callee_fndecl (t); 6918 if (fndecl == NULL_TREE 6919 || TREE_CODE (fndecl) != FUNCTION_DECL 6920 || ! DECL_BUILT_IN (fndecl) 6921 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) 6922 return END_BUILTINS; 6923 6924 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 6925 init_const_call_expr_arg_iterator (t, &iter); 6926 for (; parmlist; parmlist = TREE_CHAIN (parmlist)) 6927 { 6928 /* If a function doesn't take a variable number of arguments, 6929 the last element in the list will have type `void'. */ 6930 parmtype = TREE_VALUE (parmlist); 6931 if (VOID_TYPE_P (parmtype)) 6932 { 6933 if (more_const_call_expr_args_p (&iter)) 6934 return END_BUILTINS; 6935 return DECL_FUNCTION_CODE (fndecl); 6936 } 6937 6938 if (! more_const_call_expr_args_p (&iter)) 6939 return END_BUILTINS; 6940 6941 arg = next_const_call_expr_arg (&iter); 6942 argtype = TREE_TYPE (arg); 6943 6944 if (SCALAR_FLOAT_TYPE_P (parmtype)) 6945 { 6946 if (! SCALAR_FLOAT_TYPE_P (argtype)) 6947 return END_BUILTINS; 6948 } 6949 else if (COMPLEX_FLOAT_TYPE_P (parmtype)) 6950 { 6951 if (! COMPLEX_FLOAT_TYPE_P (argtype)) 6952 return END_BUILTINS; 6953 } 6954 else if (POINTER_TYPE_P (parmtype)) 6955 { 6956 if (! POINTER_TYPE_P (argtype)) 6957 return END_BUILTINS; 6958 } 6959 else if (INTEGRAL_TYPE_P (parmtype)) 6960 { 6961 if (! INTEGRAL_TYPE_P (argtype)) 6962 return END_BUILTINS; 6963 } 6964 else 6965 return END_BUILTINS; 6966 } 6967 6968 /* Variable-length argument list. */ 6969 return DECL_FUNCTION_CODE (fndecl); 6970 } 6971 6972 /* Fold a call to __builtin_constant_p, if we know its argument ARG will 6973 evaluate to a constant. */ 6974 6975 static tree 6976 fold_builtin_constant_p (tree arg) 6977 { 6978 /* We return 1 for a numeric type that's known to be a constant 6979 value at compile-time or for an aggregate type that's a 6980 literal constant. */ 6981 STRIP_NOPS (arg); 6982 6983 /* If we know this is a constant, emit the constant of one. */ 6984 if (CONSTANT_CLASS_P (arg) 6985 || (TREE_CODE (arg) == CONSTRUCTOR 6986 && TREE_CONSTANT (arg))) 6987 return integer_one_node; 6988 if (TREE_CODE (arg) == ADDR_EXPR) 6989 { 6990 tree op = TREE_OPERAND (arg, 0); 6991 if (TREE_CODE (op) == STRING_CST 6992 || (TREE_CODE (op) == ARRAY_REF 6993 && integer_zerop (TREE_OPERAND (op, 1)) 6994 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST)) 6995 return integer_one_node; 6996 } 6997 6998 /* If this expression has side effects, show we don't know it to be a 6999 constant. Likewise if it's a pointer or aggregate type since in 7000 those case we only want literals, since those are only optimized 7001 when generating RTL, not later. 7002 And finally, if we are compiling an initializer, not code, we 7003 need to return a definite result now; there's not going to be any 7004 more optimization done. */ 7005 if (TREE_SIDE_EFFECTS (arg) 7006 || AGGREGATE_TYPE_P (TREE_TYPE (arg)) 7007 || POINTER_TYPE_P (TREE_TYPE (arg)) 7008 || cfun == 0 7009 || folding_initializer) 7010 return integer_zero_node; 7011 7012 return NULL_TREE; 7013 } 7014 7015 /* Create builtin_expect with PRED and EXPECTED as its arguments and 7016 return it as a truthvalue. */ 7017 7018 static tree 7019 build_builtin_expect_predicate (location_t loc, tree pred, tree expected) 7020 { 7021 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type; 7022 7023 fn = builtin_decl_explicit (BUILT_IN_EXPECT); 7024 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn)); 7025 ret_type = TREE_TYPE (TREE_TYPE (fn)); 7026 pred_type = TREE_VALUE (arg_types); 7027 expected_type = TREE_VALUE (TREE_CHAIN (arg_types)); 7028 7029 pred = fold_convert_loc (loc, pred_type, pred); 7030 expected = fold_convert_loc (loc, expected_type, expected); 7031 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected); 7032 7033 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr, 7034 build_int_cst (ret_type, 0)); 7035 } 7036 7037 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return 7038 NULL_TREE if no simplification is possible. */ 7039 7040 static tree 7041 fold_builtin_expect (location_t loc, tree arg0, tree arg1) 7042 { 7043 tree inner, fndecl, inner_arg0; 7044 enum tree_code code; 7045 7046 /* Distribute the expected value over short-circuiting operators. 7047 See through the cast from truthvalue_type_node to long. */ 7048 inner_arg0 = arg0; 7049 while (TREE_CODE (inner_arg0) == NOP_EXPR 7050 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0)) 7051 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0)))) 7052 inner_arg0 = TREE_OPERAND (inner_arg0, 0); 7053 7054 /* If this is a builtin_expect within a builtin_expect keep the 7055 inner one. See through a comparison against a constant. It 7056 might have been added to create a thruthvalue. */ 7057 inner = inner_arg0; 7058 7059 if (COMPARISON_CLASS_P (inner) 7060 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST) 7061 inner = TREE_OPERAND (inner, 0); 7062 7063 if (TREE_CODE (inner) == CALL_EXPR 7064 && (fndecl = get_callee_fndecl (inner)) 7065 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 7066 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT) 7067 return arg0; 7068 7069 inner = inner_arg0; 7070 code = TREE_CODE (inner); 7071 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR) 7072 { 7073 tree op0 = TREE_OPERAND (inner, 0); 7074 tree op1 = TREE_OPERAND (inner, 1); 7075 7076 op0 = build_builtin_expect_predicate (loc, op0, arg1); 7077 op1 = build_builtin_expect_predicate (loc, op1, arg1); 7078 inner = build2 (code, TREE_TYPE (inner), op0, op1); 7079 7080 return fold_convert_loc (loc, TREE_TYPE (arg0), inner); 7081 } 7082 7083 /* If the argument isn't invariant then there's nothing else we can do. */ 7084 if (!TREE_CONSTANT (inner_arg0)) 7085 return NULL_TREE; 7086 7087 /* If we expect that a comparison against the argument will fold to 7088 a constant return the constant. In practice, this means a true 7089 constant or the address of a non-weak symbol. */ 7090 inner = inner_arg0; 7091 STRIP_NOPS (inner); 7092 if (TREE_CODE (inner) == ADDR_EXPR) 7093 { 7094 do 7095 { 7096 inner = TREE_OPERAND (inner, 0); 7097 } 7098 while (TREE_CODE (inner) == COMPONENT_REF 7099 || TREE_CODE (inner) == ARRAY_REF); 7100 if ((TREE_CODE (inner) == VAR_DECL 7101 || TREE_CODE (inner) == FUNCTION_DECL) 7102 && DECL_WEAK (inner)) 7103 return NULL_TREE; 7104 } 7105 7106 /* Otherwise, ARG0 already has the proper type for the return value. */ 7107 return arg0; 7108 } 7109 7110 /* Fold a call to __builtin_classify_type with argument ARG. */ 7111 7112 static tree 7113 fold_builtin_classify_type (tree arg) 7114 { 7115 if (arg == 0) 7116 return build_int_cst (integer_type_node, no_type_class); 7117 7118 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg))); 7119 } 7120 7121 /* Fold a call to __builtin_strlen with argument ARG. */ 7122 7123 static tree 7124 fold_builtin_strlen (location_t loc, tree type, tree arg) 7125 { 7126 if (!validate_arg (arg, POINTER_TYPE)) 7127 return NULL_TREE; 7128 else 7129 { 7130 tree len = c_strlen (arg, 0); 7131 7132 if (len) 7133 return fold_convert_loc (loc, type, len); 7134 7135 return NULL_TREE; 7136 } 7137 } 7138 7139 /* Fold a call to __builtin_inf or __builtin_huge_val. */ 7140 7141 static tree 7142 fold_builtin_inf (location_t loc, tree type, int warn) 7143 { 7144 REAL_VALUE_TYPE real; 7145 7146 /* __builtin_inff is intended to be usable to define INFINITY on all 7147 targets. If an infinity is not available, INFINITY expands "to a 7148 positive constant of type float that overflows at translation 7149 time", footnote "In this case, using INFINITY will violate the 7150 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4). 7151 Thus we pedwarn to ensure this constraint violation is 7152 diagnosed. */ 7153 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn) 7154 pedwarn (loc, 0, "target format does not support infinity"); 7155 7156 real_inf (&real); 7157 return build_real (type, real); 7158 } 7159 7160 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */ 7161 7162 static tree 7163 fold_builtin_nan (tree arg, tree type, int quiet) 7164 { 7165 REAL_VALUE_TYPE real; 7166 const char *str; 7167 7168 if (!validate_arg (arg, POINTER_TYPE)) 7169 return NULL_TREE; 7170 str = c_getstr (arg); 7171 if (!str) 7172 return NULL_TREE; 7173 7174 if (!real_nan (&real, str, quiet, TYPE_MODE (type))) 7175 return NULL_TREE; 7176 7177 return build_real (type, real); 7178 } 7179 7180 /* Return true if the floating point expression T has an integer value. 7181 We also allow +Inf, -Inf and NaN to be considered integer values. */ 7182 7183 static bool 7184 integer_valued_real_p (tree t) 7185 { 7186 switch (TREE_CODE (t)) 7187 { 7188 case FLOAT_EXPR: 7189 return true; 7190 7191 case ABS_EXPR: 7192 case SAVE_EXPR: 7193 return integer_valued_real_p (TREE_OPERAND (t, 0)); 7194 7195 case COMPOUND_EXPR: 7196 case MODIFY_EXPR: 7197 case BIND_EXPR: 7198 return integer_valued_real_p (TREE_OPERAND (t, 1)); 7199 7200 case PLUS_EXPR: 7201 case MINUS_EXPR: 7202 case MULT_EXPR: 7203 case MIN_EXPR: 7204 case MAX_EXPR: 7205 return integer_valued_real_p (TREE_OPERAND (t, 0)) 7206 && integer_valued_real_p (TREE_OPERAND (t, 1)); 7207 7208 case COND_EXPR: 7209 return integer_valued_real_p (TREE_OPERAND (t, 1)) 7210 && integer_valued_real_p (TREE_OPERAND (t, 2)); 7211 7212 case REAL_CST: 7213 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t))); 7214 7215 case NOP_EXPR: 7216 { 7217 tree type = TREE_TYPE (TREE_OPERAND (t, 0)); 7218 if (TREE_CODE (type) == INTEGER_TYPE) 7219 return true; 7220 if (TREE_CODE (type) == REAL_TYPE) 7221 return integer_valued_real_p (TREE_OPERAND (t, 0)); 7222 break; 7223 } 7224 7225 case CALL_EXPR: 7226 switch (builtin_mathfn_code (t)) 7227 { 7228 CASE_FLT_FN (BUILT_IN_CEIL): 7229 CASE_FLT_FN (BUILT_IN_FLOOR): 7230 CASE_FLT_FN (BUILT_IN_NEARBYINT): 7231 CASE_FLT_FN (BUILT_IN_RINT): 7232 CASE_FLT_FN (BUILT_IN_ROUND): 7233 CASE_FLT_FN (BUILT_IN_TRUNC): 7234 return true; 7235 7236 CASE_FLT_FN (BUILT_IN_FMIN): 7237 CASE_FLT_FN (BUILT_IN_FMAX): 7238 return integer_valued_real_p (CALL_EXPR_ARG (t, 0)) 7239 && integer_valued_real_p (CALL_EXPR_ARG (t, 1)); 7240 7241 default: 7242 break; 7243 } 7244 break; 7245 7246 default: 7247 break; 7248 } 7249 return false; 7250 } 7251 7252 /* FNDECL is assumed to be a builtin where truncation can be propagated 7253 across (for instance floor((double)f) == (double)floorf (f). 7254 Do the transformation for a call with argument ARG. */ 7255 7256 static tree 7257 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg) 7258 { 7259 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); 7260 7261 if (!validate_arg (arg, REAL_TYPE)) 7262 return NULL_TREE; 7263 7264 /* Integer rounding functions are idempotent. */ 7265 if (fcode == builtin_mathfn_code (arg)) 7266 return arg; 7267 7268 /* If argument is already integer valued, and we don't need to worry 7269 about setting errno, there's no need to perform rounding. */ 7270 if (! flag_errno_math && integer_valued_real_p (arg)) 7271 return arg; 7272 7273 if (optimize) 7274 { 7275 tree arg0 = strip_float_extensions (arg); 7276 tree ftype = TREE_TYPE (TREE_TYPE (fndecl)); 7277 tree newtype = TREE_TYPE (arg0); 7278 tree decl; 7279 7280 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype) 7281 && (decl = mathfn_built_in (newtype, fcode))) 7282 return fold_convert_loc (loc, ftype, 7283 build_call_expr_loc (loc, decl, 1, 7284 fold_convert_loc (loc, 7285 newtype, 7286 arg0))); 7287 } 7288 return NULL_TREE; 7289 } 7290 7291 /* FNDECL is assumed to be builtin which can narrow the FP type of 7292 the argument, for instance lround((double)f) -> lroundf (f). 7293 Do the transformation for a call with argument ARG. */ 7294 7295 static tree 7296 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg) 7297 { 7298 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); 7299 7300 if (!validate_arg (arg, REAL_TYPE)) 7301 return NULL_TREE; 7302 7303 /* If argument is already integer valued, and we don't need to worry 7304 about setting errno, there's no need to perform rounding. */ 7305 if (! flag_errno_math && integer_valued_real_p (arg)) 7306 return fold_build1_loc (loc, FIX_TRUNC_EXPR, 7307 TREE_TYPE (TREE_TYPE (fndecl)), arg); 7308 7309 if (optimize) 7310 { 7311 tree ftype = TREE_TYPE (arg); 7312 tree arg0 = strip_float_extensions (arg); 7313 tree newtype = TREE_TYPE (arg0); 7314 tree decl; 7315 7316 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype) 7317 && (decl = mathfn_built_in (newtype, fcode))) 7318 return build_call_expr_loc (loc, decl, 1, 7319 fold_convert_loc (loc, newtype, arg0)); 7320 } 7321 7322 /* Canonicalize iround (x) to lround (x) on ILP32 targets where 7323 sizeof (int) == sizeof (long). */ 7324 if (TYPE_PRECISION (integer_type_node) 7325 == TYPE_PRECISION (long_integer_type_node)) 7326 { 7327 tree newfn = NULL_TREE; 7328 switch (fcode) 7329 { 7330 CASE_FLT_FN (BUILT_IN_ICEIL): 7331 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL); 7332 break; 7333 7334 CASE_FLT_FN (BUILT_IN_IFLOOR): 7335 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR); 7336 break; 7337 7338 CASE_FLT_FN (BUILT_IN_IROUND): 7339 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND); 7340 break; 7341 7342 CASE_FLT_FN (BUILT_IN_IRINT): 7343 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT); 7344 break; 7345 7346 default: 7347 break; 7348 } 7349 7350 if (newfn) 7351 { 7352 tree newcall = build_call_expr_loc (loc, newfn, 1, arg); 7353 return fold_convert_loc (loc, 7354 TREE_TYPE (TREE_TYPE (fndecl)), newcall); 7355 } 7356 } 7357 7358 /* Canonicalize llround (x) to lround (x) on LP64 targets where 7359 sizeof (long long) == sizeof (long). */ 7360 if (TYPE_PRECISION (long_long_integer_type_node) 7361 == TYPE_PRECISION (long_integer_type_node)) 7362 { 7363 tree newfn = NULL_TREE; 7364 switch (fcode) 7365 { 7366 CASE_FLT_FN (BUILT_IN_LLCEIL): 7367 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL); 7368 break; 7369 7370 CASE_FLT_FN (BUILT_IN_LLFLOOR): 7371 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR); 7372 break; 7373 7374 CASE_FLT_FN (BUILT_IN_LLROUND): 7375 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND); 7376 break; 7377 7378 CASE_FLT_FN (BUILT_IN_LLRINT): 7379 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT); 7380 break; 7381 7382 default: 7383 break; 7384 } 7385 7386 if (newfn) 7387 { 7388 tree newcall = build_call_expr_loc (loc, newfn, 1, arg); 7389 return fold_convert_loc (loc, 7390 TREE_TYPE (TREE_TYPE (fndecl)), newcall); 7391 } 7392 } 7393 7394 return NULL_TREE; 7395 } 7396 7397 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the 7398 return type. Return NULL_TREE if no simplification can be made. */ 7399 7400 static tree 7401 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl) 7402 { 7403 tree res; 7404 7405 if (!validate_arg (arg, COMPLEX_TYPE) 7406 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE) 7407 return NULL_TREE; 7408 7409 /* Calculate the result when the argument is a constant. */ 7410 if (TREE_CODE (arg) == COMPLEX_CST 7411 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg), 7412 type, mpfr_hypot))) 7413 return res; 7414 7415 if (TREE_CODE (arg) == COMPLEX_EXPR) 7416 { 7417 tree real = TREE_OPERAND (arg, 0); 7418 tree imag = TREE_OPERAND (arg, 1); 7419 7420 /* If either part is zero, cabs is fabs of the other. */ 7421 if (real_zerop (real)) 7422 return fold_build1_loc (loc, ABS_EXPR, type, imag); 7423 if (real_zerop (imag)) 7424 return fold_build1_loc (loc, ABS_EXPR, type, real); 7425 7426 /* cabs(x+xi) -> fabs(x)*sqrt(2). */ 7427 if (flag_unsafe_math_optimizations 7428 && operand_equal_p (real, imag, OEP_PURE_SAME)) 7429 { 7430 const REAL_VALUE_TYPE sqrt2_trunc 7431 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ()); 7432 STRIP_NOPS (real); 7433 return fold_build2_loc (loc, MULT_EXPR, type, 7434 fold_build1_loc (loc, ABS_EXPR, type, real), 7435 build_real (type, sqrt2_trunc)); 7436 } 7437 } 7438 7439 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */ 7440 if (TREE_CODE (arg) == NEGATE_EXPR 7441 || TREE_CODE (arg) == CONJ_EXPR) 7442 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0)); 7443 7444 /* Don't do this when optimizing for size. */ 7445 if (flag_unsafe_math_optimizations 7446 && optimize && optimize_function_for_speed_p (cfun)) 7447 { 7448 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT); 7449 7450 if (sqrtfn != NULL_TREE) 7451 { 7452 tree rpart, ipart, result; 7453 7454 arg = builtin_save_expr (arg); 7455 7456 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg); 7457 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg); 7458 7459 rpart = builtin_save_expr (rpart); 7460 ipart = builtin_save_expr (ipart); 7461 7462 result = fold_build2_loc (loc, PLUS_EXPR, type, 7463 fold_build2_loc (loc, MULT_EXPR, type, 7464 rpart, rpart), 7465 fold_build2_loc (loc, MULT_EXPR, type, 7466 ipart, ipart)); 7467 7468 return build_call_expr_loc (loc, sqrtfn, 1, result); 7469 } 7470 } 7471 7472 return NULL_TREE; 7473 } 7474 7475 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the 7476 complex tree type of the result. If NEG is true, the imaginary 7477 zero is negative. */ 7478 7479 static tree 7480 build_complex_cproj (tree type, bool neg) 7481 { 7482 REAL_VALUE_TYPE rinf, rzero = dconst0; 7483 7484 real_inf (&rinf); 7485 rzero.sign = neg; 7486 return build_complex (type, build_real (TREE_TYPE (type), rinf), 7487 build_real (TREE_TYPE (type), rzero)); 7488 } 7489 7490 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the 7491 return type. Return NULL_TREE if no simplification can be made. */ 7492 7493 static tree 7494 fold_builtin_cproj (location_t loc, tree arg, tree type) 7495 { 7496 if (!validate_arg (arg, COMPLEX_TYPE) 7497 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE) 7498 return NULL_TREE; 7499 7500 /* If there are no infinities, return arg. */ 7501 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type)))) 7502 return non_lvalue_loc (loc, arg); 7503 7504 /* Calculate the result when the argument is a constant. */ 7505 if (TREE_CODE (arg) == COMPLEX_CST) 7506 { 7507 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg)); 7508 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg)); 7509 7510 if (real_isinf (real) || real_isinf (imag)) 7511 return build_complex_cproj (type, imag->sign); 7512 else 7513 return arg; 7514 } 7515 else if (TREE_CODE (arg) == COMPLEX_EXPR) 7516 { 7517 tree real = TREE_OPERAND (arg, 0); 7518 tree imag = TREE_OPERAND (arg, 1); 7519 7520 STRIP_NOPS (real); 7521 STRIP_NOPS (imag); 7522 7523 /* If the real part is inf and the imag part is known to be 7524 nonnegative, return (inf + 0i). Remember side-effects are 7525 possible in the imag part. */ 7526 if (TREE_CODE (real) == REAL_CST 7527 && real_isinf (TREE_REAL_CST_PTR (real)) 7528 && tree_expr_nonnegative_p (imag)) 7529 return omit_one_operand_loc (loc, type, 7530 build_complex_cproj (type, false), 7531 arg); 7532 7533 /* If the imag part is inf, return (inf+I*copysign(0,imag)). 7534 Remember side-effects are possible in the real part. */ 7535 if (TREE_CODE (imag) == REAL_CST 7536 && real_isinf (TREE_REAL_CST_PTR (imag))) 7537 return 7538 omit_one_operand_loc (loc, type, 7539 build_complex_cproj (type, TREE_REAL_CST_PTR 7540 (imag)->sign), arg); 7541 } 7542 7543 return NULL_TREE; 7544 } 7545 7546 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG. 7547 Return NULL_TREE if no simplification can be made. */ 7548 7549 static tree 7550 fold_builtin_sqrt (location_t loc, tree arg, tree type) 7551 { 7552 7553 enum built_in_function fcode; 7554 tree res; 7555 7556 if (!validate_arg (arg, REAL_TYPE)) 7557 return NULL_TREE; 7558 7559 /* Calculate the result when the argument is a constant. */ 7560 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true))) 7561 return res; 7562 7563 /* Optimize sqrt(expN(x)) = expN(x*0.5). */ 7564 fcode = builtin_mathfn_code (arg); 7565 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode)) 7566 { 7567 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0); 7568 arg = fold_build2_loc (loc, MULT_EXPR, type, 7569 CALL_EXPR_ARG (arg, 0), 7570 build_real (type, dconsthalf)); 7571 return build_call_expr_loc (loc, expfn, 1, arg); 7572 } 7573 7574 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */ 7575 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode)) 7576 { 7577 tree powfn = mathfn_built_in (type, BUILT_IN_POW); 7578 7579 if (powfn) 7580 { 7581 tree arg0 = CALL_EXPR_ARG (arg, 0); 7582 tree tree_root; 7583 /* The inner root was either sqrt or cbrt. */ 7584 /* This was a conditional expression but it triggered a bug 7585 in Sun C 5.5. */ 7586 REAL_VALUE_TYPE dconstroot; 7587 if (BUILTIN_SQRT_P (fcode)) 7588 dconstroot = dconsthalf; 7589 else 7590 dconstroot = dconst_third (); 7591 7592 /* Adjust for the outer root. */ 7593 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1); 7594 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot); 7595 tree_root = build_real (type, dconstroot); 7596 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root); 7597 } 7598 } 7599 7600 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */ 7601 if (flag_unsafe_math_optimizations 7602 && (fcode == BUILT_IN_POW 7603 || fcode == BUILT_IN_POWF 7604 || fcode == BUILT_IN_POWL)) 7605 { 7606 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0); 7607 tree arg0 = CALL_EXPR_ARG (arg, 0); 7608 tree arg1 = CALL_EXPR_ARG (arg, 1); 7609 tree narg1; 7610 if (!tree_expr_nonnegative_p (arg0)) 7611 arg0 = build1 (ABS_EXPR, type, arg0); 7612 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1, 7613 build_real (type, dconsthalf)); 7614 return build_call_expr_loc (loc, powfn, 2, arg0, narg1); 7615 } 7616 7617 return NULL_TREE; 7618 } 7619 7620 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG. 7621 Return NULL_TREE if no simplification can be made. */ 7622 7623 static tree 7624 fold_builtin_cbrt (location_t loc, tree arg, tree type) 7625 { 7626 const enum built_in_function fcode = builtin_mathfn_code (arg); 7627 tree res; 7628 7629 if (!validate_arg (arg, REAL_TYPE)) 7630 return NULL_TREE; 7631 7632 /* Calculate the result when the argument is a constant. */ 7633 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0))) 7634 return res; 7635 7636 if (flag_unsafe_math_optimizations) 7637 { 7638 /* Optimize cbrt(expN(x)) -> expN(x/3). */ 7639 if (BUILTIN_EXPONENT_P (fcode)) 7640 { 7641 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0); 7642 const REAL_VALUE_TYPE third_trunc = 7643 real_value_truncate (TYPE_MODE (type), dconst_third ()); 7644 arg = fold_build2_loc (loc, MULT_EXPR, type, 7645 CALL_EXPR_ARG (arg, 0), 7646 build_real (type, third_trunc)); 7647 return build_call_expr_loc (loc, expfn, 1, arg); 7648 } 7649 7650 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */ 7651 if (BUILTIN_SQRT_P (fcode)) 7652 { 7653 tree powfn = mathfn_built_in (type, BUILT_IN_POW); 7654 7655 if (powfn) 7656 { 7657 tree arg0 = CALL_EXPR_ARG (arg, 0); 7658 tree tree_root; 7659 REAL_VALUE_TYPE dconstroot = dconst_third (); 7660 7661 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1); 7662 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot); 7663 tree_root = build_real (type, dconstroot); 7664 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root); 7665 } 7666 } 7667 7668 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */ 7669 if (BUILTIN_CBRT_P (fcode)) 7670 { 7671 tree arg0 = CALL_EXPR_ARG (arg, 0); 7672 if (tree_expr_nonnegative_p (arg0)) 7673 { 7674 tree powfn = mathfn_built_in (type, BUILT_IN_POW); 7675 7676 if (powfn) 7677 { 7678 tree tree_root; 7679 REAL_VALUE_TYPE dconstroot; 7680 7681 real_arithmetic (&dconstroot, MULT_EXPR, 7682 dconst_third_ptr (), dconst_third_ptr ()); 7683 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot); 7684 tree_root = build_real (type, dconstroot); 7685 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root); 7686 } 7687 } 7688 } 7689 7690 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */ 7691 if (fcode == BUILT_IN_POW 7692 || fcode == BUILT_IN_POWF 7693 || fcode == BUILT_IN_POWL) 7694 { 7695 tree arg00 = CALL_EXPR_ARG (arg, 0); 7696 tree arg01 = CALL_EXPR_ARG (arg, 1); 7697 if (tree_expr_nonnegative_p (arg00)) 7698 { 7699 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0); 7700 const REAL_VALUE_TYPE dconstroot 7701 = real_value_truncate (TYPE_MODE (type), dconst_third ()); 7702 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01, 7703 build_real (type, dconstroot)); 7704 return build_call_expr_loc (loc, powfn, 2, arg00, narg01); 7705 } 7706 } 7707 } 7708 return NULL_TREE; 7709 } 7710 7711 /* Fold function call to builtin cos, cosf, or cosl with argument ARG. 7712 TYPE is the type of the return value. Return NULL_TREE if no 7713 simplification can be made. */ 7714 7715 static tree 7716 fold_builtin_cos (location_t loc, 7717 tree arg, tree type, tree fndecl) 7718 { 7719 tree res, narg; 7720 7721 if (!validate_arg (arg, REAL_TYPE)) 7722 return NULL_TREE; 7723 7724 /* Calculate the result when the argument is a constant. */ 7725 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0))) 7726 return res; 7727 7728 /* Optimize cos(-x) into cos (x). */ 7729 if ((narg = fold_strip_sign_ops (arg))) 7730 return build_call_expr_loc (loc, fndecl, 1, narg); 7731 7732 return NULL_TREE; 7733 } 7734 7735 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG. 7736 Return NULL_TREE if no simplification can be made. */ 7737 7738 static tree 7739 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl) 7740 { 7741 if (validate_arg (arg, REAL_TYPE)) 7742 { 7743 tree res, narg; 7744 7745 /* Calculate the result when the argument is a constant. */ 7746 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0))) 7747 return res; 7748 7749 /* Optimize cosh(-x) into cosh (x). */ 7750 if ((narg = fold_strip_sign_ops (arg))) 7751 return build_call_expr_loc (loc, fndecl, 1, narg); 7752 } 7753 7754 return NULL_TREE; 7755 } 7756 7757 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with 7758 argument ARG. TYPE is the type of the return value. Return 7759 NULL_TREE if no simplification can be made. */ 7760 7761 static tree 7762 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl, 7763 bool hyper) 7764 { 7765 if (validate_arg (arg, COMPLEX_TYPE) 7766 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE) 7767 { 7768 tree tmp; 7769 7770 /* Calculate the result when the argument is a constant. */ 7771 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos)))) 7772 return tmp; 7773 7774 /* Optimize fn(-x) into fn(x). */ 7775 if ((tmp = fold_strip_sign_ops (arg))) 7776 return build_call_expr_loc (loc, fndecl, 1, tmp); 7777 } 7778 7779 return NULL_TREE; 7780 } 7781 7782 /* Fold function call to builtin tan, tanf, or tanl with argument ARG. 7783 Return NULL_TREE if no simplification can be made. */ 7784 7785 static tree 7786 fold_builtin_tan (tree arg, tree type) 7787 { 7788 enum built_in_function fcode; 7789 tree res; 7790 7791 if (!validate_arg (arg, REAL_TYPE)) 7792 return NULL_TREE; 7793 7794 /* Calculate the result when the argument is a constant. */ 7795 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0))) 7796 return res; 7797 7798 /* Optimize tan(atan(x)) = x. */ 7799 fcode = builtin_mathfn_code (arg); 7800 if (flag_unsafe_math_optimizations 7801 && (fcode == BUILT_IN_ATAN 7802 || fcode == BUILT_IN_ATANF 7803 || fcode == BUILT_IN_ATANL)) 7804 return CALL_EXPR_ARG (arg, 0); 7805 7806 return NULL_TREE; 7807 } 7808 7809 /* Fold function call to builtin sincos, sincosf, or sincosl. Return 7810 NULL_TREE if no simplification can be made. */ 7811 7812 static tree 7813 fold_builtin_sincos (location_t loc, 7814 tree arg0, tree arg1, tree arg2) 7815 { 7816 tree type; 7817 tree res, fn, call; 7818 7819 if (!validate_arg (arg0, REAL_TYPE) 7820 || !validate_arg (arg1, POINTER_TYPE) 7821 || !validate_arg (arg2, POINTER_TYPE)) 7822 return NULL_TREE; 7823 7824 type = TREE_TYPE (arg0); 7825 7826 /* Calculate the result when the argument is a constant. */ 7827 if ((res = do_mpfr_sincos (arg0, arg1, arg2))) 7828 return res; 7829 7830 /* Canonicalize sincos to cexpi. */ 7831 if (!TARGET_C99_FUNCTIONS) 7832 return NULL_TREE; 7833 fn = mathfn_built_in (type, BUILT_IN_CEXPI); 7834 if (!fn) 7835 return NULL_TREE; 7836 7837 call = build_call_expr_loc (loc, fn, 1, arg0); 7838 call = builtin_save_expr (call); 7839 7840 return build2 (COMPOUND_EXPR, void_type_node, 7841 build2 (MODIFY_EXPR, void_type_node, 7842 build_fold_indirect_ref_loc (loc, arg1), 7843 build1 (IMAGPART_EXPR, type, call)), 7844 build2 (MODIFY_EXPR, void_type_node, 7845 build_fold_indirect_ref_loc (loc, arg2), 7846 build1 (REALPART_EXPR, type, call))); 7847 } 7848 7849 /* Fold function call to builtin cexp, cexpf, or cexpl. Return 7850 NULL_TREE if no simplification can be made. */ 7851 7852 static tree 7853 fold_builtin_cexp (location_t loc, tree arg0, tree type) 7854 { 7855 tree rtype; 7856 tree realp, imagp, ifn; 7857 tree res; 7858 7859 if (!validate_arg (arg0, COMPLEX_TYPE) 7860 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE) 7861 return NULL_TREE; 7862 7863 /* Calculate the result when the argument is a constant. */ 7864 if ((res = do_mpc_arg1 (arg0, type, mpc_exp))) 7865 return res; 7866 7867 rtype = TREE_TYPE (TREE_TYPE (arg0)); 7868 7869 /* In case we can figure out the real part of arg0 and it is constant zero 7870 fold to cexpi. */ 7871 if (!TARGET_C99_FUNCTIONS) 7872 return NULL_TREE; 7873 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI); 7874 if (!ifn) 7875 return NULL_TREE; 7876 7877 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0)) 7878 && real_zerop (realp)) 7879 { 7880 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0); 7881 return build_call_expr_loc (loc, ifn, 1, narg); 7882 } 7883 7884 /* In case we can easily decompose real and imaginary parts split cexp 7885 to exp (r) * cexpi (i). */ 7886 if (flag_unsafe_math_optimizations 7887 && realp) 7888 { 7889 tree rfn, rcall, icall; 7890 7891 rfn = mathfn_built_in (rtype, BUILT_IN_EXP); 7892 if (!rfn) 7893 return NULL_TREE; 7894 7895 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0); 7896 if (!imagp) 7897 return NULL_TREE; 7898 7899 icall = build_call_expr_loc (loc, ifn, 1, imagp); 7900 icall = builtin_save_expr (icall); 7901 rcall = build_call_expr_loc (loc, rfn, 1, realp); 7902 rcall = builtin_save_expr (rcall); 7903 return fold_build2_loc (loc, COMPLEX_EXPR, type, 7904 fold_build2_loc (loc, MULT_EXPR, rtype, 7905 rcall, 7906 fold_build1_loc (loc, REALPART_EXPR, 7907 rtype, icall)), 7908 fold_build2_loc (loc, MULT_EXPR, rtype, 7909 rcall, 7910 fold_build1_loc (loc, IMAGPART_EXPR, 7911 rtype, icall))); 7912 } 7913 7914 return NULL_TREE; 7915 } 7916 7917 /* Fold function call to builtin trunc, truncf or truncl with argument ARG. 7918 Return NULL_TREE if no simplification can be made. */ 7919 7920 static tree 7921 fold_builtin_trunc (location_t loc, tree fndecl, tree arg) 7922 { 7923 if (!validate_arg (arg, REAL_TYPE)) 7924 return NULL_TREE; 7925 7926 /* Optimize trunc of constant value. */ 7927 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)) 7928 { 7929 REAL_VALUE_TYPE r, x; 7930 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 7931 7932 x = TREE_REAL_CST (arg); 7933 real_trunc (&r, TYPE_MODE (type), &x); 7934 return build_real (type, r); 7935 } 7936 7937 return fold_trunc_transparent_mathfn (loc, fndecl, arg); 7938 } 7939 7940 /* Fold function call to builtin floor, floorf or floorl with argument ARG. 7941 Return NULL_TREE if no simplification can be made. */ 7942 7943 static tree 7944 fold_builtin_floor (location_t loc, tree fndecl, tree arg) 7945 { 7946 if (!validate_arg (arg, REAL_TYPE)) 7947 return NULL_TREE; 7948 7949 /* Optimize floor of constant value. */ 7950 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)) 7951 { 7952 REAL_VALUE_TYPE x; 7953 7954 x = TREE_REAL_CST (arg); 7955 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math) 7956 { 7957 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 7958 REAL_VALUE_TYPE r; 7959 7960 real_floor (&r, TYPE_MODE (type), &x); 7961 return build_real (type, r); 7962 } 7963 } 7964 7965 /* Fold floor (x) where x is nonnegative to trunc (x). */ 7966 if (tree_expr_nonnegative_p (arg)) 7967 { 7968 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC); 7969 if (truncfn) 7970 return build_call_expr_loc (loc, truncfn, 1, arg); 7971 } 7972 7973 return fold_trunc_transparent_mathfn (loc, fndecl, arg); 7974 } 7975 7976 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG. 7977 Return NULL_TREE if no simplification can be made. */ 7978 7979 static tree 7980 fold_builtin_ceil (location_t loc, tree fndecl, tree arg) 7981 { 7982 if (!validate_arg (arg, REAL_TYPE)) 7983 return NULL_TREE; 7984 7985 /* Optimize ceil of constant value. */ 7986 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)) 7987 { 7988 REAL_VALUE_TYPE x; 7989 7990 x = TREE_REAL_CST (arg); 7991 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math) 7992 { 7993 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 7994 REAL_VALUE_TYPE r; 7995 7996 real_ceil (&r, TYPE_MODE (type), &x); 7997 return build_real (type, r); 7998 } 7999 } 8000 8001 return fold_trunc_transparent_mathfn (loc, fndecl, arg); 8002 } 8003 8004 /* Fold function call to builtin round, roundf or roundl with argument ARG. 8005 Return NULL_TREE if no simplification can be made. */ 8006 8007 static tree 8008 fold_builtin_round (location_t loc, tree fndecl, tree arg) 8009 { 8010 if (!validate_arg (arg, REAL_TYPE)) 8011 return NULL_TREE; 8012 8013 /* Optimize round of constant value. */ 8014 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)) 8015 { 8016 REAL_VALUE_TYPE x; 8017 8018 x = TREE_REAL_CST (arg); 8019 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math) 8020 { 8021 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 8022 REAL_VALUE_TYPE r; 8023 8024 real_round (&r, TYPE_MODE (type), &x); 8025 return build_real (type, r); 8026 } 8027 } 8028 8029 return fold_trunc_transparent_mathfn (loc, fndecl, arg); 8030 } 8031 8032 /* Fold function call to builtin lround, lroundf or lroundl (or the 8033 corresponding long long versions) and other rounding functions. ARG 8034 is the argument to the call. Return NULL_TREE if no simplification 8035 can be made. */ 8036 8037 static tree 8038 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg) 8039 { 8040 if (!validate_arg (arg, REAL_TYPE)) 8041 return NULL_TREE; 8042 8043 /* Optimize lround of constant value. */ 8044 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)) 8045 { 8046 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg); 8047 8048 if (real_isfinite (&x)) 8049 { 8050 tree itype = TREE_TYPE (TREE_TYPE (fndecl)); 8051 tree ftype = TREE_TYPE (arg); 8052 double_int val; 8053 REAL_VALUE_TYPE r; 8054 8055 switch (DECL_FUNCTION_CODE (fndecl)) 8056 { 8057 CASE_FLT_FN (BUILT_IN_IFLOOR): 8058 CASE_FLT_FN (BUILT_IN_LFLOOR): 8059 CASE_FLT_FN (BUILT_IN_LLFLOOR): 8060 real_floor (&r, TYPE_MODE (ftype), &x); 8061 break; 8062 8063 CASE_FLT_FN (BUILT_IN_ICEIL): 8064 CASE_FLT_FN (BUILT_IN_LCEIL): 8065 CASE_FLT_FN (BUILT_IN_LLCEIL): 8066 real_ceil (&r, TYPE_MODE (ftype), &x); 8067 break; 8068 8069 CASE_FLT_FN (BUILT_IN_IROUND): 8070 CASE_FLT_FN (BUILT_IN_LROUND): 8071 CASE_FLT_FN (BUILT_IN_LLROUND): 8072 real_round (&r, TYPE_MODE (ftype), &x); 8073 break; 8074 8075 default: 8076 gcc_unreachable (); 8077 } 8078 8079 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r); 8080 if (double_int_fits_to_tree_p (itype, val)) 8081 return double_int_to_tree (itype, val); 8082 } 8083 } 8084 8085 switch (DECL_FUNCTION_CODE (fndecl)) 8086 { 8087 CASE_FLT_FN (BUILT_IN_LFLOOR): 8088 CASE_FLT_FN (BUILT_IN_LLFLOOR): 8089 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */ 8090 if (tree_expr_nonnegative_p (arg)) 8091 return fold_build1_loc (loc, FIX_TRUNC_EXPR, 8092 TREE_TYPE (TREE_TYPE (fndecl)), arg); 8093 break; 8094 default:; 8095 } 8096 8097 return fold_fixed_mathfn (loc, fndecl, arg); 8098 } 8099 8100 /* Fold function call to builtin ffs, clz, ctz, popcount and parity 8101 and their long and long long variants (i.e. ffsl and ffsll). ARG is 8102 the argument to the call. Return NULL_TREE if no simplification can 8103 be made. */ 8104 8105 static tree 8106 fold_builtin_bitop (tree fndecl, tree arg) 8107 { 8108 if (!validate_arg (arg, INTEGER_TYPE)) 8109 return NULL_TREE; 8110 8111 /* Optimize for constant argument. */ 8112 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg)) 8113 { 8114 HOST_WIDE_INT hi, width, result; 8115 unsigned HOST_WIDE_INT lo; 8116 tree type; 8117 8118 type = TREE_TYPE (arg); 8119 width = TYPE_PRECISION (type); 8120 lo = TREE_INT_CST_LOW (arg); 8121 8122 /* Clear all the bits that are beyond the type's precision. */ 8123 if (width > HOST_BITS_PER_WIDE_INT) 8124 { 8125 hi = TREE_INT_CST_HIGH (arg); 8126 if (width < HOST_BITS_PER_DOUBLE_INT) 8127 hi &= ~((unsigned HOST_WIDE_INT) (-1) 8128 << (width - HOST_BITS_PER_WIDE_INT)); 8129 } 8130 else 8131 { 8132 hi = 0; 8133 if (width < HOST_BITS_PER_WIDE_INT) 8134 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width); 8135 } 8136 8137 switch (DECL_FUNCTION_CODE (fndecl)) 8138 { 8139 CASE_INT_FN (BUILT_IN_FFS): 8140 if (lo != 0) 8141 result = ffs_hwi (lo); 8142 else if (hi != 0) 8143 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi); 8144 else 8145 result = 0; 8146 break; 8147 8148 CASE_INT_FN (BUILT_IN_CLZ): 8149 if (hi != 0) 8150 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT; 8151 else if (lo != 0) 8152 result = width - floor_log2 (lo) - 1; 8153 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result)) 8154 result = width; 8155 break; 8156 8157 CASE_INT_FN (BUILT_IN_CTZ): 8158 if (lo != 0) 8159 result = ctz_hwi (lo); 8160 else if (hi != 0) 8161 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi); 8162 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result)) 8163 result = width; 8164 break; 8165 8166 CASE_INT_FN (BUILT_IN_CLRSB): 8167 if (width > HOST_BITS_PER_WIDE_INT 8168 && (hi & ((unsigned HOST_WIDE_INT) 1 8169 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0) 8170 { 8171 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1) 8172 << (width - HOST_BITS_PER_WIDE_INT - 1)); 8173 lo = ~lo; 8174 } 8175 else if (width <= HOST_BITS_PER_WIDE_INT 8176 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0) 8177 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1)); 8178 if (hi != 0) 8179 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT; 8180 else if (lo != 0) 8181 result = width - floor_log2 (lo) - 2; 8182 else 8183 result = width - 1; 8184 break; 8185 8186 CASE_INT_FN (BUILT_IN_POPCOUNT): 8187 result = 0; 8188 while (lo) 8189 result++, lo &= lo - 1; 8190 while (hi) 8191 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1; 8192 break; 8193 8194 CASE_INT_FN (BUILT_IN_PARITY): 8195 result = 0; 8196 while (lo) 8197 result++, lo &= lo - 1; 8198 while (hi) 8199 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1; 8200 result &= 1; 8201 break; 8202 8203 default: 8204 gcc_unreachable (); 8205 } 8206 8207 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result); 8208 } 8209 8210 return NULL_TREE; 8211 } 8212 8213 /* Fold function call to builtin_bswap and the short, long and long long 8214 variants. Return NULL_TREE if no simplification can be made. */ 8215 static tree 8216 fold_builtin_bswap (tree fndecl, tree arg) 8217 { 8218 if (! validate_arg (arg, INTEGER_TYPE)) 8219 return NULL_TREE; 8220 8221 /* Optimize constant value. */ 8222 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg)) 8223 { 8224 HOST_WIDE_INT hi, width, r_hi = 0; 8225 unsigned HOST_WIDE_INT lo, r_lo = 0; 8226 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 8227 8228 width = TYPE_PRECISION (type); 8229 lo = TREE_INT_CST_LOW (arg); 8230 hi = TREE_INT_CST_HIGH (arg); 8231 8232 switch (DECL_FUNCTION_CODE (fndecl)) 8233 { 8234 case BUILT_IN_BSWAP16: 8235 case BUILT_IN_BSWAP32: 8236 case BUILT_IN_BSWAP64: 8237 { 8238 int s; 8239 8240 for (s = 0; s < width; s += 8) 8241 { 8242 int d = width - s - 8; 8243 unsigned HOST_WIDE_INT byte; 8244 8245 if (s < HOST_BITS_PER_WIDE_INT) 8246 byte = (lo >> s) & 0xff; 8247 else 8248 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff; 8249 8250 if (d < HOST_BITS_PER_WIDE_INT) 8251 r_lo |= byte << d; 8252 else 8253 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT); 8254 } 8255 } 8256 8257 break; 8258 8259 default: 8260 gcc_unreachable (); 8261 } 8262 8263 if (width < HOST_BITS_PER_WIDE_INT) 8264 return build_int_cst (type, r_lo); 8265 else 8266 return build_int_cst_wide (type, r_lo, r_hi); 8267 } 8268 8269 return NULL_TREE; 8270 } 8271 8272 /* A subroutine of fold_builtin to fold the various logarithmic 8273 functions. Return NULL_TREE if no simplification can me made. 8274 FUNC is the corresponding MPFR logarithm function. */ 8275 8276 static tree 8277 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg, 8278 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t)) 8279 { 8280 if (validate_arg (arg, REAL_TYPE)) 8281 { 8282 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 8283 tree res; 8284 const enum built_in_function fcode = builtin_mathfn_code (arg); 8285 8286 /* Calculate the result when the argument is a constant. */ 8287 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false))) 8288 return res; 8289 8290 /* Special case, optimize logN(expN(x)) = x. */ 8291 if (flag_unsafe_math_optimizations 8292 && ((func == mpfr_log 8293 && (fcode == BUILT_IN_EXP 8294 || fcode == BUILT_IN_EXPF 8295 || fcode == BUILT_IN_EXPL)) 8296 || (func == mpfr_log2 8297 && (fcode == BUILT_IN_EXP2 8298 || fcode == BUILT_IN_EXP2F 8299 || fcode == BUILT_IN_EXP2L)) 8300 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode))))) 8301 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0)); 8302 8303 /* Optimize logN(func()) for various exponential functions. We 8304 want to determine the value "x" and the power "exponent" in 8305 order to transform logN(x**exponent) into exponent*logN(x). */ 8306 if (flag_unsafe_math_optimizations) 8307 { 8308 tree exponent = 0, x = 0; 8309 8310 switch (fcode) 8311 { 8312 CASE_FLT_FN (BUILT_IN_EXP): 8313 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */ 8314 x = build_real (type, real_value_truncate (TYPE_MODE (type), 8315 dconst_e ())); 8316 exponent = CALL_EXPR_ARG (arg, 0); 8317 break; 8318 CASE_FLT_FN (BUILT_IN_EXP2): 8319 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */ 8320 x = build_real (type, dconst2); 8321 exponent = CALL_EXPR_ARG (arg, 0); 8322 break; 8323 CASE_FLT_FN (BUILT_IN_EXP10): 8324 CASE_FLT_FN (BUILT_IN_POW10): 8325 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */ 8326 { 8327 REAL_VALUE_TYPE dconst10; 8328 real_from_integer (&dconst10, VOIDmode, 10, 0, 0); 8329 x = build_real (type, dconst10); 8330 } 8331 exponent = CALL_EXPR_ARG (arg, 0); 8332 break; 8333 CASE_FLT_FN (BUILT_IN_SQRT): 8334 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */ 8335 x = CALL_EXPR_ARG (arg, 0); 8336 exponent = build_real (type, dconsthalf); 8337 break; 8338 CASE_FLT_FN (BUILT_IN_CBRT): 8339 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */ 8340 x = CALL_EXPR_ARG (arg, 0); 8341 exponent = build_real (type, real_value_truncate (TYPE_MODE (type), 8342 dconst_third ())); 8343 break; 8344 CASE_FLT_FN (BUILT_IN_POW): 8345 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */ 8346 x = CALL_EXPR_ARG (arg, 0); 8347 exponent = CALL_EXPR_ARG (arg, 1); 8348 break; 8349 default: 8350 break; 8351 } 8352 8353 /* Now perform the optimization. */ 8354 if (x && exponent) 8355 { 8356 tree logfn = build_call_expr_loc (loc, fndecl, 1, x); 8357 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn); 8358 } 8359 } 8360 } 8361 8362 return NULL_TREE; 8363 } 8364 8365 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return 8366 NULL_TREE if no simplification can be made. */ 8367 8368 static tree 8369 fold_builtin_hypot (location_t loc, tree fndecl, 8370 tree arg0, tree arg1, tree type) 8371 { 8372 tree res, narg0, narg1; 8373 8374 if (!validate_arg (arg0, REAL_TYPE) 8375 || !validate_arg (arg1, REAL_TYPE)) 8376 return NULL_TREE; 8377 8378 /* Calculate the result when the argument is a constant. */ 8379 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot))) 8380 return res; 8381 8382 /* If either argument to hypot has a negate or abs, strip that off. 8383 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */ 8384 narg0 = fold_strip_sign_ops (arg0); 8385 narg1 = fold_strip_sign_ops (arg1); 8386 if (narg0 || narg1) 8387 { 8388 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0, 8389 narg1 ? narg1 : arg1); 8390 } 8391 8392 /* If either argument is zero, hypot is fabs of the other. */ 8393 if (real_zerop (arg0)) 8394 return fold_build1_loc (loc, ABS_EXPR, type, arg1); 8395 else if (real_zerop (arg1)) 8396 return fold_build1_loc (loc, ABS_EXPR, type, arg0); 8397 8398 /* hypot(x,x) -> fabs(x)*sqrt(2). */ 8399 if (flag_unsafe_math_optimizations 8400 && operand_equal_p (arg0, arg1, OEP_PURE_SAME)) 8401 { 8402 const REAL_VALUE_TYPE sqrt2_trunc 8403 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ()); 8404 return fold_build2_loc (loc, MULT_EXPR, type, 8405 fold_build1_loc (loc, ABS_EXPR, type, arg0), 8406 build_real (type, sqrt2_trunc)); 8407 } 8408 8409 return NULL_TREE; 8410 } 8411 8412 8413 /* Fold a builtin function call to pow, powf, or powl. Return 8414 NULL_TREE if no simplification can be made. */ 8415 static tree 8416 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type) 8417 { 8418 tree res; 8419 8420 if (!validate_arg (arg0, REAL_TYPE) 8421 || !validate_arg (arg1, REAL_TYPE)) 8422 return NULL_TREE; 8423 8424 /* Calculate the result when the argument is a constant. */ 8425 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow))) 8426 return res; 8427 8428 /* Optimize pow(1.0,y) = 1.0. */ 8429 if (real_onep (arg0)) 8430 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1); 8431 8432 if (TREE_CODE (arg1) == REAL_CST 8433 && !TREE_OVERFLOW (arg1)) 8434 { 8435 REAL_VALUE_TYPE cint; 8436 REAL_VALUE_TYPE c; 8437 HOST_WIDE_INT n; 8438 8439 c = TREE_REAL_CST (arg1); 8440 8441 /* Optimize pow(x,0.0) = 1.0. */ 8442 if (REAL_VALUES_EQUAL (c, dconst0)) 8443 return omit_one_operand_loc (loc, type, build_real (type, dconst1), 8444 arg0); 8445 8446 /* Optimize pow(x,1.0) = x. */ 8447 if (REAL_VALUES_EQUAL (c, dconst1)) 8448 return arg0; 8449 8450 /* Optimize pow(x,-1.0) = 1.0/x. */ 8451 if (REAL_VALUES_EQUAL (c, dconstm1)) 8452 return fold_build2_loc (loc, RDIV_EXPR, type, 8453 build_real (type, dconst1), arg0); 8454 8455 /* Optimize pow(x,0.5) = sqrt(x). */ 8456 if (flag_unsafe_math_optimizations 8457 && REAL_VALUES_EQUAL (c, dconsthalf)) 8458 { 8459 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT); 8460 8461 if (sqrtfn != NULL_TREE) 8462 return build_call_expr_loc (loc, sqrtfn, 1, arg0); 8463 } 8464 8465 /* Optimize pow(x,1.0/3.0) = cbrt(x). */ 8466 if (flag_unsafe_math_optimizations) 8467 { 8468 const REAL_VALUE_TYPE dconstroot 8469 = real_value_truncate (TYPE_MODE (type), dconst_third ()); 8470 8471 if (REAL_VALUES_EQUAL (c, dconstroot)) 8472 { 8473 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT); 8474 if (cbrtfn != NULL_TREE) 8475 return build_call_expr_loc (loc, cbrtfn, 1, arg0); 8476 } 8477 } 8478 8479 /* Check for an integer exponent. */ 8480 n = real_to_integer (&c); 8481 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0); 8482 if (real_identical (&c, &cint)) 8483 { 8484 /* Attempt to evaluate pow at compile-time, unless this should 8485 raise an exception. */ 8486 if (TREE_CODE (arg0) == REAL_CST 8487 && !TREE_OVERFLOW (arg0) 8488 && (n > 0 8489 || (!flag_trapping_math && !flag_errno_math) 8490 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0))) 8491 { 8492 REAL_VALUE_TYPE x; 8493 bool inexact; 8494 8495 x = TREE_REAL_CST (arg0); 8496 inexact = real_powi (&x, TYPE_MODE (type), &x, n); 8497 if (flag_unsafe_math_optimizations || !inexact) 8498 return build_real (type, x); 8499 } 8500 8501 /* Strip sign ops from even integer powers. */ 8502 if ((n & 1) == 0 && flag_unsafe_math_optimizations) 8503 { 8504 tree narg0 = fold_strip_sign_ops (arg0); 8505 if (narg0) 8506 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1); 8507 } 8508 } 8509 } 8510 8511 if (flag_unsafe_math_optimizations) 8512 { 8513 const enum built_in_function fcode = builtin_mathfn_code (arg0); 8514 8515 /* Optimize pow(expN(x),y) = expN(x*y). */ 8516 if (BUILTIN_EXPONENT_P (fcode)) 8517 { 8518 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 8519 tree arg = CALL_EXPR_ARG (arg0, 0); 8520 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1); 8521 return build_call_expr_loc (loc, expfn, 1, arg); 8522 } 8523 8524 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */ 8525 if (BUILTIN_SQRT_P (fcode)) 8526 { 8527 tree narg0 = CALL_EXPR_ARG (arg0, 0); 8528 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1, 8529 build_real (type, dconsthalf)); 8530 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1); 8531 } 8532 8533 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */ 8534 if (BUILTIN_CBRT_P (fcode)) 8535 { 8536 tree arg = CALL_EXPR_ARG (arg0, 0); 8537 if (tree_expr_nonnegative_p (arg)) 8538 { 8539 const REAL_VALUE_TYPE dconstroot 8540 = real_value_truncate (TYPE_MODE (type), dconst_third ()); 8541 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1, 8542 build_real (type, dconstroot)); 8543 return build_call_expr_loc (loc, fndecl, 2, arg, narg1); 8544 } 8545 } 8546 8547 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */ 8548 if (fcode == BUILT_IN_POW 8549 || fcode == BUILT_IN_POWF 8550 || fcode == BUILT_IN_POWL) 8551 { 8552 tree arg00 = CALL_EXPR_ARG (arg0, 0); 8553 if (tree_expr_nonnegative_p (arg00)) 8554 { 8555 tree arg01 = CALL_EXPR_ARG (arg0, 1); 8556 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1); 8557 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1); 8558 } 8559 } 8560 } 8561 8562 return NULL_TREE; 8563 } 8564 8565 /* Fold a builtin function call to powi, powif, or powil with argument ARG. 8566 Return NULL_TREE if no simplification can be made. */ 8567 static tree 8568 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED, 8569 tree arg0, tree arg1, tree type) 8570 { 8571 if (!validate_arg (arg0, REAL_TYPE) 8572 || !validate_arg (arg1, INTEGER_TYPE)) 8573 return NULL_TREE; 8574 8575 /* Optimize pow(1.0,y) = 1.0. */ 8576 if (real_onep (arg0)) 8577 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1); 8578 8579 if (host_integerp (arg1, 0)) 8580 { 8581 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1); 8582 8583 /* Evaluate powi at compile-time. */ 8584 if (TREE_CODE (arg0) == REAL_CST 8585 && !TREE_OVERFLOW (arg0)) 8586 { 8587 REAL_VALUE_TYPE x; 8588 x = TREE_REAL_CST (arg0); 8589 real_powi (&x, TYPE_MODE (type), &x, c); 8590 return build_real (type, x); 8591 } 8592 8593 /* Optimize pow(x,0) = 1.0. */ 8594 if (c == 0) 8595 return omit_one_operand_loc (loc, type, build_real (type, dconst1), 8596 arg0); 8597 8598 /* Optimize pow(x,1) = x. */ 8599 if (c == 1) 8600 return arg0; 8601 8602 /* Optimize pow(x,-1) = 1.0/x. */ 8603 if (c == -1) 8604 return fold_build2_loc (loc, RDIV_EXPR, type, 8605 build_real (type, dconst1), arg0); 8606 } 8607 8608 return NULL_TREE; 8609 } 8610 8611 /* A subroutine of fold_builtin to fold the various exponent 8612 functions. Return NULL_TREE if no simplification can be made. 8613 FUNC is the corresponding MPFR exponent function. */ 8614 8615 static tree 8616 fold_builtin_exponent (location_t loc, tree fndecl, tree arg, 8617 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t)) 8618 { 8619 if (validate_arg (arg, REAL_TYPE)) 8620 { 8621 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 8622 tree res; 8623 8624 /* Calculate the result when the argument is a constant. */ 8625 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0))) 8626 return res; 8627 8628 /* Optimize expN(logN(x)) = x. */ 8629 if (flag_unsafe_math_optimizations) 8630 { 8631 const enum built_in_function fcode = builtin_mathfn_code (arg); 8632 8633 if ((func == mpfr_exp 8634 && (fcode == BUILT_IN_LOG 8635 || fcode == BUILT_IN_LOGF 8636 || fcode == BUILT_IN_LOGL)) 8637 || (func == mpfr_exp2 8638 && (fcode == BUILT_IN_LOG2 8639 || fcode == BUILT_IN_LOG2F 8640 || fcode == BUILT_IN_LOG2L)) 8641 || (func == mpfr_exp10 8642 && (fcode == BUILT_IN_LOG10 8643 || fcode == BUILT_IN_LOG10F 8644 || fcode == BUILT_IN_LOG10L))) 8645 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0)); 8646 } 8647 } 8648 8649 return NULL_TREE; 8650 } 8651 8652 /* Return true if VAR is a VAR_DECL or a component thereof. */ 8653 8654 static bool 8655 var_decl_component_p (tree var) 8656 { 8657 tree inner = var; 8658 while (handled_component_p (inner)) 8659 inner = TREE_OPERAND (inner, 0); 8660 return SSA_VAR_P (inner); 8661 } 8662 8663 /* Fold function call to builtin memset. Return 8664 NULL_TREE if no simplification can be made. */ 8665 8666 static tree 8667 fold_builtin_memset (location_t loc, tree dest, tree c, tree len, 8668 tree type, bool ignore) 8669 { 8670 tree var, ret, etype; 8671 unsigned HOST_WIDE_INT length, cval; 8672 8673 if (! validate_arg (dest, POINTER_TYPE) 8674 || ! validate_arg (c, INTEGER_TYPE) 8675 || ! validate_arg (len, INTEGER_TYPE)) 8676 return NULL_TREE; 8677 8678 if (! host_integerp (len, 1)) 8679 return NULL_TREE; 8680 8681 /* If the LEN parameter is zero, return DEST. */ 8682 if (integer_zerop (len)) 8683 return omit_one_operand_loc (loc, type, dest, c); 8684 8685 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest)) 8686 return NULL_TREE; 8687 8688 var = dest; 8689 STRIP_NOPS (var); 8690 if (TREE_CODE (var) != ADDR_EXPR) 8691 return NULL_TREE; 8692 8693 var = TREE_OPERAND (var, 0); 8694 if (TREE_THIS_VOLATILE (var)) 8695 return NULL_TREE; 8696 8697 etype = TREE_TYPE (var); 8698 if (TREE_CODE (etype) == ARRAY_TYPE) 8699 etype = TREE_TYPE (etype); 8700 8701 if (!INTEGRAL_TYPE_P (etype) 8702 && !POINTER_TYPE_P (etype)) 8703 return NULL_TREE; 8704 8705 if (! var_decl_component_p (var)) 8706 return NULL_TREE; 8707 8708 length = tree_low_cst (len, 1); 8709 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length 8710 || get_pointer_alignment (dest) / BITS_PER_UNIT < length) 8711 return NULL_TREE; 8712 8713 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) 8714 return NULL_TREE; 8715 8716 if (integer_zerop (c)) 8717 cval = 0; 8718 else 8719 { 8720 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64) 8721 return NULL_TREE; 8722 8723 cval = TREE_INT_CST_LOW (c); 8724 cval &= 0xff; 8725 cval |= cval << 8; 8726 cval |= cval << 16; 8727 cval |= (cval << 31) << 1; 8728 } 8729 8730 ret = build_int_cst_type (etype, cval); 8731 var = build_fold_indirect_ref_loc (loc, 8732 fold_convert_loc (loc, 8733 build_pointer_type (etype), 8734 dest)); 8735 ret = build2 (MODIFY_EXPR, etype, var, ret); 8736 if (ignore) 8737 return ret; 8738 8739 return omit_one_operand_loc (loc, type, dest, ret); 8740 } 8741 8742 /* Fold function call to builtin memset. Return 8743 NULL_TREE if no simplification can be made. */ 8744 8745 static tree 8746 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore) 8747 { 8748 if (! validate_arg (dest, POINTER_TYPE) 8749 || ! validate_arg (size, INTEGER_TYPE)) 8750 return NULL_TREE; 8751 8752 if (!ignore) 8753 return NULL_TREE; 8754 8755 /* New argument list transforming bzero(ptr x, int y) to 8756 memset(ptr x, int 0, size_t y). This is done this way 8757 so that if it isn't expanded inline, we fallback to 8758 calling bzero instead of memset. */ 8759 8760 return fold_builtin_memset (loc, dest, integer_zero_node, 8761 fold_convert_loc (loc, size_type_node, size), 8762 void_type_node, ignore); 8763 } 8764 8765 /* Fold function call to builtin mem{{,p}cpy,move}. Return 8766 NULL_TREE if no simplification can be made. 8767 If ENDP is 0, return DEST (like memcpy). 8768 If ENDP is 1, return DEST+LEN (like mempcpy). 8769 If ENDP is 2, return DEST+LEN-1 (like stpcpy). 8770 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap 8771 (memmove). */ 8772 8773 static tree 8774 fold_builtin_memory_op (location_t loc, tree dest, tree src, 8775 tree len, tree type, bool ignore, int endp) 8776 { 8777 tree destvar, srcvar, expr; 8778 8779 if (! validate_arg (dest, POINTER_TYPE) 8780 || ! validate_arg (src, POINTER_TYPE) 8781 || ! validate_arg (len, INTEGER_TYPE)) 8782 return NULL_TREE; 8783 8784 /* If the LEN parameter is zero, return DEST. */ 8785 if (integer_zerop (len)) 8786 return omit_one_operand_loc (loc, type, dest, src); 8787 8788 /* If SRC and DEST are the same (and not volatile), return 8789 DEST{,+LEN,+LEN-1}. */ 8790 if (operand_equal_p (src, dest, 0)) 8791 expr = len; 8792 else 8793 { 8794 tree srctype, desttype; 8795 unsigned int src_align, dest_align; 8796 tree off0; 8797 8798 if (endp == 3) 8799 { 8800 src_align = get_pointer_alignment (src); 8801 dest_align = get_pointer_alignment (dest); 8802 8803 /* Both DEST and SRC must be pointer types. 8804 ??? This is what old code did. Is the testing for pointer types 8805 really mandatory? 8806 8807 If either SRC is readonly or length is 1, we can use memcpy. */ 8808 if (!dest_align || !src_align) 8809 return NULL_TREE; 8810 if (readonly_data_expr (src) 8811 || (host_integerp (len, 1) 8812 && (MIN (src_align, dest_align) / BITS_PER_UNIT 8813 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1)))) 8814 { 8815 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 8816 if (!fn) 8817 return NULL_TREE; 8818 return build_call_expr_loc (loc, fn, 3, dest, src, len); 8819 } 8820 8821 /* If *src and *dest can't overlap, optimize into memcpy as well. */ 8822 if (TREE_CODE (src) == ADDR_EXPR 8823 && TREE_CODE (dest) == ADDR_EXPR) 8824 { 8825 tree src_base, dest_base, fn; 8826 HOST_WIDE_INT src_offset = 0, dest_offset = 0; 8827 HOST_WIDE_INT size = -1; 8828 HOST_WIDE_INT maxsize = -1; 8829 8830 srcvar = TREE_OPERAND (src, 0); 8831 src_base = get_ref_base_and_extent (srcvar, &src_offset, 8832 &size, &maxsize); 8833 destvar = TREE_OPERAND (dest, 0); 8834 dest_base = get_ref_base_and_extent (destvar, &dest_offset, 8835 &size, &maxsize); 8836 if (host_integerp (len, 1)) 8837 maxsize = tree_low_cst (len, 1); 8838 else 8839 maxsize = -1; 8840 src_offset /= BITS_PER_UNIT; 8841 dest_offset /= BITS_PER_UNIT; 8842 if (SSA_VAR_P (src_base) 8843 && SSA_VAR_P (dest_base)) 8844 { 8845 if (operand_equal_p (src_base, dest_base, 0) 8846 && ranges_overlap_p (src_offset, maxsize, 8847 dest_offset, maxsize)) 8848 return NULL_TREE; 8849 } 8850 else if (TREE_CODE (src_base) == MEM_REF 8851 && TREE_CODE (dest_base) == MEM_REF) 8852 { 8853 double_int off; 8854 if (! operand_equal_p (TREE_OPERAND (src_base, 0), 8855 TREE_OPERAND (dest_base, 0), 0)) 8856 return NULL_TREE; 8857 off = mem_ref_offset (src_base) + 8858 double_int::from_shwi (src_offset); 8859 if (!off.fits_shwi ()) 8860 return NULL_TREE; 8861 src_offset = off.low; 8862 off = mem_ref_offset (dest_base) + 8863 double_int::from_shwi (dest_offset); 8864 if (!off.fits_shwi ()) 8865 return NULL_TREE; 8866 dest_offset = off.low; 8867 if (ranges_overlap_p (src_offset, maxsize, 8868 dest_offset, maxsize)) 8869 return NULL_TREE; 8870 } 8871 else 8872 return NULL_TREE; 8873 8874 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 8875 if (!fn) 8876 return NULL_TREE; 8877 return build_call_expr_loc (loc, fn, 3, dest, src, len); 8878 } 8879 8880 /* If the destination and source do not alias optimize into 8881 memcpy as well. */ 8882 if ((is_gimple_min_invariant (dest) 8883 || TREE_CODE (dest) == SSA_NAME) 8884 && (is_gimple_min_invariant (src) 8885 || TREE_CODE (src) == SSA_NAME)) 8886 { 8887 ao_ref destr, srcr; 8888 ao_ref_init_from_ptr_and_size (&destr, dest, len); 8889 ao_ref_init_from_ptr_and_size (&srcr, src, len); 8890 if (!refs_may_alias_p_1 (&destr, &srcr, false)) 8891 { 8892 tree fn; 8893 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 8894 if (!fn) 8895 return NULL_TREE; 8896 return build_call_expr_loc (loc, fn, 3, dest, src, len); 8897 } 8898 } 8899 8900 return NULL_TREE; 8901 } 8902 8903 if (!host_integerp (len, 0)) 8904 return NULL_TREE; 8905 /* FIXME: 8906 This logic lose for arguments like (type *)malloc (sizeof (type)), 8907 since we strip the casts of up to VOID return value from malloc. 8908 Perhaps we ought to inherit type from non-VOID argument here? */ 8909 STRIP_NOPS (src); 8910 STRIP_NOPS (dest); 8911 if (!POINTER_TYPE_P (TREE_TYPE (src)) 8912 || !POINTER_TYPE_P (TREE_TYPE (dest))) 8913 return NULL_TREE; 8914 /* In the following try to find a type that is most natural to be 8915 used for the memcpy source and destination and that allows 8916 the most optimization when memcpy is turned into a plain assignment 8917 using that type. In theory we could always use a char[len] type 8918 but that only gains us that the destination and source possibly 8919 no longer will have their address taken. */ 8920 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */ 8921 if (TREE_CODE (src) == POINTER_PLUS_EXPR) 8922 { 8923 tree tem = TREE_OPERAND (src, 0); 8924 STRIP_NOPS (tem); 8925 if (tem != TREE_OPERAND (src, 0)) 8926 src = build1 (NOP_EXPR, TREE_TYPE (tem), src); 8927 } 8928 if (TREE_CODE (dest) == POINTER_PLUS_EXPR) 8929 { 8930 tree tem = TREE_OPERAND (dest, 0); 8931 STRIP_NOPS (tem); 8932 if (tem != TREE_OPERAND (dest, 0)) 8933 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest); 8934 } 8935 srctype = TREE_TYPE (TREE_TYPE (src)); 8936 if (TREE_CODE (srctype) == ARRAY_TYPE 8937 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)) 8938 { 8939 srctype = TREE_TYPE (srctype); 8940 STRIP_NOPS (src); 8941 src = build1 (NOP_EXPR, build_pointer_type (srctype), src); 8942 } 8943 desttype = TREE_TYPE (TREE_TYPE (dest)); 8944 if (TREE_CODE (desttype) == ARRAY_TYPE 8945 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)) 8946 { 8947 desttype = TREE_TYPE (desttype); 8948 STRIP_NOPS (dest); 8949 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest); 8950 } 8951 if (TREE_ADDRESSABLE (srctype) 8952 || TREE_ADDRESSABLE (desttype)) 8953 return NULL_TREE; 8954 8955 /* Make sure we are not copying using a floating-point mode or 8956 a type whose size possibly does not match its precision. */ 8957 if (FLOAT_MODE_P (TYPE_MODE (desttype)) 8958 || TREE_CODE (desttype) == BOOLEAN_TYPE 8959 || TREE_CODE (desttype) == ENUMERAL_TYPE) 8960 { 8961 /* A more suitable int_mode_for_mode would return a vector 8962 integer mode for a vector float mode or a integer complex 8963 mode for a float complex mode if there isn't a regular 8964 integer mode covering the mode of desttype. */ 8965 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (desttype)); 8966 if (mode == BLKmode) 8967 desttype = NULL_TREE; 8968 else 8969 desttype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode), 8970 1); 8971 } 8972 if (FLOAT_MODE_P (TYPE_MODE (srctype)) 8973 || TREE_CODE (srctype) == BOOLEAN_TYPE 8974 || TREE_CODE (srctype) == ENUMERAL_TYPE) 8975 { 8976 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (srctype)); 8977 if (mode == BLKmode) 8978 srctype = NULL_TREE; 8979 else 8980 srctype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode), 8981 1); 8982 } 8983 if (!srctype) 8984 srctype = desttype; 8985 if (!desttype) 8986 desttype = srctype; 8987 if (!srctype) 8988 return NULL_TREE; 8989 8990 src_align = get_pointer_alignment (src); 8991 dest_align = get_pointer_alignment (dest); 8992 if (dest_align < TYPE_ALIGN (desttype) 8993 || src_align < TYPE_ALIGN (srctype)) 8994 return NULL_TREE; 8995 8996 if (!ignore) 8997 dest = builtin_save_expr (dest); 8998 8999 /* Build accesses at offset zero with a ref-all character type. */ 9000 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node, 9001 ptr_mode, true), 0); 9002 9003 destvar = dest; 9004 STRIP_NOPS (destvar); 9005 if (TREE_CODE (destvar) == ADDR_EXPR 9006 && var_decl_component_p (TREE_OPERAND (destvar, 0)) 9007 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)) 9008 destvar = fold_build2 (MEM_REF, desttype, destvar, off0); 9009 else 9010 destvar = NULL_TREE; 9011 9012 srcvar = src; 9013 STRIP_NOPS (srcvar); 9014 if (TREE_CODE (srcvar) == ADDR_EXPR 9015 && var_decl_component_p (TREE_OPERAND (srcvar, 0)) 9016 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)) 9017 { 9018 if (!destvar 9019 || src_align >= TYPE_ALIGN (desttype)) 9020 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype, 9021 srcvar, off0); 9022 else if (!STRICT_ALIGNMENT) 9023 { 9024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype), 9025 src_align); 9026 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0); 9027 } 9028 else 9029 srcvar = NULL_TREE; 9030 } 9031 else 9032 srcvar = NULL_TREE; 9033 9034 if (srcvar == NULL_TREE && destvar == NULL_TREE) 9035 return NULL_TREE; 9036 9037 if (srcvar == NULL_TREE) 9038 { 9039 STRIP_NOPS (src); 9040 if (src_align >= TYPE_ALIGN (desttype)) 9041 srcvar = fold_build2 (MEM_REF, desttype, src, off0); 9042 else 9043 { 9044 if (STRICT_ALIGNMENT) 9045 return NULL_TREE; 9046 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype), 9047 src_align); 9048 srcvar = fold_build2 (MEM_REF, srctype, src, off0); 9049 } 9050 } 9051 else if (destvar == NULL_TREE) 9052 { 9053 STRIP_NOPS (dest); 9054 if (dest_align >= TYPE_ALIGN (srctype)) 9055 destvar = fold_build2 (MEM_REF, srctype, dest, off0); 9056 else 9057 { 9058 if (STRICT_ALIGNMENT) 9059 return NULL_TREE; 9060 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype), 9061 dest_align); 9062 destvar = fold_build2 (MEM_REF, desttype, dest, off0); 9063 } 9064 } 9065 9066 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar); 9067 } 9068 9069 if (ignore) 9070 return expr; 9071 9072 if (endp == 0 || endp == 3) 9073 return omit_one_operand_loc (loc, type, dest, expr); 9074 9075 if (expr == len) 9076 expr = NULL_TREE; 9077 9078 if (endp == 2) 9079 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len, 9080 ssize_int (1)); 9081 9082 dest = fold_build_pointer_plus_loc (loc, dest, len); 9083 dest = fold_convert_loc (loc, type, dest); 9084 if (expr) 9085 dest = omit_one_operand_loc (loc, type, dest, expr); 9086 return dest; 9087 } 9088 9089 /* Fold function call to builtin strcpy with arguments DEST and SRC. 9090 If LEN is not NULL, it represents the length of the string to be 9091 copied. Return NULL_TREE if no simplification can be made. */ 9092 9093 tree 9094 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len) 9095 { 9096 tree fn; 9097 9098 if (!validate_arg (dest, POINTER_TYPE) 9099 || !validate_arg (src, POINTER_TYPE)) 9100 return NULL_TREE; 9101 9102 /* If SRC and DEST are the same (and not volatile), return DEST. */ 9103 if (operand_equal_p (src, dest, 0)) 9104 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest); 9105 9106 if (optimize_function_for_size_p (cfun)) 9107 return NULL_TREE; 9108 9109 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 9110 if (!fn) 9111 return NULL_TREE; 9112 9113 if (!len) 9114 { 9115 len = c_strlen (src, 1); 9116 if (! len || TREE_SIDE_EFFECTS (len)) 9117 return NULL_TREE; 9118 } 9119 9120 len = fold_convert_loc (loc, size_type_node, len); 9121 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1)); 9122 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), 9123 build_call_expr_loc (loc, fn, 3, dest, src, len)); 9124 } 9125 9126 /* Fold function call to builtin stpcpy with arguments DEST and SRC. 9127 Return NULL_TREE if no simplification can be made. */ 9128 9129 static tree 9130 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src) 9131 { 9132 tree fn, len, lenp1, call, type; 9133 9134 if (!validate_arg (dest, POINTER_TYPE) 9135 || !validate_arg (src, POINTER_TYPE)) 9136 return NULL_TREE; 9137 9138 len = c_strlen (src, 1); 9139 if (!len 9140 || TREE_CODE (len) != INTEGER_CST) 9141 return NULL_TREE; 9142 9143 if (optimize_function_for_size_p (cfun) 9144 /* If length is zero it's small enough. */ 9145 && !integer_zerop (len)) 9146 return NULL_TREE; 9147 9148 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 9149 if (!fn) 9150 return NULL_TREE; 9151 9152 lenp1 = size_binop_loc (loc, PLUS_EXPR, 9153 fold_convert_loc (loc, size_type_node, len), 9154 build_int_cst (size_type_node, 1)); 9155 /* We use dest twice in building our expression. Save it from 9156 multiple expansions. */ 9157 dest = builtin_save_expr (dest); 9158 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1); 9159 9160 type = TREE_TYPE (TREE_TYPE (fndecl)); 9161 dest = fold_build_pointer_plus_loc (loc, dest, len); 9162 dest = fold_convert_loc (loc, type, dest); 9163 dest = omit_one_operand_loc (loc, type, dest, call); 9164 return dest; 9165 } 9166 9167 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN. 9168 If SLEN is not NULL, it represents the length of the source string. 9169 Return NULL_TREE if no simplification can be made. */ 9170 9171 tree 9172 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest, 9173 tree src, tree len, tree slen) 9174 { 9175 tree fn; 9176 9177 if (!validate_arg (dest, POINTER_TYPE) 9178 || !validate_arg (src, POINTER_TYPE) 9179 || !validate_arg (len, INTEGER_TYPE)) 9180 return NULL_TREE; 9181 9182 /* If the LEN parameter is zero, return DEST. */ 9183 if (integer_zerop (len)) 9184 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src); 9185 9186 /* We can't compare slen with len as constants below if len is not a 9187 constant. */ 9188 if (len == 0 || TREE_CODE (len) != INTEGER_CST) 9189 return NULL_TREE; 9190 9191 if (!slen) 9192 slen = c_strlen (src, 1); 9193 9194 /* Now, we must be passed a constant src ptr parameter. */ 9195 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST) 9196 return NULL_TREE; 9197 9198 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1)); 9199 9200 /* We do not support simplification of this case, though we do 9201 support it when expanding trees into RTL. */ 9202 /* FIXME: generate a call to __builtin_memset. */ 9203 if (tree_int_cst_lt (slen, len)) 9204 return NULL_TREE; 9205 9206 /* OK transform into builtin memcpy. */ 9207 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 9208 if (!fn) 9209 return NULL_TREE; 9210 9211 len = fold_convert_loc (loc, size_type_node, len); 9212 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), 9213 build_call_expr_loc (loc, fn, 3, dest, src, len)); 9214 } 9215 9216 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the 9217 arguments to the call, and TYPE is its return type. 9218 Return NULL_TREE if no simplification can be made. */ 9219 9220 static tree 9221 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type) 9222 { 9223 if (!validate_arg (arg1, POINTER_TYPE) 9224 || !validate_arg (arg2, INTEGER_TYPE) 9225 || !validate_arg (len, INTEGER_TYPE)) 9226 return NULL_TREE; 9227 else 9228 { 9229 const char *p1; 9230 9231 if (TREE_CODE (arg2) != INTEGER_CST 9232 || !host_integerp (len, 1)) 9233 return NULL_TREE; 9234 9235 p1 = c_getstr (arg1); 9236 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0) 9237 { 9238 char c; 9239 const char *r; 9240 tree tem; 9241 9242 if (target_char_cast (arg2, &c)) 9243 return NULL_TREE; 9244 9245 r = (const char *) memchr (p1, c, tree_low_cst (len, 1)); 9246 9247 if (r == NULL) 9248 return build_int_cst (TREE_TYPE (arg1), 0); 9249 9250 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1); 9251 return fold_convert_loc (loc, type, tem); 9252 } 9253 return NULL_TREE; 9254 } 9255 } 9256 9257 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2. 9258 Return NULL_TREE if no simplification can be made. */ 9259 9260 static tree 9261 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len) 9262 { 9263 const char *p1, *p2; 9264 9265 if (!validate_arg (arg1, POINTER_TYPE) 9266 || !validate_arg (arg2, POINTER_TYPE) 9267 || !validate_arg (len, INTEGER_TYPE)) 9268 return NULL_TREE; 9269 9270 /* If the LEN parameter is zero, return zero. */ 9271 if (integer_zerop (len)) 9272 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node, 9273 arg1, arg2); 9274 9275 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */ 9276 if (operand_equal_p (arg1, arg2, 0)) 9277 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len); 9278 9279 p1 = c_getstr (arg1); 9280 p2 = c_getstr (arg2); 9281 9282 /* If all arguments are constant, and the value of len is not greater 9283 than the lengths of arg1 and arg2, evaluate at compile-time. */ 9284 if (host_integerp (len, 1) && p1 && p2 9285 && compare_tree_int (len, strlen (p1) + 1) <= 0 9286 && compare_tree_int (len, strlen (p2) + 1) <= 0) 9287 { 9288 const int r = memcmp (p1, p2, tree_low_cst (len, 1)); 9289 9290 if (r > 0) 9291 return integer_one_node; 9292 else if (r < 0) 9293 return integer_minus_one_node; 9294 else 9295 return integer_zero_node; 9296 } 9297 9298 /* If len parameter is one, return an expression corresponding to 9299 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */ 9300 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1) 9301 { 9302 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0); 9303 tree cst_uchar_ptr_node 9304 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true); 9305 9306 tree ind1 9307 = fold_convert_loc (loc, integer_type_node, 9308 build1 (INDIRECT_REF, cst_uchar_node, 9309 fold_convert_loc (loc, 9310 cst_uchar_ptr_node, 9311 arg1))); 9312 tree ind2 9313 = fold_convert_loc (loc, integer_type_node, 9314 build1 (INDIRECT_REF, cst_uchar_node, 9315 fold_convert_loc (loc, 9316 cst_uchar_ptr_node, 9317 arg2))); 9318 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2); 9319 } 9320 9321 return NULL_TREE; 9322 } 9323 9324 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2. 9325 Return NULL_TREE if no simplification can be made. */ 9326 9327 static tree 9328 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2) 9329 { 9330 const char *p1, *p2; 9331 9332 if (!validate_arg (arg1, POINTER_TYPE) 9333 || !validate_arg (arg2, POINTER_TYPE)) 9334 return NULL_TREE; 9335 9336 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */ 9337 if (operand_equal_p (arg1, arg2, 0)) 9338 return integer_zero_node; 9339 9340 p1 = c_getstr (arg1); 9341 p2 = c_getstr (arg2); 9342 9343 if (p1 && p2) 9344 { 9345 const int i = strcmp (p1, p2); 9346 if (i < 0) 9347 return integer_minus_one_node; 9348 else if (i > 0) 9349 return integer_one_node; 9350 else 9351 return integer_zero_node; 9352 } 9353 9354 /* If the second arg is "", return *(const unsigned char*)arg1. */ 9355 if (p2 && *p2 == '\0') 9356 { 9357 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0); 9358 tree cst_uchar_ptr_node 9359 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true); 9360 9361 return fold_convert_loc (loc, integer_type_node, 9362 build1 (INDIRECT_REF, cst_uchar_node, 9363 fold_convert_loc (loc, 9364 cst_uchar_ptr_node, 9365 arg1))); 9366 } 9367 9368 /* If the first arg is "", return -*(const unsigned char*)arg2. */ 9369 if (p1 && *p1 == '\0') 9370 { 9371 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0); 9372 tree cst_uchar_ptr_node 9373 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true); 9374 9375 tree temp 9376 = fold_convert_loc (loc, integer_type_node, 9377 build1 (INDIRECT_REF, cst_uchar_node, 9378 fold_convert_loc (loc, 9379 cst_uchar_ptr_node, 9380 arg2))); 9381 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp); 9382 } 9383 9384 return NULL_TREE; 9385 } 9386 9387 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN. 9388 Return NULL_TREE if no simplification can be made. */ 9389 9390 static tree 9391 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len) 9392 { 9393 const char *p1, *p2; 9394 9395 if (!validate_arg (arg1, POINTER_TYPE) 9396 || !validate_arg (arg2, POINTER_TYPE) 9397 || !validate_arg (len, INTEGER_TYPE)) 9398 return NULL_TREE; 9399 9400 /* If the LEN parameter is zero, return zero. */ 9401 if (integer_zerop (len)) 9402 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node, 9403 arg1, arg2); 9404 9405 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */ 9406 if (operand_equal_p (arg1, arg2, 0)) 9407 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len); 9408 9409 p1 = c_getstr (arg1); 9410 p2 = c_getstr (arg2); 9411 9412 if (host_integerp (len, 1) && p1 && p2) 9413 { 9414 const int i = strncmp (p1, p2, tree_low_cst (len, 1)); 9415 if (i > 0) 9416 return integer_one_node; 9417 else if (i < 0) 9418 return integer_minus_one_node; 9419 else 9420 return integer_zero_node; 9421 } 9422 9423 /* If the second arg is "", and the length is greater than zero, 9424 return *(const unsigned char*)arg1. */ 9425 if (p2 && *p2 == '\0' 9426 && TREE_CODE (len) == INTEGER_CST 9427 && tree_int_cst_sgn (len) == 1) 9428 { 9429 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0); 9430 tree cst_uchar_ptr_node 9431 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true); 9432 9433 return fold_convert_loc (loc, integer_type_node, 9434 build1 (INDIRECT_REF, cst_uchar_node, 9435 fold_convert_loc (loc, 9436 cst_uchar_ptr_node, 9437 arg1))); 9438 } 9439 9440 /* If the first arg is "", and the length is greater than zero, 9441 return -*(const unsigned char*)arg2. */ 9442 if (p1 && *p1 == '\0' 9443 && TREE_CODE (len) == INTEGER_CST 9444 && tree_int_cst_sgn (len) == 1) 9445 { 9446 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0); 9447 tree cst_uchar_ptr_node 9448 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true); 9449 9450 tree temp = fold_convert_loc (loc, integer_type_node, 9451 build1 (INDIRECT_REF, cst_uchar_node, 9452 fold_convert_loc (loc, 9453 cst_uchar_ptr_node, 9454 arg2))); 9455 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp); 9456 } 9457 9458 /* If len parameter is one, return an expression corresponding to 9459 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */ 9460 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1) 9461 { 9462 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0); 9463 tree cst_uchar_ptr_node 9464 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true); 9465 9466 tree ind1 = fold_convert_loc (loc, integer_type_node, 9467 build1 (INDIRECT_REF, cst_uchar_node, 9468 fold_convert_loc (loc, 9469 cst_uchar_ptr_node, 9470 arg1))); 9471 tree ind2 = fold_convert_loc (loc, integer_type_node, 9472 build1 (INDIRECT_REF, cst_uchar_node, 9473 fold_convert_loc (loc, 9474 cst_uchar_ptr_node, 9475 arg2))); 9476 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2); 9477 } 9478 9479 return NULL_TREE; 9480 } 9481 9482 /* Fold function call to builtin signbit, signbitf or signbitl with argument 9483 ARG. Return NULL_TREE if no simplification can be made. */ 9484 9485 static tree 9486 fold_builtin_signbit (location_t loc, tree arg, tree type) 9487 { 9488 if (!validate_arg (arg, REAL_TYPE)) 9489 return NULL_TREE; 9490 9491 /* If ARG is a compile-time constant, determine the result. */ 9492 if (TREE_CODE (arg) == REAL_CST 9493 && !TREE_OVERFLOW (arg)) 9494 { 9495 REAL_VALUE_TYPE c; 9496 9497 c = TREE_REAL_CST (arg); 9498 return (REAL_VALUE_NEGATIVE (c) 9499 ? build_one_cst (type) 9500 : build_zero_cst (type)); 9501 } 9502 9503 /* If ARG is non-negative, the result is always zero. */ 9504 if (tree_expr_nonnegative_p (arg)) 9505 return omit_one_operand_loc (loc, type, integer_zero_node, arg); 9506 9507 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */ 9508 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg)))) 9509 return fold_convert (type, 9510 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg, 9511 build_real (TREE_TYPE (arg), dconst0))); 9512 9513 return NULL_TREE; 9514 } 9515 9516 /* Fold function call to builtin copysign, copysignf or copysignl with 9517 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can 9518 be made. */ 9519 9520 static tree 9521 fold_builtin_copysign (location_t loc, tree fndecl, 9522 tree arg1, tree arg2, tree type) 9523 { 9524 tree tem; 9525 9526 if (!validate_arg (arg1, REAL_TYPE) 9527 || !validate_arg (arg2, REAL_TYPE)) 9528 return NULL_TREE; 9529 9530 /* copysign(X,X) is X. */ 9531 if (operand_equal_p (arg1, arg2, 0)) 9532 return fold_convert_loc (loc, type, arg1); 9533 9534 /* If ARG1 and ARG2 are compile-time constants, determine the result. */ 9535 if (TREE_CODE (arg1) == REAL_CST 9536 && TREE_CODE (arg2) == REAL_CST 9537 && !TREE_OVERFLOW (arg1) 9538 && !TREE_OVERFLOW (arg2)) 9539 { 9540 REAL_VALUE_TYPE c1, c2; 9541 9542 c1 = TREE_REAL_CST (arg1); 9543 c2 = TREE_REAL_CST (arg2); 9544 /* c1.sign := c2.sign. */ 9545 real_copysign (&c1, &c2); 9546 return build_real (type, c1); 9547 } 9548 9549 /* copysign(X, Y) is fabs(X) when Y is always non-negative. 9550 Remember to evaluate Y for side-effects. */ 9551 if (tree_expr_nonnegative_p (arg2)) 9552 return omit_one_operand_loc (loc, type, 9553 fold_build1_loc (loc, ABS_EXPR, type, arg1), 9554 arg2); 9555 9556 /* Strip sign changing operations for the first argument. */ 9557 tem = fold_strip_sign_ops (arg1); 9558 if (tem) 9559 return build_call_expr_loc (loc, fndecl, 2, tem, arg2); 9560 9561 return NULL_TREE; 9562 } 9563 9564 /* Fold a call to builtin isascii with argument ARG. */ 9565 9566 static tree 9567 fold_builtin_isascii (location_t loc, tree arg) 9568 { 9569 if (!validate_arg (arg, INTEGER_TYPE)) 9570 return NULL_TREE; 9571 else 9572 { 9573 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */ 9574 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg, 9575 build_int_cst (integer_type_node, 9576 ~ (unsigned HOST_WIDE_INT) 0x7f)); 9577 return fold_build2_loc (loc, EQ_EXPR, integer_type_node, 9578 arg, integer_zero_node); 9579 } 9580 } 9581 9582 /* Fold a call to builtin toascii with argument ARG. */ 9583 9584 static tree 9585 fold_builtin_toascii (location_t loc, tree arg) 9586 { 9587 if (!validate_arg (arg, INTEGER_TYPE)) 9588 return NULL_TREE; 9589 9590 /* Transform toascii(c) -> (c & 0x7f). */ 9591 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg, 9592 build_int_cst (integer_type_node, 0x7f)); 9593 } 9594 9595 /* Fold a call to builtin isdigit with argument ARG. */ 9596 9597 static tree 9598 fold_builtin_isdigit (location_t loc, tree arg) 9599 { 9600 if (!validate_arg (arg, INTEGER_TYPE)) 9601 return NULL_TREE; 9602 else 9603 { 9604 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */ 9605 /* According to the C standard, isdigit is unaffected by locale. 9606 However, it definitely is affected by the target character set. */ 9607 unsigned HOST_WIDE_INT target_digit0 9608 = lang_hooks.to_target_charset ('0'); 9609 9610 if (target_digit0 == 0) 9611 return NULL_TREE; 9612 9613 arg = fold_convert_loc (loc, unsigned_type_node, arg); 9614 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg, 9615 build_int_cst (unsigned_type_node, target_digit0)); 9616 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg, 9617 build_int_cst (unsigned_type_node, 9)); 9618 } 9619 } 9620 9621 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */ 9622 9623 static tree 9624 fold_builtin_fabs (location_t loc, tree arg, tree type) 9625 { 9626 if (!validate_arg (arg, REAL_TYPE)) 9627 return NULL_TREE; 9628 9629 arg = fold_convert_loc (loc, type, arg); 9630 if (TREE_CODE (arg) == REAL_CST) 9631 return fold_abs_const (arg, type); 9632 return fold_build1_loc (loc, ABS_EXPR, type, arg); 9633 } 9634 9635 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */ 9636 9637 static tree 9638 fold_builtin_abs (location_t loc, tree arg, tree type) 9639 { 9640 if (!validate_arg (arg, INTEGER_TYPE)) 9641 return NULL_TREE; 9642 9643 arg = fold_convert_loc (loc, type, arg); 9644 if (TREE_CODE (arg) == INTEGER_CST) 9645 return fold_abs_const (arg, type); 9646 return fold_build1_loc (loc, ABS_EXPR, type, arg); 9647 } 9648 9649 /* Fold a fma operation with arguments ARG[012]. */ 9650 9651 tree 9652 fold_fma (location_t loc ATTRIBUTE_UNUSED, 9653 tree type, tree arg0, tree arg1, tree arg2) 9654 { 9655 if (TREE_CODE (arg0) == REAL_CST 9656 && TREE_CODE (arg1) == REAL_CST 9657 && TREE_CODE (arg2) == REAL_CST) 9658 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma); 9659 9660 return NULL_TREE; 9661 } 9662 9663 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */ 9664 9665 static tree 9666 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type) 9667 { 9668 if (validate_arg (arg0, REAL_TYPE) 9669 && validate_arg(arg1, REAL_TYPE) 9670 && validate_arg(arg2, REAL_TYPE)) 9671 { 9672 tree tem = fold_fma (loc, type, arg0, arg1, arg2); 9673 if (tem) 9674 return tem; 9675 9676 /* ??? Only expand to FMA_EXPR if it's directly supported. */ 9677 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing) 9678 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2); 9679 } 9680 return NULL_TREE; 9681 } 9682 9683 /* Fold a call to builtin fmin or fmax. */ 9684 9685 static tree 9686 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1, 9687 tree type, bool max) 9688 { 9689 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE)) 9690 { 9691 /* Calculate the result when the argument is a constant. */ 9692 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min)); 9693 9694 if (res) 9695 return res; 9696 9697 /* If either argument is NaN, return the other one. Avoid the 9698 transformation if we get (and honor) a signalling NaN. Using 9699 omit_one_operand() ensures we create a non-lvalue. */ 9700 if (TREE_CODE (arg0) == REAL_CST 9701 && real_isnan (&TREE_REAL_CST (arg0)) 9702 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 9703 || ! TREE_REAL_CST (arg0).signalling)) 9704 return omit_one_operand_loc (loc, type, arg1, arg0); 9705 if (TREE_CODE (arg1) == REAL_CST 9706 && real_isnan (&TREE_REAL_CST (arg1)) 9707 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))) 9708 || ! TREE_REAL_CST (arg1).signalling)) 9709 return omit_one_operand_loc (loc, type, arg0, arg1); 9710 9711 /* Transform fmin/fmax(x,x) -> x. */ 9712 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME)) 9713 return omit_one_operand_loc (loc, type, arg0, arg1); 9714 9715 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these 9716 functions to return the numeric arg if the other one is NaN. 9717 These tree codes don't honor that, so only transform if 9718 -ffinite-math-only is set. C99 doesn't require -0.0 to be 9719 handled, so we don't have to worry about it either. */ 9720 if (flag_finite_math_only) 9721 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type, 9722 fold_convert_loc (loc, type, arg0), 9723 fold_convert_loc (loc, type, arg1)); 9724 } 9725 return NULL_TREE; 9726 } 9727 9728 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */ 9729 9730 static tree 9731 fold_builtin_carg (location_t loc, tree arg, tree type) 9732 { 9733 if (validate_arg (arg, COMPLEX_TYPE) 9734 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE) 9735 { 9736 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2); 9737 9738 if (atan2_fn) 9739 { 9740 tree new_arg = builtin_save_expr (arg); 9741 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg); 9742 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg); 9743 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg); 9744 } 9745 } 9746 9747 return NULL_TREE; 9748 } 9749 9750 /* Fold a call to builtin logb/ilogb. */ 9751 9752 static tree 9753 fold_builtin_logb (location_t loc, tree arg, tree rettype) 9754 { 9755 if (! validate_arg (arg, REAL_TYPE)) 9756 return NULL_TREE; 9757 9758 STRIP_NOPS (arg); 9759 9760 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg)) 9761 { 9762 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg); 9763 9764 switch (value->cl) 9765 { 9766 case rvc_nan: 9767 case rvc_inf: 9768 /* If arg is Inf or NaN and we're logb, return it. */ 9769 if (TREE_CODE (rettype) == REAL_TYPE) 9770 { 9771 /* For logb(-Inf) we have to return +Inf. */ 9772 if (real_isinf (value) && real_isneg (value)) 9773 { 9774 REAL_VALUE_TYPE tem; 9775 real_inf (&tem); 9776 return build_real (rettype, tem); 9777 } 9778 return fold_convert_loc (loc, rettype, arg); 9779 } 9780 /* Fall through... */ 9781 case rvc_zero: 9782 /* Zero may set errno and/or raise an exception for logb, also 9783 for ilogb we don't know FP_ILOGB0. */ 9784 return NULL_TREE; 9785 case rvc_normal: 9786 /* For normal numbers, proceed iff radix == 2. In GCC, 9787 normalized significands are in the range [0.5, 1.0). We 9788 want the exponent as if they were [1.0, 2.0) so get the 9789 exponent and subtract 1. */ 9790 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2) 9791 return fold_convert_loc (loc, rettype, 9792 build_int_cst (integer_type_node, 9793 REAL_EXP (value)-1)); 9794 break; 9795 } 9796 } 9797 9798 return NULL_TREE; 9799 } 9800 9801 /* Fold a call to builtin significand, if radix == 2. */ 9802 9803 static tree 9804 fold_builtin_significand (location_t loc, tree arg, tree rettype) 9805 { 9806 if (! validate_arg (arg, REAL_TYPE)) 9807 return NULL_TREE; 9808 9809 STRIP_NOPS (arg); 9810 9811 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg)) 9812 { 9813 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg); 9814 9815 switch (value->cl) 9816 { 9817 case rvc_zero: 9818 case rvc_nan: 9819 case rvc_inf: 9820 /* If arg is +-0, +-Inf or +-NaN, then return it. */ 9821 return fold_convert_loc (loc, rettype, arg); 9822 case rvc_normal: 9823 /* For normal numbers, proceed iff radix == 2. */ 9824 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2) 9825 { 9826 REAL_VALUE_TYPE result = *value; 9827 /* In GCC, normalized significands are in the range [0.5, 9828 1.0). We want them to be [1.0, 2.0) so set the 9829 exponent to 1. */ 9830 SET_REAL_EXP (&result, 1); 9831 return build_real (rettype, result); 9832 } 9833 break; 9834 } 9835 } 9836 9837 return NULL_TREE; 9838 } 9839 9840 /* Fold a call to builtin frexp, we can assume the base is 2. */ 9841 9842 static tree 9843 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype) 9844 { 9845 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE)) 9846 return NULL_TREE; 9847 9848 STRIP_NOPS (arg0); 9849 9850 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0))) 9851 return NULL_TREE; 9852 9853 arg1 = build_fold_indirect_ref_loc (loc, arg1); 9854 9855 /* Proceed if a valid pointer type was passed in. */ 9856 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node) 9857 { 9858 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0); 9859 tree frac, exp; 9860 9861 switch (value->cl) 9862 { 9863 case rvc_zero: 9864 /* For +-0, return (*exp = 0, +-0). */ 9865 exp = integer_zero_node; 9866 frac = arg0; 9867 break; 9868 case rvc_nan: 9869 case rvc_inf: 9870 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */ 9871 return omit_one_operand_loc (loc, rettype, arg0, arg1); 9872 case rvc_normal: 9873 { 9874 /* Since the frexp function always expects base 2, and in 9875 GCC normalized significands are already in the range 9876 [0.5, 1.0), we have exactly what frexp wants. */ 9877 REAL_VALUE_TYPE frac_rvt = *value; 9878 SET_REAL_EXP (&frac_rvt, 0); 9879 frac = build_real (rettype, frac_rvt); 9880 exp = build_int_cst (integer_type_node, REAL_EXP (value)); 9881 } 9882 break; 9883 default: 9884 gcc_unreachable (); 9885 } 9886 9887 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */ 9888 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp); 9889 TREE_SIDE_EFFECTS (arg1) = 1; 9890 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac); 9891 } 9892 9893 return NULL_TREE; 9894 } 9895 9896 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true 9897 then we can assume the base is two. If it's false, then we have to 9898 check the mode of the TYPE parameter in certain cases. */ 9899 9900 static tree 9901 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1, 9902 tree type, bool ldexp) 9903 { 9904 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE)) 9905 { 9906 STRIP_NOPS (arg0); 9907 STRIP_NOPS (arg1); 9908 9909 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */ 9910 if (real_zerop (arg0) || integer_zerop (arg1) 9911 || (TREE_CODE (arg0) == REAL_CST 9912 && !real_isfinite (&TREE_REAL_CST (arg0)))) 9913 return omit_one_operand_loc (loc, type, arg0, arg1); 9914 9915 /* If both arguments are constant, then try to evaluate it. */ 9916 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2) 9917 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0) 9918 && host_integerp (arg1, 0)) 9919 { 9920 /* Bound the maximum adjustment to twice the range of the 9921 mode's valid exponents. Use abs to ensure the range is 9922 positive as a sanity check. */ 9923 const long max_exp_adj = 2 * 9924 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax 9925 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin); 9926 9927 /* Get the user-requested adjustment. */ 9928 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0); 9929 9930 /* The requested adjustment must be inside this range. This 9931 is a preliminary cap to avoid things like overflow, we 9932 may still fail to compute the result for other reasons. */ 9933 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj) 9934 { 9935 REAL_VALUE_TYPE initial_result; 9936 9937 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj); 9938 9939 /* Ensure we didn't overflow. */ 9940 if (! real_isinf (&initial_result)) 9941 { 9942 const REAL_VALUE_TYPE trunc_result 9943 = real_value_truncate (TYPE_MODE (type), initial_result); 9944 9945 /* Only proceed if the target mode can hold the 9946 resulting value. */ 9947 if (REAL_VALUES_EQUAL (initial_result, trunc_result)) 9948 return build_real (type, trunc_result); 9949 } 9950 } 9951 } 9952 } 9953 9954 return NULL_TREE; 9955 } 9956 9957 /* Fold a call to builtin modf. */ 9958 9959 static tree 9960 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype) 9961 { 9962 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE)) 9963 return NULL_TREE; 9964 9965 STRIP_NOPS (arg0); 9966 9967 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0))) 9968 return NULL_TREE; 9969 9970 arg1 = build_fold_indirect_ref_loc (loc, arg1); 9971 9972 /* Proceed if a valid pointer type was passed in. */ 9973 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype)) 9974 { 9975 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0); 9976 REAL_VALUE_TYPE trunc, frac; 9977 9978 switch (value->cl) 9979 { 9980 case rvc_nan: 9981 case rvc_zero: 9982 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */ 9983 trunc = frac = *value; 9984 break; 9985 case rvc_inf: 9986 /* For +-Inf, return (*arg1 = arg0, +-0). */ 9987 frac = dconst0; 9988 frac.sign = value->sign; 9989 trunc = *value; 9990 break; 9991 case rvc_normal: 9992 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */ 9993 real_trunc (&trunc, VOIDmode, value); 9994 real_arithmetic (&frac, MINUS_EXPR, value, &trunc); 9995 /* If the original number was negative and already 9996 integral, then the fractional part is -0.0. */ 9997 if (value->sign && frac.cl == rvc_zero) 9998 frac.sign = value->sign; 9999 break; 10000 } 10001 10002 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */ 10003 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, 10004 build_real (rettype, trunc)); 10005 TREE_SIDE_EFFECTS (arg1) = 1; 10006 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, 10007 build_real (rettype, frac)); 10008 } 10009 10010 return NULL_TREE; 10011 } 10012 10013 /* Given a location LOC, an interclass builtin function decl FNDECL 10014 and its single argument ARG, return an folded expression computing 10015 the same, or NULL_TREE if we either couldn't or didn't want to fold 10016 (the latter happen if there's an RTL instruction available). */ 10017 10018 static tree 10019 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg) 10020 { 10021 enum machine_mode mode; 10022 10023 if (!validate_arg (arg, REAL_TYPE)) 10024 return NULL_TREE; 10025 10026 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing) 10027 return NULL_TREE; 10028 10029 mode = TYPE_MODE (TREE_TYPE (arg)); 10030 10031 /* If there is no optab, try generic code. */ 10032 switch (DECL_FUNCTION_CODE (fndecl)) 10033 { 10034 tree result; 10035 10036 CASE_FLT_FN (BUILT_IN_ISINF): 10037 { 10038 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */ 10039 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER); 10040 tree const type = TREE_TYPE (arg); 10041 REAL_VALUE_TYPE r; 10042 char buf[128]; 10043 10044 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf)); 10045 real_from_string (&r, buf); 10046 result = build_call_expr (isgr_fn, 2, 10047 fold_build1_loc (loc, ABS_EXPR, type, arg), 10048 build_real (type, r)); 10049 return result; 10050 } 10051 CASE_FLT_FN (BUILT_IN_FINITE): 10052 case BUILT_IN_ISFINITE: 10053 { 10054 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */ 10055 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL); 10056 tree const type = TREE_TYPE (arg); 10057 REAL_VALUE_TYPE r; 10058 char buf[128]; 10059 10060 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf)); 10061 real_from_string (&r, buf); 10062 result = build_call_expr (isle_fn, 2, 10063 fold_build1_loc (loc, ABS_EXPR, type, arg), 10064 build_real (type, r)); 10065 /*result = fold_build2_loc (loc, UNGT_EXPR, 10066 TREE_TYPE (TREE_TYPE (fndecl)), 10067 fold_build1_loc (loc, ABS_EXPR, type, arg), 10068 build_real (type, r)); 10069 result = fold_build1_loc (loc, TRUTH_NOT_EXPR, 10070 TREE_TYPE (TREE_TYPE (fndecl)), 10071 result);*/ 10072 return result; 10073 } 10074 case BUILT_IN_ISNORMAL: 10075 { 10076 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) & 10077 islessequal(fabs(x),DBL_MAX). */ 10078 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL); 10079 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL); 10080 tree const type = TREE_TYPE (arg); 10081 REAL_VALUE_TYPE rmax, rmin; 10082 char buf[128]; 10083 10084 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf)); 10085 real_from_string (&rmax, buf); 10086 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1); 10087 real_from_string (&rmin, buf); 10088 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg)); 10089 result = build_call_expr (isle_fn, 2, arg, 10090 build_real (type, rmax)); 10091 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result, 10092 build_call_expr (isge_fn, 2, arg, 10093 build_real (type, rmin))); 10094 return result; 10095 } 10096 default: 10097 break; 10098 } 10099 10100 return NULL_TREE; 10101 } 10102 10103 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite. 10104 ARG is the argument for the call. */ 10105 10106 static tree 10107 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index) 10108 { 10109 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 10110 REAL_VALUE_TYPE r; 10111 10112 if (!validate_arg (arg, REAL_TYPE)) 10113 return NULL_TREE; 10114 10115 switch (builtin_index) 10116 { 10117 case BUILT_IN_ISINF: 10118 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg)))) 10119 return omit_one_operand_loc (loc, type, integer_zero_node, arg); 10120 10121 if (TREE_CODE (arg) == REAL_CST) 10122 { 10123 r = TREE_REAL_CST (arg); 10124 if (real_isinf (&r)) 10125 return real_compare (GT_EXPR, &r, &dconst0) 10126 ? integer_one_node : integer_minus_one_node; 10127 else 10128 return integer_zero_node; 10129 } 10130 10131 return NULL_TREE; 10132 10133 case BUILT_IN_ISINF_SIGN: 10134 { 10135 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */ 10136 /* In a boolean context, GCC will fold the inner COND_EXPR to 10137 1. So e.g. "if (isinf_sign(x))" would be folded to just 10138 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */ 10139 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0); 10140 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF); 10141 tree tmp = NULL_TREE; 10142 10143 arg = builtin_save_expr (arg); 10144 10145 if (signbit_fn && isinf_fn) 10146 { 10147 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg); 10148 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg); 10149 10150 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node, 10151 signbit_call, integer_zero_node); 10152 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node, 10153 isinf_call, integer_zero_node); 10154 10155 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call, 10156 integer_minus_one_node, integer_one_node); 10157 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, 10158 isinf_call, tmp, 10159 integer_zero_node); 10160 } 10161 10162 return tmp; 10163 } 10164 10165 case BUILT_IN_ISFINITE: 10166 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))) 10167 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg)))) 10168 return omit_one_operand_loc (loc, type, integer_one_node, arg); 10169 10170 if (TREE_CODE (arg) == REAL_CST) 10171 { 10172 r = TREE_REAL_CST (arg); 10173 return real_isfinite (&r) ? integer_one_node : integer_zero_node; 10174 } 10175 10176 return NULL_TREE; 10177 10178 case BUILT_IN_ISNAN: 10179 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))) 10180 return omit_one_operand_loc (loc, type, integer_zero_node, arg); 10181 10182 if (TREE_CODE (arg) == REAL_CST) 10183 { 10184 r = TREE_REAL_CST (arg); 10185 return real_isnan (&r) ? integer_one_node : integer_zero_node; 10186 } 10187 10188 arg = builtin_save_expr (arg); 10189 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg); 10190 10191 default: 10192 gcc_unreachable (); 10193 } 10194 } 10195 10196 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...). 10197 This builtin will generate code to return the appropriate floating 10198 point classification depending on the value of the floating point 10199 number passed in. The possible return values must be supplied as 10200 int arguments to the call in the following order: FP_NAN, FP_INFINITE, 10201 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly 10202 one floating point argument which is "type generic". */ 10203 10204 static tree 10205 fold_builtin_fpclassify (location_t loc, tree exp) 10206 { 10207 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero, 10208 arg, type, res, tmp; 10209 enum machine_mode mode; 10210 REAL_VALUE_TYPE r; 10211 char buf[128]; 10212 10213 /* Verify the required arguments in the original call. */ 10214 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, 10215 INTEGER_TYPE, INTEGER_TYPE, 10216 INTEGER_TYPE, REAL_TYPE, VOID_TYPE)) 10217 return NULL_TREE; 10218 10219 fp_nan = CALL_EXPR_ARG (exp, 0); 10220 fp_infinite = CALL_EXPR_ARG (exp, 1); 10221 fp_normal = CALL_EXPR_ARG (exp, 2); 10222 fp_subnormal = CALL_EXPR_ARG (exp, 3); 10223 fp_zero = CALL_EXPR_ARG (exp, 4); 10224 arg = CALL_EXPR_ARG (exp, 5); 10225 type = TREE_TYPE (arg); 10226 mode = TYPE_MODE (type); 10227 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg)); 10228 10229 /* fpclassify(x) -> 10230 isnan(x) ? FP_NAN : 10231 (fabs(x) == Inf ? FP_INFINITE : 10232 (fabs(x) >= DBL_MIN ? FP_NORMAL : 10233 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */ 10234 10235 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg, 10236 build_real (type, dconst0)); 10237 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, 10238 tmp, fp_zero, fp_subnormal); 10239 10240 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1); 10241 real_from_string (&r, buf); 10242 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node, 10243 arg, build_real (type, r)); 10244 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res); 10245 10246 if (HONOR_INFINITIES (mode)) 10247 { 10248 real_inf (&r); 10249 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg, 10250 build_real (type, r)); 10251 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, 10252 fp_infinite, res); 10253 } 10254 10255 if (HONOR_NANS (mode)) 10256 { 10257 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg); 10258 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan); 10259 } 10260 10261 return res; 10262 } 10263 10264 /* Fold a call to an unordered comparison function such as 10265 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function 10266 being called and ARG0 and ARG1 are the arguments for the call. 10267 UNORDERED_CODE and ORDERED_CODE are comparison codes that give 10268 the opposite of the desired result. UNORDERED_CODE is used 10269 for modes that can hold NaNs and ORDERED_CODE is used for 10270 the rest. */ 10271 10272 static tree 10273 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1, 10274 enum tree_code unordered_code, 10275 enum tree_code ordered_code) 10276 { 10277 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 10278 enum tree_code code; 10279 tree type0, type1; 10280 enum tree_code code0, code1; 10281 tree cmp_type = NULL_TREE; 10282 10283 type0 = TREE_TYPE (arg0); 10284 type1 = TREE_TYPE (arg1); 10285 10286 code0 = TREE_CODE (type0); 10287 code1 = TREE_CODE (type1); 10288 10289 if (code0 == REAL_TYPE && code1 == REAL_TYPE) 10290 /* Choose the wider of two real types. */ 10291 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1) 10292 ? type0 : type1; 10293 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE) 10294 cmp_type = type0; 10295 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE) 10296 cmp_type = type1; 10297 10298 arg0 = fold_convert_loc (loc, cmp_type, arg0); 10299 arg1 = fold_convert_loc (loc, cmp_type, arg1); 10300 10301 if (unordered_code == UNORDERED_EXPR) 10302 { 10303 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))) 10304 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1); 10305 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1); 10306 } 10307 10308 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code 10309 : ordered_code; 10310 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, 10311 fold_build2_loc (loc, code, type, arg0, arg1)); 10312 } 10313 10314 /* Fold a call to built-in function FNDECL with 0 arguments. 10315 IGNORE is true if the result of the function call is ignored. This 10316 function returns NULL_TREE if no simplification was possible. */ 10317 10318 static tree 10319 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED) 10320 { 10321 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 10322 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); 10323 switch (fcode) 10324 { 10325 CASE_FLT_FN (BUILT_IN_INF): 10326 case BUILT_IN_INFD32: 10327 case BUILT_IN_INFD64: 10328 case BUILT_IN_INFD128: 10329 return fold_builtin_inf (loc, type, true); 10330 10331 CASE_FLT_FN (BUILT_IN_HUGE_VAL): 10332 return fold_builtin_inf (loc, type, false); 10333 10334 case BUILT_IN_CLASSIFY_TYPE: 10335 return fold_builtin_classify_type (NULL_TREE); 10336 10337 default: 10338 break; 10339 } 10340 return NULL_TREE; 10341 } 10342 10343 /* Fold a call to built-in function FNDECL with 1 argument, ARG0. 10344 IGNORE is true if the result of the function call is ignored. This 10345 function returns NULL_TREE if no simplification was possible. */ 10346 10347 static tree 10348 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore) 10349 { 10350 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 10351 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); 10352 switch (fcode) 10353 { 10354 case BUILT_IN_CONSTANT_P: 10355 { 10356 tree val = fold_builtin_constant_p (arg0); 10357 10358 /* Gimplification will pull the CALL_EXPR for the builtin out of 10359 an if condition. When not optimizing, we'll not CSE it back. 10360 To avoid link error types of regressions, return false now. */ 10361 if (!val && !optimize) 10362 val = integer_zero_node; 10363 10364 return val; 10365 } 10366 10367 case BUILT_IN_CLASSIFY_TYPE: 10368 return fold_builtin_classify_type (arg0); 10369 10370 case BUILT_IN_STRLEN: 10371 return fold_builtin_strlen (loc, type, arg0); 10372 10373 CASE_FLT_FN (BUILT_IN_FABS): 10374 case BUILT_IN_FABSD32: 10375 case BUILT_IN_FABSD64: 10376 case BUILT_IN_FABSD128: 10377 return fold_builtin_fabs (loc, arg0, type); 10378 10379 case BUILT_IN_ABS: 10380 case BUILT_IN_LABS: 10381 case BUILT_IN_LLABS: 10382 case BUILT_IN_IMAXABS: 10383 return fold_builtin_abs (loc, arg0, type); 10384 10385 CASE_FLT_FN (BUILT_IN_CONJ): 10386 if (validate_arg (arg0, COMPLEX_TYPE) 10387 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10388 return fold_build1_loc (loc, CONJ_EXPR, type, arg0); 10389 break; 10390 10391 CASE_FLT_FN (BUILT_IN_CREAL): 10392 if (validate_arg (arg0, COMPLEX_TYPE) 10393 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10394 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));; 10395 break; 10396 10397 CASE_FLT_FN (BUILT_IN_CIMAG): 10398 if (validate_arg (arg0, COMPLEX_TYPE) 10399 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10400 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0)); 10401 break; 10402 10403 CASE_FLT_FN (BUILT_IN_CCOS): 10404 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false); 10405 10406 CASE_FLT_FN (BUILT_IN_CCOSH): 10407 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true); 10408 10409 CASE_FLT_FN (BUILT_IN_CPROJ): 10410 return fold_builtin_cproj(loc, arg0, type); 10411 10412 CASE_FLT_FN (BUILT_IN_CSIN): 10413 if (validate_arg (arg0, COMPLEX_TYPE) 10414 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10415 return do_mpc_arg1 (arg0, type, mpc_sin); 10416 break; 10417 10418 CASE_FLT_FN (BUILT_IN_CSINH): 10419 if (validate_arg (arg0, COMPLEX_TYPE) 10420 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10421 return do_mpc_arg1 (arg0, type, mpc_sinh); 10422 break; 10423 10424 CASE_FLT_FN (BUILT_IN_CTAN): 10425 if (validate_arg (arg0, COMPLEX_TYPE) 10426 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10427 return do_mpc_arg1 (arg0, type, mpc_tan); 10428 break; 10429 10430 CASE_FLT_FN (BUILT_IN_CTANH): 10431 if (validate_arg (arg0, COMPLEX_TYPE) 10432 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10433 return do_mpc_arg1 (arg0, type, mpc_tanh); 10434 break; 10435 10436 CASE_FLT_FN (BUILT_IN_CLOG): 10437 if (validate_arg (arg0, COMPLEX_TYPE) 10438 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10439 return do_mpc_arg1 (arg0, type, mpc_log); 10440 break; 10441 10442 CASE_FLT_FN (BUILT_IN_CSQRT): 10443 if (validate_arg (arg0, COMPLEX_TYPE) 10444 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10445 return do_mpc_arg1 (arg0, type, mpc_sqrt); 10446 break; 10447 10448 CASE_FLT_FN (BUILT_IN_CASIN): 10449 if (validate_arg (arg0, COMPLEX_TYPE) 10450 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10451 return do_mpc_arg1 (arg0, type, mpc_asin); 10452 break; 10453 10454 CASE_FLT_FN (BUILT_IN_CACOS): 10455 if (validate_arg (arg0, COMPLEX_TYPE) 10456 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10457 return do_mpc_arg1 (arg0, type, mpc_acos); 10458 break; 10459 10460 CASE_FLT_FN (BUILT_IN_CATAN): 10461 if (validate_arg (arg0, COMPLEX_TYPE) 10462 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10463 return do_mpc_arg1 (arg0, type, mpc_atan); 10464 break; 10465 10466 CASE_FLT_FN (BUILT_IN_CASINH): 10467 if (validate_arg (arg0, COMPLEX_TYPE) 10468 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10469 return do_mpc_arg1 (arg0, type, mpc_asinh); 10470 break; 10471 10472 CASE_FLT_FN (BUILT_IN_CACOSH): 10473 if (validate_arg (arg0, COMPLEX_TYPE) 10474 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10475 return do_mpc_arg1 (arg0, type, mpc_acosh); 10476 break; 10477 10478 CASE_FLT_FN (BUILT_IN_CATANH): 10479 if (validate_arg (arg0, COMPLEX_TYPE) 10480 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE) 10481 return do_mpc_arg1 (arg0, type, mpc_atanh); 10482 break; 10483 10484 CASE_FLT_FN (BUILT_IN_CABS): 10485 return fold_builtin_cabs (loc, arg0, type, fndecl); 10486 10487 CASE_FLT_FN (BUILT_IN_CARG): 10488 return fold_builtin_carg (loc, arg0, type); 10489 10490 CASE_FLT_FN (BUILT_IN_SQRT): 10491 return fold_builtin_sqrt (loc, arg0, type); 10492 10493 CASE_FLT_FN (BUILT_IN_CBRT): 10494 return fold_builtin_cbrt (loc, arg0, type); 10495 10496 CASE_FLT_FN (BUILT_IN_ASIN): 10497 if (validate_arg (arg0, REAL_TYPE)) 10498 return do_mpfr_arg1 (arg0, type, mpfr_asin, 10499 &dconstm1, &dconst1, true); 10500 break; 10501 10502 CASE_FLT_FN (BUILT_IN_ACOS): 10503 if (validate_arg (arg0, REAL_TYPE)) 10504 return do_mpfr_arg1 (arg0, type, mpfr_acos, 10505 &dconstm1, &dconst1, true); 10506 break; 10507 10508 CASE_FLT_FN (BUILT_IN_ATAN): 10509 if (validate_arg (arg0, REAL_TYPE)) 10510 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0); 10511 break; 10512 10513 CASE_FLT_FN (BUILT_IN_ASINH): 10514 if (validate_arg (arg0, REAL_TYPE)) 10515 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0); 10516 break; 10517 10518 CASE_FLT_FN (BUILT_IN_ACOSH): 10519 if (validate_arg (arg0, REAL_TYPE)) 10520 return do_mpfr_arg1 (arg0, type, mpfr_acosh, 10521 &dconst1, NULL, true); 10522 break; 10523 10524 CASE_FLT_FN (BUILT_IN_ATANH): 10525 if (validate_arg (arg0, REAL_TYPE)) 10526 return do_mpfr_arg1 (arg0, type, mpfr_atanh, 10527 &dconstm1, &dconst1, false); 10528 break; 10529 10530 CASE_FLT_FN (BUILT_IN_SIN): 10531 if (validate_arg (arg0, REAL_TYPE)) 10532 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0); 10533 break; 10534 10535 CASE_FLT_FN (BUILT_IN_COS): 10536 return fold_builtin_cos (loc, arg0, type, fndecl); 10537 10538 CASE_FLT_FN (BUILT_IN_TAN): 10539 return fold_builtin_tan (arg0, type); 10540 10541 CASE_FLT_FN (BUILT_IN_CEXP): 10542 return fold_builtin_cexp (loc, arg0, type); 10543 10544 CASE_FLT_FN (BUILT_IN_CEXPI): 10545 if (validate_arg (arg0, REAL_TYPE)) 10546 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE); 10547 break; 10548 10549 CASE_FLT_FN (BUILT_IN_SINH): 10550 if (validate_arg (arg0, REAL_TYPE)) 10551 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0); 10552 break; 10553 10554 CASE_FLT_FN (BUILT_IN_COSH): 10555 return fold_builtin_cosh (loc, arg0, type, fndecl); 10556 10557 CASE_FLT_FN (BUILT_IN_TANH): 10558 if (validate_arg (arg0, REAL_TYPE)) 10559 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0); 10560 break; 10561 10562 CASE_FLT_FN (BUILT_IN_ERF): 10563 if (validate_arg (arg0, REAL_TYPE)) 10564 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0); 10565 break; 10566 10567 CASE_FLT_FN (BUILT_IN_ERFC): 10568 if (validate_arg (arg0, REAL_TYPE)) 10569 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0); 10570 break; 10571 10572 CASE_FLT_FN (BUILT_IN_TGAMMA): 10573 if (validate_arg (arg0, REAL_TYPE)) 10574 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0); 10575 break; 10576 10577 CASE_FLT_FN (BUILT_IN_EXP): 10578 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp); 10579 10580 CASE_FLT_FN (BUILT_IN_EXP2): 10581 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2); 10582 10583 CASE_FLT_FN (BUILT_IN_EXP10): 10584 CASE_FLT_FN (BUILT_IN_POW10): 10585 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10); 10586 10587 CASE_FLT_FN (BUILT_IN_EXPM1): 10588 if (validate_arg (arg0, REAL_TYPE)) 10589 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0); 10590 break; 10591 10592 CASE_FLT_FN (BUILT_IN_LOG): 10593 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log); 10594 10595 CASE_FLT_FN (BUILT_IN_LOG2): 10596 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2); 10597 10598 CASE_FLT_FN (BUILT_IN_LOG10): 10599 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10); 10600 10601 CASE_FLT_FN (BUILT_IN_LOG1P): 10602 if (validate_arg (arg0, REAL_TYPE)) 10603 return do_mpfr_arg1 (arg0, type, mpfr_log1p, 10604 &dconstm1, NULL, false); 10605 break; 10606 10607 CASE_FLT_FN (BUILT_IN_J0): 10608 if (validate_arg (arg0, REAL_TYPE)) 10609 return do_mpfr_arg1 (arg0, type, mpfr_j0, 10610 NULL, NULL, 0); 10611 break; 10612 10613 CASE_FLT_FN (BUILT_IN_J1): 10614 if (validate_arg (arg0, REAL_TYPE)) 10615 return do_mpfr_arg1 (arg0, type, mpfr_j1, 10616 NULL, NULL, 0); 10617 break; 10618 10619 CASE_FLT_FN (BUILT_IN_Y0): 10620 if (validate_arg (arg0, REAL_TYPE)) 10621 return do_mpfr_arg1 (arg0, type, mpfr_y0, 10622 &dconst0, NULL, false); 10623 break; 10624 10625 CASE_FLT_FN (BUILT_IN_Y1): 10626 if (validate_arg (arg0, REAL_TYPE)) 10627 return do_mpfr_arg1 (arg0, type, mpfr_y1, 10628 &dconst0, NULL, false); 10629 break; 10630 10631 CASE_FLT_FN (BUILT_IN_NAN): 10632 case BUILT_IN_NAND32: 10633 case BUILT_IN_NAND64: 10634 case BUILT_IN_NAND128: 10635 return fold_builtin_nan (arg0, type, true); 10636 10637 CASE_FLT_FN (BUILT_IN_NANS): 10638 return fold_builtin_nan (arg0, type, false); 10639 10640 CASE_FLT_FN (BUILT_IN_FLOOR): 10641 return fold_builtin_floor (loc, fndecl, arg0); 10642 10643 CASE_FLT_FN (BUILT_IN_CEIL): 10644 return fold_builtin_ceil (loc, fndecl, arg0); 10645 10646 CASE_FLT_FN (BUILT_IN_TRUNC): 10647 return fold_builtin_trunc (loc, fndecl, arg0); 10648 10649 CASE_FLT_FN (BUILT_IN_ROUND): 10650 return fold_builtin_round (loc, fndecl, arg0); 10651 10652 CASE_FLT_FN (BUILT_IN_NEARBYINT): 10653 CASE_FLT_FN (BUILT_IN_RINT): 10654 return fold_trunc_transparent_mathfn (loc, fndecl, arg0); 10655 10656 CASE_FLT_FN (BUILT_IN_ICEIL): 10657 CASE_FLT_FN (BUILT_IN_LCEIL): 10658 CASE_FLT_FN (BUILT_IN_LLCEIL): 10659 CASE_FLT_FN (BUILT_IN_LFLOOR): 10660 CASE_FLT_FN (BUILT_IN_IFLOOR): 10661 CASE_FLT_FN (BUILT_IN_LLFLOOR): 10662 CASE_FLT_FN (BUILT_IN_IROUND): 10663 CASE_FLT_FN (BUILT_IN_LROUND): 10664 CASE_FLT_FN (BUILT_IN_LLROUND): 10665 return fold_builtin_int_roundingfn (loc, fndecl, arg0); 10666 10667 CASE_FLT_FN (BUILT_IN_IRINT): 10668 CASE_FLT_FN (BUILT_IN_LRINT): 10669 CASE_FLT_FN (BUILT_IN_LLRINT): 10670 return fold_fixed_mathfn (loc, fndecl, arg0); 10671 10672 case BUILT_IN_BSWAP16: 10673 case BUILT_IN_BSWAP32: 10674 case BUILT_IN_BSWAP64: 10675 return fold_builtin_bswap (fndecl, arg0); 10676 10677 CASE_INT_FN (BUILT_IN_FFS): 10678 CASE_INT_FN (BUILT_IN_CLZ): 10679 CASE_INT_FN (BUILT_IN_CTZ): 10680 CASE_INT_FN (BUILT_IN_CLRSB): 10681 CASE_INT_FN (BUILT_IN_POPCOUNT): 10682 CASE_INT_FN (BUILT_IN_PARITY): 10683 return fold_builtin_bitop (fndecl, arg0); 10684 10685 CASE_FLT_FN (BUILT_IN_SIGNBIT): 10686 return fold_builtin_signbit (loc, arg0, type); 10687 10688 CASE_FLT_FN (BUILT_IN_SIGNIFICAND): 10689 return fold_builtin_significand (loc, arg0, type); 10690 10691 CASE_FLT_FN (BUILT_IN_ILOGB): 10692 CASE_FLT_FN (BUILT_IN_LOGB): 10693 return fold_builtin_logb (loc, arg0, type); 10694 10695 case BUILT_IN_ISASCII: 10696 return fold_builtin_isascii (loc, arg0); 10697 10698 case BUILT_IN_TOASCII: 10699 return fold_builtin_toascii (loc, arg0); 10700 10701 case BUILT_IN_ISDIGIT: 10702 return fold_builtin_isdigit (loc, arg0); 10703 10704 CASE_FLT_FN (BUILT_IN_FINITE): 10705 case BUILT_IN_FINITED32: 10706 case BUILT_IN_FINITED64: 10707 case BUILT_IN_FINITED128: 10708 case BUILT_IN_ISFINITE: 10709 { 10710 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE); 10711 if (ret) 10712 return ret; 10713 return fold_builtin_interclass_mathfn (loc, fndecl, arg0); 10714 } 10715 10716 CASE_FLT_FN (BUILT_IN_ISINF): 10717 case BUILT_IN_ISINFD32: 10718 case BUILT_IN_ISINFD64: 10719 case BUILT_IN_ISINFD128: 10720 { 10721 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF); 10722 if (ret) 10723 return ret; 10724 return fold_builtin_interclass_mathfn (loc, fndecl, arg0); 10725 } 10726 10727 case BUILT_IN_ISNORMAL: 10728 return fold_builtin_interclass_mathfn (loc, fndecl, arg0); 10729 10730 case BUILT_IN_ISINF_SIGN: 10731 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN); 10732 10733 CASE_FLT_FN (BUILT_IN_ISNAN): 10734 case BUILT_IN_ISNAND32: 10735 case BUILT_IN_ISNAND64: 10736 case BUILT_IN_ISNAND128: 10737 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN); 10738 10739 case BUILT_IN_PRINTF: 10740 case BUILT_IN_PRINTF_UNLOCKED: 10741 case BUILT_IN_VPRINTF: 10742 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode); 10743 10744 case BUILT_IN_FREE: 10745 if (integer_zerop (arg0)) 10746 return build_empty_stmt (loc); 10747 break; 10748 10749 default: 10750 break; 10751 } 10752 10753 return NULL_TREE; 10754 10755 } 10756 10757 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1. 10758 IGNORE is true if the result of the function call is ignored. This 10759 function returns NULL_TREE if no simplification was possible. */ 10760 10761 static tree 10762 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore) 10763 { 10764 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 10765 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); 10766 10767 switch (fcode) 10768 { 10769 CASE_FLT_FN (BUILT_IN_JN): 10770 if (validate_arg (arg0, INTEGER_TYPE) 10771 && validate_arg (arg1, REAL_TYPE)) 10772 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0); 10773 break; 10774 10775 CASE_FLT_FN (BUILT_IN_YN): 10776 if (validate_arg (arg0, INTEGER_TYPE) 10777 && validate_arg (arg1, REAL_TYPE)) 10778 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn, 10779 &dconst0, false); 10780 break; 10781 10782 CASE_FLT_FN (BUILT_IN_DREM): 10783 CASE_FLT_FN (BUILT_IN_REMAINDER): 10784 if (validate_arg (arg0, REAL_TYPE) 10785 && validate_arg(arg1, REAL_TYPE)) 10786 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder); 10787 break; 10788 10789 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */ 10790 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */ 10791 if (validate_arg (arg0, REAL_TYPE) 10792 && validate_arg(arg1, POINTER_TYPE)) 10793 return do_mpfr_lgamma_r (arg0, arg1, type); 10794 break; 10795 10796 CASE_FLT_FN (BUILT_IN_ATAN2): 10797 if (validate_arg (arg0, REAL_TYPE) 10798 && validate_arg(arg1, REAL_TYPE)) 10799 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2); 10800 break; 10801 10802 CASE_FLT_FN (BUILT_IN_FDIM): 10803 if (validate_arg (arg0, REAL_TYPE) 10804 && validate_arg(arg1, REAL_TYPE)) 10805 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim); 10806 break; 10807 10808 CASE_FLT_FN (BUILT_IN_HYPOT): 10809 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type); 10810 10811 CASE_FLT_FN (BUILT_IN_CPOW): 10812 if (validate_arg (arg0, COMPLEX_TYPE) 10813 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE 10814 && validate_arg (arg1, COMPLEX_TYPE) 10815 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE) 10816 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow); 10817 break; 10818 10819 CASE_FLT_FN (BUILT_IN_LDEXP): 10820 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true); 10821 CASE_FLT_FN (BUILT_IN_SCALBN): 10822 CASE_FLT_FN (BUILT_IN_SCALBLN): 10823 return fold_builtin_load_exponent (loc, arg0, arg1, 10824 type, /*ldexp=*/false); 10825 10826 CASE_FLT_FN (BUILT_IN_FREXP): 10827 return fold_builtin_frexp (loc, arg0, arg1, type); 10828 10829 CASE_FLT_FN (BUILT_IN_MODF): 10830 return fold_builtin_modf (loc, arg0, arg1, type); 10831 10832 case BUILT_IN_BZERO: 10833 return fold_builtin_bzero (loc, arg0, arg1, ignore); 10834 10835 case BUILT_IN_FPUTS: 10836 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE); 10837 10838 case BUILT_IN_FPUTS_UNLOCKED: 10839 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE); 10840 10841 case BUILT_IN_STRSTR: 10842 return fold_builtin_strstr (loc, arg0, arg1, type); 10843 10844 case BUILT_IN_STRCAT: 10845 return fold_builtin_strcat (loc, arg0, arg1, NULL_TREE); 10846 10847 case BUILT_IN_STRSPN: 10848 return fold_builtin_strspn (loc, arg0, arg1); 10849 10850 case BUILT_IN_STRCSPN: 10851 return fold_builtin_strcspn (loc, arg0, arg1); 10852 10853 case BUILT_IN_STRCHR: 10854 case BUILT_IN_INDEX: 10855 return fold_builtin_strchr (loc, arg0, arg1, type); 10856 10857 case BUILT_IN_STRRCHR: 10858 case BUILT_IN_RINDEX: 10859 return fold_builtin_strrchr (loc, arg0, arg1, type); 10860 10861 case BUILT_IN_STRCPY: 10862 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE); 10863 10864 case BUILT_IN_STPCPY: 10865 if (ignore) 10866 { 10867 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 10868 if (!fn) 10869 break; 10870 10871 return build_call_expr_loc (loc, fn, 2, arg0, arg1); 10872 } 10873 else 10874 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1); 10875 break; 10876 10877 case BUILT_IN_STRCMP: 10878 return fold_builtin_strcmp (loc, arg0, arg1); 10879 10880 case BUILT_IN_STRPBRK: 10881 return fold_builtin_strpbrk (loc, arg0, arg1, type); 10882 10883 case BUILT_IN_EXPECT: 10884 return fold_builtin_expect (loc, arg0, arg1); 10885 10886 CASE_FLT_FN (BUILT_IN_POW): 10887 return fold_builtin_pow (loc, fndecl, arg0, arg1, type); 10888 10889 CASE_FLT_FN (BUILT_IN_POWI): 10890 return fold_builtin_powi (loc, fndecl, arg0, arg1, type); 10891 10892 CASE_FLT_FN (BUILT_IN_COPYSIGN): 10893 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type); 10894 10895 CASE_FLT_FN (BUILT_IN_FMIN): 10896 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false); 10897 10898 CASE_FLT_FN (BUILT_IN_FMAX): 10899 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true); 10900 10901 case BUILT_IN_ISGREATER: 10902 return fold_builtin_unordered_cmp (loc, fndecl, 10903 arg0, arg1, UNLE_EXPR, LE_EXPR); 10904 case BUILT_IN_ISGREATEREQUAL: 10905 return fold_builtin_unordered_cmp (loc, fndecl, 10906 arg0, arg1, UNLT_EXPR, LT_EXPR); 10907 case BUILT_IN_ISLESS: 10908 return fold_builtin_unordered_cmp (loc, fndecl, 10909 arg0, arg1, UNGE_EXPR, GE_EXPR); 10910 case BUILT_IN_ISLESSEQUAL: 10911 return fold_builtin_unordered_cmp (loc, fndecl, 10912 arg0, arg1, UNGT_EXPR, GT_EXPR); 10913 case BUILT_IN_ISLESSGREATER: 10914 return fold_builtin_unordered_cmp (loc, fndecl, 10915 arg0, arg1, UNEQ_EXPR, EQ_EXPR); 10916 case BUILT_IN_ISUNORDERED: 10917 return fold_builtin_unordered_cmp (loc, fndecl, 10918 arg0, arg1, UNORDERED_EXPR, 10919 NOP_EXPR); 10920 10921 /* We do the folding for va_start in the expander. */ 10922 case BUILT_IN_VA_START: 10923 break; 10924 10925 case BUILT_IN_SPRINTF: 10926 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore); 10927 10928 case BUILT_IN_OBJECT_SIZE: 10929 return fold_builtin_object_size (arg0, arg1); 10930 10931 case BUILT_IN_PRINTF: 10932 case BUILT_IN_PRINTF_UNLOCKED: 10933 case BUILT_IN_VPRINTF: 10934 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode); 10935 10936 case BUILT_IN_PRINTF_CHK: 10937 case BUILT_IN_VPRINTF_CHK: 10938 if (!validate_arg (arg0, INTEGER_TYPE) 10939 || TREE_SIDE_EFFECTS (arg0)) 10940 return NULL_TREE; 10941 else 10942 return fold_builtin_printf (loc, fndecl, 10943 arg1, NULL_TREE, ignore, fcode); 10944 break; 10945 10946 case BUILT_IN_FPRINTF: 10947 case BUILT_IN_FPRINTF_UNLOCKED: 10948 case BUILT_IN_VFPRINTF: 10949 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE, 10950 ignore, fcode); 10951 10952 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE: 10953 return fold_builtin_atomic_always_lock_free (arg0, arg1); 10954 10955 case BUILT_IN_ATOMIC_IS_LOCK_FREE: 10956 return fold_builtin_atomic_is_lock_free (arg0, arg1); 10957 10958 default: 10959 break; 10960 } 10961 return NULL_TREE; 10962 } 10963 10964 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1, 10965 and ARG2. IGNORE is true if the result of the function call is ignored. 10966 This function returns NULL_TREE if no simplification was possible. */ 10967 10968 static tree 10969 fold_builtin_3 (location_t loc, tree fndecl, 10970 tree arg0, tree arg1, tree arg2, bool ignore) 10971 { 10972 tree type = TREE_TYPE (TREE_TYPE (fndecl)); 10973 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); 10974 switch (fcode) 10975 { 10976 10977 CASE_FLT_FN (BUILT_IN_SINCOS): 10978 return fold_builtin_sincos (loc, arg0, arg1, arg2); 10979 10980 CASE_FLT_FN (BUILT_IN_FMA): 10981 return fold_builtin_fma (loc, arg0, arg1, arg2, type); 10982 break; 10983 10984 CASE_FLT_FN (BUILT_IN_REMQUO): 10985 if (validate_arg (arg0, REAL_TYPE) 10986 && validate_arg(arg1, REAL_TYPE) 10987 && validate_arg(arg2, POINTER_TYPE)) 10988 return do_mpfr_remquo (arg0, arg1, arg2); 10989 break; 10990 10991 case BUILT_IN_MEMSET: 10992 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore); 10993 10994 case BUILT_IN_BCOPY: 10995 return fold_builtin_memory_op (loc, arg1, arg0, arg2, 10996 void_type_node, true, /*endp=*/3); 10997 10998 case BUILT_IN_MEMCPY: 10999 return fold_builtin_memory_op (loc, arg0, arg1, arg2, 11000 type, ignore, /*endp=*/0); 11001 11002 case BUILT_IN_MEMPCPY: 11003 return fold_builtin_memory_op (loc, arg0, arg1, arg2, 11004 type, ignore, /*endp=*/1); 11005 11006 case BUILT_IN_MEMMOVE: 11007 return fold_builtin_memory_op (loc, arg0, arg1, arg2, 11008 type, ignore, /*endp=*/3); 11009 11010 case BUILT_IN_STRNCAT: 11011 return fold_builtin_strncat (loc, arg0, arg1, arg2); 11012 11013 case BUILT_IN_STRNCPY: 11014 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE); 11015 11016 case BUILT_IN_STRNCMP: 11017 return fold_builtin_strncmp (loc, arg0, arg1, arg2); 11018 11019 case BUILT_IN_MEMCHR: 11020 return fold_builtin_memchr (loc, arg0, arg1, arg2, type); 11021 11022 case BUILT_IN_BCMP: 11023 case BUILT_IN_MEMCMP: 11024 return fold_builtin_memcmp (loc, arg0, arg1, arg2);; 11025 11026 case BUILT_IN_SPRINTF: 11027 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore); 11028 11029 case BUILT_IN_SNPRINTF: 11030 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore); 11031 11032 case BUILT_IN_STRCPY_CHK: 11033 case BUILT_IN_STPCPY_CHK: 11034 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE, 11035 ignore, fcode); 11036 11037 case BUILT_IN_STRCAT_CHK: 11038 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2); 11039 11040 case BUILT_IN_PRINTF_CHK: 11041 case BUILT_IN_VPRINTF_CHK: 11042 if (!validate_arg (arg0, INTEGER_TYPE) 11043 || TREE_SIDE_EFFECTS (arg0)) 11044 return NULL_TREE; 11045 else 11046 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode); 11047 break; 11048 11049 case BUILT_IN_FPRINTF: 11050 case BUILT_IN_FPRINTF_UNLOCKED: 11051 case BUILT_IN_VFPRINTF: 11052 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2, 11053 ignore, fcode); 11054 11055 case BUILT_IN_FPRINTF_CHK: 11056 case BUILT_IN_VFPRINTF_CHK: 11057 if (!validate_arg (arg1, INTEGER_TYPE) 11058 || TREE_SIDE_EFFECTS (arg1)) 11059 return NULL_TREE; 11060 else 11061 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE, 11062 ignore, fcode); 11063 11064 default: 11065 break; 11066 } 11067 return NULL_TREE; 11068 } 11069 11070 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1, 11071 ARG2, and ARG3. IGNORE is true if the result of the function call is 11072 ignored. This function returns NULL_TREE if no simplification was 11073 possible. */ 11074 11075 static tree 11076 fold_builtin_4 (location_t loc, tree fndecl, 11077 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore) 11078 { 11079 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); 11080 11081 switch (fcode) 11082 { 11083 case BUILT_IN_MEMCPY_CHK: 11084 case BUILT_IN_MEMPCPY_CHK: 11085 case BUILT_IN_MEMMOVE_CHK: 11086 case BUILT_IN_MEMSET_CHK: 11087 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3, 11088 NULL_TREE, ignore, 11089 DECL_FUNCTION_CODE (fndecl)); 11090 11091 case BUILT_IN_STRNCPY_CHK: 11092 case BUILT_IN_STPNCPY_CHK: 11093 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE, 11094 ignore, fcode); 11095 11096 case BUILT_IN_STRNCAT_CHK: 11097 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3); 11098 11099 case BUILT_IN_SNPRINTF: 11100 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore); 11101 11102 case BUILT_IN_FPRINTF_CHK: 11103 case BUILT_IN_VFPRINTF_CHK: 11104 if (!validate_arg (arg1, INTEGER_TYPE) 11105 || TREE_SIDE_EFFECTS (arg1)) 11106 return NULL_TREE; 11107 else 11108 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3, 11109 ignore, fcode); 11110 break; 11111 11112 default: 11113 break; 11114 } 11115 return NULL_TREE; 11116 } 11117 11118 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS 11119 arguments, where NARGS <= 4. IGNORE is true if the result of the 11120 function call is ignored. This function returns NULL_TREE if no 11121 simplification was possible. Note that this only folds builtins with 11122 fixed argument patterns. Foldings that do varargs-to-varargs 11123 transformations, or that match calls with more than 4 arguments, 11124 need to be handled with fold_builtin_varargs instead. */ 11125 11126 #define MAX_ARGS_TO_FOLD_BUILTIN 4 11127 11128 static tree 11129 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore) 11130 { 11131 tree ret = NULL_TREE; 11132 11133 switch (nargs) 11134 { 11135 case 0: 11136 ret = fold_builtin_0 (loc, fndecl, ignore); 11137 break; 11138 case 1: 11139 ret = fold_builtin_1 (loc, fndecl, args[0], ignore); 11140 break; 11141 case 2: 11142 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore); 11143 break; 11144 case 3: 11145 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore); 11146 break; 11147 case 4: 11148 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3], 11149 ignore); 11150 break; 11151 default: 11152 break; 11153 } 11154 if (ret) 11155 { 11156 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret); 11157 SET_EXPR_LOCATION (ret, loc); 11158 TREE_NO_WARNING (ret) = 1; 11159 return ret; 11160 } 11161 return NULL_TREE; 11162 } 11163 11164 /* Builtins with folding operations that operate on "..." arguments 11165 need special handling; we need to store the arguments in a convenient 11166 data structure before attempting any folding. Fortunately there are 11167 only a few builtins that fall into this category. FNDECL is the 11168 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the 11169 result of the function call is ignored. */ 11170 11171 static tree 11172 fold_builtin_varargs (location_t loc, tree fndecl, tree exp, 11173 bool ignore ATTRIBUTE_UNUSED) 11174 { 11175 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); 11176 tree ret = NULL_TREE; 11177 11178 switch (fcode) 11179 { 11180 case BUILT_IN_SPRINTF_CHK: 11181 case BUILT_IN_VSPRINTF_CHK: 11182 ret = fold_builtin_sprintf_chk (loc, exp, fcode); 11183 break; 11184 11185 case BUILT_IN_SNPRINTF_CHK: 11186 case BUILT_IN_VSNPRINTF_CHK: 11187 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode); 11188 break; 11189 11190 case BUILT_IN_FPCLASSIFY: 11191 ret = fold_builtin_fpclassify (loc, exp); 11192 break; 11193 11194 default: 11195 break; 11196 } 11197 if (ret) 11198 { 11199 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret); 11200 SET_EXPR_LOCATION (ret, loc); 11201 TREE_NO_WARNING (ret) = 1; 11202 return ret; 11203 } 11204 return NULL_TREE; 11205 } 11206 11207 /* Return true if FNDECL shouldn't be folded right now. 11208 If a built-in function has an inline attribute always_inline 11209 wrapper, defer folding it after always_inline functions have 11210 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking 11211 might not be performed. */ 11212 11213 bool 11214 avoid_folding_inline_builtin (tree fndecl) 11215 { 11216 return (DECL_DECLARED_INLINE_P (fndecl) 11217 && DECL_DISREGARD_INLINE_LIMITS (fndecl) 11218 && cfun 11219 && !cfun->always_inline_functions_inlined 11220 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl))); 11221 } 11222 11223 /* A wrapper function for builtin folding that prevents warnings for 11224 "statement without effect" and the like, caused by removing the 11225 call node earlier than the warning is generated. */ 11226 11227 tree 11228 fold_call_expr (location_t loc, tree exp, bool ignore) 11229 { 11230 tree ret = NULL_TREE; 11231 tree fndecl = get_callee_fndecl (exp); 11232 if (fndecl 11233 && TREE_CODE (fndecl) == FUNCTION_DECL 11234 && DECL_BUILT_IN (fndecl) 11235 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized 11236 yet. Defer folding until we see all the arguments 11237 (after inlining). */ 11238 && !CALL_EXPR_VA_ARG_PACK (exp)) 11239 { 11240 int nargs = call_expr_nargs (exp); 11241 11242 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but 11243 instead last argument is __builtin_va_arg_pack (). Defer folding 11244 even in that case, until arguments are finalized. */ 11245 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR) 11246 { 11247 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1)); 11248 if (fndecl2 11249 && TREE_CODE (fndecl2) == FUNCTION_DECL 11250 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL 11251 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK) 11252 return NULL_TREE; 11253 } 11254 11255 if (avoid_folding_inline_builtin (fndecl)) 11256 return NULL_TREE; 11257 11258 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) 11259 return targetm.fold_builtin (fndecl, call_expr_nargs (exp), 11260 CALL_EXPR_ARGP (exp), ignore); 11261 else 11262 { 11263 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN) 11264 { 11265 tree *args = CALL_EXPR_ARGP (exp); 11266 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore); 11267 } 11268 if (!ret) 11269 ret = fold_builtin_varargs (loc, fndecl, exp, ignore); 11270 if (ret) 11271 return ret; 11272 } 11273 } 11274 return NULL_TREE; 11275 } 11276 11277 /* Conveniently construct a function call expression. FNDECL names the 11278 function to be called and N arguments are passed in the array 11279 ARGARRAY. */ 11280 11281 tree 11282 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray) 11283 { 11284 tree fntype = TREE_TYPE (fndecl); 11285 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl); 11286 11287 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray); 11288 } 11289 11290 /* Conveniently construct a function call expression. FNDECL names the 11291 function to be called and the arguments are passed in the vector 11292 VEC. */ 11293 11294 tree 11295 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec) 11296 { 11297 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec), 11298 vec_safe_address (vec)); 11299 } 11300 11301 11302 /* Conveniently construct a function call expression. FNDECL names the 11303 function to be called, N is the number of arguments, and the "..." 11304 parameters are the argument expressions. */ 11305 11306 tree 11307 build_call_expr_loc (location_t loc, tree fndecl, int n, ...) 11308 { 11309 va_list ap; 11310 tree *argarray = XALLOCAVEC (tree, n); 11311 int i; 11312 11313 va_start (ap, n); 11314 for (i = 0; i < n; i++) 11315 argarray[i] = va_arg (ap, tree); 11316 va_end (ap); 11317 return build_call_expr_loc_array (loc, fndecl, n, argarray); 11318 } 11319 11320 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because 11321 varargs macros aren't supported by all bootstrap compilers. */ 11322 11323 tree 11324 build_call_expr (tree fndecl, int n, ...) 11325 { 11326 va_list ap; 11327 tree *argarray = XALLOCAVEC (tree, n); 11328 int i; 11329 11330 va_start (ap, n); 11331 for (i = 0; i < n; i++) 11332 argarray[i] = va_arg (ap, tree); 11333 va_end (ap); 11334 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray); 11335 } 11336 11337 /* Construct a CALL_EXPR with type TYPE with FN as the function expression. 11338 N arguments are passed in the array ARGARRAY. */ 11339 11340 tree 11341 fold_builtin_call_array (location_t loc, tree type, 11342 tree fn, 11343 int n, 11344 tree *argarray) 11345 { 11346 tree ret = NULL_TREE; 11347 tree exp; 11348 11349 if (TREE_CODE (fn) == ADDR_EXPR) 11350 { 11351 tree fndecl = TREE_OPERAND (fn, 0); 11352 if (TREE_CODE (fndecl) == FUNCTION_DECL 11353 && DECL_BUILT_IN (fndecl)) 11354 { 11355 /* If last argument is __builtin_va_arg_pack (), arguments to this 11356 function are not finalized yet. Defer folding until they are. */ 11357 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR) 11358 { 11359 tree fndecl2 = get_callee_fndecl (argarray[n - 1]); 11360 if (fndecl2 11361 && TREE_CODE (fndecl2) == FUNCTION_DECL 11362 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL 11363 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK) 11364 return build_call_array_loc (loc, type, fn, n, argarray); 11365 } 11366 if (avoid_folding_inline_builtin (fndecl)) 11367 return build_call_array_loc (loc, type, fn, n, argarray); 11368 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) 11369 { 11370 ret = targetm.fold_builtin (fndecl, n, argarray, false); 11371 if (ret) 11372 return ret; 11373 11374 return build_call_array_loc (loc, type, fn, n, argarray); 11375 } 11376 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN) 11377 { 11378 /* First try the transformations that don't require consing up 11379 an exp. */ 11380 ret = fold_builtin_n (loc, fndecl, argarray, n, false); 11381 if (ret) 11382 return ret; 11383 } 11384 11385 /* If we got this far, we need to build an exp. */ 11386 exp = build_call_array_loc (loc, type, fn, n, argarray); 11387 ret = fold_builtin_varargs (loc, fndecl, exp, false); 11388 return ret ? ret : exp; 11389 } 11390 } 11391 11392 return build_call_array_loc (loc, type, fn, n, argarray); 11393 } 11394 11395 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument 11396 list ARGS along with N new arguments in NEWARGS. SKIP is the number 11397 of arguments in ARGS to be omitted. OLDNARGS is the number of 11398 elements in ARGS. */ 11399 11400 static tree 11401 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args, 11402 int skip, tree fndecl, int n, va_list newargs) 11403 { 11404 int nargs = oldnargs - skip + n; 11405 tree *buffer; 11406 11407 if (n > 0) 11408 { 11409 int i, j; 11410 11411 buffer = XALLOCAVEC (tree, nargs); 11412 for (i = 0; i < n; i++) 11413 buffer[i] = va_arg (newargs, tree); 11414 for (j = skip; j < oldnargs; j++, i++) 11415 buffer[i] = args[j]; 11416 } 11417 else 11418 buffer = args + skip; 11419 11420 return build_call_expr_loc_array (loc, fndecl, nargs, buffer); 11421 } 11422 11423 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument 11424 list ARGS along with N new arguments specified as the "..." 11425 parameters. SKIP is the number of arguments in ARGS to be omitted. 11426 OLDNARGS is the number of elements in ARGS. */ 11427 11428 static tree 11429 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args, 11430 int skip, tree fndecl, int n, ...) 11431 { 11432 va_list ap; 11433 tree t; 11434 11435 va_start (ap, n); 11436 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap); 11437 va_end (ap); 11438 11439 return t; 11440 } 11441 11442 /* Construct a new CALL_EXPR using the tail of the argument list of EXP 11443 along with N new arguments specified as the "..." parameters. SKIP 11444 is the number of arguments in EXP to be omitted. This function is used 11445 to do varargs-to-varargs transformations. */ 11446 11447 static tree 11448 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...) 11449 { 11450 va_list ap; 11451 tree t; 11452 11453 va_start (ap, n); 11454 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp), 11455 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap); 11456 va_end (ap); 11457 11458 return t; 11459 } 11460 11461 /* Validate a single argument ARG against a tree code CODE representing 11462 a type. */ 11463 11464 static bool 11465 validate_arg (const_tree arg, enum tree_code code) 11466 { 11467 if (!arg) 11468 return false; 11469 else if (code == POINTER_TYPE) 11470 return POINTER_TYPE_P (TREE_TYPE (arg)); 11471 else if (code == INTEGER_TYPE) 11472 return INTEGRAL_TYPE_P (TREE_TYPE (arg)); 11473 return code == TREE_CODE (TREE_TYPE (arg)); 11474 } 11475 11476 /* This function validates the types of a function call argument list 11477 against a specified list of tree_codes. If the last specifier is a 0, 11478 that represents an ellipses, otherwise the last specifier must be a 11479 VOID_TYPE. 11480 11481 This is the GIMPLE version of validate_arglist. Eventually we want to 11482 completely convert builtins.c to work from GIMPLEs and the tree based 11483 validate_arglist will then be removed. */ 11484 11485 bool 11486 validate_gimple_arglist (const_gimple call, ...) 11487 { 11488 enum tree_code code; 11489 bool res = 0; 11490 va_list ap; 11491 const_tree arg; 11492 size_t i; 11493 11494 va_start (ap, call); 11495 i = 0; 11496 11497 do 11498 { 11499 code = (enum tree_code) va_arg (ap, int); 11500 switch (code) 11501 { 11502 case 0: 11503 /* This signifies an ellipses, any further arguments are all ok. */ 11504 res = true; 11505 goto end; 11506 case VOID_TYPE: 11507 /* This signifies an endlink, if no arguments remain, return 11508 true, otherwise return false. */ 11509 res = (i == gimple_call_num_args (call)); 11510 goto end; 11511 default: 11512 /* If no parameters remain or the parameter's code does not 11513 match the specified code, return false. Otherwise continue 11514 checking any remaining arguments. */ 11515 arg = gimple_call_arg (call, i++); 11516 if (!validate_arg (arg, code)) 11517 goto end; 11518 break; 11519 } 11520 } 11521 while (1); 11522 11523 /* We need gotos here since we can only have one VA_CLOSE in a 11524 function. */ 11525 end: ; 11526 va_end (ap); 11527 11528 return res; 11529 } 11530 11531 /* This function validates the types of a function call argument list 11532 against a specified list of tree_codes. If the last specifier is a 0, 11533 that represents an ellipses, otherwise the last specifier must be a 11534 VOID_TYPE. */ 11535 11536 bool 11537 validate_arglist (const_tree callexpr, ...) 11538 { 11539 enum tree_code code; 11540 bool res = 0; 11541 va_list ap; 11542 const_call_expr_arg_iterator iter; 11543 const_tree arg; 11544 11545 va_start (ap, callexpr); 11546 init_const_call_expr_arg_iterator (callexpr, &iter); 11547 11548 do 11549 { 11550 code = (enum tree_code) va_arg (ap, int); 11551 switch (code) 11552 { 11553 case 0: 11554 /* This signifies an ellipses, any further arguments are all ok. */ 11555 res = true; 11556 goto end; 11557 case VOID_TYPE: 11558 /* This signifies an endlink, if no arguments remain, return 11559 true, otherwise return false. */ 11560 res = !more_const_call_expr_args_p (&iter); 11561 goto end; 11562 default: 11563 /* If no parameters remain or the parameter's code does not 11564 match the specified code, return false. Otherwise continue 11565 checking any remaining arguments. */ 11566 arg = next_const_call_expr_arg (&iter); 11567 if (!validate_arg (arg, code)) 11568 goto end; 11569 break; 11570 } 11571 } 11572 while (1); 11573 11574 /* We need gotos here since we can only have one VA_CLOSE in a 11575 function. */ 11576 end: ; 11577 va_end (ap); 11578 11579 return res; 11580 } 11581 11582 /* Default target-specific builtin expander that does nothing. */ 11583 11584 rtx 11585 default_expand_builtin (tree exp ATTRIBUTE_UNUSED, 11586 rtx target ATTRIBUTE_UNUSED, 11587 rtx subtarget ATTRIBUTE_UNUSED, 11588 enum machine_mode mode ATTRIBUTE_UNUSED, 11589 int ignore ATTRIBUTE_UNUSED) 11590 { 11591 return NULL_RTX; 11592 } 11593 11594 /* Returns true is EXP represents data that would potentially reside 11595 in a readonly section. */ 11596 11597 static bool 11598 readonly_data_expr (tree exp) 11599 { 11600 STRIP_NOPS (exp); 11601 11602 if (TREE_CODE (exp) != ADDR_EXPR) 11603 return false; 11604 11605 exp = get_base_address (TREE_OPERAND (exp, 0)); 11606 if (!exp) 11607 return false; 11608 11609 /* Make sure we call decl_readonly_section only for trees it 11610 can handle (since it returns true for everything it doesn't 11611 understand). */ 11612 if (TREE_CODE (exp) == STRING_CST 11613 || TREE_CODE (exp) == CONSTRUCTOR 11614 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp))) 11615 return decl_readonly_section (exp, 0); 11616 else 11617 return false; 11618 } 11619 11620 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments 11621 to the call, and TYPE is its return type. 11622 11623 Return NULL_TREE if no simplification was possible, otherwise return the 11624 simplified form of the call as a tree. 11625 11626 The simplified form may be a constant or other expression which 11627 computes the same value, but in a more efficient manner (including 11628 calls to other builtin functions). 11629 11630 The call may contain arguments which need to be evaluated, but 11631 which are not useful to determine the result of the call. In 11632 this case we return a chain of COMPOUND_EXPRs. The LHS of each 11633 COMPOUND_EXPR will be an argument which must be evaluated. 11634 COMPOUND_EXPRs are chained through their RHS. The RHS of the last 11635 COMPOUND_EXPR in the chain will contain the tree for the simplified 11636 form of the builtin function call. */ 11637 11638 static tree 11639 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type) 11640 { 11641 if (!validate_arg (s1, POINTER_TYPE) 11642 || !validate_arg (s2, POINTER_TYPE)) 11643 return NULL_TREE; 11644 else 11645 { 11646 tree fn; 11647 const char *p1, *p2; 11648 11649 p2 = c_getstr (s2); 11650 if (p2 == NULL) 11651 return NULL_TREE; 11652 11653 p1 = c_getstr (s1); 11654 if (p1 != NULL) 11655 { 11656 const char *r = strstr (p1, p2); 11657 tree tem; 11658 11659 if (r == NULL) 11660 return build_int_cst (TREE_TYPE (s1), 0); 11661 11662 /* Return an offset into the constant string argument. */ 11663 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1); 11664 return fold_convert_loc (loc, type, tem); 11665 } 11666 11667 /* The argument is const char *, and the result is char *, so we need 11668 a type conversion here to avoid a warning. */ 11669 if (p2[0] == '\0') 11670 return fold_convert_loc (loc, type, s1); 11671 11672 if (p2[1] != '\0') 11673 return NULL_TREE; 11674 11675 fn = builtin_decl_implicit (BUILT_IN_STRCHR); 11676 if (!fn) 11677 return NULL_TREE; 11678 11679 /* New argument list transforming strstr(s1, s2) to 11680 strchr(s1, s2[0]). */ 11681 return build_call_expr_loc (loc, fn, 2, s1, 11682 build_int_cst (integer_type_node, p2[0])); 11683 } 11684 } 11685 11686 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to 11687 the call, and TYPE is its return type. 11688 11689 Return NULL_TREE if no simplification was possible, otherwise return the 11690 simplified form of the call as a tree. 11691 11692 The simplified form may be a constant or other expression which 11693 computes the same value, but in a more efficient manner (including 11694 calls to other builtin functions). 11695 11696 The call may contain arguments which need to be evaluated, but 11697 which are not useful to determine the result of the call. In 11698 this case we return a chain of COMPOUND_EXPRs. The LHS of each 11699 COMPOUND_EXPR will be an argument which must be evaluated. 11700 COMPOUND_EXPRs are chained through their RHS. The RHS of the last 11701 COMPOUND_EXPR in the chain will contain the tree for the simplified 11702 form of the builtin function call. */ 11703 11704 static tree 11705 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type) 11706 { 11707 if (!validate_arg (s1, POINTER_TYPE) 11708 || !validate_arg (s2, INTEGER_TYPE)) 11709 return NULL_TREE; 11710 else 11711 { 11712 const char *p1; 11713 11714 if (TREE_CODE (s2) != INTEGER_CST) 11715 return NULL_TREE; 11716 11717 p1 = c_getstr (s1); 11718 if (p1 != NULL) 11719 { 11720 char c; 11721 const char *r; 11722 tree tem; 11723 11724 if (target_char_cast (s2, &c)) 11725 return NULL_TREE; 11726 11727 r = strchr (p1, c); 11728 11729 if (r == NULL) 11730 return build_int_cst (TREE_TYPE (s1), 0); 11731 11732 /* Return an offset into the constant string argument. */ 11733 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1); 11734 return fold_convert_loc (loc, type, tem); 11735 } 11736 return NULL_TREE; 11737 } 11738 } 11739 11740 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to 11741 the call, and TYPE is its return type. 11742 11743 Return NULL_TREE if no simplification was possible, otherwise return the 11744 simplified form of the call as a tree. 11745 11746 The simplified form may be a constant or other expression which 11747 computes the same value, but in a more efficient manner (including 11748 calls to other builtin functions). 11749 11750 The call may contain arguments which need to be evaluated, but 11751 which are not useful to determine the result of the call. In 11752 this case we return a chain of COMPOUND_EXPRs. The LHS of each 11753 COMPOUND_EXPR will be an argument which must be evaluated. 11754 COMPOUND_EXPRs are chained through their RHS. The RHS of the last 11755 COMPOUND_EXPR in the chain will contain the tree for the simplified 11756 form of the builtin function call. */ 11757 11758 static tree 11759 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type) 11760 { 11761 if (!validate_arg (s1, POINTER_TYPE) 11762 || !validate_arg (s2, INTEGER_TYPE)) 11763 return NULL_TREE; 11764 else 11765 { 11766 tree fn; 11767 const char *p1; 11768 11769 if (TREE_CODE (s2) != INTEGER_CST) 11770 return NULL_TREE; 11771 11772 p1 = c_getstr (s1); 11773 if (p1 != NULL) 11774 { 11775 char c; 11776 const char *r; 11777 tree tem; 11778 11779 if (target_char_cast (s2, &c)) 11780 return NULL_TREE; 11781 11782 r = strrchr (p1, c); 11783 11784 if (r == NULL) 11785 return build_int_cst (TREE_TYPE (s1), 0); 11786 11787 /* Return an offset into the constant string argument. */ 11788 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1); 11789 return fold_convert_loc (loc, type, tem); 11790 } 11791 11792 if (! integer_zerop (s2)) 11793 return NULL_TREE; 11794 11795 fn = builtin_decl_implicit (BUILT_IN_STRCHR); 11796 if (!fn) 11797 return NULL_TREE; 11798 11799 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */ 11800 return build_call_expr_loc (loc, fn, 2, s1, s2); 11801 } 11802 } 11803 11804 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments 11805 to the call, and TYPE is its return type. 11806 11807 Return NULL_TREE if no simplification was possible, otherwise return the 11808 simplified form of the call as a tree. 11809 11810 The simplified form may be a constant or other expression which 11811 computes the same value, but in a more efficient manner (including 11812 calls to other builtin functions). 11813 11814 The call may contain arguments which need to be evaluated, but 11815 which are not useful to determine the result of the call. In 11816 this case we return a chain of COMPOUND_EXPRs. The LHS of each 11817 COMPOUND_EXPR will be an argument which must be evaluated. 11818 COMPOUND_EXPRs are chained through their RHS. The RHS of the last 11819 COMPOUND_EXPR in the chain will contain the tree for the simplified 11820 form of the builtin function call. */ 11821 11822 static tree 11823 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type) 11824 { 11825 if (!validate_arg (s1, POINTER_TYPE) 11826 || !validate_arg (s2, POINTER_TYPE)) 11827 return NULL_TREE; 11828 else 11829 { 11830 tree fn; 11831 const char *p1, *p2; 11832 11833 p2 = c_getstr (s2); 11834 if (p2 == NULL) 11835 return NULL_TREE; 11836 11837 p1 = c_getstr (s1); 11838 if (p1 != NULL) 11839 { 11840 const char *r = strpbrk (p1, p2); 11841 tree tem; 11842 11843 if (r == NULL) 11844 return build_int_cst (TREE_TYPE (s1), 0); 11845 11846 /* Return an offset into the constant string argument. */ 11847 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1); 11848 return fold_convert_loc (loc, type, tem); 11849 } 11850 11851 if (p2[0] == '\0') 11852 /* strpbrk(x, "") == NULL. 11853 Evaluate and ignore s1 in case it had side-effects. */ 11854 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1); 11855 11856 if (p2[1] != '\0') 11857 return NULL_TREE; /* Really call strpbrk. */ 11858 11859 fn = builtin_decl_implicit (BUILT_IN_STRCHR); 11860 if (!fn) 11861 return NULL_TREE; 11862 11863 /* New argument list transforming strpbrk(s1, s2) to 11864 strchr(s1, s2[0]). */ 11865 return build_call_expr_loc (loc, fn, 2, s1, 11866 build_int_cst (integer_type_node, p2[0])); 11867 } 11868 } 11869 11870 /* Simplify a call to the strcat builtin. DST and SRC are the arguments 11871 to the call. 11872 11873 Return NULL_TREE if no simplification was possible, otherwise return the 11874 simplified form of the call as a tree. 11875 11876 The simplified form may be a constant or other expression which 11877 computes the same value, but in a more efficient manner (including 11878 calls to other builtin functions). 11879 11880 The call may contain arguments which need to be evaluated, but 11881 which are not useful to determine the result of the call. In 11882 this case we return a chain of COMPOUND_EXPRs. The LHS of each 11883 COMPOUND_EXPR will be an argument which must be evaluated. 11884 COMPOUND_EXPRs are chained through their RHS. The RHS of the last 11885 COMPOUND_EXPR in the chain will contain the tree for the simplified 11886 form of the builtin function call. */ 11887 11888 tree 11889 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src, 11890 tree len) 11891 { 11892 if (!validate_arg (dst, POINTER_TYPE) 11893 || !validate_arg (src, POINTER_TYPE)) 11894 return NULL_TREE; 11895 else 11896 { 11897 const char *p = c_getstr (src); 11898 11899 /* If the string length is zero, return the dst parameter. */ 11900 if (p && *p == '\0') 11901 return dst; 11902 11903 if (optimize_insn_for_speed_p ()) 11904 { 11905 /* See if we can store by pieces into (dst + strlen(dst)). */ 11906 tree newdst, call; 11907 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN); 11908 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 11909 11910 if (!strlen_fn || !memcpy_fn) 11911 return NULL_TREE; 11912 11913 /* If the length of the source string isn't computable don't 11914 split strcat into strlen and memcpy. */ 11915 if (! len) 11916 len = c_strlen (src, 1); 11917 if (! len || TREE_SIDE_EFFECTS (len)) 11918 return NULL_TREE; 11919 11920 /* Stabilize the argument list. */ 11921 dst = builtin_save_expr (dst); 11922 11923 /* Create strlen (dst). */ 11924 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst); 11925 /* Create (dst p+ strlen (dst)). */ 11926 11927 newdst = fold_build_pointer_plus_loc (loc, dst, newdst); 11928 newdst = builtin_save_expr (newdst); 11929 11930 len = fold_convert_loc (loc, size_type_node, len); 11931 len = size_binop_loc (loc, PLUS_EXPR, len, 11932 build_int_cst (size_type_node, 1)); 11933 11934 call = build_call_expr_loc (loc, memcpy_fn, 3, newdst, src, len); 11935 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst); 11936 } 11937 return NULL_TREE; 11938 } 11939 } 11940 11941 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the 11942 arguments to the call. 11943 11944 Return NULL_TREE if no simplification was possible, otherwise return the 11945 simplified form of the call as a tree. 11946 11947 The simplified form may be a constant or other expression which 11948 computes the same value, but in a more efficient manner (including 11949 calls to other builtin functions). 11950 11951 The call may contain arguments which need to be evaluated, but 11952 which are not useful to determine the result of the call. In 11953 this case we return a chain of COMPOUND_EXPRs. The LHS of each 11954 COMPOUND_EXPR will be an argument which must be evaluated. 11955 COMPOUND_EXPRs are chained through their RHS. The RHS of the last 11956 COMPOUND_EXPR in the chain will contain the tree for the simplified 11957 form of the builtin function call. */ 11958 11959 static tree 11960 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len) 11961 { 11962 if (!validate_arg (dst, POINTER_TYPE) 11963 || !validate_arg (src, POINTER_TYPE) 11964 || !validate_arg (len, INTEGER_TYPE)) 11965 return NULL_TREE; 11966 else 11967 { 11968 const char *p = c_getstr (src); 11969 11970 /* If the requested length is zero, or the src parameter string 11971 length is zero, return the dst parameter. */ 11972 if (integer_zerop (len) || (p && *p == '\0')) 11973 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len); 11974 11975 /* If the requested len is greater than or equal to the string 11976 length, call strcat. */ 11977 if (TREE_CODE (len) == INTEGER_CST && p 11978 && compare_tree_int (len, strlen (p)) >= 0) 11979 { 11980 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT); 11981 11982 /* If the replacement _DECL isn't initialized, don't do the 11983 transformation. */ 11984 if (!fn) 11985 return NULL_TREE; 11986 11987 return build_call_expr_loc (loc, fn, 2, dst, src); 11988 } 11989 return NULL_TREE; 11990 } 11991 } 11992 11993 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments 11994 to the call. 11995 11996 Return NULL_TREE if no simplification was possible, otherwise return the 11997 simplified form of the call as a tree. 11998 11999 The simplified form may be a constant or other expression which 12000 computes the same value, but in a more efficient manner (including 12001 calls to other builtin functions). 12002 12003 The call may contain arguments which need to be evaluated, but 12004 which are not useful to determine the result of the call. In 12005 this case we return a chain of COMPOUND_EXPRs. The LHS of each 12006 COMPOUND_EXPR will be an argument which must be evaluated. 12007 COMPOUND_EXPRs are chained through their RHS. The RHS of the last 12008 COMPOUND_EXPR in the chain will contain the tree for the simplified 12009 form of the builtin function call. */ 12010 12011 static tree 12012 fold_builtin_strspn (location_t loc, tree s1, tree s2) 12013 { 12014 if (!validate_arg (s1, POINTER_TYPE) 12015 || !validate_arg (s2, POINTER_TYPE)) 12016 return NULL_TREE; 12017 else 12018 { 12019 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2); 12020 12021 /* If both arguments are constants, evaluate at compile-time. */ 12022 if (p1 && p2) 12023 { 12024 const size_t r = strspn (p1, p2); 12025 return build_int_cst (size_type_node, r); 12026 } 12027 12028 /* If either argument is "", return NULL_TREE. */ 12029 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')) 12030 /* Evaluate and ignore both arguments in case either one has 12031 side-effects. */ 12032 return omit_two_operands_loc (loc, size_type_node, size_zero_node, 12033 s1, s2); 12034 return NULL_TREE; 12035 } 12036 } 12037 12038 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments 12039 to the call. 12040 12041 Return NULL_TREE if no simplification was possible, otherwise return the 12042 simplified form of the call as a tree. 12043 12044 The simplified form may be a constant or other expression which 12045 computes the same value, but in a more efficient manner (including 12046 calls to other builtin functions). 12047 12048 The call may contain arguments which need to be evaluated, but 12049 which are not useful to determine the result of the call. In 12050 this case we return a chain of COMPOUND_EXPRs. The LHS of each 12051 COMPOUND_EXPR will be an argument which must be evaluated. 12052 COMPOUND_EXPRs are chained through their RHS. The RHS of the last 12053 COMPOUND_EXPR in the chain will contain the tree for the simplified 12054 form of the builtin function call. */ 12055 12056 static tree 12057 fold_builtin_strcspn (location_t loc, tree s1, tree s2) 12058 { 12059 if (!validate_arg (s1, POINTER_TYPE) 12060 || !validate_arg (s2, POINTER_TYPE)) 12061 return NULL_TREE; 12062 else 12063 { 12064 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2); 12065 12066 /* If both arguments are constants, evaluate at compile-time. */ 12067 if (p1 && p2) 12068 { 12069 const size_t r = strcspn (p1, p2); 12070 return build_int_cst (size_type_node, r); 12071 } 12072 12073 /* If the first argument is "", return NULL_TREE. */ 12074 if (p1 && *p1 == '\0') 12075 { 12076 /* Evaluate and ignore argument s2 in case it has 12077 side-effects. */ 12078 return omit_one_operand_loc (loc, size_type_node, 12079 size_zero_node, s2); 12080 } 12081 12082 /* If the second argument is "", return __builtin_strlen(s1). */ 12083 if (p2 && *p2 == '\0') 12084 { 12085 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN); 12086 12087 /* If the replacement _DECL isn't initialized, don't do the 12088 transformation. */ 12089 if (!fn) 12090 return NULL_TREE; 12091 12092 return build_call_expr_loc (loc, fn, 1, s1); 12093 } 12094 return NULL_TREE; 12095 } 12096 } 12097 12098 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments 12099 to the call. IGNORE is true if the value returned 12100 by the builtin will be ignored. UNLOCKED is true is true if this 12101 actually a call to fputs_unlocked. If LEN in non-NULL, it represents 12102 the known length of the string. Return NULL_TREE if no simplification 12103 was possible. */ 12104 12105 tree 12106 fold_builtin_fputs (location_t loc, tree arg0, tree arg1, 12107 bool ignore, bool unlocked, tree len) 12108 { 12109 /* If we're using an unlocked function, assume the other unlocked 12110 functions exist explicitly. */ 12111 tree const fn_fputc = (unlocked 12112 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED) 12113 : builtin_decl_implicit (BUILT_IN_FPUTC)); 12114 tree const fn_fwrite = (unlocked 12115 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED) 12116 : builtin_decl_implicit (BUILT_IN_FWRITE)); 12117 12118 /* If the return value is used, don't do the transformation. */ 12119 if (!ignore) 12120 return NULL_TREE; 12121 12122 /* Verify the arguments in the original call. */ 12123 if (!validate_arg (arg0, POINTER_TYPE) 12124 || !validate_arg (arg1, POINTER_TYPE)) 12125 return NULL_TREE; 12126 12127 if (! len) 12128 len = c_strlen (arg0, 0); 12129 12130 /* Get the length of the string passed to fputs. If the length 12131 can't be determined, punt. */ 12132 if (!len 12133 || TREE_CODE (len) != INTEGER_CST) 12134 return NULL_TREE; 12135 12136 switch (compare_tree_int (len, 1)) 12137 { 12138 case -1: /* length is 0, delete the call entirely . */ 12139 return omit_one_operand_loc (loc, integer_type_node, 12140 integer_zero_node, arg1);; 12141 12142 case 0: /* length is 1, call fputc. */ 12143 { 12144 const char *p = c_getstr (arg0); 12145 12146 if (p != NULL) 12147 { 12148 if (fn_fputc) 12149 return build_call_expr_loc (loc, fn_fputc, 2, 12150 build_int_cst 12151 (integer_type_node, p[0]), arg1); 12152 else 12153 return NULL_TREE; 12154 } 12155 } 12156 /* FALLTHROUGH */ 12157 case 1: /* length is greater than 1, call fwrite. */ 12158 { 12159 /* If optimizing for size keep fputs. */ 12160 if (optimize_function_for_size_p (cfun)) 12161 return NULL_TREE; 12162 /* New argument list transforming fputs(string, stream) to 12163 fwrite(string, 1, len, stream). */ 12164 if (fn_fwrite) 12165 return build_call_expr_loc (loc, fn_fwrite, 4, arg0, 12166 size_one_node, len, arg1); 12167 else 12168 return NULL_TREE; 12169 } 12170 default: 12171 gcc_unreachable (); 12172 } 12173 return NULL_TREE; 12174 } 12175 12176 /* Fold the next_arg or va_start call EXP. Returns true if there was an error 12177 produced. False otherwise. This is done so that we don't output the error 12178 or warning twice or three times. */ 12179 12180 bool 12181 fold_builtin_next_arg (tree exp, bool va_start_p) 12182 { 12183 tree fntype = TREE_TYPE (current_function_decl); 12184 int nargs = call_expr_nargs (exp); 12185 tree arg; 12186 /* There is good chance the current input_location points inside the 12187 definition of the va_start macro (perhaps on the token for 12188 builtin) in a system header, so warnings will not be emitted. 12189 Use the location in real source code. */ 12190 source_location current_location = 12191 linemap_unwind_to_first_non_reserved_loc (line_table, input_location, 12192 NULL); 12193 12194 if (!stdarg_p (fntype)) 12195 { 12196 error ("%<va_start%> used in function with fixed args"); 12197 return true; 12198 } 12199 12200 if (va_start_p) 12201 { 12202 if (va_start_p && (nargs != 2)) 12203 { 12204 error ("wrong number of arguments to function %<va_start%>"); 12205 return true; 12206 } 12207 arg = CALL_EXPR_ARG (exp, 1); 12208 } 12209 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0) 12210 when we checked the arguments and if needed issued a warning. */ 12211 else 12212 { 12213 if (nargs == 0) 12214 { 12215 /* Evidently an out of date version of <stdarg.h>; can't validate 12216 va_start's second argument, but can still work as intended. */ 12217 warning_at (current_location, 12218 OPT_Wvarargs, 12219 "%<__builtin_next_arg%> called without an argument"); 12220 return true; 12221 } 12222 else if (nargs > 1) 12223 { 12224 error ("wrong number of arguments to function %<__builtin_next_arg%>"); 12225 return true; 12226 } 12227 arg = CALL_EXPR_ARG (exp, 0); 12228 } 12229 12230 if (TREE_CODE (arg) == SSA_NAME) 12231 arg = SSA_NAME_VAR (arg); 12232 12233 /* We destructively modify the call to be __builtin_va_start (ap, 0) 12234 or __builtin_next_arg (0) the first time we see it, after checking 12235 the arguments and if needed issuing a warning. */ 12236 if (!integer_zerop (arg)) 12237 { 12238 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl)); 12239 12240 /* Strip off all nops for the sake of the comparison. This 12241 is not quite the same as STRIP_NOPS. It does more. 12242 We must also strip off INDIRECT_EXPR for C++ reference 12243 parameters. */ 12244 while (CONVERT_EXPR_P (arg) 12245 || TREE_CODE (arg) == INDIRECT_REF) 12246 arg = TREE_OPERAND (arg, 0); 12247 if (arg != last_parm) 12248 { 12249 /* FIXME: Sometimes with the tree optimizers we can get the 12250 not the last argument even though the user used the last 12251 argument. We just warn and set the arg to be the last 12252 argument so that we will get wrong-code because of 12253 it. */ 12254 warning_at (current_location, 12255 OPT_Wvarargs, 12256 "second parameter of %<va_start%> not last named argument"); 12257 } 12258 12259 /* Undefined by C99 7.15.1.4p4 (va_start): 12260 "If the parameter parmN is declared with the register storage 12261 class, with a function or array type, or with a type that is 12262 not compatible with the type that results after application of 12263 the default argument promotions, the behavior is undefined." 12264 */ 12265 else if (DECL_REGISTER (arg)) 12266 { 12267 warning_at (current_location, 12268 OPT_Wvarargs, 12269 "undefined behaviour when second parameter of " 12270 "%<va_start%> is declared with %<register%> storage"); 12271 } 12272 12273 /* We want to verify the second parameter just once before the tree 12274 optimizers are run and then avoid keeping it in the tree, 12275 as otherwise we could warn even for correct code like: 12276 void foo (int i, ...) 12277 { va_list ap; i++; va_start (ap, i); va_end (ap); } */ 12278 if (va_start_p) 12279 CALL_EXPR_ARG (exp, 1) = integer_zero_node; 12280 else 12281 CALL_EXPR_ARG (exp, 0) = integer_zero_node; 12282 } 12283 return false; 12284 } 12285 12286 12287 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG. 12288 ORIG may be null if this is a 2-argument call. We don't attempt to 12289 simplify calls with more than 3 arguments. 12290 12291 Return NULL_TREE if no simplification was possible, otherwise return the 12292 simplified form of the call as a tree. If IGNORED is true, it means that 12293 the caller does not use the returned value of the function. */ 12294 12295 static tree 12296 fold_builtin_sprintf (location_t loc, tree dest, tree fmt, 12297 tree orig, int ignored) 12298 { 12299 tree call, retval; 12300 const char *fmt_str = NULL; 12301 12302 /* Verify the required arguments in the original call. We deal with two 12303 types of sprintf() calls: 'sprintf (str, fmt)' and 12304 'sprintf (dest, "%s", orig)'. */ 12305 if (!validate_arg (dest, POINTER_TYPE) 12306 || !validate_arg (fmt, POINTER_TYPE)) 12307 return NULL_TREE; 12308 if (orig && !validate_arg (orig, POINTER_TYPE)) 12309 return NULL_TREE; 12310 12311 /* Check whether the format is a literal string constant. */ 12312 fmt_str = c_getstr (fmt); 12313 if (fmt_str == NULL) 12314 return NULL_TREE; 12315 12316 call = NULL_TREE; 12317 retval = NULL_TREE; 12318 12319 if (!init_target_chars ()) 12320 return NULL_TREE; 12321 12322 /* If the format doesn't contain % args or %%, use strcpy. */ 12323 if (strchr (fmt_str, target_percent) == NULL) 12324 { 12325 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 12326 12327 if (!fn) 12328 return NULL_TREE; 12329 12330 /* Don't optimize sprintf (buf, "abc", ptr++). */ 12331 if (orig) 12332 return NULL_TREE; 12333 12334 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when 12335 'format' is known to contain no % formats. */ 12336 call = build_call_expr_loc (loc, fn, 2, dest, fmt); 12337 if (!ignored) 12338 retval = build_int_cst (integer_type_node, strlen (fmt_str)); 12339 } 12340 12341 /* If the format is "%s", use strcpy if the result isn't used. */ 12342 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0) 12343 { 12344 tree fn; 12345 fn = builtin_decl_implicit (BUILT_IN_STRCPY); 12346 12347 if (!fn) 12348 return NULL_TREE; 12349 12350 /* Don't crash on sprintf (str1, "%s"). */ 12351 if (!orig) 12352 return NULL_TREE; 12353 12354 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */ 12355 if (!ignored) 12356 { 12357 retval = c_strlen (orig, 1); 12358 if (!retval || TREE_CODE (retval) != INTEGER_CST) 12359 return NULL_TREE; 12360 } 12361 call = build_call_expr_loc (loc, fn, 2, dest, orig); 12362 } 12363 12364 if (call && retval) 12365 { 12366 retval = fold_convert_loc 12367 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))), 12368 retval); 12369 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval); 12370 } 12371 else 12372 return call; 12373 } 12374 12375 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE, 12376 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't 12377 attempt to simplify calls with more than 4 arguments. 12378 12379 Return NULL_TREE if no simplification was possible, otherwise return the 12380 simplified form of the call as a tree. If IGNORED is true, it means that 12381 the caller does not use the returned value of the function. */ 12382 12383 static tree 12384 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt, 12385 tree orig, int ignored) 12386 { 12387 tree call, retval; 12388 const char *fmt_str = NULL; 12389 unsigned HOST_WIDE_INT destlen; 12390 12391 /* Verify the required arguments in the original call. We deal with two 12392 types of snprintf() calls: 'snprintf (str, cst, fmt)' and 12393 'snprintf (dest, cst, "%s", orig)'. */ 12394 if (!validate_arg (dest, POINTER_TYPE) 12395 || !validate_arg (destsize, INTEGER_TYPE) 12396 || !validate_arg (fmt, POINTER_TYPE)) 12397 return NULL_TREE; 12398 if (orig && !validate_arg (orig, POINTER_TYPE)) 12399 return NULL_TREE; 12400 12401 if (!host_integerp (destsize, 1)) 12402 return NULL_TREE; 12403 12404 /* Check whether the format is a literal string constant. */ 12405 fmt_str = c_getstr (fmt); 12406 if (fmt_str == NULL) 12407 return NULL_TREE; 12408 12409 call = NULL_TREE; 12410 retval = NULL_TREE; 12411 12412 if (!init_target_chars ()) 12413 return NULL_TREE; 12414 12415 destlen = tree_low_cst (destsize, 1); 12416 12417 /* If the format doesn't contain % args or %%, use strcpy. */ 12418 if (strchr (fmt_str, target_percent) == NULL) 12419 { 12420 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 12421 size_t len = strlen (fmt_str); 12422 12423 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */ 12424 if (orig) 12425 return NULL_TREE; 12426 12427 /* We could expand this as 12428 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0'; 12429 or to 12430 memcpy (str, fmt_with_nul_at_cstm1, cst); 12431 but in the former case that might increase code size 12432 and in the latter case grow .rodata section too much. 12433 So punt for now. */ 12434 if (len >= destlen) 12435 return NULL_TREE; 12436 12437 if (!fn) 12438 return NULL_TREE; 12439 12440 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when 12441 'format' is known to contain no % formats and 12442 strlen (fmt) < cst. */ 12443 call = build_call_expr_loc (loc, fn, 2, dest, fmt); 12444 12445 if (!ignored) 12446 retval = build_int_cst (integer_type_node, strlen (fmt_str)); 12447 } 12448 12449 /* If the format is "%s", use strcpy if the result isn't used. */ 12450 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0) 12451 { 12452 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 12453 unsigned HOST_WIDE_INT origlen; 12454 12455 /* Don't crash on snprintf (str1, cst, "%s"). */ 12456 if (!orig) 12457 return NULL_TREE; 12458 12459 retval = c_strlen (orig, 1); 12460 if (!retval || !host_integerp (retval, 1)) 12461 return NULL_TREE; 12462 12463 origlen = tree_low_cst (retval, 1); 12464 /* We could expand this as 12465 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0'; 12466 or to 12467 memcpy (str1, str2_with_nul_at_cstm1, cst); 12468 but in the former case that might increase code size 12469 and in the latter case grow .rodata section too much. 12470 So punt for now. */ 12471 if (origlen >= destlen) 12472 return NULL_TREE; 12473 12474 /* Convert snprintf (str1, cst, "%s", str2) into 12475 strcpy (str1, str2) if strlen (str2) < cst. */ 12476 if (!fn) 12477 return NULL_TREE; 12478 12479 call = build_call_expr_loc (loc, fn, 2, dest, orig); 12480 12481 if (ignored) 12482 retval = NULL_TREE; 12483 } 12484 12485 if (call && retval) 12486 { 12487 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF); 12488 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval); 12489 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval); 12490 } 12491 else 12492 return call; 12493 } 12494 12495 /* Expand a call EXP to __builtin_object_size. */ 12496 12497 rtx 12498 expand_builtin_object_size (tree exp) 12499 { 12500 tree ost; 12501 int object_size_type; 12502 tree fndecl = get_callee_fndecl (exp); 12503 12504 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) 12505 { 12506 error ("%Kfirst argument of %D must be a pointer, second integer constant", 12507 exp, fndecl); 12508 expand_builtin_trap (); 12509 return const0_rtx; 12510 } 12511 12512 ost = CALL_EXPR_ARG (exp, 1); 12513 STRIP_NOPS (ost); 12514 12515 if (TREE_CODE (ost) != INTEGER_CST 12516 || tree_int_cst_sgn (ost) < 0 12517 || compare_tree_int (ost, 3) > 0) 12518 { 12519 error ("%Klast argument of %D is not integer constant between 0 and 3", 12520 exp, fndecl); 12521 expand_builtin_trap (); 12522 return const0_rtx; 12523 } 12524 12525 object_size_type = tree_low_cst (ost, 0); 12526 12527 return object_size_type < 2 ? constm1_rtx : const0_rtx; 12528 } 12529 12530 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin. 12531 FCODE is the BUILT_IN_* to use. 12532 Return NULL_RTX if we failed; the caller should emit a normal call, 12533 otherwise try to get the result in TARGET, if convenient (and in 12534 mode MODE if that's convenient). */ 12535 12536 static rtx 12537 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode, 12538 enum built_in_function fcode) 12539 { 12540 tree dest, src, len, size; 12541 12542 if (!validate_arglist (exp, 12543 POINTER_TYPE, 12544 fcode == BUILT_IN_MEMSET_CHK 12545 ? INTEGER_TYPE : POINTER_TYPE, 12546 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)) 12547 return NULL_RTX; 12548 12549 dest = CALL_EXPR_ARG (exp, 0); 12550 src = CALL_EXPR_ARG (exp, 1); 12551 len = CALL_EXPR_ARG (exp, 2); 12552 size = CALL_EXPR_ARG (exp, 3); 12553 12554 if (! host_integerp (size, 1)) 12555 return NULL_RTX; 12556 12557 if (host_integerp (len, 1) || integer_all_onesp (size)) 12558 { 12559 tree fn; 12560 12561 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len)) 12562 { 12563 warning_at (tree_nonartificial_location (exp), 12564 0, "%Kcall to %D will always overflow destination buffer", 12565 exp, get_callee_fndecl (exp)); 12566 return NULL_RTX; 12567 } 12568 12569 fn = NULL_TREE; 12570 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume 12571 mem{cpy,pcpy,move,set} is available. */ 12572 switch (fcode) 12573 { 12574 case BUILT_IN_MEMCPY_CHK: 12575 fn = builtin_decl_explicit (BUILT_IN_MEMCPY); 12576 break; 12577 case BUILT_IN_MEMPCPY_CHK: 12578 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY); 12579 break; 12580 case BUILT_IN_MEMMOVE_CHK: 12581 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE); 12582 break; 12583 case BUILT_IN_MEMSET_CHK: 12584 fn = builtin_decl_explicit (BUILT_IN_MEMSET); 12585 break; 12586 default: 12587 break; 12588 } 12589 12590 if (! fn) 12591 return NULL_RTX; 12592 12593 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len); 12594 gcc_assert (TREE_CODE (fn) == CALL_EXPR); 12595 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp); 12596 return expand_expr (fn, target, mode, EXPAND_NORMAL); 12597 } 12598 else if (fcode == BUILT_IN_MEMSET_CHK) 12599 return NULL_RTX; 12600 else 12601 { 12602 unsigned int dest_align = get_pointer_alignment (dest); 12603 12604 /* If DEST is not a pointer type, call the normal function. */ 12605 if (dest_align == 0) 12606 return NULL_RTX; 12607 12608 /* If SRC and DEST are the same (and not volatile), do nothing. */ 12609 if (operand_equal_p (src, dest, 0)) 12610 { 12611 tree expr; 12612 12613 if (fcode != BUILT_IN_MEMPCPY_CHK) 12614 { 12615 /* Evaluate and ignore LEN in case it has side-effects. */ 12616 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL); 12617 return expand_expr (dest, target, mode, EXPAND_NORMAL); 12618 } 12619 12620 expr = fold_build_pointer_plus (dest, len); 12621 return expand_expr (expr, target, mode, EXPAND_NORMAL); 12622 } 12623 12624 /* __memmove_chk special case. */ 12625 if (fcode == BUILT_IN_MEMMOVE_CHK) 12626 { 12627 unsigned int src_align = get_pointer_alignment (src); 12628 12629 if (src_align == 0) 12630 return NULL_RTX; 12631 12632 /* If src is categorized for a readonly section we can use 12633 normal __memcpy_chk. */ 12634 if (readonly_data_expr (src)) 12635 { 12636 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK); 12637 if (!fn) 12638 return NULL_RTX; 12639 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4, 12640 dest, src, len, size); 12641 gcc_assert (TREE_CODE (fn) == CALL_EXPR); 12642 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp); 12643 return expand_expr (fn, target, mode, EXPAND_NORMAL); 12644 } 12645 } 12646 return NULL_RTX; 12647 } 12648 } 12649 12650 /* Emit warning if a buffer overflow is detected at compile time. */ 12651 12652 static void 12653 maybe_emit_chk_warning (tree exp, enum built_in_function fcode) 12654 { 12655 int is_strlen = 0; 12656 tree len, size; 12657 location_t loc = tree_nonartificial_location (exp); 12658 12659 switch (fcode) 12660 { 12661 case BUILT_IN_STRCPY_CHK: 12662 case BUILT_IN_STPCPY_CHK: 12663 /* For __strcat_chk the warning will be emitted only if overflowing 12664 by at least strlen (dest) + 1 bytes. */ 12665 case BUILT_IN_STRCAT_CHK: 12666 len = CALL_EXPR_ARG (exp, 1); 12667 size = CALL_EXPR_ARG (exp, 2); 12668 is_strlen = 1; 12669 break; 12670 case BUILT_IN_STRNCAT_CHK: 12671 case BUILT_IN_STRNCPY_CHK: 12672 case BUILT_IN_STPNCPY_CHK: 12673 len = CALL_EXPR_ARG (exp, 2); 12674 size = CALL_EXPR_ARG (exp, 3); 12675 break; 12676 case BUILT_IN_SNPRINTF_CHK: 12677 case BUILT_IN_VSNPRINTF_CHK: 12678 len = CALL_EXPR_ARG (exp, 1); 12679 size = CALL_EXPR_ARG (exp, 3); 12680 break; 12681 default: 12682 gcc_unreachable (); 12683 } 12684 12685 if (!len || !size) 12686 return; 12687 12688 if (! host_integerp (size, 1) || integer_all_onesp (size)) 12689 return; 12690 12691 if (is_strlen) 12692 { 12693 len = c_strlen (len, 1); 12694 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size)) 12695 return; 12696 } 12697 else if (fcode == BUILT_IN_STRNCAT_CHK) 12698 { 12699 tree src = CALL_EXPR_ARG (exp, 1); 12700 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size)) 12701 return; 12702 src = c_strlen (src, 1); 12703 if (! src || ! host_integerp (src, 1)) 12704 { 12705 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer", 12706 exp, get_callee_fndecl (exp)); 12707 return; 12708 } 12709 else if (tree_int_cst_lt (src, size)) 12710 return; 12711 } 12712 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len)) 12713 return; 12714 12715 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer", 12716 exp, get_callee_fndecl (exp)); 12717 } 12718 12719 /* Emit warning if a buffer overflow is detected at compile time 12720 in __sprintf_chk/__vsprintf_chk calls. */ 12721 12722 static void 12723 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode) 12724 { 12725 tree size, len, fmt; 12726 const char *fmt_str; 12727 int nargs = call_expr_nargs (exp); 12728 12729 /* Verify the required arguments in the original call. */ 12730 12731 if (nargs < 4) 12732 return; 12733 size = CALL_EXPR_ARG (exp, 2); 12734 fmt = CALL_EXPR_ARG (exp, 3); 12735 12736 if (! host_integerp (size, 1) || integer_all_onesp (size)) 12737 return; 12738 12739 /* Check whether the format is a literal string constant. */ 12740 fmt_str = c_getstr (fmt); 12741 if (fmt_str == NULL) 12742 return; 12743 12744 if (!init_target_chars ()) 12745 return; 12746 12747 /* If the format doesn't contain % args or %%, we know its size. */ 12748 if (strchr (fmt_str, target_percent) == 0) 12749 len = build_int_cstu (size_type_node, strlen (fmt_str)); 12750 /* If the format is "%s" and first ... argument is a string literal, 12751 we know it too. */ 12752 else if (fcode == BUILT_IN_SPRINTF_CHK 12753 && strcmp (fmt_str, target_percent_s) == 0) 12754 { 12755 tree arg; 12756 12757 if (nargs < 5) 12758 return; 12759 arg = CALL_EXPR_ARG (exp, 4); 12760 if (! POINTER_TYPE_P (TREE_TYPE (arg))) 12761 return; 12762 12763 len = c_strlen (arg, 1); 12764 if (!len || ! host_integerp (len, 1)) 12765 return; 12766 } 12767 else 12768 return; 12769 12770 if (! tree_int_cst_lt (len, size)) 12771 warning_at (tree_nonartificial_location (exp), 12772 0, "%Kcall to %D will always overflow destination buffer", 12773 exp, get_callee_fndecl (exp)); 12774 } 12775 12776 /* Emit warning if a free is called with address of a variable. */ 12777 12778 static void 12779 maybe_emit_free_warning (tree exp) 12780 { 12781 tree arg = CALL_EXPR_ARG (exp, 0); 12782 12783 STRIP_NOPS (arg); 12784 if (TREE_CODE (arg) != ADDR_EXPR) 12785 return; 12786 12787 arg = get_base_address (TREE_OPERAND (arg, 0)); 12788 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF) 12789 return; 12790 12791 if (SSA_VAR_P (arg)) 12792 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object, 12793 "%Kattempt to free a non-heap object %qD", exp, arg); 12794 else 12795 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object, 12796 "%Kattempt to free a non-heap object", exp); 12797 } 12798 12799 /* Fold a call to __builtin_object_size with arguments PTR and OST, 12800 if possible. */ 12801 12802 tree 12803 fold_builtin_object_size (tree ptr, tree ost) 12804 { 12805 unsigned HOST_WIDE_INT bytes; 12806 int object_size_type; 12807 12808 if (!validate_arg (ptr, POINTER_TYPE) 12809 || !validate_arg (ost, INTEGER_TYPE)) 12810 return NULL_TREE; 12811 12812 STRIP_NOPS (ost); 12813 12814 if (TREE_CODE (ost) != INTEGER_CST 12815 || tree_int_cst_sgn (ost) < 0 12816 || compare_tree_int (ost, 3) > 0) 12817 return NULL_TREE; 12818 12819 object_size_type = tree_low_cst (ost, 0); 12820 12821 /* __builtin_object_size doesn't evaluate side-effects in its arguments; 12822 if there are any side-effects, it returns (size_t) -1 for types 0 and 1 12823 and (size_t) 0 for types 2 and 3. */ 12824 if (TREE_SIDE_EFFECTS (ptr)) 12825 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0); 12826 12827 if (TREE_CODE (ptr) == ADDR_EXPR) 12828 { 12829 bytes = compute_builtin_object_size (ptr, object_size_type); 12830 if (double_int_fits_to_tree_p (size_type_node, 12831 double_int::from_uhwi (bytes))) 12832 return build_int_cstu (size_type_node, bytes); 12833 } 12834 else if (TREE_CODE (ptr) == SSA_NAME) 12835 { 12836 /* If object size is not known yet, delay folding until 12837 later. Maybe subsequent passes will help determining 12838 it. */ 12839 bytes = compute_builtin_object_size (ptr, object_size_type); 12840 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0) 12841 && double_int_fits_to_tree_p (size_type_node, 12842 double_int::from_uhwi (bytes))) 12843 return build_int_cstu (size_type_node, bytes); 12844 } 12845 12846 return NULL_TREE; 12847 } 12848 12849 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin. 12850 DEST, SRC, LEN, and SIZE are the arguments to the call. 12851 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_* 12852 code of the builtin. If MAXLEN is not NULL, it is maximum length 12853 passed as third argument. */ 12854 12855 tree 12856 fold_builtin_memory_chk (location_t loc, tree fndecl, 12857 tree dest, tree src, tree len, tree size, 12858 tree maxlen, bool ignore, 12859 enum built_in_function fcode) 12860 { 12861 tree fn; 12862 12863 if (!validate_arg (dest, POINTER_TYPE) 12864 || !validate_arg (src, 12865 (fcode == BUILT_IN_MEMSET_CHK 12866 ? INTEGER_TYPE : POINTER_TYPE)) 12867 || !validate_arg (len, INTEGER_TYPE) 12868 || !validate_arg (size, INTEGER_TYPE)) 12869 return NULL_TREE; 12870 12871 /* If SRC and DEST are the same (and not volatile), return DEST 12872 (resp. DEST+LEN for __mempcpy_chk). */ 12873 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0)) 12874 { 12875 if (fcode != BUILT_IN_MEMPCPY_CHK) 12876 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), 12877 dest, len); 12878 else 12879 { 12880 tree temp = fold_build_pointer_plus_loc (loc, dest, len); 12881 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp); 12882 } 12883 } 12884 12885 if (! host_integerp (size, 1)) 12886 return NULL_TREE; 12887 12888 if (! integer_all_onesp (size)) 12889 { 12890 if (! host_integerp (len, 1)) 12891 { 12892 /* If LEN is not constant, try MAXLEN too. 12893 For MAXLEN only allow optimizing into non-_ocs function 12894 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 12895 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1)) 12896 { 12897 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore) 12898 { 12899 /* (void) __mempcpy_chk () can be optimized into 12900 (void) __memcpy_chk (). */ 12901 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK); 12902 if (!fn) 12903 return NULL_TREE; 12904 12905 return build_call_expr_loc (loc, fn, 4, dest, src, len, size); 12906 } 12907 return NULL_TREE; 12908 } 12909 } 12910 else 12911 maxlen = len; 12912 12913 if (tree_int_cst_lt (size, maxlen)) 12914 return NULL_TREE; 12915 } 12916 12917 fn = NULL_TREE; 12918 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume 12919 mem{cpy,pcpy,move,set} is available. */ 12920 switch (fcode) 12921 { 12922 case BUILT_IN_MEMCPY_CHK: 12923 fn = builtin_decl_explicit (BUILT_IN_MEMCPY); 12924 break; 12925 case BUILT_IN_MEMPCPY_CHK: 12926 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY); 12927 break; 12928 case BUILT_IN_MEMMOVE_CHK: 12929 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE); 12930 break; 12931 case BUILT_IN_MEMSET_CHK: 12932 fn = builtin_decl_explicit (BUILT_IN_MEMSET); 12933 break; 12934 default: 12935 break; 12936 } 12937 12938 if (!fn) 12939 return NULL_TREE; 12940 12941 return build_call_expr_loc (loc, fn, 3, dest, src, len); 12942 } 12943 12944 /* Fold a call to the __st[rp]cpy_chk builtin. 12945 DEST, SRC, and SIZE are the arguments to the call. 12946 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_* 12947 code of the builtin. If MAXLEN is not NULL, it is maximum length of 12948 strings passed as second argument. */ 12949 12950 tree 12951 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest, 12952 tree src, tree size, 12953 tree maxlen, bool ignore, 12954 enum built_in_function fcode) 12955 { 12956 tree len, fn; 12957 12958 if (!validate_arg (dest, POINTER_TYPE) 12959 || !validate_arg (src, POINTER_TYPE) 12960 || !validate_arg (size, INTEGER_TYPE)) 12961 return NULL_TREE; 12962 12963 /* If SRC and DEST are the same (and not volatile), return DEST. */ 12964 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0)) 12965 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest); 12966 12967 if (! host_integerp (size, 1)) 12968 return NULL_TREE; 12969 12970 if (! integer_all_onesp (size)) 12971 { 12972 len = c_strlen (src, 1); 12973 if (! len || ! host_integerp (len, 1)) 12974 { 12975 /* If LEN is not constant, try MAXLEN too. 12976 For MAXLEN only allow optimizing into non-_ocs function 12977 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 12978 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1)) 12979 { 12980 if (fcode == BUILT_IN_STPCPY_CHK) 12981 { 12982 if (! ignore) 12983 return NULL_TREE; 12984 12985 /* If return value of __stpcpy_chk is ignored, 12986 optimize into __strcpy_chk. */ 12987 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK); 12988 if (!fn) 12989 return NULL_TREE; 12990 12991 return build_call_expr_loc (loc, fn, 3, dest, src, size); 12992 } 12993 12994 if (! len || TREE_SIDE_EFFECTS (len)) 12995 return NULL_TREE; 12996 12997 /* If c_strlen returned something, but not a constant, 12998 transform __strcpy_chk into __memcpy_chk. */ 12999 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK); 13000 if (!fn) 13001 return NULL_TREE; 13002 13003 len = fold_convert_loc (loc, size_type_node, len); 13004 len = size_binop_loc (loc, PLUS_EXPR, len, 13005 build_int_cst (size_type_node, 1)); 13006 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), 13007 build_call_expr_loc (loc, fn, 4, 13008 dest, src, len, size)); 13009 } 13010 } 13011 else 13012 maxlen = len; 13013 13014 if (! tree_int_cst_lt (maxlen, size)) 13015 return NULL_TREE; 13016 } 13017 13018 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */ 13019 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK 13020 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY); 13021 if (!fn) 13022 return NULL_TREE; 13023 13024 return build_call_expr_loc (loc, fn, 2, dest, src); 13025 } 13026 13027 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE 13028 are the arguments to the call. If MAXLEN is not NULL, it is maximum 13029 length passed as third argument. IGNORE is true if return value can be 13030 ignored. FCODE is the BUILT_IN_* code of the builtin. */ 13031 13032 tree 13033 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src, 13034 tree len, tree size, tree maxlen, bool ignore, 13035 enum built_in_function fcode) 13036 { 13037 tree fn; 13038 13039 if (!validate_arg (dest, POINTER_TYPE) 13040 || !validate_arg (src, POINTER_TYPE) 13041 || !validate_arg (len, INTEGER_TYPE) 13042 || !validate_arg (size, INTEGER_TYPE)) 13043 return NULL_TREE; 13044 13045 if (fcode == BUILT_IN_STPNCPY_CHK && ignore) 13046 { 13047 /* If return value of __stpncpy_chk is ignored, 13048 optimize into __strncpy_chk. */ 13049 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK); 13050 if (fn) 13051 return build_call_expr_loc (loc, fn, 4, dest, src, len, size); 13052 } 13053 13054 if (! host_integerp (size, 1)) 13055 return NULL_TREE; 13056 13057 if (! integer_all_onesp (size)) 13058 { 13059 if (! host_integerp (len, 1)) 13060 { 13061 /* If LEN is not constant, try MAXLEN too. 13062 For MAXLEN only allow optimizing into non-_ocs function 13063 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 13064 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1)) 13065 return NULL_TREE; 13066 } 13067 else 13068 maxlen = len; 13069 13070 if (tree_int_cst_lt (size, maxlen)) 13071 return NULL_TREE; 13072 } 13073 13074 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */ 13075 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK 13076 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY); 13077 if (!fn) 13078 return NULL_TREE; 13079 13080 return build_call_expr_loc (loc, fn, 3, dest, src, len); 13081 } 13082 13083 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE 13084 are the arguments to the call. */ 13085 13086 static tree 13087 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest, 13088 tree src, tree size) 13089 { 13090 tree fn; 13091 const char *p; 13092 13093 if (!validate_arg (dest, POINTER_TYPE) 13094 || !validate_arg (src, POINTER_TYPE) 13095 || !validate_arg (size, INTEGER_TYPE)) 13096 return NULL_TREE; 13097 13098 p = c_getstr (src); 13099 /* If the SRC parameter is "", return DEST. */ 13100 if (p && *p == '\0') 13101 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src); 13102 13103 if (! host_integerp (size, 1) || ! integer_all_onesp (size)) 13104 return NULL_TREE; 13105 13106 /* If __builtin_strcat_chk is used, assume strcat is available. */ 13107 fn = builtin_decl_explicit (BUILT_IN_STRCAT); 13108 if (!fn) 13109 return NULL_TREE; 13110 13111 return build_call_expr_loc (loc, fn, 2, dest, src); 13112 } 13113 13114 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC, 13115 LEN, and SIZE. */ 13116 13117 static tree 13118 fold_builtin_strncat_chk (location_t loc, tree fndecl, 13119 tree dest, tree src, tree len, tree size) 13120 { 13121 tree fn; 13122 const char *p; 13123 13124 if (!validate_arg (dest, POINTER_TYPE) 13125 || !validate_arg (src, POINTER_TYPE) 13126 || !validate_arg (size, INTEGER_TYPE) 13127 || !validate_arg (size, INTEGER_TYPE)) 13128 return NULL_TREE; 13129 13130 p = c_getstr (src); 13131 /* If the SRC parameter is "" or if LEN is 0, return DEST. */ 13132 if (p && *p == '\0') 13133 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len); 13134 else if (integer_zerop (len)) 13135 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src); 13136 13137 if (! host_integerp (size, 1)) 13138 return NULL_TREE; 13139 13140 if (! integer_all_onesp (size)) 13141 { 13142 tree src_len = c_strlen (src, 1); 13143 if (src_len 13144 && host_integerp (src_len, 1) 13145 && host_integerp (len, 1) 13146 && ! tree_int_cst_lt (len, src_len)) 13147 { 13148 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */ 13149 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK); 13150 if (!fn) 13151 return NULL_TREE; 13152 13153 return build_call_expr_loc (loc, fn, 3, dest, src, size); 13154 } 13155 return NULL_TREE; 13156 } 13157 13158 /* If __builtin_strncat_chk is used, assume strncat is available. */ 13159 fn = builtin_decl_explicit (BUILT_IN_STRNCAT); 13160 if (!fn) 13161 return NULL_TREE; 13162 13163 return build_call_expr_loc (loc, fn, 3, dest, src, len); 13164 } 13165 13166 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS. 13167 Return NULL_TREE if a normal call should be emitted rather than 13168 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK 13169 or BUILT_IN_VSPRINTF_CHK. */ 13170 13171 static tree 13172 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args, 13173 enum built_in_function fcode) 13174 { 13175 tree dest, size, len, fn, fmt, flag; 13176 const char *fmt_str; 13177 13178 /* Verify the required arguments in the original call. */ 13179 if (nargs < 4) 13180 return NULL_TREE; 13181 dest = args[0]; 13182 if (!validate_arg (dest, POINTER_TYPE)) 13183 return NULL_TREE; 13184 flag = args[1]; 13185 if (!validate_arg (flag, INTEGER_TYPE)) 13186 return NULL_TREE; 13187 size = args[2]; 13188 if (!validate_arg (size, INTEGER_TYPE)) 13189 return NULL_TREE; 13190 fmt = args[3]; 13191 if (!validate_arg (fmt, POINTER_TYPE)) 13192 return NULL_TREE; 13193 13194 if (! host_integerp (size, 1)) 13195 return NULL_TREE; 13196 13197 len = NULL_TREE; 13198 13199 if (!init_target_chars ()) 13200 return NULL_TREE; 13201 13202 /* Check whether the format is a literal string constant. */ 13203 fmt_str = c_getstr (fmt); 13204 if (fmt_str != NULL) 13205 { 13206 /* If the format doesn't contain % args or %%, we know the size. */ 13207 if (strchr (fmt_str, target_percent) == 0) 13208 { 13209 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4) 13210 len = build_int_cstu (size_type_node, strlen (fmt_str)); 13211 } 13212 /* If the format is "%s" and first ... argument is a string literal, 13213 we know the size too. */ 13214 else if (fcode == BUILT_IN_SPRINTF_CHK 13215 && strcmp (fmt_str, target_percent_s) == 0) 13216 { 13217 tree arg; 13218 13219 if (nargs == 5) 13220 { 13221 arg = args[4]; 13222 if (validate_arg (arg, POINTER_TYPE)) 13223 { 13224 len = c_strlen (arg, 1); 13225 if (! len || ! host_integerp (len, 1)) 13226 len = NULL_TREE; 13227 } 13228 } 13229 } 13230 } 13231 13232 if (! integer_all_onesp (size)) 13233 { 13234 if (! len || ! tree_int_cst_lt (len, size)) 13235 return NULL_TREE; 13236 } 13237 13238 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0 13239 or if format doesn't contain % chars or is "%s". */ 13240 if (! integer_zerop (flag)) 13241 { 13242 if (fmt_str == NULL) 13243 return NULL_TREE; 13244 if (strchr (fmt_str, target_percent) != NULL 13245 && strcmp (fmt_str, target_percent_s)) 13246 return NULL_TREE; 13247 } 13248 13249 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */ 13250 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK 13251 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF); 13252 if (!fn) 13253 return NULL_TREE; 13254 13255 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt); 13256 } 13257 13258 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if 13259 a normal call should be emitted rather than expanding the function 13260 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */ 13261 13262 static tree 13263 fold_builtin_sprintf_chk (location_t loc, tree exp, 13264 enum built_in_function fcode) 13265 { 13266 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp), 13267 CALL_EXPR_ARGP (exp), fcode); 13268 } 13269 13270 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return 13271 NULL_TREE if a normal call should be emitted rather than expanding 13272 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or 13273 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length 13274 passed as second argument. */ 13275 13276 static tree 13277 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args, 13278 tree maxlen, enum built_in_function fcode) 13279 { 13280 tree dest, size, len, fn, fmt, flag; 13281 const char *fmt_str; 13282 13283 /* Verify the required arguments in the original call. */ 13284 if (nargs < 5) 13285 return NULL_TREE; 13286 dest = args[0]; 13287 if (!validate_arg (dest, POINTER_TYPE)) 13288 return NULL_TREE; 13289 len = args[1]; 13290 if (!validate_arg (len, INTEGER_TYPE)) 13291 return NULL_TREE; 13292 flag = args[2]; 13293 if (!validate_arg (flag, INTEGER_TYPE)) 13294 return NULL_TREE; 13295 size = args[3]; 13296 if (!validate_arg (size, INTEGER_TYPE)) 13297 return NULL_TREE; 13298 fmt = args[4]; 13299 if (!validate_arg (fmt, POINTER_TYPE)) 13300 return NULL_TREE; 13301 13302 if (! host_integerp (size, 1)) 13303 return NULL_TREE; 13304 13305 if (! integer_all_onesp (size)) 13306 { 13307 if (! host_integerp (len, 1)) 13308 { 13309 /* If LEN is not constant, try MAXLEN too. 13310 For MAXLEN only allow optimizing into non-_ocs function 13311 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 13312 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1)) 13313 return NULL_TREE; 13314 } 13315 else 13316 maxlen = len; 13317 13318 if (tree_int_cst_lt (size, maxlen)) 13319 return NULL_TREE; 13320 } 13321 13322 if (!init_target_chars ()) 13323 return NULL_TREE; 13324 13325 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0 13326 or if format doesn't contain % chars or is "%s". */ 13327 if (! integer_zerop (flag)) 13328 { 13329 fmt_str = c_getstr (fmt); 13330 if (fmt_str == NULL) 13331 return NULL_TREE; 13332 if (strchr (fmt_str, target_percent) != NULL 13333 && strcmp (fmt_str, target_percent_s)) 13334 return NULL_TREE; 13335 } 13336 13337 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is 13338 available. */ 13339 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK 13340 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF); 13341 if (!fn) 13342 return NULL_TREE; 13343 13344 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt); 13345 } 13346 13347 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if 13348 a normal call should be emitted rather than expanding the function 13349 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or 13350 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length 13351 passed as second argument. */ 13352 13353 tree 13354 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen, 13355 enum built_in_function fcode) 13356 { 13357 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp), 13358 CALL_EXPR_ARGP (exp), maxlen, fcode); 13359 } 13360 13361 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins. 13362 FMT and ARG are the arguments to the call; we don't fold cases with 13363 more than 2 arguments, and ARG may be null if this is a 1-argument case. 13364 13365 Return NULL_TREE if no simplification was possible, otherwise return the 13366 simplified form of the call as a tree. FCODE is the BUILT_IN_* 13367 code of the function to be simplified. */ 13368 13369 static tree 13370 fold_builtin_printf (location_t loc, tree fndecl, tree fmt, 13371 tree arg, bool ignore, 13372 enum built_in_function fcode) 13373 { 13374 tree fn_putchar, fn_puts, newarg, call = NULL_TREE; 13375 const char *fmt_str = NULL; 13376 13377 /* If the return value is used, don't do the transformation. */ 13378 if (! ignore) 13379 return NULL_TREE; 13380 13381 /* Verify the required arguments in the original call. */ 13382 if (!validate_arg (fmt, POINTER_TYPE)) 13383 return NULL_TREE; 13384 13385 /* Check whether the format is a literal string constant. */ 13386 fmt_str = c_getstr (fmt); 13387 if (fmt_str == NULL) 13388 return NULL_TREE; 13389 13390 if (fcode == BUILT_IN_PRINTF_UNLOCKED) 13391 { 13392 /* If we're using an unlocked function, assume the other 13393 unlocked functions exist explicitly. */ 13394 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED); 13395 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED); 13396 } 13397 else 13398 { 13399 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR); 13400 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS); 13401 } 13402 13403 if (!init_target_chars ()) 13404 return NULL_TREE; 13405 13406 if (strcmp (fmt_str, target_percent_s) == 0 13407 || strchr (fmt_str, target_percent) == NULL) 13408 { 13409 const char *str; 13410 13411 if (strcmp (fmt_str, target_percent_s) == 0) 13412 { 13413 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK) 13414 return NULL_TREE; 13415 13416 if (!arg || !validate_arg (arg, POINTER_TYPE)) 13417 return NULL_TREE; 13418 13419 str = c_getstr (arg); 13420 if (str == NULL) 13421 return NULL_TREE; 13422 } 13423 else 13424 { 13425 /* The format specifier doesn't contain any '%' characters. */ 13426 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK 13427 && arg) 13428 return NULL_TREE; 13429 str = fmt_str; 13430 } 13431 13432 /* If the string was "", printf does nothing. */ 13433 if (str[0] == '\0') 13434 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0); 13435 13436 /* If the string has length of 1, call putchar. */ 13437 if (str[1] == '\0') 13438 { 13439 /* Given printf("c"), (where c is any one character,) 13440 convert "c"[0] to an int and pass that to the replacement 13441 function. */ 13442 newarg = build_int_cst (integer_type_node, str[0]); 13443 if (fn_putchar) 13444 call = build_call_expr_loc (loc, fn_putchar, 1, newarg); 13445 } 13446 else 13447 { 13448 /* If the string was "string\n", call puts("string"). */ 13449 size_t len = strlen (str); 13450 if ((unsigned char)str[len - 1] == target_newline 13451 && (size_t) (int) len == len 13452 && (int) len > 0) 13453 { 13454 char *newstr; 13455 tree offset_node, string_cst; 13456 13457 /* Create a NUL-terminated string that's one char shorter 13458 than the original, stripping off the trailing '\n'. */ 13459 newarg = build_string_literal (len, str); 13460 string_cst = string_constant (newarg, &offset_node); 13461 gcc_checking_assert (string_cst 13462 && (TREE_STRING_LENGTH (string_cst) 13463 == (int) len) 13464 && integer_zerop (offset_node) 13465 && (unsigned char) 13466 TREE_STRING_POINTER (string_cst)[len - 1] 13467 == target_newline); 13468 /* build_string_literal creates a new STRING_CST, 13469 modify it in place to avoid double copying. */ 13470 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst)); 13471 newstr[len - 1] = '\0'; 13472 if (fn_puts) 13473 call = build_call_expr_loc (loc, fn_puts, 1, newarg); 13474 } 13475 else 13476 /* We'd like to arrange to call fputs(string,stdout) here, 13477 but we need stdout and don't have a way to get it yet. */ 13478 return NULL_TREE; 13479 } 13480 } 13481 13482 /* The other optimizations can be done only on the non-va_list variants. */ 13483 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK) 13484 return NULL_TREE; 13485 13486 /* If the format specifier was "%s\n", call __builtin_puts(arg). */ 13487 else if (strcmp (fmt_str, target_percent_s_newline) == 0) 13488 { 13489 if (!arg || !validate_arg (arg, POINTER_TYPE)) 13490 return NULL_TREE; 13491 if (fn_puts) 13492 call = build_call_expr_loc (loc, fn_puts, 1, arg); 13493 } 13494 13495 /* If the format specifier was "%c", call __builtin_putchar(arg). */ 13496 else if (strcmp (fmt_str, target_percent_c) == 0) 13497 { 13498 if (!arg || !validate_arg (arg, INTEGER_TYPE)) 13499 return NULL_TREE; 13500 if (fn_putchar) 13501 call = build_call_expr_loc (loc, fn_putchar, 1, arg); 13502 } 13503 13504 if (!call) 13505 return NULL_TREE; 13506 13507 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call); 13508 } 13509 13510 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins. 13511 FP, FMT, and ARG are the arguments to the call. We don't fold calls with 13512 more than 3 arguments, and ARG may be null in the 2-argument case. 13513 13514 Return NULL_TREE if no simplification was possible, otherwise return the 13515 simplified form of the call as a tree. FCODE is the BUILT_IN_* 13516 code of the function to be simplified. */ 13517 13518 static tree 13519 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp, 13520 tree fmt, tree arg, bool ignore, 13521 enum built_in_function fcode) 13522 { 13523 tree fn_fputc, fn_fputs, call = NULL_TREE; 13524 const char *fmt_str = NULL; 13525 13526 /* If the return value is used, don't do the transformation. */ 13527 if (! ignore) 13528 return NULL_TREE; 13529 13530 /* Verify the required arguments in the original call. */ 13531 if (!validate_arg (fp, POINTER_TYPE)) 13532 return NULL_TREE; 13533 if (!validate_arg (fmt, POINTER_TYPE)) 13534 return NULL_TREE; 13535 13536 /* Check whether the format is a literal string constant. */ 13537 fmt_str = c_getstr (fmt); 13538 if (fmt_str == NULL) 13539 return NULL_TREE; 13540 13541 if (fcode == BUILT_IN_FPRINTF_UNLOCKED) 13542 { 13543 /* If we're using an unlocked function, assume the other 13544 unlocked functions exist explicitly. */ 13545 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED); 13546 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED); 13547 } 13548 else 13549 { 13550 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC); 13551 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS); 13552 } 13553 13554 if (!init_target_chars ()) 13555 return NULL_TREE; 13556 13557 /* If the format doesn't contain % args or %%, use strcpy. */ 13558 if (strchr (fmt_str, target_percent) == NULL) 13559 { 13560 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK 13561 && arg) 13562 return NULL_TREE; 13563 13564 /* If the format specifier was "", fprintf does nothing. */ 13565 if (fmt_str[0] == '\0') 13566 { 13567 /* If FP has side-effects, just wait until gimplification is 13568 done. */ 13569 if (TREE_SIDE_EFFECTS (fp)) 13570 return NULL_TREE; 13571 13572 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0); 13573 } 13574 13575 /* When "string" doesn't contain %, replace all cases of 13576 fprintf (fp, string) with fputs (string, fp). The fputs 13577 builtin will take care of special cases like length == 1. */ 13578 if (fn_fputs) 13579 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp); 13580 } 13581 13582 /* The other optimizations can be done only on the non-va_list variants. */ 13583 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK) 13584 return NULL_TREE; 13585 13586 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */ 13587 else if (strcmp (fmt_str, target_percent_s) == 0) 13588 { 13589 if (!arg || !validate_arg (arg, POINTER_TYPE)) 13590 return NULL_TREE; 13591 if (fn_fputs) 13592 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp); 13593 } 13594 13595 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */ 13596 else if (strcmp (fmt_str, target_percent_c) == 0) 13597 { 13598 if (!arg || !validate_arg (arg, INTEGER_TYPE)) 13599 return NULL_TREE; 13600 if (fn_fputc) 13601 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp); 13602 } 13603 13604 if (!call) 13605 return NULL_TREE; 13606 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call); 13607 } 13608 13609 /* Initialize format string characters in the target charset. */ 13610 13611 static bool 13612 init_target_chars (void) 13613 { 13614 static bool init; 13615 if (!init) 13616 { 13617 target_newline = lang_hooks.to_target_charset ('\n'); 13618 target_percent = lang_hooks.to_target_charset ('%'); 13619 target_c = lang_hooks.to_target_charset ('c'); 13620 target_s = lang_hooks.to_target_charset ('s'); 13621 if (target_newline == 0 || target_percent == 0 || target_c == 0 13622 || target_s == 0) 13623 return false; 13624 13625 target_percent_c[0] = target_percent; 13626 target_percent_c[1] = target_c; 13627 target_percent_c[2] = '\0'; 13628 13629 target_percent_s[0] = target_percent; 13630 target_percent_s[1] = target_s; 13631 target_percent_s[2] = '\0'; 13632 13633 target_percent_s_newline[0] = target_percent; 13634 target_percent_s_newline[1] = target_s; 13635 target_percent_s_newline[2] = target_newline; 13636 target_percent_s_newline[3] = '\0'; 13637 13638 init = true; 13639 } 13640 return true; 13641 } 13642 13643 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number 13644 and no overflow/underflow occurred. INEXACT is true if M was not 13645 exactly calculated. TYPE is the tree type for the result. This 13646 function assumes that you cleared the MPFR flags and then 13647 calculated M to see if anything subsequently set a flag prior to 13648 entering this function. Return NULL_TREE if any checks fail. */ 13649 13650 static tree 13651 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact) 13652 { 13653 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no 13654 overflow/underflow occurred. If -frounding-math, proceed iff the 13655 result of calling FUNC was exact. */ 13656 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p () 13657 && (!flag_rounding_math || !inexact)) 13658 { 13659 REAL_VALUE_TYPE rr; 13660 13661 real_from_mpfr (&rr, m, type, GMP_RNDN); 13662 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value, 13663 check for overflow/underflow. If the REAL_VALUE_TYPE is zero 13664 but the mpft_t is not, then we underflowed in the 13665 conversion. */ 13666 if (real_isfinite (&rr) 13667 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0)) 13668 { 13669 REAL_VALUE_TYPE rmode; 13670 13671 real_convert (&rmode, TYPE_MODE (type), &rr); 13672 /* Proceed iff the specified mode can hold the value. */ 13673 if (real_identical (&rmode, &rr)) 13674 return build_real (type, rmode); 13675 } 13676 } 13677 return NULL_TREE; 13678 } 13679 13680 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex 13681 number and no overflow/underflow occurred. INEXACT is true if M 13682 was not exactly calculated. TYPE is the tree type for the result. 13683 This function assumes that you cleared the MPFR flags and then 13684 calculated M to see if anything subsequently set a flag prior to 13685 entering this function. Return NULL_TREE if any checks fail, if 13686 FORCE_CONVERT is true, then bypass the checks. */ 13687 13688 static tree 13689 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert) 13690 { 13691 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no 13692 overflow/underflow occurred. If -frounding-math, proceed iff the 13693 result of calling FUNC was exact. */ 13694 if (force_convert 13695 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m)) 13696 && !mpfr_overflow_p () && !mpfr_underflow_p () 13697 && (!flag_rounding_math || !inexact))) 13698 { 13699 REAL_VALUE_TYPE re, im; 13700 13701 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN); 13702 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN); 13703 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values, 13704 check for overflow/underflow. If the REAL_VALUE_TYPE is zero 13705 but the mpft_t is not, then we underflowed in the 13706 conversion. */ 13707 if (force_convert 13708 || (real_isfinite (&re) && real_isfinite (&im) 13709 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0) 13710 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0))) 13711 { 13712 REAL_VALUE_TYPE re_mode, im_mode; 13713 13714 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re); 13715 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im); 13716 /* Proceed iff the specified mode can hold the value. */ 13717 if (force_convert 13718 || (real_identical (&re_mode, &re) 13719 && real_identical (&im_mode, &im))) 13720 return build_complex (type, build_real (TREE_TYPE (type), re_mode), 13721 build_real (TREE_TYPE (type), im_mode)); 13722 } 13723 } 13724 return NULL_TREE; 13725 } 13726 13727 /* If argument ARG is a REAL_CST, call the one-argument mpfr function 13728 FUNC on it and return the resulting value as a tree with type TYPE. 13729 If MIN and/or MAX are not NULL, then the supplied ARG must be 13730 within those bounds. If INCLUSIVE is true, then MIN/MAX are 13731 acceptable values, otherwise they are not. The mpfr precision is 13732 set to the precision of TYPE. We assume that function FUNC returns 13733 zero if the result could be calculated exactly within the requested 13734 precision. */ 13735 13736 static tree 13737 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t), 13738 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max, 13739 bool inclusive) 13740 { 13741 tree result = NULL_TREE; 13742 13743 STRIP_NOPS (arg); 13744 13745 /* To proceed, MPFR must exactly represent the target floating point 13746 format, which only happens when the target base equals two. */ 13747 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2 13748 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)) 13749 { 13750 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg); 13751 13752 if (real_isfinite (ra) 13753 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)) 13754 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max))) 13755 { 13756 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type)); 13757 const int prec = fmt->p; 13758 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN; 13759 int inexact; 13760 mpfr_t m; 13761 13762 mpfr_init2 (m, prec); 13763 mpfr_from_real (m, ra, GMP_RNDN); 13764 mpfr_clear_flags (); 13765 inexact = func (m, m, rnd); 13766 result = do_mpfr_ckconv (m, type, inexact); 13767 mpfr_clear (m); 13768 } 13769 } 13770 13771 return result; 13772 } 13773 13774 /* If argument ARG is a REAL_CST, call the two-argument mpfr function 13775 FUNC on it and return the resulting value as a tree with type TYPE. 13776 The mpfr precision is set to the precision of TYPE. We assume that 13777 function FUNC returns zero if the result could be calculated 13778 exactly within the requested precision. */ 13779 13780 static tree 13781 do_mpfr_arg2 (tree arg1, tree arg2, tree type, 13782 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t)) 13783 { 13784 tree result = NULL_TREE; 13785 13786 STRIP_NOPS (arg1); 13787 STRIP_NOPS (arg2); 13788 13789 /* To proceed, MPFR must exactly represent the target floating point 13790 format, which only happens when the target base equals two. */ 13791 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2 13792 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1) 13793 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)) 13794 { 13795 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1); 13796 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2); 13797 13798 if (real_isfinite (ra1) && real_isfinite (ra2)) 13799 { 13800 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type)); 13801 const int prec = fmt->p; 13802 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN; 13803 int inexact; 13804 mpfr_t m1, m2; 13805 13806 mpfr_inits2 (prec, m1, m2, NULL); 13807 mpfr_from_real (m1, ra1, GMP_RNDN); 13808 mpfr_from_real (m2, ra2, GMP_RNDN); 13809 mpfr_clear_flags (); 13810 inexact = func (m1, m1, m2, rnd); 13811 result = do_mpfr_ckconv (m1, type, inexact); 13812 mpfr_clears (m1, m2, NULL); 13813 } 13814 } 13815 13816 return result; 13817 } 13818 13819 /* If argument ARG is a REAL_CST, call the three-argument mpfr function 13820 FUNC on it and return the resulting value as a tree with type TYPE. 13821 The mpfr precision is set to the precision of TYPE. We assume that 13822 function FUNC returns zero if the result could be calculated 13823 exactly within the requested precision. */ 13824 13825 static tree 13826 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type, 13827 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t)) 13828 { 13829 tree result = NULL_TREE; 13830 13831 STRIP_NOPS (arg1); 13832 STRIP_NOPS (arg2); 13833 STRIP_NOPS (arg3); 13834 13835 /* To proceed, MPFR must exactly represent the target floating point 13836 format, which only happens when the target base equals two. */ 13837 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2 13838 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1) 13839 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2) 13840 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3)) 13841 { 13842 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1); 13843 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2); 13844 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3); 13845 13846 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3)) 13847 { 13848 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type)); 13849 const int prec = fmt->p; 13850 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN; 13851 int inexact; 13852 mpfr_t m1, m2, m3; 13853 13854 mpfr_inits2 (prec, m1, m2, m3, NULL); 13855 mpfr_from_real (m1, ra1, GMP_RNDN); 13856 mpfr_from_real (m2, ra2, GMP_RNDN); 13857 mpfr_from_real (m3, ra3, GMP_RNDN); 13858 mpfr_clear_flags (); 13859 inexact = func (m1, m1, m2, m3, rnd); 13860 result = do_mpfr_ckconv (m1, type, inexact); 13861 mpfr_clears (m1, m2, m3, NULL); 13862 } 13863 } 13864 13865 return result; 13866 } 13867 13868 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set 13869 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values. 13870 If ARG_SINP and ARG_COSP are NULL then the result is returned 13871 as a complex value. 13872 The type is taken from the type of ARG and is used for setting the 13873 precision of the calculation and results. */ 13874 13875 static tree 13876 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp) 13877 { 13878 tree const type = TREE_TYPE (arg); 13879 tree result = NULL_TREE; 13880 13881 STRIP_NOPS (arg); 13882 13883 /* To proceed, MPFR must exactly represent the target floating point 13884 format, which only happens when the target base equals two. */ 13885 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2 13886 && TREE_CODE (arg) == REAL_CST 13887 && !TREE_OVERFLOW (arg)) 13888 { 13889 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg); 13890 13891 if (real_isfinite (ra)) 13892 { 13893 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type)); 13894 const int prec = fmt->p; 13895 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN; 13896 tree result_s, result_c; 13897 int inexact; 13898 mpfr_t m, ms, mc; 13899 13900 mpfr_inits2 (prec, m, ms, mc, NULL); 13901 mpfr_from_real (m, ra, GMP_RNDN); 13902 mpfr_clear_flags (); 13903 inexact = mpfr_sin_cos (ms, mc, m, rnd); 13904 result_s = do_mpfr_ckconv (ms, type, inexact); 13905 result_c = do_mpfr_ckconv (mc, type, inexact); 13906 mpfr_clears (m, ms, mc, NULL); 13907 if (result_s && result_c) 13908 { 13909 /* If we are to return in a complex value do so. */ 13910 if (!arg_sinp && !arg_cosp) 13911 return build_complex (build_complex_type (type), 13912 result_c, result_s); 13913 13914 /* Dereference the sin/cos pointer arguments. */ 13915 arg_sinp = build_fold_indirect_ref (arg_sinp); 13916 arg_cosp = build_fold_indirect_ref (arg_cosp); 13917 /* Proceed if valid pointer type were passed in. */ 13918 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type) 13919 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type)) 13920 { 13921 /* Set the values. */ 13922 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp, 13923 result_s); 13924 TREE_SIDE_EFFECTS (result_s) = 1; 13925 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp, 13926 result_c); 13927 TREE_SIDE_EFFECTS (result_c) = 1; 13928 /* Combine the assignments into a compound expr. */ 13929 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type, 13930 result_s, result_c)); 13931 } 13932 } 13933 } 13934 } 13935 return result; 13936 } 13937 13938 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the 13939 two-argument mpfr order N Bessel function FUNC on them and return 13940 the resulting value as a tree with type TYPE. The mpfr precision 13941 is set to the precision of TYPE. We assume that function FUNC 13942 returns zero if the result could be calculated exactly within the 13943 requested precision. */ 13944 static tree 13945 do_mpfr_bessel_n (tree arg1, tree arg2, tree type, 13946 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t), 13947 const REAL_VALUE_TYPE *min, bool inclusive) 13948 { 13949 tree result = NULL_TREE; 13950 13951 STRIP_NOPS (arg1); 13952 STRIP_NOPS (arg2); 13953 13954 /* To proceed, MPFR must exactly represent the target floating point 13955 format, which only happens when the target base equals two. */ 13956 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2 13957 && host_integerp (arg1, 0) 13958 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)) 13959 { 13960 const HOST_WIDE_INT n = tree_low_cst(arg1, 0); 13961 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2); 13962 13963 if (n == (long)n 13964 && real_isfinite (ra) 13965 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))) 13966 { 13967 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type)); 13968 const int prec = fmt->p; 13969 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN; 13970 int inexact; 13971 mpfr_t m; 13972 13973 mpfr_init2 (m, prec); 13974 mpfr_from_real (m, ra, GMP_RNDN); 13975 mpfr_clear_flags (); 13976 inexact = func (m, n, m, rnd); 13977 result = do_mpfr_ckconv (m, type, inexact); 13978 mpfr_clear (m); 13979 } 13980 } 13981 13982 return result; 13983 } 13984 13985 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set 13986 the pointer *(ARG_QUO) and return the result. The type is taken 13987 from the type of ARG0 and is used for setting the precision of the 13988 calculation and results. */ 13989 13990 static tree 13991 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo) 13992 { 13993 tree const type = TREE_TYPE (arg0); 13994 tree result = NULL_TREE; 13995 13996 STRIP_NOPS (arg0); 13997 STRIP_NOPS (arg1); 13998 13999 /* To proceed, MPFR must exactly represent the target floating point 14000 format, which only happens when the target base equals two. */ 14001 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2 14002 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0) 14003 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)) 14004 { 14005 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0); 14006 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1); 14007 14008 if (real_isfinite (ra0) && real_isfinite (ra1)) 14009 { 14010 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type)); 14011 const int prec = fmt->p; 14012 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN; 14013 tree result_rem; 14014 long integer_quo; 14015 mpfr_t m0, m1; 14016 14017 mpfr_inits2 (prec, m0, m1, NULL); 14018 mpfr_from_real (m0, ra0, GMP_RNDN); 14019 mpfr_from_real (m1, ra1, GMP_RNDN); 14020 mpfr_clear_flags (); 14021 mpfr_remquo (m0, &integer_quo, m0, m1, rnd); 14022 /* Remquo is independent of the rounding mode, so pass 14023 inexact=0 to do_mpfr_ckconv(). */ 14024 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0); 14025 mpfr_clears (m0, m1, NULL); 14026 if (result_rem) 14027 { 14028 /* MPFR calculates quo in the host's long so it may 14029 return more bits in quo than the target int can hold 14030 if sizeof(host long) > sizeof(target int). This can 14031 happen even for native compilers in LP64 mode. In 14032 these cases, modulo the quo value with the largest 14033 number that the target int can hold while leaving one 14034 bit for the sign. */ 14035 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE) 14036 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1)); 14037 14038 /* Dereference the quo pointer argument. */ 14039 arg_quo = build_fold_indirect_ref (arg_quo); 14040 /* Proceed iff a valid pointer type was passed in. */ 14041 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node) 14042 { 14043 /* Set the value. */ 14044 tree result_quo 14045 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo, 14046 build_int_cst (TREE_TYPE (arg_quo), 14047 integer_quo)); 14048 TREE_SIDE_EFFECTS (result_quo) = 1; 14049 /* Combine the quo assignment with the rem. */ 14050 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type, 14051 result_quo, result_rem)); 14052 } 14053 } 14054 } 14055 } 14056 return result; 14057 } 14058 14059 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the 14060 resulting value as a tree with type TYPE. The mpfr precision is 14061 set to the precision of TYPE. We assume that this mpfr function 14062 returns zero if the result could be calculated exactly within the 14063 requested precision. In addition, the integer pointer represented 14064 by ARG_SG will be dereferenced and set to the appropriate signgam 14065 (-1,1) value. */ 14066 14067 static tree 14068 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type) 14069 { 14070 tree result = NULL_TREE; 14071 14072 STRIP_NOPS (arg); 14073 14074 /* To proceed, MPFR must exactly represent the target floating point 14075 format, which only happens when the target base equals two. Also 14076 verify ARG is a constant and that ARG_SG is an int pointer. */ 14077 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2 14078 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg) 14079 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE 14080 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node) 14081 { 14082 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg); 14083 14084 /* In addition to NaN and Inf, the argument cannot be zero or a 14085 negative integer. */ 14086 if (real_isfinite (ra) 14087 && ra->cl != rvc_zero 14088 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type)))) 14089 { 14090 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type)); 14091 const int prec = fmt->p; 14092 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN; 14093 int inexact, sg; 14094 mpfr_t m; 14095 tree result_lg; 14096 14097 mpfr_init2 (m, prec); 14098 mpfr_from_real (m, ra, GMP_RNDN); 14099 mpfr_clear_flags (); 14100 inexact = mpfr_lgamma (m, &sg, m, rnd); 14101 result_lg = do_mpfr_ckconv (m, type, inexact); 14102 mpfr_clear (m); 14103 if (result_lg) 14104 { 14105 tree result_sg; 14106 14107 /* Dereference the arg_sg pointer argument. */ 14108 arg_sg = build_fold_indirect_ref (arg_sg); 14109 /* Assign the signgam value into *arg_sg. */ 14110 result_sg = fold_build2 (MODIFY_EXPR, 14111 TREE_TYPE (arg_sg), arg_sg, 14112 build_int_cst (TREE_TYPE (arg_sg), sg)); 14113 TREE_SIDE_EFFECTS (result_sg) = 1; 14114 /* Combine the signgam assignment with the lgamma result. */ 14115 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type, 14116 result_sg, result_lg)); 14117 } 14118 } 14119 } 14120 14121 return result; 14122 } 14123 14124 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc 14125 function FUNC on it and return the resulting value as a tree with 14126 type TYPE. The mpfr precision is set to the precision of TYPE. We 14127 assume that function FUNC returns zero if the result could be 14128 calculated exactly within the requested precision. */ 14129 14130 static tree 14131 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t)) 14132 { 14133 tree result = NULL_TREE; 14134 14135 STRIP_NOPS (arg); 14136 14137 /* To proceed, MPFR must exactly represent the target floating point 14138 format, which only happens when the target base equals two. */ 14139 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg) 14140 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE 14141 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2) 14142 { 14143 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg)); 14144 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg)); 14145 14146 if (real_isfinite (re) && real_isfinite (im)) 14147 { 14148 const struct real_format *const fmt = 14149 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type))); 14150 const int prec = fmt->p; 14151 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN; 14152 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN; 14153 int inexact; 14154 mpc_t m; 14155 14156 mpc_init2 (m, prec); 14157 mpfr_from_real (mpc_realref(m), re, rnd); 14158 mpfr_from_real (mpc_imagref(m), im, rnd); 14159 mpfr_clear_flags (); 14160 inexact = func (m, m, crnd); 14161 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0); 14162 mpc_clear (m); 14163 } 14164 } 14165 14166 return result; 14167 } 14168 14169 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument 14170 mpc function FUNC on it and return the resulting value as a tree 14171 with type TYPE. The mpfr precision is set to the precision of 14172 TYPE. We assume that function FUNC returns zero if the result 14173 could be calculated exactly within the requested precision. If 14174 DO_NONFINITE is true, then fold expressions containing Inf or NaN 14175 in the arguments and/or results. */ 14176 14177 tree 14178 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite, 14179 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t)) 14180 { 14181 tree result = NULL_TREE; 14182 14183 STRIP_NOPS (arg0); 14184 STRIP_NOPS (arg1); 14185 14186 /* To proceed, MPFR must exactly represent the target floating point 14187 format, which only happens when the target base equals two. */ 14188 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0) 14189 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE 14190 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1) 14191 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE 14192 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2) 14193 { 14194 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0)); 14195 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0)); 14196 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1)); 14197 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1)); 14198 14199 if (do_nonfinite 14200 || (real_isfinite (re0) && real_isfinite (im0) 14201 && real_isfinite (re1) && real_isfinite (im1))) 14202 { 14203 const struct real_format *const fmt = 14204 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type))); 14205 const int prec = fmt->p; 14206 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN; 14207 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN; 14208 int inexact; 14209 mpc_t m0, m1; 14210 14211 mpc_init2 (m0, prec); 14212 mpc_init2 (m1, prec); 14213 mpfr_from_real (mpc_realref(m0), re0, rnd); 14214 mpfr_from_real (mpc_imagref(m0), im0, rnd); 14215 mpfr_from_real (mpc_realref(m1), re1, rnd); 14216 mpfr_from_real (mpc_imagref(m1), im1, rnd); 14217 mpfr_clear_flags (); 14218 inexact = func (m0, m0, m1, crnd); 14219 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite); 14220 mpc_clear (m0); 14221 mpc_clear (m1); 14222 } 14223 } 14224 14225 return result; 14226 } 14227 14228 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if 14229 a normal call should be emitted rather than expanding the function 14230 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */ 14231 14232 static tree 14233 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode) 14234 { 14235 int nargs = gimple_call_num_args (stmt); 14236 14237 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs, 14238 (nargs > 0 14239 ? gimple_call_arg_ptr (stmt, 0) 14240 : &error_mark_node), fcode); 14241 } 14242 14243 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if 14244 a normal call should be emitted rather than expanding the function 14245 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or 14246 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length 14247 passed as second argument. */ 14248 14249 tree 14250 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen, 14251 enum built_in_function fcode) 14252 { 14253 int nargs = gimple_call_num_args (stmt); 14254 14255 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs, 14256 (nargs > 0 14257 ? gimple_call_arg_ptr (stmt, 0) 14258 : &error_mark_node), maxlen, fcode); 14259 } 14260 14261 /* Builtins with folding operations that operate on "..." arguments 14262 need special handling; we need to store the arguments in a convenient 14263 data structure before attempting any folding. Fortunately there are 14264 only a few builtins that fall into this category. FNDECL is the 14265 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the 14266 result of the function call is ignored. */ 14267 14268 static tree 14269 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, 14270 bool ignore ATTRIBUTE_UNUSED) 14271 { 14272 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); 14273 tree ret = NULL_TREE; 14274 14275 switch (fcode) 14276 { 14277 case BUILT_IN_SPRINTF_CHK: 14278 case BUILT_IN_VSPRINTF_CHK: 14279 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode); 14280 break; 14281 14282 case BUILT_IN_SNPRINTF_CHK: 14283 case BUILT_IN_VSNPRINTF_CHK: 14284 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode); 14285 14286 default: 14287 break; 14288 } 14289 if (ret) 14290 { 14291 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret); 14292 TREE_NO_WARNING (ret) = 1; 14293 return ret; 14294 } 14295 return NULL_TREE; 14296 } 14297 14298 /* A wrapper function for builtin folding that prevents warnings for 14299 "statement without effect" and the like, caused by removing the 14300 call node earlier than the warning is generated. */ 14301 14302 tree 14303 fold_call_stmt (gimple stmt, bool ignore) 14304 { 14305 tree ret = NULL_TREE; 14306 tree fndecl = gimple_call_fndecl (stmt); 14307 location_t loc = gimple_location (stmt); 14308 if (fndecl 14309 && TREE_CODE (fndecl) == FUNCTION_DECL 14310 && DECL_BUILT_IN (fndecl) 14311 && !gimple_call_va_arg_pack_p (stmt)) 14312 { 14313 int nargs = gimple_call_num_args (stmt); 14314 tree *args = (nargs > 0 14315 ? gimple_call_arg_ptr (stmt, 0) 14316 : &error_mark_node); 14317 14318 if (avoid_folding_inline_builtin (fndecl)) 14319 return NULL_TREE; 14320 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) 14321 { 14322 return targetm.fold_builtin (fndecl, nargs, args, ignore); 14323 } 14324 else 14325 { 14326 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN) 14327 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore); 14328 if (!ret) 14329 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore); 14330 if (ret) 14331 { 14332 /* Propagate location information from original call to 14333 expansion of builtin. Otherwise things like 14334 maybe_emit_chk_warning, that operate on the expansion 14335 of a builtin, will use the wrong location information. */ 14336 if (gimple_has_location (stmt)) 14337 { 14338 tree realret = ret; 14339 if (TREE_CODE (ret) == NOP_EXPR) 14340 realret = TREE_OPERAND (ret, 0); 14341 if (CAN_HAVE_LOCATION_P (realret) 14342 && !EXPR_HAS_LOCATION (realret)) 14343 SET_EXPR_LOCATION (realret, loc); 14344 return realret; 14345 } 14346 return ret; 14347 } 14348 } 14349 } 14350 return NULL_TREE; 14351 } 14352 14353 /* Look up the function in builtin_decl that corresponds to DECL 14354 and set ASMSPEC as its user assembler name. DECL must be a 14355 function decl that declares a builtin. */ 14356 14357 void 14358 set_builtin_user_assembler_name (tree decl, const char *asmspec) 14359 { 14360 tree builtin; 14361 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL 14362 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL 14363 && asmspec != 0); 14364 14365 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl)); 14366 set_user_assembler_name (builtin, asmspec); 14367 switch (DECL_FUNCTION_CODE (decl)) 14368 { 14369 case BUILT_IN_MEMCPY: 14370 init_block_move_fn (asmspec); 14371 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec); 14372 break; 14373 case BUILT_IN_MEMSET: 14374 init_block_clear_fn (asmspec); 14375 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec); 14376 break; 14377 case BUILT_IN_MEMMOVE: 14378 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec); 14379 break; 14380 case BUILT_IN_MEMCMP: 14381 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec); 14382 break; 14383 case BUILT_IN_ABORT: 14384 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec); 14385 break; 14386 case BUILT_IN_FFS: 14387 if (INT_TYPE_SIZE < BITS_PER_WORD) 14388 { 14389 set_user_assembler_libfunc ("ffs", asmspec); 14390 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, 14391 MODE_INT, 0), "ffs"); 14392 } 14393 break; 14394 default: 14395 break; 14396 } 14397 } 14398 14399 /* Return true if DECL is a builtin that expands to a constant or similarly 14400 simple code. */ 14401 bool 14402 is_simple_builtin (tree decl) 14403 { 14404 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) 14405 switch (DECL_FUNCTION_CODE (decl)) 14406 { 14407 /* Builtins that expand to constants. */ 14408 case BUILT_IN_CONSTANT_P: 14409 case BUILT_IN_EXPECT: 14410 case BUILT_IN_OBJECT_SIZE: 14411 case BUILT_IN_UNREACHABLE: 14412 /* Simple register moves or loads from stack. */ 14413 case BUILT_IN_ASSUME_ALIGNED: 14414 case BUILT_IN_RETURN_ADDRESS: 14415 case BUILT_IN_EXTRACT_RETURN_ADDR: 14416 case BUILT_IN_FROB_RETURN_ADDR: 14417 case BUILT_IN_RETURN: 14418 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS: 14419 case BUILT_IN_FRAME_ADDRESS: 14420 case BUILT_IN_VA_END: 14421 case BUILT_IN_STACK_SAVE: 14422 case BUILT_IN_STACK_RESTORE: 14423 /* Exception state returns or moves registers around. */ 14424 case BUILT_IN_EH_FILTER: 14425 case BUILT_IN_EH_POINTER: 14426 case BUILT_IN_EH_COPY_VALUES: 14427 return true; 14428 14429 default: 14430 return false; 14431 } 14432 14433 return false; 14434 } 14435 14436 /* Return true if DECL is a builtin that is not expensive, i.e., they are 14437 most probably expanded inline into reasonably simple code. This is a 14438 superset of is_simple_builtin. */ 14439 bool 14440 is_inexpensive_builtin (tree decl) 14441 { 14442 if (!decl) 14443 return false; 14444 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD) 14445 return true; 14446 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) 14447 switch (DECL_FUNCTION_CODE (decl)) 14448 { 14449 case BUILT_IN_ABS: 14450 case BUILT_IN_ALLOCA: 14451 case BUILT_IN_ALLOCA_WITH_ALIGN: 14452 case BUILT_IN_BSWAP16: 14453 case BUILT_IN_BSWAP32: 14454 case BUILT_IN_BSWAP64: 14455 case BUILT_IN_CLZ: 14456 case BUILT_IN_CLZIMAX: 14457 case BUILT_IN_CLZL: 14458 case BUILT_IN_CLZLL: 14459 case BUILT_IN_CTZ: 14460 case BUILT_IN_CTZIMAX: 14461 case BUILT_IN_CTZL: 14462 case BUILT_IN_CTZLL: 14463 case BUILT_IN_FFS: 14464 case BUILT_IN_FFSIMAX: 14465 case BUILT_IN_FFSL: 14466 case BUILT_IN_FFSLL: 14467 case BUILT_IN_IMAXABS: 14468 case BUILT_IN_FINITE: 14469 case BUILT_IN_FINITEF: 14470 case BUILT_IN_FINITEL: 14471 case BUILT_IN_FINITED32: 14472 case BUILT_IN_FINITED64: 14473 case BUILT_IN_FINITED128: 14474 case BUILT_IN_FPCLASSIFY: 14475 case BUILT_IN_ISFINITE: 14476 case BUILT_IN_ISINF_SIGN: 14477 case BUILT_IN_ISINF: 14478 case BUILT_IN_ISINFF: 14479 case BUILT_IN_ISINFL: 14480 case BUILT_IN_ISINFD32: 14481 case BUILT_IN_ISINFD64: 14482 case BUILT_IN_ISINFD128: 14483 case BUILT_IN_ISNAN: 14484 case BUILT_IN_ISNANF: 14485 case BUILT_IN_ISNANL: 14486 case BUILT_IN_ISNAND32: 14487 case BUILT_IN_ISNAND64: 14488 case BUILT_IN_ISNAND128: 14489 case BUILT_IN_ISNORMAL: 14490 case BUILT_IN_ISGREATER: 14491 case BUILT_IN_ISGREATEREQUAL: 14492 case BUILT_IN_ISLESS: 14493 case BUILT_IN_ISLESSEQUAL: 14494 case BUILT_IN_ISLESSGREATER: 14495 case BUILT_IN_ISUNORDERED: 14496 case BUILT_IN_VA_ARG_PACK: 14497 case BUILT_IN_VA_ARG_PACK_LEN: 14498 case BUILT_IN_VA_COPY: 14499 case BUILT_IN_TRAP: 14500 case BUILT_IN_SAVEREGS: 14501 case BUILT_IN_POPCOUNTL: 14502 case BUILT_IN_POPCOUNTLL: 14503 case BUILT_IN_POPCOUNTIMAX: 14504 case BUILT_IN_POPCOUNT: 14505 case BUILT_IN_PARITYL: 14506 case BUILT_IN_PARITYLL: 14507 case BUILT_IN_PARITYIMAX: 14508 case BUILT_IN_PARITY: 14509 case BUILT_IN_LABS: 14510 case BUILT_IN_LLABS: 14511 case BUILT_IN_PREFETCH: 14512 return true; 14513 14514 default: 14515 return is_simple_builtin (decl); 14516 } 14517 14518 return false; 14519 } 14520