1 /* Expand builtin functions.
2 Copyright (C) 1988-2022 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.cc instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
84
85 struct target_builtins default_target_builtins;
86 #if SWITCHABLE_TARGET
87 struct target_builtins *this_target_builtins = &default_target_builtins;
88 #endif
89
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names[BUILT_IN_LAST]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
93
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names[(int) END_BUILTINS] =
96 {
97 #include "builtins.def"
98 };
99
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info[(int)END_BUILTINS];
103
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p;
106
107 static int target_char_cast (tree, char *);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx result_vector (int, rtx);
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static enum type_class type_to_class (tree);
117 static rtx expand_builtin_classify_type (tree);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123 static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 optab);
125 static rtx expand_builtin_cexpi (tree, rtx);
126 static rtx expand_builtin_int_roundingfn (tree, rtx);
127 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
128 static rtx expand_builtin_next_arg (void);
129 static rtx expand_builtin_va_start (tree);
130 static rtx expand_builtin_va_end (tree);
131 static rtx expand_builtin_va_copy (tree);
132 static rtx inline_expand_builtin_bytecmp (tree, rtx);
133 static rtx expand_builtin_strcmp (tree, rtx);
134 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
135 static rtx expand_builtin_memcpy (tree, rtx);
136 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
137 rtx target, tree exp,
138 memop_ret retmode,
139 bool might_overlap);
140 static rtx expand_builtin_memmove (tree, rtx);
141 static rtx expand_builtin_mempcpy (tree, rtx);
142 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
143 static rtx expand_builtin_strcpy (tree, rtx);
144 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
145 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
146 static rtx expand_builtin_strncpy (tree, rtx);
147 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
148 static rtx expand_builtin_bzero (tree);
149 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
150 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_alloca (tree);
152 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 static rtx expand_builtin_frame_address (tree, tree);
154 static tree stabilize_va_list_loc (location_t, tree, int);
155 static rtx expand_builtin_expect (tree, rtx);
156 static rtx expand_builtin_expect_with_probability (tree, rtx);
157 static tree fold_builtin_constant_p (tree);
158 static tree fold_builtin_classify_type (tree);
159 static tree fold_builtin_strlen (location_t, tree, tree, tree);
160 static tree fold_builtin_inf (location_t, tree, int);
161 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 static bool validate_arg (const_tree, enum tree_code code);
163 static rtx expand_builtin_fabs (tree, rtx, rtx);
164 static rtx expand_builtin_signbit (tree, rtx);
165 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
166 static tree fold_builtin_isascii (location_t, tree);
167 static tree fold_builtin_toascii (location_t, tree);
168 static tree fold_builtin_isdigit (location_t, tree);
169 static tree fold_builtin_fabs (location_t, tree, tree);
170 static tree fold_builtin_abs (location_t, tree, tree);
171 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 enum tree_code);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
178
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static tree fold_builtin_object_size (tree, tree, enum built_in_function);
185
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
196
197 /* Return true if NAME starts with __builtin_ or __sync_. */
198
199 static bool
is_builtin_name(const char * name)200 is_builtin_name (const char *name)
201 {
202 return (startswith (name, "__builtin_")
203 || startswith (name, "__sync_")
204 || startswith (name, "__atomic_"));
205 }
206
207 /* Return true if NODE should be considered for inline expansion regardless
208 of the optimization level. This means whenever a function is invoked with
209 its "internal" name, which normally contains the prefix "__builtin". */
210
211 bool
called_as_built_in(tree node)212 called_as_built_in (tree node)
213 {
214 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
215 we want the name used to call the function, not the name it
216 will have. */
217 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
218 return is_builtin_name (name);
219 }
220
221 /* Compute values M and N such that M divides (address of EXP - N) and such
222 that N < M. If these numbers can be determined, store M in alignp and N in
223 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
224 *alignp and any bit-offset to *bitposp.
225
226 Note that the address (and thus the alignment) computed here is based
227 on the address to which a symbol resolves, whereas DECL_ALIGN is based
228 on the address at which an object is actually located. These two
229 addresses are not always the same. For example, on ARM targets,
230 the address &foo of a Thumb function foo() has the lowest bit set,
231 whereas foo() itself starts on an even address.
232
233 If ADDR_P is true we are taking the address of the memory reference EXP
234 and thus cannot rely on the access taking place. */
235
236 bool
get_object_alignment_2(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp,bool addr_p)237 get_object_alignment_2 (tree exp, unsigned int *alignp,
238 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
239 {
240 poly_int64 bitsize, bitpos;
241 tree offset;
242 machine_mode mode;
243 int unsignedp, reversep, volatilep;
244 unsigned int align = BITS_PER_UNIT;
245 bool known_alignment = false;
246
247 /* Get the innermost object and the constant (bitpos) and possibly
248 variable (offset) offset of the access. */
249 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
250 &unsignedp, &reversep, &volatilep);
251
252 /* Extract alignment information from the innermost object and
253 possibly adjust bitpos and offset. */
254 if (TREE_CODE (exp) == FUNCTION_DECL)
255 {
256 /* Function addresses can encode extra information besides their
257 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
258 allows the low bit to be used as a virtual bit, we know
259 that the address itself must be at least 2-byte aligned. */
260 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
261 align = 2 * BITS_PER_UNIT;
262 }
263 else if (TREE_CODE (exp) == LABEL_DECL)
264 ;
265 else if (TREE_CODE (exp) == CONST_DECL)
266 {
267 /* The alignment of a CONST_DECL is determined by its initializer. */
268 exp = DECL_INITIAL (exp);
269 align = TYPE_ALIGN (TREE_TYPE (exp));
270 if (CONSTANT_CLASS_P (exp))
271 align = targetm.constant_alignment (exp, align);
272
273 known_alignment = true;
274 }
275 else if (DECL_P (exp))
276 {
277 align = DECL_ALIGN (exp);
278 known_alignment = true;
279 }
280 else if (TREE_CODE (exp) == INDIRECT_REF
281 || TREE_CODE (exp) == MEM_REF
282 || TREE_CODE (exp) == TARGET_MEM_REF)
283 {
284 tree addr = TREE_OPERAND (exp, 0);
285 unsigned ptr_align;
286 unsigned HOST_WIDE_INT ptr_bitpos;
287 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
288
289 /* If the address is explicitely aligned, handle that. */
290 if (TREE_CODE (addr) == BIT_AND_EXPR
291 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
292 {
293 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
294 ptr_bitmask *= BITS_PER_UNIT;
295 align = least_bit_hwi (ptr_bitmask);
296 addr = TREE_OPERAND (addr, 0);
297 }
298
299 known_alignment
300 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
301 align = MAX (ptr_align, align);
302
303 /* Re-apply explicit alignment to the bitpos. */
304 ptr_bitpos &= ptr_bitmask;
305
306 /* The alignment of the pointer operand in a TARGET_MEM_REF
307 has to take the variable offset parts into account. */
308 if (TREE_CODE (exp) == TARGET_MEM_REF)
309 {
310 if (TMR_INDEX (exp))
311 {
312 unsigned HOST_WIDE_INT step = 1;
313 if (TMR_STEP (exp))
314 step = TREE_INT_CST_LOW (TMR_STEP (exp));
315 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
316 }
317 if (TMR_INDEX2 (exp))
318 align = BITS_PER_UNIT;
319 known_alignment = false;
320 }
321
322 /* When EXP is an actual memory reference then we can use
323 TYPE_ALIGN of a pointer indirection to derive alignment.
324 Do so only if get_pointer_alignment_1 did not reveal absolute
325 alignment knowledge and if using that alignment would
326 improve the situation. */
327 unsigned int talign;
328 if (!addr_p && !known_alignment
329 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
330 && talign > align)
331 align = talign;
332 else
333 {
334 /* Else adjust bitpos accordingly. */
335 bitpos += ptr_bitpos;
336 if (TREE_CODE (exp) == MEM_REF
337 || TREE_CODE (exp) == TARGET_MEM_REF)
338 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
339 }
340 }
341 else if (TREE_CODE (exp) == STRING_CST)
342 {
343 /* STRING_CST are the only constant objects we allow to be not
344 wrapped inside a CONST_DECL. */
345 align = TYPE_ALIGN (TREE_TYPE (exp));
346 if (CONSTANT_CLASS_P (exp))
347 align = targetm.constant_alignment (exp, align);
348
349 known_alignment = true;
350 }
351
352 /* If there is a non-constant offset part extract the maximum
353 alignment that can prevail. */
354 if (offset)
355 {
356 unsigned int trailing_zeros = tree_ctz (offset);
357 if (trailing_zeros < HOST_BITS_PER_INT)
358 {
359 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
360 if (inner)
361 align = MIN (align, inner);
362 }
363 }
364
365 /* Account for the alignment of runtime coefficients, so that the constant
366 bitpos is guaranteed to be accurate. */
367 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
368 if (alt_align != 0 && alt_align < align)
369 {
370 align = alt_align;
371 known_alignment = false;
372 }
373
374 *alignp = align;
375 *bitposp = bitpos.coeffs[0] & (align - 1);
376 return known_alignment;
377 }
378
379 /* For a memory reference expression EXP compute values M and N such that M
380 divides (&EXP - N) and such that N < M. If these numbers can be determined,
381 store M in alignp and N in *BITPOSP and return true. Otherwise return false
382 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
383
384 bool
get_object_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)385 get_object_alignment_1 (tree exp, unsigned int *alignp,
386 unsigned HOST_WIDE_INT *bitposp)
387 {
388 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
389 with it. */
390 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
391 exp = TREE_OPERAND (exp, 0);
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
393 }
394
395 /* Return the alignment in bits of EXP, an object. */
396
397 unsigned int
get_object_alignment(tree exp)398 get_object_alignment (tree exp)
399 {
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
402
403 get_object_alignment_1 (exp, &align, &bitpos);
404
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
407
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
411 }
412
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
417
418 If EXP is not a pointer, false is returned too. */
419
420 bool
get_pointer_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
423 {
424 STRIP_NOPS (exp);
425
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430 {
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
438 {
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
441 {
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
445 }
446 }
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
450 }
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
453 {
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
456
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 {
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
467 }
468 else
469 {
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
473 }
474 }
475 else if (TREE_CODE (exp) == INTEGER_CST)
476 {
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
481 }
482
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
486 }
487
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
491
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
494
495 unsigned int
get_pointer_alignment(tree exp)496 get_pointer_alignment (tree exp)
497 {
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
500
501 get_pointer_alignment_1 (exp, &align, &bitpos);
502
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
505
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
508
509 return align;
510 }
511
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
515
516 unsigned
string_length(const void * ptr,unsigned eltsize,unsigned maxelts)517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
518 {
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
520
521 unsigned n;
522
523 if (eltsize == 1)
524 {
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
527 {
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
531 }
532 }
533 else
534 {
535 for (n = 0; n < maxelts; n++)
536 {
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
540 }
541 }
542 return n;
543 }
544
545 /* Compute the length of a null-terminated character string or wide
546 character string handling character sizes of 1, 2, and 4 bytes.
547 TREE_STRING_LENGTH is not the right way because it evaluates to
548 the size of the character array in bytes (as opposed to characters)
549 and because it can contain a zero byte in the middle.
550
551 ONLY_VALUE should be nonzero if the result is not going to be emitted
552 into the instruction stream and zero if it is going to be expanded.
553 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
554 is returned, otherwise NULL, since
555 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
556 evaluate the side-effects.
557
558 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 accesses. Note that this implies the result is not going to be emitted
560 into the instruction stream.
561
562 Additional information about the string accessed may be recorded
563 in DATA. For example, if ARG references an unterminated string,
564 then the declaration will be stored in the DECL field. If the
565 length of the unterminated string can be determined, it'll be
566 stored in the LEN field. Note this length could well be different
567 than what a C strlen call would return.
568
569 ELTSIZE is 1 for normal single byte character strings, and 2 or
570 4 for wide characer strings. ELTSIZE is by default 1.
571
572 The value returned is of type `ssizetype'. */
573
574 tree
c_strlen(tree arg,int only_value,c_strlen_data * data,unsigned eltsize)575 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
576 {
577 /* If we were not passed a DATA pointer, then get one to a local
578 structure. That avoids having to check DATA for NULL before
579 each time we want to use it. */
580 c_strlen_data local_strlen_data = { };
581 if (!data)
582 data = &local_strlen_data;
583
584 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
585
586 tree src = STRIP_NOPS (arg);
587 if (TREE_CODE (src) == COND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
589 {
590 tree len1, len2;
591
592 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
593 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
594 if (tree_int_cst_equal (len1, len2))
595 return len1;
596 }
597
598 if (TREE_CODE (src) == COMPOUND_EXPR
599 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
600 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
601
602 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
603
604 /* Offset from the beginning of the string in bytes. */
605 tree byteoff;
606 tree memsize;
607 tree decl;
608 src = string_constant (src, &byteoff, &memsize, &decl);
609 if (src == 0)
610 return NULL_TREE;
611
612 /* Determine the size of the string element. */
613 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
614 return NULL_TREE;
615
616 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
617 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
618 in case the latter is less than the size of the array, such as when
619 SRC refers to a short string literal used to initialize a large array.
620 In that case, the elements of the array after the terminating NUL are
621 all NUL. */
622 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
623 strelts = strelts / eltsize;
624
625 if (!tree_fits_uhwi_p (memsize))
626 return NULL_TREE;
627
628 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
629
630 /* PTR can point to the byte representation of any string type, including
631 char* and wchar_t*. */
632 const char *ptr = TREE_STRING_POINTER (src);
633
634 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
635 {
636 /* The code below works only for single byte character types. */
637 if (eltsize != 1)
638 return NULL_TREE;
639
640 /* If the string has an internal NUL character followed by any
641 non-NUL characters (e.g., "foo\0bar"), we can't compute
642 the offset to the following NUL if we don't know where to
643 start searching for it. */
644 unsigned len = string_length (ptr, eltsize, strelts);
645
646 /* Return when an embedded null character is found or none at all.
647 In the latter case, set the DECL/LEN field in the DATA structure
648 so that callers may examine them. */
649 if (len + 1 < strelts)
650 return NULL_TREE;
651 else if (len >= maxelts)
652 {
653 data->decl = decl;
654 data->off = byteoff;
655 data->minlen = ssize_int (len);
656 return NULL_TREE;
657 }
658
659 /* For empty strings the result should be zero. */
660 if (len == 0)
661 return ssize_int (0);
662
663 /* We don't know the starting offset, but we do know that the string
664 has no internal zero bytes. If the offset falls within the bounds
665 of the string subtract the offset from the length of the string,
666 and return that. Otherwise the length is zero. Take care to
667 use SAVE_EXPR in case the OFFSET has side-effects. */
668 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
669 : byteoff;
670 offsave = fold_convert_loc (loc, sizetype, offsave);
671 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
672 size_int (len));
673 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
674 offsave);
675 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
676 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
677 build_zero_cst (ssizetype));
678 }
679
680 /* Offset from the beginning of the string in elements. */
681 HOST_WIDE_INT eltoff;
682
683 /* We have a known offset into the string. Start searching there for
684 a null character if we can represent it as a single HOST_WIDE_INT. */
685 if (byteoff == 0)
686 eltoff = 0;
687 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
688 eltoff = -1;
689 else
690 eltoff = tree_to_uhwi (byteoff) / eltsize;
691
692 /* If the offset is known to be out of bounds, warn, and call strlen at
693 runtime. */
694 if (eltoff < 0 || eltoff >= maxelts)
695 {
696 /* Suppress multiple warnings for propagated constant strings. */
697 if (only_value != 2
698 && !warning_suppressed_p (arg, OPT_Warray_bounds)
699 && warning_at (loc, OPT_Warray_bounds,
700 "offset %qwi outside bounds of constant string",
701 eltoff))
702 {
703 if (decl)
704 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
705 suppress_warning (arg, OPT_Warray_bounds);
706 }
707 return NULL_TREE;
708 }
709
710 /* If eltoff is larger than strelts but less than maxelts the
711 string length is zero, since the excess memory will be zero. */
712 if (eltoff > strelts)
713 return ssize_int (0);
714
715 /* Use strlen to search for the first zero byte. Since any strings
716 constructed with build_string will have nulls appended, we win even
717 if we get handed something like (char[4])"abcd".
718
719 Since ELTOFF is our starting index into the string, no further
720 calculation is needed. */
721 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
722 strelts - eltoff);
723
724 /* Don't know what to return if there was no zero termination.
725 Ideally this would turn into a gcc_checking_assert over time.
726 Set DECL/LEN so callers can examine them. */
727 if (len >= maxelts - eltoff)
728 {
729 data->decl = decl;
730 data->off = byteoff;
731 data->minlen = ssize_int (len);
732 return NULL_TREE;
733 }
734
735 return ssize_int (len);
736 }
737
738 /* Return a constant integer corresponding to target reading
739 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
740 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
741 are assumed to be zero, otherwise it reads as many characters
742 as needed. */
743
744 rtx
c_readstr(const char * str,scalar_int_mode mode,bool null_terminated_p)745 c_readstr (const char *str, scalar_int_mode mode,
746 bool null_terminated_p/*=true*/)
747 {
748 HOST_WIDE_INT ch;
749 unsigned int i, j;
750 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
751
752 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
753 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
754 / HOST_BITS_PER_WIDE_INT;
755
756 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
757 for (i = 0; i < len; i++)
758 tmp[i] = 0;
759
760 ch = 1;
761 for (i = 0; i < GET_MODE_SIZE (mode); i++)
762 {
763 j = i;
764 if (WORDS_BIG_ENDIAN)
765 j = GET_MODE_SIZE (mode) - i - 1;
766 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
767 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
768 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
769 j *= BITS_PER_UNIT;
770
771 if (ch || !null_terminated_p)
772 ch = (unsigned char) str[i];
773 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
774 }
775
776 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
777 return immed_wide_int_const (c, mode);
778 }
779
780 /* Cast a target constant CST to target CHAR and if that value fits into
781 host char type, return zero and put that value into variable pointed to by
782 P. */
783
784 static int
target_char_cast(tree cst,char * p)785 target_char_cast (tree cst, char *p)
786 {
787 unsigned HOST_WIDE_INT val, hostval;
788
789 if (TREE_CODE (cst) != INTEGER_CST
790 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
791 return 1;
792
793 /* Do not care if it fits or not right here. */
794 val = TREE_INT_CST_LOW (cst);
795
796 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
797 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
798
799 hostval = val;
800 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
801 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
802
803 if (val != hostval)
804 return 1;
805
806 *p = hostval;
807 return 0;
808 }
809
810 /* Similar to save_expr, but assumes that arbitrary code is not executed
811 in between the multiple evaluations. In particular, we assume that a
812 non-addressable local variable will not be modified. */
813
814 static tree
builtin_save_expr(tree exp)815 builtin_save_expr (tree exp)
816 {
817 if (TREE_CODE (exp) == SSA_NAME
818 || (TREE_ADDRESSABLE (exp) == 0
819 && (TREE_CODE (exp) == PARM_DECL
820 || (VAR_P (exp) && !TREE_STATIC (exp)))))
821 return exp;
822
823 return save_expr (exp);
824 }
825
826 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
827 times to get the address of either a higher stack frame, or a return
828 address located within it (depending on FNDECL_CODE). */
829
830 static rtx
expand_builtin_return_addr(enum built_in_function fndecl_code,int count)831 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
832 {
833 int i;
834 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
835 if (tem == NULL_RTX)
836 {
837 /* For a zero count with __builtin_return_address, we don't care what
838 frame address we return, because target-specific definitions will
839 override us. Therefore frame pointer elimination is OK, and using
840 the soft frame pointer is OK.
841
842 For a nonzero count, or a zero count with __builtin_frame_address,
843 we require a stable offset from the current frame pointer to the
844 previous one, so we must use the hard frame pointer, and
845 we must disable frame pointer elimination. */
846 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
847 tem = frame_pointer_rtx;
848 else
849 {
850 tem = hard_frame_pointer_rtx;
851
852 /* Tell reload not to eliminate the frame pointer. */
853 crtl->accesses_prior_frames = 1;
854 }
855 }
856
857 if (count > 0)
858 SETUP_FRAME_ADDRESSES ();
859
860 /* On the SPARC, the return address is not in the frame, it is in a
861 register. There is no way to access it off of the current frame
862 pointer, but it can be accessed off the previous frame pointer by
863 reading the value from the register window save area. */
864 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
865 count--;
866
867 /* Scan back COUNT frames to the specified frame. */
868 for (i = 0; i < count; i++)
869 {
870 /* Assume the dynamic chain pointer is in the word that the
871 frame address points to, unless otherwise specified. */
872 tem = DYNAMIC_CHAIN_ADDRESS (tem);
873 tem = memory_address (Pmode, tem);
874 tem = gen_frame_mem (Pmode, tem);
875 tem = copy_to_reg (tem);
876 }
877
878 /* For __builtin_frame_address, return what we've got. But, on
879 the SPARC for example, we may have to add a bias. */
880 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
881 return FRAME_ADDR_RTX (tem);
882
883 /* For __builtin_return_address, get the return address from that frame. */
884 #ifdef RETURN_ADDR_RTX
885 tem = RETURN_ADDR_RTX (count, tem);
886 #else
887 tem = memory_address (Pmode,
888 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
889 tem = gen_frame_mem (Pmode, tem);
890 #endif
891 return tem;
892 }
893
894 /* Alias set used for setjmp buffer. */
895 static alias_set_type setjmp_alias_set = -1;
896
897 /* Construct the leading half of a __builtin_setjmp call. Control will
898 return to RECEIVER_LABEL. This is also called directly by the SJLJ
899 exception handling code. */
900
901 void
expand_builtin_setjmp_setup(rtx buf_addr,rtx receiver_label)902 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
903 {
904 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
905 rtx stack_save;
906 rtx mem;
907
908 if (setjmp_alias_set == -1)
909 setjmp_alias_set = new_alias_set ();
910
911 buf_addr = convert_memory_address (Pmode, buf_addr);
912
913 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
914
915 /* We store the frame pointer and the address of receiver_label in
916 the buffer and use the rest of it for the stack save area, which
917 is machine-dependent. */
918
919 mem = gen_rtx_MEM (Pmode, buf_addr);
920 set_mem_alias_set (mem, setjmp_alias_set);
921 emit_move_insn (mem, hard_frame_pointer_rtx);
922
923 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
924 GET_MODE_SIZE (Pmode))),
925 set_mem_alias_set (mem, setjmp_alias_set);
926
927 emit_move_insn (validize_mem (mem),
928 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
929
930 stack_save = gen_rtx_MEM (sa_mode,
931 plus_constant (Pmode, buf_addr,
932 2 * GET_MODE_SIZE (Pmode)));
933 set_mem_alias_set (stack_save, setjmp_alias_set);
934 emit_stack_save (SAVE_NONLOCAL, &stack_save);
935
936 /* If there is further processing to do, do it. */
937 if (targetm.have_builtin_setjmp_setup ())
938 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
939
940 /* We have a nonlocal label. */
941 cfun->has_nonlocal_label = 1;
942 }
943
944 /* Construct the trailing part of a __builtin_setjmp call. This is
945 also called directly by the SJLJ exception handling code.
946 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
947
948 void
expand_builtin_setjmp_receiver(rtx receiver_label)949 expand_builtin_setjmp_receiver (rtx receiver_label)
950 {
951 rtx chain;
952
953 /* Mark the FP as used when we get here, so we have to make sure it's
954 marked as used by this function. */
955 emit_use (hard_frame_pointer_rtx);
956
957 /* Mark the static chain as clobbered here so life information
958 doesn't get messed up for it. */
959 chain = rtx_for_static_chain (current_function_decl, true);
960 if (chain && REG_P (chain))
961 emit_clobber (chain);
962
963 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
964 {
965 /* If the argument pointer can be eliminated in favor of the
966 frame pointer, we don't need to restore it. We assume here
967 that if such an elimination is present, it can always be used.
968 This is the case on all known machines; if we don't make this
969 assumption, we do unnecessary saving on many machines. */
970 size_t i;
971 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
972
973 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
974 if (elim_regs[i].from == ARG_POINTER_REGNUM
975 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
976 break;
977
978 if (i == ARRAY_SIZE (elim_regs))
979 {
980 /* Now restore our arg pointer from the address at which it
981 was saved in our stack frame. */
982 emit_move_insn (crtl->args.internal_arg_pointer,
983 copy_to_reg (get_arg_pointer_save_area ()));
984 }
985 }
986
987 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
988 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
989 else if (targetm.have_nonlocal_goto_receiver ())
990 emit_insn (targetm.gen_nonlocal_goto_receiver ());
991 else
992 { /* Nothing */ }
993
994 /* We must not allow the code we just generated to be reordered by
995 scheduling. Specifically, the update of the frame pointer must
996 happen immediately, not later. */
997 emit_insn (gen_blockage ());
998 }
999
1000 /* __builtin_longjmp is passed a pointer to an array of five words (not
1001 all will be used on all machines). It operates similarly to the C
1002 library function of the same name, but is more efficient. Much of
1003 the code below is copied from the handling of non-local gotos. */
1004
1005 static void
expand_builtin_longjmp(rtx buf_addr,rtx value)1006 expand_builtin_longjmp (rtx buf_addr, rtx value)
1007 {
1008 rtx fp, lab, stack;
1009 rtx_insn *insn, *last;
1010 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1011
1012 /* DRAP is needed for stack realign if longjmp is expanded to current
1013 function */
1014 if (SUPPORTS_STACK_ALIGNMENT)
1015 crtl->need_drap = true;
1016
1017 if (setjmp_alias_set == -1)
1018 setjmp_alias_set = new_alias_set ();
1019
1020 buf_addr = convert_memory_address (Pmode, buf_addr);
1021
1022 buf_addr = force_reg (Pmode, buf_addr);
1023
1024 /* We require that the user must pass a second argument of 1, because
1025 that is what builtin_setjmp will return. */
1026 gcc_assert (value == const1_rtx);
1027
1028 last = get_last_insn ();
1029 if (targetm.have_builtin_longjmp ())
1030 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1031 else
1032 {
1033 fp = gen_rtx_MEM (Pmode, buf_addr);
1034 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 GET_MODE_SIZE (Pmode)));
1036
1037 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1038 2 * GET_MODE_SIZE (Pmode)));
1039 set_mem_alias_set (fp, setjmp_alias_set);
1040 set_mem_alias_set (lab, setjmp_alias_set);
1041 set_mem_alias_set (stack, setjmp_alias_set);
1042
1043 /* Pick up FP, label, and SP from the block and jump. This code is
1044 from expand_goto in stmt.cc; see there for detailed comments. */
1045 if (targetm.have_nonlocal_goto ())
1046 /* We have to pass a value to the nonlocal_goto pattern that will
1047 get copied into the static_chain pointer, but it does not matter
1048 what that value is, because builtin_setjmp does not use it. */
1049 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1050 else
1051 {
1052 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1053 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1054
1055 lab = copy_to_reg (lab);
1056
1057 /* Restore the frame pointer and stack pointer. We must use a
1058 temporary since the setjmp buffer may be a local. */
1059 fp = copy_to_reg (fp);
1060 emit_stack_restore (SAVE_NONLOCAL, stack);
1061
1062 /* Ensure the frame pointer move is not optimized. */
1063 emit_insn (gen_blockage ());
1064 emit_clobber (hard_frame_pointer_rtx);
1065 emit_clobber (frame_pointer_rtx);
1066 emit_move_insn (hard_frame_pointer_rtx, fp);
1067
1068 emit_use (hard_frame_pointer_rtx);
1069 emit_use (stack_pointer_rtx);
1070 emit_indirect_jump (lab);
1071 }
1072 }
1073
1074 /* Search backwards and mark the jump insn as a non-local goto.
1075 Note that this precludes the use of __builtin_longjmp to a
1076 __builtin_setjmp target in the same function. However, we've
1077 already cautioned the user that these functions are for
1078 internal exception handling use only. */
1079 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1080 {
1081 gcc_assert (insn != last);
1082
1083 if (JUMP_P (insn))
1084 {
1085 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1086 break;
1087 }
1088 else if (CALL_P (insn))
1089 break;
1090 }
1091 }
1092
1093 static inline bool
more_const_call_expr_args_p(const const_call_expr_arg_iterator * iter)1094 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1095 {
1096 return (iter->i < iter->n);
1097 }
1098
1099 /* This function validates the types of a function call argument list
1100 against a specified list of tree_codes. If the last specifier is a 0,
1101 that represents an ellipsis, otherwise the last specifier must be a
1102 VOID_TYPE. */
1103
1104 static bool
validate_arglist(const_tree callexpr,...)1105 validate_arglist (const_tree callexpr, ...)
1106 {
1107 enum tree_code code;
1108 bool res = 0;
1109 va_list ap;
1110 const_call_expr_arg_iterator iter;
1111 const_tree arg;
1112
1113 va_start (ap, callexpr);
1114 init_const_call_expr_arg_iterator (callexpr, &iter);
1115
1116 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1117 tree fn = CALL_EXPR_FN (callexpr);
1118 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1119
1120 for (unsigned argno = 1; ; ++argno)
1121 {
1122 code = (enum tree_code) va_arg (ap, int);
1123
1124 switch (code)
1125 {
1126 case 0:
1127 /* This signifies an ellipses, any further arguments are all ok. */
1128 res = true;
1129 goto end;
1130 case VOID_TYPE:
1131 /* This signifies an endlink, if no arguments remain, return
1132 true, otherwise return false. */
1133 res = !more_const_call_expr_args_p (&iter);
1134 goto end;
1135 case POINTER_TYPE:
1136 /* The actual argument must be nonnull when either the whole
1137 called function has been declared nonnull, or when the formal
1138 argument corresponding to the actual argument has been. */
1139 if (argmap
1140 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1141 {
1142 arg = next_const_call_expr_arg (&iter);
1143 if (!validate_arg (arg, code) || integer_zerop (arg))
1144 goto end;
1145 break;
1146 }
1147 /* FALLTHRU */
1148 default:
1149 /* If no parameters remain or the parameter's code does not
1150 match the specified code, return false. Otherwise continue
1151 checking any remaining arguments. */
1152 arg = next_const_call_expr_arg (&iter);
1153 if (!validate_arg (arg, code))
1154 goto end;
1155 break;
1156 }
1157 }
1158
1159 /* We need gotos here since we can only have one VA_CLOSE in a
1160 function. */
1161 end: ;
1162 va_end (ap);
1163
1164 BITMAP_FREE (argmap);
1165
1166 return res;
1167 }
1168
1169 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1170 and the address of the save area. */
1171
1172 static rtx
expand_builtin_nonlocal_goto(tree exp)1173 expand_builtin_nonlocal_goto (tree exp)
1174 {
1175 tree t_label, t_save_area;
1176 rtx r_label, r_save_area, r_fp, r_sp;
1177 rtx_insn *insn;
1178
1179 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1180 return NULL_RTX;
1181
1182 t_label = CALL_EXPR_ARG (exp, 0);
1183 t_save_area = CALL_EXPR_ARG (exp, 1);
1184
1185 r_label = expand_normal (t_label);
1186 r_label = convert_memory_address (Pmode, r_label);
1187 r_save_area = expand_normal (t_save_area);
1188 r_save_area = convert_memory_address (Pmode, r_save_area);
1189 /* Copy the address of the save location to a register just in case it was
1190 based on the frame pointer. */
1191 r_save_area = copy_to_reg (r_save_area);
1192 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1193 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1194 plus_constant (Pmode, r_save_area,
1195 GET_MODE_SIZE (Pmode)));
1196
1197 crtl->has_nonlocal_goto = 1;
1198
1199 /* ??? We no longer need to pass the static chain value, afaik. */
1200 if (targetm.have_nonlocal_goto ())
1201 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1202 else
1203 {
1204 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1205 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1206
1207 r_label = copy_to_reg (r_label);
1208
1209 /* Restore the frame pointer and stack pointer. We must use a
1210 temporary since the setjmp buffer may be a local. */
1211 r_fp = copy_to_reg (r_fp);
1212 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1213
1214 /* Ensure the frame pointer move is not optimized. */
1215 emit_insn (gen_blockage ());
1216 emit_clobber (hard_frame_pointer_rtx);
1217 emit_clobber (frame_pointer_rtx);
1218 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1219
1220 /* USE of hard_frame_pointer_rtx added for consistency;
1221 not clear if really needed. */
1222 emit_use (hard_frame_pointer_rtx);
1223 emit_use (stack_pointer_rtx);
1224
1225 /* If the architecture is using a GP register, we must
1226 conservatively assume that the target function makes use of it.
1227 The prologue of functions with nonlocal gotos must therefore
1228 initialize the GP register to the appropriate value, and we
1229 must then make sure that this value is live at the point
1230 of the jump. (Note that this doesn't necessarily apply
1231 to targets with a nonlocal_goto pattern; they are free
1232 to implement it in their own way. Note also that this is
1233 a no-op if the GP register is a global invariant.) */
1234 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1235 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1236 emit_use (pic_offset_table_rtx);
1237
1238 emit_indirect_jump (r_label);
1239 }
1240
1241 /* Search backwards to the jump insn and mark it as a
1242 non-local goto. */
1243 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1244 {
1245 if (JUMP_P (insn))
1246 {
1247 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1248 break;
1249 }
1250 else if (CALL_P (insn))
1251 break;
1252 }
1253
1254 return const0_rtx;
1255 }
1256
1257 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1258 (not all will be used on all machines) that was passed to __builtin_setjmp.
1259 It updates the stack pointer in that block to the current value. This is
1260 also called directly by the SJLJ exception handling code. */
1261
1262 void
expand_builtin_update_setjmp_buf(rtx buf_addr)1263 expand_builtin_update_setjmp_buf (rtx buf_addr)
1264 {
1265 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1266 buf_addr = convert_memory_address (Pmode, buf_addr);
1267 rtx stack_save
1268 = gen_rtx_MEM (sa_mode,
1269 memory_address
1270 (sa_mode,
1271 plus_constant (Pmode, buf_addr,
1272 2 * GET_MODE_SIZE (Pmode))));
1273
1274 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1275 }
1276
1277 /* Expand a call to __builtin_prefetch. For a target that does not support
1278 data prefetch, evaluate the memory address argument in case it has side
1279 effects. */
1280
1281 static void
expand_builtin_prefetch(tree exp)1282 expand_builtin_prefetch (tree exp)
1283 {
1284 tree arg0, arg1, arg2;
1285 int nargs;
1286 rtx op0, op1, op2;
1287
1288 if (!validate_arglist (exp, POINTER_TYPE, 0))
1289 return;
1290
1291 arg0 = CALL_EXPR_ARG (exp, 0);
1292
1293 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1294 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1295 locality). */
1296 nargs = call_expr_nargs (exp);
1297 if (nargs > 1)
1298 arg1 = CALL_EXPR_ARG (exp, 1);
1299 else
1300 arg1 = integer_zero_node;
1301 if (nargs > 2)
1302 arg2 = CALL_EXPR_ARG (exp, 2);
1303 else
1304 arg2 = integer_three_node;
1305
1306 /* Argument 0 is an address. */
1307 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1308
1309 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1310 if (TREE_CODE (arg1) != INTEGER_CST)
1311 {
1312 error ("second argument to %<__builtin_prefetch%> must be a constant");
1313 arg1 = integer_zero_node;
1314 }
1315 op1 = expand_normal (arg1);
1316 /* Argument 1 must be either zero or one. */
1317 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1318 {
1319 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1320 " using zero");
1321 op1 = const0_rtx;
1322 }
1323
1324 /* Argument 2 (locality) must be a compile-time constant int. */
1325 if (TREE_CODE (arg2) != INTEGER_CST)
1326 {
1327 error ("third argument to %<__builtin_prefetch%> must be a constant");
1328 arg2 = integer_zero_node;
1329 }
1330 op2 = expand_normal (arg2);
1331 /* Argument 2 must be 0, 1, 2, or 3. */
1332 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1333 {
1334 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1335 op2 = const0_rtx;
1336 }
1337
1338 if (targetm.have_prefetch ())
1339 {
1340 class expand_operand ops[3];
1341
1342 create_address_operand (&ops[0], op0);
1343 create_integer_operand (&ops[1], INTVAL (op1));
1344 create_integer_operand (&ops[2], INTVAL (op2));
1345 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1346 return;
1347 }
1348
1349 /* Don't do anything with direct references to volatile memory, but
1350 generate code to handle other side effects. */
1351 if (!MEM_P (op0) && side_effects_p (op0))
1352 emit_insn (op0);
1353 }
1354
1355 /* Get a MEM rtx for expression EXP which is the address of an operand
1356 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1357 the maximum length of the block of memory that might be accessed or
1358 NULL if unknown. */
1359
1360 rtx
get_memory_rtx(tree exp,tree len)1361 get_memory_rtx (tree exp, tree len)
1362 {
1363 tree orig_exp = exp, base;
1364 rtx addr, mem;
1365
1366 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1367 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1368 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1369 exp = TREE_OPERAND (exp, 0);
1370
1371 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1372 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1373
1374 /* Get an expression we can use to find the attributes to assign to MEM.
1375 First remove any nops. */
1376 while (CONVERT_EXPR_P (exp)
1377 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1378 exp = TREE_OPERAND (exp, 0);
1379
1380 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1381 (as builtin stringops may alias with anything). */
1382 exp = fold_build2 (MEM_REF,
1383 build_array_type (char_type_node,
1384 build_range_type (sizetype,
1385 size_one_node, len)),
1386 exp, build_int_cst (ptr_type_node, 0));
1387
1388 /* If the MEM_REF has no acceptable address, try to get the base object
1389 from the original address we got, and build an all-aliasing
1390 unknown-sized access to that one. */
1391 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1392 set_mem_attributes (mem, exp, 0);
1393 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1394 && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1395 0))))
1396 {
1397 unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1398 exp = build_fold_addr_expr (base);
1399 exp = fold_build2 (MEM_REF,
1400 build_array_type (char_type_node,
1401 build_range_type (sizetype,
1402 size_zero_node,
1403 NULL)),
1404 exp, build_int_cst (ptr_type_node, 0));
1405 set_mem_attributes (mem, exp, 0);
1406 /* Since we stripped parts make sure the offset is unknown and the
1407 alignment is computed from the original address. */
1408 clear_mem_offset (mem);
1409 set_mem_align (mem, align);
1410 }
1411 set_mem_alias_set (mem, 0);
1412 return mem;
1413 }
1414
1415 /* Built-in functions to perform an untyped call and return. */
1416
1417 #define apply_args_mode \
1418 (this_target_builtins->x_apply_args_mode)
1419 #define apply_result_mode \
1420 (this_target_builtins->x_apply_result_mode)
1421
1422 /* Return the size required for the block returned by __builtin_apply_args,
1423 and initialize apply_args_mode. */
1424
1425 static int
apply_args_size(void)1426 apply_args_size (void)
1427 {
1428 static int size = -1;
1429 int align;
1430 unsigned int regno;
1431
1432 /* The values computed by this function never change. */
1433 if (size < 0)
1434 {
1435 /* The first value is the incoming arg-pointer. */
1436 size = GET_MODE_SIZE (Pmode);
1437
1438 /* The second value is the structure value address unless this is
1439 passed as an "invisible" first argument. */
1440 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1441 size += GET_MODE_SIZE (Pmode);
1442
1443 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1444 if (FUNCTION_ARG_REGNO_P (regno))
1445 {
1446 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1447
1448 gcc_assert (mode != VOIDmode);
1449
1450 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1451 if (size % align != 0)
1452 size = CEIL (size, align) * align;
1453 size += GET_MODE_SIZE (mode);
1454 apply_args_mode[regno] = mode;
1455 }
1456 else
1457 {
1458 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1459 }
1460 }
1461 return size;
1462 }
1463
1464 /* Return the size required for the block returned by __builtin_apply,
1465 and initialize apply_result_mode. */
1466
1467 static int
apply_result_size(void)1468 apply_result_size (void)
1469 {
1470 static int size = -1;
1471 int align, regno;
1472
1473 /* The values computed by this function never change. */
1474 if (size < 0)
1475 {
1476 size = 0;
1477
1478 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1479 if (targetm.calls.function_value_regno_p (regno))
1480 {
1481 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1482
1483 gcc_assert (mode != VOIDmode);
1484
1485 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1486 if (size % align != 0)
1487 size = CEIL (size, align) * align;
1488 size += GET_MODE_SIZE (mode);
1489 apply_result_mode[regno] = mode;
1490 }
1491 else
1492 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1493
1494 /* Allow targets that use untyped_call and untyped_return to override
1495 the size so that machine-specific information can be stored here. */
1496 #ifdef APPLY_RESULT_SIZE
1497 size = APPLY_RESULT_SIZE;
1498 #endif
1499 }
1500 return size;
1501 }
1502
1503 /* Create a vector describing the result block RESULT. If SAVEP is true,
1504 the result block is used to save the values; otherwise it is used to
1505 restore the values. */
1506
1507 static rtx
result_vector(int savep,rtx result)1508 result_vector (int savep, rtx result)
1509 {
1510 int regno, size, align, nelts;
1511 fixed_size_mode mode;
1512 rtx reg, mem;
1513 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1514
1515 size = nelts = 0;
1516 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1517 if ((mode = apply_result_mode[regno]) != VOIDmode)
1518 {
1519 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1520 if (size % align != 0)
1521 size = CEIL (size, align) * align;
1522 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1523 mem = adjust_address (result, mode, size);
1524 savevec[nelts++] = (savep
1525 ? gen_rtx_SET (mem, reg)
1526 : gen_rtx_SET (reg, mem));
1527 size += GET_MODE_SIZE (mode);
1528 }
1529 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1530 }
1531
1532 /* Save the state required to perform an untyped call with the same
1533 arguments as were passed to the current function. */
1534
1535 static rtx
expand_builtin_apply_args_1(void)1536 expand_builtin_apply_args_1 (void)
1537 {
1538 rtx registers, tem;
1539 int size, align, regno;
1540 fixed_size_mode mode;
1541 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1542
1543 /* Create a block where the arg-pointer, structure value address,
1544 and argument registers can be saved. */
1545 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1546
1547 /* Walk past the arg-pointer and structure value address. */
1548 size = GET_MODE_SIZE (Pmode);
1549 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1550 size += GET_MODE_SIZE (Pmode);
1551
1552 /* Save each register used in calling a function to the block. */
1553 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1554 if ((mode = apply_args_mode[regno]) != VOIDmode)
1555 {
1556 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1557 if (size % align != 0)
1558 size = CEIL (size, align) * align;
1559
1560 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1561
1562 emit_move_insn (adjust_address (registers, mode, size), tem);
1563 size += GET_MODE_SIZE (mode);
1564 }
1565
1566 /* Save the arg pointer to the block. */
1567 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1568 /* We need the pointer as the caller actually passed them to us, not
1569 as we might have pretended they were passed. Make sure it's a valid
1570 operand, as emit_move_insn isn't expected to handle a PLUS. */
1571 if (STACK_GROWS_DOWNWARD)
1572 tem
1573 = force_operand (plus_constant (Pmode, tem,
1574 crtl->args.pretend_args_size),
1575 NULL_RTX);
1576 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1577
1578 size = GET_MODE_SIZE (Pmode);
1579
1580 /* Save the structure value address unless this is passed as an
1581 "invisible" first argument. */
1582 if (struct_incoming_value)
1583 emit_move_insn (adjust_address (registers, Pmode, size),
1584 copy_to_reg (struct_incoming_value));
1585
1586 /* Return the address of the block. */
1587 return copy_addr_to_reg (XEXP (registers, 0));
1588 }
1589
1590 /* __builtin_apply_args returns block of memory allocated on
1591 the stack into which is stored the arg pointer, structure
1592 value address, static chain, and all the registers that might
1593 possibly be used in performing a function call. The code is
1594 moved to the start of the function so the incoming values are
1595 saved. */
1596
1597 static rtx
expand_builtin_apply_args(void)1598 expand_builtin_apply_args (void)
1599 {
1600 /* Don't do __builtin_apply_args more than once in a function.
1601 Save the result of the first call and reuse it. */
1602 if (apply_args_value != 0)
1603 return apply_args_value;
1604 {
1605 /* When this function is called, it means that registers must be
1606 saved on entry to this function. So we migrate the
1607 call to the first insn of this function. */
1608 rtx temp;
1609
1610 start_sequence ();
1611 temp = expand_builtin_apply_args_1 ();
1612 rtx_insn *seq = get_insns ();
1613 end_sequence ();
1614
1615 apply_args_value = temp;
1616
1617 /* Put the insns after the NOTE that starts the function.
1618 If this is inside a start_sequence, make the outer-level insn
1619 chain current, so the code is placed at the start of the
1620 function. If internal_arg_pointer is a non-virtual pseudo,
1621 it needs to be placed after the function that initializes
1622 that pseudo. */
1623 push_topmost_sequence ();
1624 if (REG_P (crtl->args.internal_arg_pointer)
1625 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1626 emit_insn_before (seq, parm_birth_insn);
1627 else
1628 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1629 pop_topmost_sequence ();
1630 return temp;
1631 }
1632 }
1633
1634 /* Perform an untyped call and save the state required to perform an
1635 untyped return of whatever value was returned by the given function. */
1636
1637 static rtx
expand_builtin_apply(rtx function,rtx arguments,rtx argsize)1638 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1639 {
1640 int size, align, regno;
1641 fixed_size_mode mode;
1642 rtx incoming_args, result, reg, dest, src;
1643 rtx_call_insn *call_insn;
1644 rtx old_stack_level = 0;
1645 rtx call_fusage = 0;
1646 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1647
1648 arguments = convert_memory_address (Pmode, arguments);
1649
1650 /* Create a block where the return registers can be saved. */
1651 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1652
1653 /* Fetch the arg pointer from the ARGUMENTS block. */
1654 incoming_args = gen_reg_rtx (Pmode);
1655 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1656 if (!STACK_GROWS_DOWNWARD)
1657 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1658 incoming_args, 0, OPTAB_LIB_WIDEN);
1659
1660 /* Push a new argument block and copy the arguments. Do not allow
1661 the (potential) memcpy call below to interfere with our stack
1662 manipulations. */
1663 do_pending_stack_adjust ();
1664 NO_DEFER_POP;
1665
1666 /* Save the stack with nonlocal if available. */
1667 if (targetm.have_save_stack_nonlocal ())
1668 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1669 else
1670 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1671
1672 /* Allocate a block of memory onto the stack and copy the memory
1673 arguments to the outgoing arguments address. We can pass TRUE
1674 as the 4th argument because we just saved the stack pointer
1675 and will restore it right after the call. */
1676 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1677
1678 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1679 may have already set current_function_calls_alloca to true.
1680 current_function_calls_alloca won't be set if argsize is zero,
1681 so we have to guarantee need_drap is true here. */
1682 if (SUPPORTS_STACK_ALIGNMENT)
1683 crtl->need_drap = true;
1684
1685 dest = virtual_outgoing_args_rtx;
1686 if (!STACK_GROWS_DOWNWARD)
1687 {
1688 if (CONST_INT_P (argsize))
1689 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1690 else
1691 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1692 }
1693 dest = gen_rtx_MEM (BLKmode, dest);
1694 set_mem_align (dest, PARM_BOUNDARY);
1695 src = gen_rtx_MEM (BLKmode, incoming_args);
1696 set_mem_align (src, PARM_BOUNDARY);
1697 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1698
1699 /* Refer to the argument block. */
1700 apply_args_size ();
1701 arguments = gen_rtx_MEM (BLKmode, arguments);
1702 set_mem_align (arguments, PARM_BOUNDARY);
1703
1704 /* Walk past the arg-pointer and structure value address. */
1705 size = GET_MODE_SIZE (Pmode);
1706 if (struct_value)
1707 size += GET_MODE_SIZE (Pmode);
1708
1709 /* Restore each of the registers previously saved. Make USE insns
1710 for each of these registers for use in making the call. */
1711 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1712 if ((mode = apply_args_mode[regno]) != VOIDmode)
1713 {
1714 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1715 if (size % align != 0)
1716 size = CEIL (size, align) * align;
1717 reg = gen_rtx_REG (mode, regno);
1718 emit_move_insn (reg, adjust_address (arguments, mode, size));
1719 use_reg (&call_fusage, reg);
1720 size += GET_MODE_SIZE (mode);
1721 }
1722
1723 /* Restore the structure value address unless this is passed as an
1724 "invisible" first argument. */
1725 size = GET_MODE_SIZE (Pmode);
1726 if (struct_value)
1727 {
1728 rtx value = gen_reg_rtx (Pmode);
1729 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1730 emit_move_insn (struct_value, value);
1731 if (REG_P (struct_value))
1732 use_reg (&call_fusage, struct_value);
1733 }
1734
1735 /* All arguments and registers used for the call are set up by now! */
1736 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1737
1738 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1739 and we don't want to load it into a register as an optimization,
1740 because prepare_call_address already did it if it should be done. */
1741 if (GET_CODE (function) != SYMBOL_REF)
1742 function = memory_address (FUNCTION_MODE, function);
1743
1744 /* Generate the actual call instruction and save the return value. */
1745 if (targetm.have_untyped_call ())
1746 {
1747 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1748 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1749 result_vector (1, result));
1750 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1751 if (CALL_P (insn))
1752 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1753 emit_insn (seq);
1754 }
1755 else if (targetm.have_call_value ())
1756 {
1757 rtx valreg = 0;
1758
1759 /* Locate the unique return register. It is not possible to
1760 express a call that sets more than one return register using
1761 call_value; use untyped_call for that. In fact, untyped_call
1762 only needs to save the return registers in the given block. */
1763 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1764 if ((mode = apply_result_mode[regno]) != VOIDmode)
1765 {
1766 gcc_assert (!valreg); /* have_untyped_call required. */
1767
1768 valreg = gen_rtx_REG (mode, regno);
1769 }
1770
1771 emit_insn (targetm.gen_call_value (valreg,
1772 gen_rtx_MEM (FUNCTION_MODE, function),
1773 const0_rtx, NULL_RTX, const0_rtx));
1774
1775 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1776 }
1777 else
1778 gcc_unreachable ();
1779
1780 /* Find the CALL insn we just emitted, and attach the register usage
1781 information. */
1782 call_insn = last_call_insn ();
1783 add_function_usage_to (call_insn, call_fusage);
1784
1785 /* Restore the stack. */
1786 if (targetm.have_save_stack_nonlocal ())
1787 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1788 else
1789 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1790 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1791
1792 OK_DEFER_POP;
1793
1794 /* Return the address of the result block. */
1795 result = copy_addr_to_reg (XEXP (result, 0));
1796 return convert_memory_address (ptr_mode, result);
1797 }
1798
1799 /* Perform an untyped return. */
1800
1801 static void
expand_builtin_return(rtx result)1802 expand_builtin_return (rtx result)
1803 {
1804 int size, align, regno;
1805 fixed_size_mode mode;
1806 rtx reg;
1807 rtx_insn *call_fusage = 0;
1808
1809 result = convert_memory_address (Pmode, result);
1810
1811 apply_result_size ();
1812 result = gen_rtx_MEM (BLKmode, result);
1813
1814 if (targetm.have_untyped_return ())
1815 {
1816 rtx vector = result_vector (0, result);
1817 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1818 emit_barrier ();
1819 return;
1820 }
1821
1822 /* Restore the return value and note that each value is used. */
1823 size = 0;
1824 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1825 if ((mode = apply_result_mode[regno]) != VOIDmode)
1826 {
1827 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1828 if (size % align != 0)
1829 size = CEIL (size, align) * align;
1830 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1831 emit_move_insn (reg, adjust_address (result, mode, size));
1832
1833 push_to_sequence (call_fusage);
1834 emit_use (reg);
1835 call_fusage = get_insns ();
1836 end_sequence ();
1837 size += GET_MODE_SIZE (mode);
1838 }
1839
1840 /* Put the USE insns before the return. */
1841 emit_insn (call_fusage);
1842
1843 /* Return whatever values was restored by jumping directly to the end
1844 of the function. */
1845 expand_naked_return ();
1846 }
1847
1848 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1849
1850 static enum type_class
type_to_class(tree type)1851 type_to_class (tree type)
1852 {
1853 switch (TREE_CODE (type))
1854 {
1855 case VOID_TYPE: return void_type_class;
1856 case INTEGER_TYPE: return integer_type_class;
1857 case ENUMERAL_TYPE: return enumeral_type_class;
1858 case BOOLEAN_TYPE: return boolean_type_class;
1859 case POINTER_TYPE: return pointer_type_class;
1860 case REFERENCE_TYPE: return reference_type_class;
1861 case OFFSET_TYPE: return offset_type_class;
1862 case REAL_TYPE: return real_type_class;
1863 case COMPLEX_TYPE: return complex_type_class;
1864 case FUNCTION_TYPE: return function_type_class;
1865 case METHOD_TYPE: return method_type_class;
1866 case RECORD_TYPE: return record_type_class;
1867 case UNION_TYPE:
1868 case QUAL_UNION_TYPE: return union_type_class;
1869 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1870 ? string_type_class : array_type_class);
1871 case LANG_TYPE: return lang_type_class;
1872 case OPAQUE_TYPE: return opaque_type_class;
1873 default: return no_type_class;
1874 }
1875 }
1876
1877 /* Expand a call EXP to __builtin_classify_type. */
1878
1879 static rtx
expand_builtin_classify_type(tree exp)1880 expand_builtin_classify_type (tree exp)
1881 {
1882 if (call_expr_nargs (exp))
1883 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1884 return GEN_INT (no_type_class);
1885 }
1886
1887 /* This helper macro, meant to be used in mathfn_built_in below, determines
1888 which among a set of builtin math functions is appropriate for a given type
1889 mode. The `F' (float) and `L' (long double) are automatically generated
1890 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1891 types, there are additional types that are considered with 'F32', 'F64',
1892 'F128', etc. suffixes. */
1893 #define CASE_MATHFN(MATHFN) \
1894 CASE_CFN_##MATHFN: \
1895 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1896 fcodel = BUILT_IN_##MATHFN##L ; break;
1897 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1898 types. */
1899 #define CASE_MATHFN_FLOATN(MATHFN) \
1900 CASE_CFN_##MATHFN: \
1901 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1902 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1903 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1904 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1905 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1906 break;
1907 /* Similar to above, but appends _R after any F/L suffix. */
1908 #define CASE_MATHFN_REENT(MATHFN) \
1909 case CFN_BUILT_IN_##MATHFN##_R: \
1910 case CFN_BUILT_IN_##MATHFN##F_R: \
1911 case CFN_BUILT_IN_##MATHFN##L_R: \
1912 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1913 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1914
1915 /* Return a function equivalent to FN but operating on floating-point
1916 values of type TYPE, or END_BUILTINS if no such function exists.
1917 This is purely an operation on function codes; it does not guarantee
1918 that the target actually has an implementation of the function. */
1919
1920 static built_in_function
mathfn_built_in_2(tree type,combined_fn fn)1921 mathfn_built_in_2 (tree type, combined_fn fn)
1922 {
1923 tree mtype;
1924 built_in_function fcode, fcodef, fcodel;
1925 built_in_function fcodef16 = END_BUILTINS;
1926 built_in_function fcodef32 = END_BUILTINS;
1927 built_in_function fcodef64 = END_BUILTINS;
1928 built_in_function fcodef128 = END_BUILTINS;
1929 built_in_function fcodef32x = END_BUILTINS;
1930 built_in_function fcodef64x = END_BUILTINS;
1931 built_in_function fcodef128x = END_BUILTINS;
1932
1933 switch (fn)
1934 {
1935 #define SEQ_OF_CASE_MATHFN \
1936 CASE_MATHFN (ACOS) \
1937 CASE_MATHFN (ACOSH) \
1938 CASE_MATHFN (ASIN) \
1939 CASE_MATHFN (ASINH) \
1940 CASE_MATHFN (ATAN) \
1941 CASE_MATHFN (ATAN2) \
1942 CASE_MATHFN (ATANH) \
1943 CASE_MATHFN (CBRT) \
1944 CASE_MATHFN_FLOATN (CEIL) \
1945 CASE_MATHFN (CEXPI) \
1946 CASE_MATHFN_FLOATN (COPYSIGN) \
1947 CASE_MATHFN (COS) \
1948 CASE_MATHFN (COSH) \
1949 CASE_MATHFN (DREM) \
1950 CASE_MATHFN (ERF) \
1951 CASE_MATHFN (ERFC) \
1952 CASE_MATHFN (EXP) \
1953 CASE_MATHFN (EXP10) \
1954 CASE_MATHFN (EXP2) \
1955 CASE_MATHFN (EXPM1) \
1956 CASE_MATHFN (FABS) \
1957 CASE_MATHFN (FDIM) \
1958 CASE_MATHFN_FLOATN (FLOOR) \
1959 CASE_MATHFN_FLOATN (FMA) \
1960 CASE_MATHFN_FLOATN (FMAX) \
1961 CASE_MATHFN_FLOATN (FMIN) \
1962 CASE_MATHFN (FMOD) \
1963 CASE_MATHFN (FREXP) \
1964 CASE_MATHFN (GAMMA) \
1965 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1966 CASE_MATHFN (HUGE_VAL) \
1967 CASE_MATHFN (HYPOT) \
1968 CASE_MATHFN (ILOGB) \
1969 CASE_MATHFN (ICEIL) \
1970 CASE_MATHFN (IFLOOR) \
1971 CASE_MATHFN (INF) \
1972 CASE_MATHFN (IRINT) \
1973 CASE_MATHFN (IROUND) \
1974 CASE_MATHFN (ISINF) \
1975 CASE_MATHFN (J0) \
1976 CASE_MATHFN (J1) \
1977 CASE_MATHFN (JN) \
1978 CASE_MATHFN (LCEIL) \
1979 CASE_MATHFN (LDEXP) \
1980 CASE_MATHFN (LFLOOR) \
1981 CASE_MATHFN (LGAMMA) \
1982 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1983 CASE_MATHFN (LLCEIL) \
1984 CASE_MATHFN (LLFLOOR) \
1985 CASE_MATHFN (LLRINT) \
1986 CASE_MATHFN (LLROUND) \
1987 CASE_MATHFN (LOG) \
1988 CASE_MATHFN (LOG10) \
1989 CASE_MATHFN (LOG1P) \
1990 CASE_MATHFN (LOG2) \
1991 CASE_MATHFN (LOGB) \
1992 CASE_MATHFN (LRINT) \
1993 CASE_MATHFN (LROUND) \
1994 CASE_MATHFN (MODF) \
1995 CASE_MATHFN (NAN) \
1996 CASE_MATHFN (NANS) \
1997 CASE_MATHFN_FLOATN (NEARBYINT) \
1998 CASE_MATHFN (NEXTAFTER) \
1999 CASE_MATHFN (NEXTTOWARD) \
2000 CASE_MATHFN (POW) \
2001 CASE_MATHFN (POWI) \
2002 CASE_MATHFN (POW10) \
2003 CASE_MATHFN (REMAINDER) \
2004 CASE_MATHFN (REMQUO) \
2005 CASE_MATHFN_FLOATN (RINT) \
2006 CASE_MATHFN_FLOATN (ROUND) \
2007 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2008 CASE_MATHFN (SCALB) \
2009 CASE_MATHFN (SCALBLN) \
2010 CASE_MATHFN (SCALBN) \
2011 CASE_MATHFN (SIGNBIT) \
2012 CASE_MATHFN (SIGNIFICAND) \
2013 CASE_MATHFN (SIN) \
2014 CASE_MATHFN (SINCOS) \
2015 CASE_MATHFN (SINH) \
2016 CASE_MATHFN_FLOATN (SQRT) \
2017 CASE_MATHFN (TAN) \
2018 CASE_MATHFN (TANH) \
2019 CASE_MATHFN (TGAMMA) \
2020 CASE_MATHFN_FLOATN (TRUNC) \
2021 CASE_MATHFN (Y0) \
2022 CASE_MATHFN (Y1) \
2023 CASE_MATHFN (YN)
2024
2025 SEQ_OF_CASE_MATHFN
2026
2027 default:
2028 return END_BUILTINS;
2029 }
2030
2031 mtype = TYPE_MAIN_VARIANT (type);
2032 if (mtype == double_type_node)
2033 return fcode;
2034 else if (mtype == float_type_node)
2035 return fcodef;
2036 else if (mtype == long_double_type_node)
2037 return fcodel;
2038 else if (mtype == float16_type_node)
2039 return fcodef16;
2040 else if (mtype == float32_type_node)
2041 return fcodef32;
2042 else if (mtype == float64_type_node)
2043 return fcodef64;
2044 else if (mtype == float128_type_node)
2045 return fcodef128;
2046 else if (mtype == float32x_type_node)
2047 return fcodef32x;
2048 else if (mtype == float64x_type_node)
2049 return fcodef64x;
2050 else if (mtype == float128x_type_node)
2051 return fcodef128x;
2052 else
2053 return END_BUILTINS;
2054 }
2055
2056 #undef CASE_MATHFN
2057 #undef CASE_MATHFN_FLOATN
2058 #undef CASE_MATHFN_REENT
2059
2060 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2061 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2062 otherwise use the explicit declaration. If we can't do the conversion,
2063 return null. */
2064
2065 static tree
mathfn_built_in_1(tree type,combined_fn fn,bool implicit_p)2066 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2067 {
2068 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2069 if (fcode2 == END_BUILTINS)
2070 return NULL_TREE;
2071
2072 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2073 return NULL_TREE;
2074
2075 return builtin_decl_explicit (fcode2);
2076 }
2077
2078 /* Like mathfn_built_in_1, but always use the implicit array. */
2079
2080 tree
mathfn_built_in(tree type,combined_fn fn)2081 mathfn_built_in (tree type, combined_fn fn)
2082 {
2083 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2084 }
2085
2086 /* Like mathfn_built_in_1, but take a built_in_function and
2087 always use the implicit array. */
2088
2089 tree
mathfn_built_in(tree type,enum built_in_function fn)2090 mathfn_built_in (tree type, enum built_in_function fn)
2091 {
2092 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2093 }
2094
2095 /* Return the type associated with a built in function, i.e., the one
2096 to be passed to mathfn_built_in to get the type-specific
2097 function. */
2098
2099 tree
mathfn_built_in_type(combined_fn fn)2100 mathfn_built_in_type (combined_fn fn)
2101 {
2102 #define CASE_MATHFN(MATHFN) \
2103 case CFN_BUILT_IN_##MATHFN: \
2104 return double_type_node; \
2105 case CFN_BUILT_IN_##MATHFN##F: \
2106 return float_type_node; \
2107 case CFN_BUILT_IN_##MATHFN##L: \
2108 return long_double_type_node;
2109
2110 #define CASE_MATHFN_FLOATN(MATHFN) \
2111 CASE_MATHFN(MATHFN) \
2112 case CFN_BUILT_IN_##MATHFN##F16: \
2113 return float16_type_node; \
2114 case CFN_BUILT_IN_##MATHFN##F32: \
2115 return float32_type_node; \
2116 case CFN_BUILT_IN_##MATHFN##F64: \
2117 return float64_type_node; \
2118 case CFN_BUILT_IN_##MATHFN##F128: \
2119 return float128_type_node; \
2120 case CFN_BUILT_IN_##MATHFN##F32X: \
2121 return float32x_type_node; \
2122 case CFN_BUILT_IN_##MATHFN##F64X: \
2123 return float64x_type_node; \
2124 case CFN_BUILT_IN_##MATHFN##F128X: \
2125 return float128x_type_node;
2126
2127 /* Similar to above, but appends _R after any F/L suffix. */
2128 #define CASE_MATHFN_REENT(MATHFN) \
2129 case CFN_BUILT_IN_##MATHFN##_R: \
2130 return double_type_node; \
2131 case CFN_BUILT_IN_##MATHFN##F_R: \
2132 return float_type_node; \
2133 case CFN_BUILT_IN_##MATHFN##L_R: \
2134 return long_double_type_node;
2135
2136 switch (fn)
2137 {
2138 SEQ_OF_CASE_MATHFN
2139
2140 default:
2141 return NULL_TREE;
2142 }
2143
2144 #undef CASE_MATHFN
2145 #undef CASE_MATHFN_FLOATN
2146 #undef CASE_MATHFN_REENT
2147 #undef SEQ_OF_CASE_MATHFN
2148 }
2149
2150 /* Check whether there is an internal function associated with function FN
2151 and return type RETURN_TYPE. Return the function if so, otherwise return
2152 IFN_LAST.
2153
2154 Note that this function only tests whether the function is defined in
2155 internals.def, not whether it is actually available on the target. */
2156
2157 static internal_fn
associated_internal_fn(built_in_function fn,tree return_type)2158 associated_internal_fn (built_in_function fn, tree return_type)
2159 {
2160 switch (fn)
2161 {
2162 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2163 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2164 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2165 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2166 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2167 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2168 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2169 #include "internal-fn.def"
2170
2171 CASE_FLT_FN (BUILT_IN_POW10):
2172 return IFN_EXP10;
2173
2174 CASE_FLT_FN (BUILT_IN_DREM):
2175 return IFN_REMAINDER;
2176
2177 CASE_FLT_FN (BUILT_IN_SCALBN):
2178 CASE_FLT_FN (BUILT_IN_SCALBLN):
2179 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2180 return IFN_LDEXP;
2181 return IFN_LAST;
2182
2183 default:
2184 return IFN_LAST;
2185 }
2186 }
2187
2188 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2189 return its code, otherwise return IFN_LAST. Note that this function
2190 only tests whether the function is defined in internals.def, not whether
2191 it is actually available on the target. */
2192
2193 internal_fn
associated_internal_fn(tree fndecl)2194 associated_internal_fn (tree fndecl)
2195 {
2196 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2197 return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2198 TREE_TYPE (TREE_TYPE (fndecl)));
2199 }
2200
2201 /* Check whether there is an internal function associated with function CFN
2202 and return type RETURN_TYPE. Return the function if so, otherwise return
2203 IFN_LAST.
2204
2205 Note that this function only tests whether the function is defined in
2206 internals.def, not whether it is actually available on the target. */
2207
2208 internal_fn
associated_internal_fn(combined_fn cfn,tree return_type)2209 associated_internal_fn (combined_fn cfn, tree return_type)
2210 {
2211 if (internal_fn_p (cfn))
2212 return as_internal_fn (cfn);
2213 return associated_internal_fn (as_builtin_fn (cfn), return_type);
2214 }
2215
2216 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2217 on the current target by a call to an internal function, return the
2218 code of that internal function, otherwise return IFN_LAST. The caller
2219 is responsible for ensuring that any side-effects of the built-in
2220 call are dealt with correctly. E.g. if CALL sets errno, the caller
2221 must decide that the errno result isn't needed or make it available
2222 in some other way. */
2223
2224 internal_fn
replacement_internal_fn(gcall * call)2225 replacement_internal_fn (gcall *call)
2226 {
2227 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2228 {
2229 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2230 if (ifn != IFN_LAST)
2231 {
2232 tree_pair types = direct_internal_fn_types (ifn, call);
2233 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2234 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2235 return ifn;
2236 }
2237 }
2238 return IFN_LAST;
2239 }
2240
2241 /* Expand a call to the builtin trinary math functions (fma).
2242 Return NULL_RTX if a normal call should be emitted rather than expanding the
2243 function in-line. EXP is the expression that is a call to the builtin
2244 function; if convenient, the result should be placed in TARGET.
2245 SUBTARGET may be used as the target for computing one of EXP's
2246 operands. */
2247
2248 static rtx
expand_builtin_mathfn_ternary(tree exp,rtx target,rtx subtarget)2249 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2250 {
2251 optab builtin_optab;
2252 rtx op0, op1, op2, result;
2253 rtx_insn *insns;
2254 tree fndecl = get_callee_fndecl (exp);
2255 tree arg0, arg1, arg2;
2256 machine_mode mode;
2257
2258 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2259 return NULL_RTX;
2260
2261 arg0 = CALL_EXPR_ARG (exp, 0);
2262 arg1 = CALL_EXPR_ARG (exp, 1);
2263 arg2 = CALL_EXPR_ARG (exp, 2);
2264
2265 switch (DECL_FUNCTION_CODE (fndecl))
2266 {
2267 CASE_FLT_FN (BUILT_IN_FMA):
2268 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2269 builtin_optab = fma_optab; break;
2270 default:
2271 gcc_unreachable ();
2272 }
2273
2274 /* Make a suitable register to place result in. */
2275 mode = TYPE_MODE (TREE_TYPE (exp));
2276
2277 /* Before working hard, check whether the instruction is available. */
2278 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2279 return NULL_RTX;
2280
2281 result = gen_reg_rtx (mode);
2282
2283 /* Always stabilize the argument list. */
2284 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2285 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2286 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2287
2288 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2289 op1 = expand_normal (arg1);
2290 op2 = expand_normal (arg2);
2291
2292 start_sequence ();
2293
2294 /* Compute into RESULT.
2295 Set RESULT to wherever the result comes back. */
2296 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2297 result, 0);
2298
2299 /* If we were unable to expand via the builtin, stop the sequence
2300 (without outputting the insns) and call to the library function
2301 with the stabilized argument list. */
2302 if (result == 0)
2303 {
2304 end_sequence ();
2305 return expand_call (exp, target, target == const0_rtx);
2306 }
2307
2308 /* Output the entire sequence. */
2309 insns = get_insns ();
2310 end_sequence ();
2311 emit_insn (insns);
2312
2313 return result;
2314 }
2315
2316 /* Expand a call to the builtin sin and cos math functions.
2317 Return NULL_RTX if a normal call should be emitted rather than expanding the
2318 function in-line. EXP is the expression that is a call to the builtin
2319 function; if convenient, the result should be placed in TARGET.
2320 SUBTARGET may be used as the target for computing one of EXP's
2321 operands. */
2322
2323 static rtx
expand_builtin_mathfn_3(tree exp,rtx target,rtx subtarget)2324 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2325 {
2326 optab builtin_optab;
2327 rtx op0;
2328 rtx_insn *insns;
2329 tree fndecl = get_callee_fndecl (exp);
2330 machine_mode mode;
2331 tree arg;
2332
2333 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2334 return NULL_RTX;
2335
2336 arg = CALL_EXPR_ARG (exp, 0);
2337
2338 switch (DECL_FUNCTION_CODE (fndecl))
2339 {
2340 CASE_FLT_FN (BUILT_IN_SIN):
2341 CASE_FLT_FN (BUILT_IN_COS):
2342 builtin_optab = sincos_optab; break;
2343 default:
2344 gcc_unreachable ();
2345 }
2346
2347 /* Make a suitable register to place result in. */
2348 mode = TYPE_MODE (TREE_TYPE (exp));
2349
2350 /* Check if sincos insn is available, otherwise fallback
2351 to sin or cos insn. */
2352 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2353 switch (DECL_FUNCTION_CODE (fndecl))
2354 {
2355 CASE_FLT_FN (BUILT_IN_SIN):
2356 builtin_optab = sin_optab; break;
2357 CASE_FLT_FN (BUILT_IN_COS):
2358 builtin_optab = cos_optab; break;
2359 default:
2360 gcc_unreachable ();
2361 }
2362
2363 /* Before working hard, check whether the instruction is available. */
2364 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2365 {
2366 rtx result = gen_reg_rtx (mode);
2367
2368 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2369 need to expand the argument again. This way, we will not perform
2370 side-effects more the once. */
2371 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2372
2373 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2374
2375 start_sequence ();
2376
2377 /* Compute into RESULT.
2378 Set RESULT to wherever the result comes back. */
2379 if (builtin_optab == sincos_optab)
2380 {
2381 int ok;
2382
2383 switch (DECL_FUNCTION_CODE (fndecl))
2384 {
2385 CASE_FLT_FN (BUILT_IN_SIN):
2386 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2387 break;
2388 CASE_FLT_FN (BUILT_IN_COS):
2389 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2390 break;
2391 default:
2392 gcc_unreachable ();
2393 }
2394 gcc_assert (ok);
2395 }
2396 else
2397 result = expand_unop (mode, builtin_optab, op0, result, 0);
2398
2399 if (result != 0)
2400 {
2401 /* Output the entire sequence. */
2402 insns = get_insns ();
2403 end_sequence ();
2404 emit_insn (insns);
2405 return result;
2406 }
2407
2408 /* If we were unable to expand via the builtin, stop the sequence
2409 (without outputting the insns) and call to the library function
2410 with the stabilized argument list. */
2411 end_sequence ();
2412 }
2413
2414 return expand_call (exp, target, target == const0_rtx);
2415 }
2416
2417 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2418 return an RTL instruction code that implements the functionality.
2419 If that isn't possible or available return CODE_FOR_nothing. */
2420
2421 static enum insn_code
interclass_mathfn_icode(tree arg,tree fndecl)2422 interclass_mathfn_icode (tree arg, tree fndecl)
2423 {
2424 bool errno_set = false;
2425 optab builtin_optab = unknown_optab;
2426 machine_mode mode;
2427
2428 switch (DECL_FUNCTION_CODE (fndecl))
2429 {
2430 CASE_FLT_FN (BUILT_IN_ILOGB):
2431 errno_set = true; builtin_optab = ilogb_optab; break;
2432 CASE_FLT_FN (BUILT_IN_ISINF):
2433 builtin_optab = isinf_optab; break;
2434 case BUILT_IN_ISNORMAL:
2435 case BUILT_IN_ISFINITE:
2436 CASE_FLT_FN (BUILT_IN_FINITE):
2437 case BUILT_IN_FINITED32:
2438 case BUILT_IN_FINITED64:
2439 case BUILT_IN_FINITED128:
2440 case BUILT_IN_ISINFD32:
2441 case BUILT_IN_ISINFD64:
2442 case BUILT_IN_ISINFD128:
2443 /* These builtins have no optabs (yet). */
2444 break;
2445 default:
2446 gcc_unreachable ();
2447 }
2448
2449 /* There's no easy way to detect the case we need to set EDOM. */
2450 if (flag_errno_math && errno_set)
2451 return CODE_FOR_nothing;
2452
2453 /* Optab mode depends on the mode of the input argument. */
2454 mode = TYPE_MODE (TREE_TYPE (arg));
2455
2456 if (builtin_optab)
2457 return optab_handler (builtin_optab, mode);
2458 return CODE_FOR_nothing;
2459 }
2460
2461 /* Expand a call to one of the builtin math functions that operate on
2462 floating point argument and output an integer result (ilogb, isinf,
2463 isnan, etc).
2464 Return 0 if a normal call should be emitted rather than expanding the
2465 function in-line. EXP is the expression that is a call to the builtin
2466 function; if convenient, the result should be placed in TARGET. */
2467
2468 static rtx
expand_builtin_interclass_mathfn(tree exp,rtx target)2469 expand_builtin_interclass_mathfn (tree exp, rtx target)
2470 {
2471 enum insn_code icode = CODE_FOR_nothing;
2472 rtx op0;
2473 tree fndecl = get_callee_fndecl (exp);
2474 machine_mode mode;
2475 tree arg;
2476
2477 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2478 return NULL_RTX;
2479
2480 arg = CALL_EXPR_ARG (exp, 0);
2481 icode = interclass_mathfn_icode (arg, fndecl);
2482 mode = TYPE_MODE (TREE_TYPE (arg));
2483
2484 if (icode != CODE_FOR_nothing)
2485 {
2486 class expand_operand ops[1];
2487 rtx_insn *last = get_last_insn ();
2488 tree orig_arg = arg;
2489
2490 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2491 need to expand the argument again. This way, we will not perform
2492 side-effects more the once. */
2493 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2494
2495 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2496
2497 if (mode != GET_MODE (op0))
2498 op0 = convert_to_mode (mode, op0, 0);
2499
2500 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2501 if (maybe_legitimize_operands (icode, 0, 1, ops)
2502 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2503 return ops[0].value;
2504
2505 delete_insns_since (last);
2506 CALL_EXPR_ARG (exp, 0) = orig_arg;
2507 }
2508
2509 return NULL_RTX;
2510 }
2511
2512 /* Expand a call to the builtin sincos math function.
2513 Return NULL_RTX if a normal call should be emitted rather than expanding the
2514 function in-line. EXP is the expression that is a call to the builtin
2515 function. */
2516
2517 static rtx
expand_builtin_sincos(tree exp)2518 expand_builtin_sincos (tree exp)
2519 {
2520 rtx op0, op1, op2, target1, target2;
2521 machine_mode mode;
2522 tree arg, sinp, cosp;
2523 int result;
2524 location_t loc = EXPR_LOCATION (exp);
2525 tree alias_type, alias_off;
2526
2527 if (!validate_arglist (exp, REAL_TYPE,
2528 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2529 return NULL_RTX;
2530
2531 arg = CALL_EXPR_ARG (exp, 0);
2532 sinp = CALL_EXPR_ARG (exp, 1);
2533 cosp = CALL_EXPR_ARG (exp, 2);
2534
2535 /* Make a suitable register to place result in. */
2536 mode = TYPE_MODE (TREE_TYPE (arg));
2537
2538 /* Check if sincos insn is available, otherwise emit the call. */
2539 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2540 return NULL_RTX;
2541
2542 target1 = gen_reg_rtx (mode);
2543 target2 = gen_reg_rtx (mode);
2544
2545 op0 = expand_normal (arg);
2546 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2547 alias_off = build_int_cst (alias_type, 0);
2548 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2549 sinp, alias_off));
2550 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2551 cosp, alias_off));
2552
2553 /* Compute into target1 and target2.
2554 Set TARGET to wherever the result comes back. */
2555 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2556 gcc_assert (result);
2557
2558 /* Move target1 and target2 to the memory locations indicated
2559 by op1 and op2. */
2560 emit_move_insn (op1, target1);
2561 emit_move_insn (op2, target2);
2562
2563 return const0_rtx;
2564 }
2565
2566 /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2567 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2568 static rtx
expand_builtin_fegetround(tree exp,rtx target,machine_mode target_mode)2569 expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2570 {
2571 if (!validate_arglist (exp, VOID_TYPE))
2572 return NULL_RTX;
2573
2574 insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2575 if (icode == CODE_FOR_nothing)
2576 return NULL_RTX;
2577
2578 if (target == 0
2579 || GET_MODE (target) != target_mode
2580 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2581 target = gen_reg_rtx (target_mode);
2582
2583 rtx pat = GEN_FCN (icode) (target);
2584 if (!pat)
2585 return NULL_RTX;
2586 emit_insn (pat);
2587
2588 return target;
2589 }
2590
2591 /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2592 fenv.h), returning the result and setting it in TARGET. Otherwise return
2593 NULL_RTX on failure. */
2594 static rtx
expand_builtin_feclear_feraise_except(tree exp,rtx target,machine_mode target_mode,optab op_optab)2595 expand_builtin_feclear_feraise_except (tree exp, rtx target,
2596 machine_mode target_mode, optab op_optab)
2597 {
2598 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2599 return NULL_RTX;
2600 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2601
2602 insn_code icode = direct_optab_handler (op_optab, SImode);
2603 if (icode == CODE_FOR_nothing)
2604 return NULL_RTX;
2605
2606 if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2607 return NULL_RTX;
2608
2609 if (target == 0
2610 || GET_MODE (target) != target_mode
2611 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2612 target = gen_reg_rtx (target_mode);
2613
2614 rtx pat = GEN_FCN (icode) (target, op0);
2615 if (!pat)
2616 return NULL_RTX;
2617 emit_insn (pat);
2618
2619 return target;
2620 }
2621
2622 /* Expand a call to the internal cexpi builtin to the sincos math function.
2623 EXP is the expression that is a call to the builtin function; if convenient,
2624 the result should be placed in TARGET. */
2625
2626 static rtx
expand_builtin_cexpi(tree exp,rtx target)2627 expand_builtin_cexpi (tree exp, rtx target)
2628 {
2629 tree fndecl = get_callee_fndecl (exp);
2630 tree arg, type;
2631 machine_mode mode;
2632 rtx op0, op1, op2;
2633 location_t loc = EXPR_LOCATION (exp);
2634
2635 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2636 return NULL_RTX;
2637
2638 arg = CALL_EXPR_ARG (exp, 0);
2639 type = TREE_TYPE (arg);
2640 mode = TYPE_MODE (TREE_TYPE (arg));
2641
2642 /* Try expanding via a sincos optab, fall back to emitting a libcall
2643 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2644 is only generated from sincos, cexp or if we have either of them. */
2645 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2646 {
2647 op1 = gen_reg_rtx (mode);
2648 op2 = gen_reg_rtx (mode);
2649
2650 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2651
2652 /* Compute into op1 and op2. */
2653 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2654 }
2655 else if (targetm.libc_has_function (function_sincos, type))
2656 {
2657 tree call, fn = NULL_TREE;
2658 tree top1, top2;
2659 rtx op1a, op2a;
2660
2661 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2662 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2663 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2664 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2665 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2666 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2667 else
2668 gcc_unreachable ();
2669
2670 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2671 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2672 op1a = copy_addr_to_reg (XEXP (op1, 0));
2673 op2a = copy_addr_to_reg (XEXP (op2, 0));
2674 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2675 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2676
2677 /* Make sure not to fold the sincos call again. */
2678 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2679 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2680 call, 3, arg, top1, top2));
2681 }
2682 else
2683 {
2684 tree call, fn = NULL_TREE, narg;
2685 tree ctype = build_complex_type (type);
2686
2687 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2688 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2689 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2690 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2691 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2692 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2693 else
2694 gcc_unreachable ();
2695
2696 /* If we don't have a decl for cexp create one. This is the
2697 friendliest fallback if the user calls __builtin_cexpi
2698 without full target C99 function support. */
2699 if (fn == NULL_TREE)
2700 {
2701 tree fntype;
2702 const char *name = NULL;
2703
2704 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2705 name = "cexpf";
2706 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2707 name = "cexp";
2708 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2709 name = "cexpl";
2710
2711 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2712 fn = build_fn_decl (name, fntype);
2713 }
2714
2715 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2716 build_real (type, dconst0), arg);
2717
2718 /* Make sure not to fold the cexp call again. */
2719 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2720 return expand_expr (build_call_nary (ctype, call, 1, narg),
2721 target, VOIDmode, EXPAND_NORMAL);
2722 }
2723
2724 /* Now build the proper return type. */
2725 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2726 make_tree (TREE_TYPE (arg), op2),
2727 make_tree (TREE_TYPE (arg), op1)),
2728 target, VOIDmode, EXPAND_NORMAL);
2729 }
2730
2731 /* Conveniently construct a function call expression. FNDECL names the
2732 function to be called, N is the number of arguments, and the "..."
2733 parameters are the argument expressions. Unlike build_call_exr
2734 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2735
2736 static tree
build_call_nofold_loc(location_t loc,tree fndecl,int n,...)2737 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2738 {
2739 va_list ap;
2740 tree fntype = TREE_TYPE (fndecl);
2741 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2742
2743 va_start (ap, n);
2744 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2745 va_end (ap);
2746 SET_EXPR_LOCATION (fn, loc);
2747 return fn;
2748 }
2749
2750 /* Expand a call to one of the builtin rounding functions gcc defines
2751 as an extension (lfloor and lceil). As these are gcc extensions we
2752 do not need to worry about setting errno to EDOM.
2753 If expanding via optab fails, lower expression to (int)(floor(x)).
2754 EXP is the expression that is a call to the builtin function;
2755 if convenient, the result should be placed in TARGET. */
2756
2757 static rtx
expand_builtin_int_roundingfn(tree exp,rtx target)2758 expand_builtin_int_roundingfn (tree exp, rtx target)
2759 {
2760 convert_optab builtin_optab;
2761 rtx op0, tmp;
2762 rtx_insn *insns;
2763 tree fndecl = get_callee_fndecl (exp);
2764 enum built_in_function fallback_fn;
2765 tree fallback_fndecl;
2766 machine_mode mode;
2767 tree arg;
2768
2769 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2770 return NULL_RTX;
2771
2772 arg = CALL_EXPR_ARG (exp, 0);
2773
2774 switch (DECL_FUNCTION_CODE (fndecl))
2775 {
2776 CASE_FLT_FN (BUILT_IN_ICEIL):
2777 CASE_FLT_FN (BUILT_IN_LCEIL):
2778 CASE_FLT_FN (BUILT_IN_LLCEIL):
2779 builtin_optab = lceil_optab;
2780 fallback_fn = BUILT_IN_CEIL;
2781 break;
2782
2783 CASE_FLT_FN (BUILT_IN_IFLOOR):
2784 CASE_FLT_FN (BUILT_IN_LFLOOR):
2785 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2786 builtin_optab = lfloor_optab;
2787 fallback_fn = BUILT_IN_FLOOR;
2788 break;
2789
2790 default:
2791 gcc_unreachable ();
2792 }
2793
2794 /* Make a suitable register to place result in. */
2795 mode = TYPE_MODE (TREE_TYPE (exp));
2796
2797 target = gen_reg_rtx (mode);
2798
2799 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2800 need to expand the argument again. This way, we will not perform
2801 side-effects more the once. */
2802 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2803
2804 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2805
2806 start_sequence ();
2807
2808 /* Compute into TARGET. */
2809 if (expand_sfix_optab (target, op0, builtin_optab))
2810 {
2811 /* Output the entire sequence. */
2812 insns = get_insns ();
2813 end_sequence ();
2814 emit_insn (insns);
2815 return target;
2816 }
2817
2818 /* If we were unable to expand via the builtin, stop the sequence
2819 (without outputting the insns). */
2820 end_sequence ();
2821
2822 /* Fall back to floating point rounding optab. */
2823 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2824
2825 /* For non-C99 targets we may end up without a fallback fndecl here
2826 if the user called __builtin_lfloor directly. In this case emit
2827 a call to the floor/ceil variants nevertheless. This should result
2828 in the best user experience for not full C99 targets. */
2829 if (fallback_fndecl == NULL_TREE)
2830 {
2831 tree fntype;
2832 const char *name = NULL;
2833
2834 switch (DECL_FUNCTION_CODE (fndecl))
2835 {
2836 case BUILT_IN_ICEIL:
2837 case BUILT_IN_LCEIL:
2838 case BUILT_IN_LLCEIL:
2839 name = "ceil";
2840 break;
2841 case BUILT_IN_ICEILF:
2842 case BUILT_IN_LCEILF:
2843 case BUILT_IN_LLCEILF:
2844 name = "ceilf";
2845 break;
2846 case BUILT_IN_ICEILL:
2847 case BUILT_IN_LCEILL:
2848 case BUILT_IN_LLCEILL:
2849 name = "ceill";
2850 break;
2851 case BUILT_IN_IFLOOR:
2852 case BUILT_IN_LFLOOR:
2853 case BUILT_IN_LLFLOOR:
2854 name = "floor";
2855 break;
2856 case BUILT_IN_IFLOORF:
2857 case BUILT_IN_LFLOORF:
2858 case BUILT_IN_LLFLOORF:
2859 name = "floorf";
2860 break;
2861 case BUILT_IN_IFLOORL:
2862 case BUILT_IN_LFLOORL:
2863 case BUILT_IN_LLFLOORL:
2864 name = "floorl";
2865 break;
2866 default:
2867 gcc_unreachable ();
2868 }
2869
2870 fntype = build_function_type_list (TREE_TYPE (arg),
2871 TREE_TYPE (arg), NULL_TREE);
2872 fallback_fndecl = build_fn_decl (name, fntype);
2873 }
2874
2875 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2876
2877 tmp = expand_normal (exp);
2878 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2879
2880 /* Truncate the result of floating point optab to integer
2881 via expand_fix (). */
2882 target = gen_reg_rtx (mode);
2883 expand_fix (target, tmp, 0);
2884
2885 return target;
2886 }
2887
2888 /* Expand a call to one of the builtin math functions doing integer
2889 conversion (lrint).
2890 Return 0 if a normal call should be emitted rather than expanding the
2891 function in-line. EXP is the expression that is a call to the builtin
2892 function; if convenient, the result should be placed in TARGET. */
2893
2894 static rtx
expand_builtin_int_roundingfn_2(tree exp,rtx target)2895 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2896 {
2897 convert_optab builtin_optab;
2898 rtx op0;
2899 rtx_insn *insns;
2900 tree fndecl = get_callee_fndecl (exp);
2901 tree arg;
2902 machine_mode mode;
2903 enum built_in_function fallback_fn = BUILT_IN_NONE;
2904
2905 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2906 return NULL_RTX;
2907
2908 arg = CALL_EXPR_ARG (exp, 0);
2909
2910 switch (DECL_FUNCTION_CODE (fndecl))
2911 {
2912 CASE_FLT_FN (BUILT_IN_IRINT):
2913 fallback_fn = BUILT_IN_LRINT;
2914 gcc_fallthrough ();
2915 CASE_FLT_FN (BUILT_IN_LRINT):
2916 CASE_FLT_FN (BUILT_IN_LLRINT):
2917 builtin_optab = lrint_optab;
2918 break;
2919
2920 CASE_FLT_FN (BUILT_IN_IROUND):
2921 fallback_fn = BUILT_IN_LROUND;
2922 gcc_fallthrough ();
2923 CASE_FLT_FN (BUILT_IN_LROUND):
2924 CASE_FLT_FN (BUILT_IN_LLROUND):
2925 builtin_optab = lround_optab;
2926 break;
2927
2928 default:
2929 gcc_unreachable ();
2930 }
2931
2932 /* There's no easy way to detect the case we need to set EDOM. */
2933 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2934 return NULL_RTX;
2935
2936 /* Make a suitable register to place result in. */
2937 mode = TYPE_MODE (TREE_TYPE (exp));
2938
2939 /* There's no easy way to detect the case we need to set EDOM. */
2940 if (!flag_errno_math)
2941 {
2942 rtx result = gen_reg_rtx (mode);
2943
2944 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2945 need to expand the argument again. This way, we will not perform
2946 side-effects more the once. */
2947 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2948
2949 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2950
2951 start_sequence ();
2952
2953 if (expand_sfix_optab (result, op0, builtin_optab))
2954 {
2955 /* Output the entire sequence. */
2956 insns = get_insns ();
2957 end_sequence ();
2958 emit_insn (insns);
2959 return result;
2960 }
2961
2962 /* If we were unable to expand via the builtin, stop the sequence
2963 (without outputting the insns) and call to the library function
2964 with the stabilized argument list. */
2965 end_sequence ();
2966 }
2967
2968 if (fallback_fn != BUILT_IN_NONE)
2969 {
2970 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2971 targets, (int) round (x) should never be transformed into
2972 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2973 a call to lround in the hope that the target provides at least some
2974 C99 functions. This should result in the best user experience for
2975 not full C99 targets.
2976 As scalar float conversions with same mode are useless in GIMPLE,
2977 we can end up e.g. with _Float32 argument passed to float builtin,
2978 try to get the type from the builtin prototype first. */
2979 tree fallback_fndecl = NULL_TREE;
2980 if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
2981 fallback_fndecl
2982 = mathfn_built_in_1 (TREE_VALUE (argtypes),
2983 as_combined_fn (fallback_fn), 0);
2984 if (fallback_fndecl == NULL_TREE)
2985 fallback_fndecl
2986 = mathfn_built_in_1 (TREE_TYPE (arg),
2987 as_combined_fn (fallback_fn), 0);
2988 if (fallback_fndecl)
2989 {
2990 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2991 fallback_fndecl, 1, arg);
2992
2993 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2994 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2995 return convert_to_mode (mode, target, 0);
2996 }
2997 }
2998
2999 return expand_call (exp, target, target == const0_rtx);
3000 }
3001
3002 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3003 a normal call should be emitted rather than expanding the function
3004 in-line. EXP is the expression that is a call to the builtin
3005 function; if convenient, the result should be placed in TARGET. */
3006
3007 static rtx
expand_builtin_powi(tree exp,rtx target)3008 expand_builtin_powi (tree exp, rtx target)
3009 {
3010 tree arg0, arg1;
3011 rtx op0, op1;
3012 machine_mode mode;
3013 machine_mode mode2;
3014
3015 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3016 return NULL_RTX;
3017
3018 arg0 = CALL_EXPR_ARG (exp, 0);
3019 arg1 = CALL_EXPR_ARG (exp, 1);
3020 mode = TYPE_MODE (TREE_TYPE (exp));
3021
3022 /* Emit a libcall to libgcc. */
3023
3024 /* Mode of the 2nd argument must match that of an int. */
3025 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3026
3027 if (target == NULL_RTX)
3028 target = gen_reg_rtx (mode);
3029
3030 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3031 if (GET_MODE (op0) != mode)
3032 op0 = convert_to_mode (mode, op0, 0);
3033 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3034 if (GET_MODE (op1) != mode2)
3035 op1 = convert_to_mode (mode2, op1, 0);
3036
3037 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3038 target, LCT_CONST, mode,
3039 op0, mode, op1, mode2);
3040
3041 return target;
3042 }
3043
3044 /* Expand expression EXP which is a call to the strlen builtin. Return
3045 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3046 try to get the result in TARGET, if convenient. */
3047
3048 static rtx
expand_builtin_strlen(tree exp,rtx target,machine_mode target_mode)3049 expand_builtin_strlen (tree exp, rtx target,
3050 machine_mode target_mode)
3051 {
3052 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3053 return NULL_RTX;
3054
3055 tree src = CALL_EXPR_ARG (exp, 0);
3056
3057 /* If the length can be computed at compile-time, return it. */
3058 if (tree len = c_strlen (src, 0))
3059 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3060
3061 /* If the length can be computed at compile-time and is constant
3062 integer, but there are side-effects in src, evaluate
3063 src for side-effects, then return len.
3064 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3065 can be optimized into: i++; x = 3; */
3066 tree len = c_strlen (src, 1);
3067 if (len && TREE_CODE (len) == INTEGER_CST)
3068 {
3069 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3070 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3071 }
3072
3073 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3074
3075 /* If SRC is not a pointer type, don't do this operation inline. */
3076 if (align == 0)
3077 return NULL_RTX;
3078
3079 /* Bail out if we can't compute strlen in the right mode. */
3080 machine_mode insn_mode;
3081 enum insn_code icode = CODE_FOR_nothing;
3082 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3083 {
3084 icode = optab_handler (strlen_optab, insn_mode);
3085 if (icode != CODE_FOR_nothing)
3086 break;
3087 }
3088 if (insn_mode == VOIDmode)
3089 return NULL_RTX;
3090
3091 /* Make a place to hold the source address. We will not expand
3092 the actual source until we are sure that the expansion will
3093 not fail -- there are trees that cannot be expanded twice. */
3094 rtx src_reg = gen_reg_rtx (Pmode);
3095
3096 /* Mark the beginning of the strlen sequence so we can emit the
3097 source operand later. */
3098 rtx_insn *before_strlen = get_last_insn ();
3099
3100 class expand_operand ops[4];
3101 create_output_operand (&ops[0], target, insn_mode);
3102 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3103 create_integer_operand (&ops[2], 0);
3104 create_integer_operand (&ops[3], align);
3105 if (!maybe_expand_insn (icode, 4, ops))
3106 return NULL_RTX;
3107
3108 /* Check to see if the argument was declared attribute nonstring
3109 and if so, issue a warning since at this point it's not known
3110 to be nul-terminated. */
3111 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3112
3113 /* Now that we are assured of success, expand the source. */
3114 start_sequence ();
3115 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3116 if (pat != src_reg)
3117 {
3118 #ifdef POINTERS_EXTEND_UNSIGNED
3119 if (GET_MODE (pat) != Pmode)
3120 pat = convert_to_mode (Pmode, pat,
3121 POINTERS_EXTEND_UNSIGNED);
3122 #endif
3123 emit_move_insn (src_reg, pat);
3124 }
3125 pat = get_insns ();
3126 end_sequence ();
3127
3128 if (before_strlen)
3129 emit_insn_after (pat, before_strlen);
3130 else
3131 emit_insn_before (pat, get_insns ());
3132
3133 /* Return the value in the proper mode for this function. */
3134 if (GET_MODE (ops[0].value) == target_mode)
3135 target = ops[0].value;
3136 else if (target != 0)
3137 convert_move (target, ops[0].value, 0);
3138 else
3139 target = convert_to_mode (target_mode, ops[0].value, 0);
3140
3141 return target;
3142 }
3143
3144 /* Expand call EXP to the strnlen built-in, returning the result
3145 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3146
3147 static rtx
expand_builtin_strnlen(tree exp,rtx target,machine_mode target_mode)3148 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3149 {
3150 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3151 return NULL_RTX;
3152
3153 tree src = CALL_EXPR_ARG (exp, 0);
3154 tree bound = CALL_EXPR_ARG (exp, 1);
3155
3156 if (!bound)
3157 return NULL_RTX;
3158
3159 location_t loc = UNKNOWN_LOCATION;
3160 if (EXPR_HAS_LOCATION (exp))
3161 loc = EXPR_LOCATION (exp);
3162
3163 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3164 so these conversions aren't necessary. */
3165 c_strlen_data lendata = { };
3166 tree len = c_strlen (src, 0, &lendata, 1);
3167 if (len)
3168 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3169
3170 if (TREE_CODE (bound) == INTEGER_CST)
3171 {
3172 if (!len)
3173 return NULL_RTX;
3174
3175 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3176 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3177 }
3178
3179 if (TREE_CODE (bound) != SSA_NAME)
3180 return NULL_RTX;
3181
3182 wide_int min, max;
3183 value_range r;
3184 get_global_range_query ()->range_of_expr (r, bound);
3185 if (r.kind () != VR_RANGE)
3186 return NULL_RTX;
3187 min = r.lower_bound ();
3188 max = r.upper_bound ();
3189
3190 if (!len || TREE_CODE (len) != INTEGER_CST)
3191 {
3192 bool exact;
3193 lendata.decl = unterminated_array (src, &len, &exact);
3194 if (!lendata.decl)
3195 return NULL_RTX;
3196 }
3197
3198 if (lendata.decl)
3199 return NULL_RTX;
3200
3201 if (wi::gtu_p (min, wi::to_wide (len)))
3202 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3203
3204 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3205 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3206 }
3207
3208 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3209 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3210 a target constant. */
3211
3212 static rtx
builtin_memcpy_read_str(void * data,void *,HOST_WIDE_INT offset,fixed_size_mode mode)3213 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3214 fixed_size_mode mode)
3215 {
3216 /* The REPresentation pointed to by DATA need not be a nul-terminated
3217 string but the caller guarantees it's large enough for MODE. */
3218 const char *rep = (const char *) data;
3219
3220 /* The by-pieces infrastructure does not try to pick a vector mode
3221 for memcpy expansion. */
3222 return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3223 /*nul_terminated=*/false);
3224 }
3225
3226 /* LEN specify length of the block of memcpy/memset operation.
3227 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3228 In some cases we can make very likely guess on max size, then we
3229 set it into PROBABLE_MAX_SIZE. */
3230
3231 static void
determine_block_size(tree len,rtx len_rtx,unsigned HOST_WIDE_INT * min_size,unsigned HOST_WIDE_INT * max_size,unsigned HOST_WIDE_INT * probable_max_size)3232 determine_block_size (tree len, rtx len_rtx,
3233 unsigned HOST_WIDE_INT *min_size,
3234 unsigned HOST_WIDE_INT *max_size,
3235 unsigned HOST_WIDE_INT *probable_max_size)
3236 {
3237 if (CONST_INT_P (len_rtx))
3238 {
3239 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3240 return;
3241 }
3242 else
3243 {
3244 wide_int min, max;
3245 enum value_range_kind range_type = VR_UNDEFINED;
3246
3247 /* Determine bounds from the type. */
3248 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3249 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3250 else
3251 *min_size = 0;
3252 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3253 *probable_max_size = *max_size
3254 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3255 else
3256 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3257
3258 if (TREE_CODE (len) == SSA_NAME)
3259 {
3260 value_range r;
3261 get_global_range_query ()->range_of_expr (r, len);
3262 range_type = r.kind ();
3263 if (range_type != VR_UNDEFINED)
3264 {
3265 min = wi::to_wide (r.min ());
3266 max = wi::to_wide (r.max ());
3267 }
3268 }
3269 if (range_type == VR_RANGE)
3270 {
3271 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3272 *min_size = min.to_uhwi ();
3273 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3274 *probable_max_size = *max_size = max.to_uhwi ();
3275 }
3276 else if (range_type == VR_ANTI_RANGE)
3277 {
3278 /* Code like
3279
3280 int n;
3281 if (n < 100)
3282 memcpy (a, b, n)
3283
3284 Produce anti range allowing negative values of N. We still
3285 can use the information and make a guess that N is not negative.
3286 */
3287 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3288 *probable_max_size = min.to_uhwi () - 1;
3289 }
3290 }
3291 gcc_checking_assert (*max_size <=
3292 (unsigned HOST_WIDE_INT)
3293 GET_MODE_MASK (GET_MODE (len_rtx)));
3294 }
3295
3296 /* Expand a call EXP to the memcpy builtin.
3297 Return NULL_RTX if we failed, the caller should emit a normal call,
3298 otherwise try to get the result in TARGET, if convenient (and in
3299 mode MODE if that's convenient). */
3300
3301 static rtx
expand_builtin_memcpy(tree exp,rtx target)3302 expand_builtin_memcpy (tree exp, rtx target)
3303 {
3304 if (!validate_arglist (exp,
3305 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3306 return NULL_RTX;
3307
3308 tree dest = CALL_EXPR_ARG (exp, 0);
3309 tree src = CALL_EXPR_ARG (exp, 1);
3310 tree len = CALL_EXPR_ARG (exp, 2);
3311
3312 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3313 /*retmode=*/ RETURN_BEGIN, false);
3314 }
3315
3316 /* Check a call EXP to the memmove built-in for validity.
3317 Return NULL_RTX on both success and failure. */
3318
3319 static rtx
expand_builtin_memmove(tree exp,rtx target)3320 expand_builtin_memmove (tree exp, rtx target)
3321 {
3322 if (!validate_arglist (exp,
3323 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3324 return NULL_RTX;
3325
3326 tree dest = CALL_EXPR_ARG (exp, 0);
3327 tree src = CALL_EXPR_ARG (exp, 1);
3328 tree len = CALL_EXPR_ARG (exp, 2);
3329
3330 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3331 /*retmode=*/ RETURN_BEGIN, true);
3332 }
3333
3334 /* Expand a call EXP to the mempcpy builtin.
3335 Return NULL_RTX if we failed; the caller should emit a normal call,
3336 otherwise try to get the result in TARGET, if convenient (and in
3337 mode MODE if that's convenient). */
3338
3339 static rtx
expand_builtin_mempcpy(tree exp,rtx target)3340 expand_builtin_mempcpy (tree exp, rtx target)
3341 {
3342 if (!validate_arglist (exp,
3343 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3344 return NULL_RTX;
3345
3346 tree dest = CALL_EXPR_ARG (exp, 0);
3347 tree src = CALL_EXPR_ARG (exp, 1);
3348 tree len = CALL_EXPR_ARG (exp, 2);
3349
3350 /* Policy does not generally allow using compute_objsize (which
3351 is used internally by check_memop_size) to change code generation
3352 or drive optimization decisions.
3353
3354 In this instance it is safe because the code we generate has
3355 the same semantics regardless of the return value of
3356 check_memop_sizes. Exactly the same amount of data is copied
3357 and the return value is exactly the same in both cases.
3358
3359 Furthermore, check_memop_size always uses mode 0 for the call to
3360 compute_objsize, so the imprecise nature of compute_objsize is
3361 avoided. */
3362
3363 /* Avoid expanding mempcpy into memcpy when the call is determined
3364 to overflow the buffer. This also prevents the same overflow
3365 from being diagnosed again when expanding memcpy. */
3366
3367 return expand_builtin_mempcpy_args (dest, src, len,
3368 target, exp, /*retmode=*/ RETURN_END);
3369 }
3370
3371 /* Helper function to do the actual work for expand of memory copy family
3372 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3373 of memory from SRC to DEST and assign to TARGET if convenient. Return
3374 value is based on RETMODE argument. */
3375
3376 static rtx
expand_builtin_memory_copy_args(tree dest,tree src,tree len,rtx target,tree exp,memop_ret retmode,bool might_overlap)3377 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3378 rtx target, tree exp, memop_ret retmode,
3379 bool might_overlap)
3380 {
3381 unsigned int src_align = get_pointer_alignment (src);
3382 unsigned int dest_align = get_pointer_alignment (dest);
3383 rtx dest_mem, src_mem, dest_addr, len_rtx;
3384 HOST_WIDE_INT expected_size = -1;
3385 unsigned int expected_align = 0;
3386 unsigned HOST_WIDE_INT min_size;
3387 unsigned HOST_WIDE_INT max_size;
3388 unsigned HOST_WIDE_INT probable_max_size;
3389
3390 bool is_move_done;
3391
3392 /* If DEST is not a pointer type, call the normal function. */
3393 if (dest_align == 0)
3394 return NULL_RTX;
3395
3396 /* If either SRC is not a pointer type, don't do this
3397 operation in-line. */
3398 if (src_align == 0)
3399 return NULL_RTX;
3400
3401 if (currently_expanding_gimple_stmt)
3402 stringop_block_profile (currently_expanding_gimple_stmt,
3403 &expected_align, &expected_size);
3404
3405 if (expected_align < dest_align)
3406 expected_align = dest_align;
3407 dest_mem = get_memory_rtx (dest, len);
3408 set_mem_align (dest_mem, dest_align);
3409 len_rtx = expand_normal (len);
3410 determine_block_size (len, len_rtx, &min_size, &max_size,
3411 &probable_max_size);
3412
3413 /* Try to get the byte representation of the constant SRC points to,
3414 with its byte size in NBYTES. */
3415 unsigned HOST_WIDE_INT nbytes;
3416 const char *rep = getbyterep (src, &nbytes);
3417
3418 /* If the function's constant bound LEN_RTX is less than or equal
3419 to the byte size of the representation of the constant argument,
3420 and if block move would be done by pieces, we can avoid loading
3421 the bytes from memory and only store the computed constant.
3422 This works in the overlap (memmove) case as well because
3423 store_by_pieces just generates a series of stores of constants
3424 from the representation returned by getbyterep(). */
3425 if (rep
3426 && CONST_INT_P (len_rtx)
3427 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3428 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3429 CONST_CAST (char *, rep),
3430 dest_align, false))
3431 {
3432 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3433 builtin_memcpy_read_str,
3434 CONST_CAST (char *, rep),
3435 dest_align, false, retmode);
3436 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3437 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3438 return dest_mem;
3439 }
3440
3441 src_mem = get_memory_rtx (src, len);
3442 set_mem_align (src_mem, src_align);
3443
3444 /* Copy word part most expediently. */
3445 enum block_op_methods method = BLOCK_OP_NORMAL;
3446 if (CALL_EXPR_TAILCALL (exp)
3447 && (retmode == RETURN_BEGIN || target == const0_rtx))
3448 method = BLOCK_OP_TAILCALL;
3449 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3450 && retmode == RETURN_END
3451 && !might_overlap
3452 && target != const0_rtx);
3453 if (use_mempcpy_call)
3454 method = BLOCK_OP_NO_LIBCALL_RET;
3455 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3456 expected_align, expected_size,
3457 min_size, max_size, probable_max_size,
3458 use_mempcpy_call, &is_move_done,
3459 might_overlap);
3460
3461 /* Bail out when a mempcpy call would be expanded as libcall and when
3462 we have a target that provides a fast implementation
3463 of mempcpy routine. */
3464 if (!is_move_done)
3465 return NULL_RTX;
3466
3467 if (dest_addr == pc_rtx)
3468 return NULL_RTX;
3469
3470 if (dest_addr == 0)
3471 {
3472 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3473 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3474 }
3475
3476 if (retmode != RETURN_BEGIN && target != const0_rtx)
3477 {
3478 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3479 /* stpcpy pointer to last byte. */
3480 if (retmode == RETURN_END_MINUS_ONE)
3481 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3482 }
3483
3484 return dest_addr;
3485 }
3486
3487 static rtx
expand_builtin_mempcpy_args(tree dest,tree src,tree len,rtx target,tree orig_exp,memop_ret retmode)3488 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3489 rtx target, tree orig_exp, memop_ret retmode)
3490 {
3491 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3492 retmode, false);
3493 }
3494
3495 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3496 we failed, the caller should emit a normal call, otherwise try to
3497 get the result in TARGET, if convenient.
3498 Return value is based on RETMODE argument. */
3499
3500 static rtx
expand_movstr(tree dest,tree src,rtx target,memop_ret retmode)3501 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3502 {
3503 class expand_operand ops[3];
3504 rtx dest_mem;
3505 rtx src_mem;
3506
3507 if (!targetm.have_movstr ())
3508 return NULL_RTX;
3509
3510 dest_mem = get_memory_rtx (dest, NULL);
3511 src_mem = get_memory_rtx (src, NULL);
3512 if (retmode == RETURN_BEGIN)
3513 {
3514 target = force_reg (Pmode, XEXP (dest_mem, 0));
3515 dest_mem = replace_equiv_address (dest_mem, target);
3516 }
3517
3518 create_output_operand (&ops[0],
3519 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3520 create_fixed_operand (&ops[1], dest_mem);
3521 create_fixed_operand (&ops[2], src_mem);
3522 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3523 return NULL_RTX;
3524
3525 if (retmode != RETURN_BEGIN && target != const0_rtx)
3526 {
3527 target = ops[0].value;
3528 /* movstr is supposed to set end to the address of the NUL
3529 terminator. If the caller requested a mempcpy-like return value,
3530 adjust it. */
3531 if (retmode == RETURN_END)
3532 {
3533 rtx tem = plus_constant (GET_MODE (target),
3534 gen_lowpart (GET_MODE (target), target), 1);
3535 emit_move_insn (target, force_operand (tem, NULL_RTX));
3536 }
3537 }
3538 return target;
3539 }
3540
3541 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3542 NULL_RTX if we failed the caller should emit a normal call, otherwise
3543 try to get the result in TARGET, if convenient (and in mode MODE if that's
3544 convenient). */
3545
3546 static rtx
expand_builtin_strcpy(tree exp,rtx target)3547 expand_builtin_strcpy (tree exp, rtx target)
3548 {
3549 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3550 return NULL_RTX;
3551
3552 tree dest = CALL_EXPR_ARG (exp, 0);
3553 tree src = CALL_EXPR_ARG (exp, 1);
3554
3555 return expand_builtin_strcpy_args (exp, dest, src, target);
3556 }
3557
3558 /* Helper function to do the actual work for expand_builtin_strcpy. The
3559 arguments to the builtin_strcpy call DEST and SRC are broken out
3560 so that this can also be called without constructing an actual CALL_EXPR.
3561 The other arguments and return value are the same as for
3562 expand_builtin_strcpy. */
3563
3564 static rtx
expand_builtin_strcpy_args(tree,tree dest,tree src,rtx target)3565 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3566 {
3567 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3568 }
3569
3570 /* Expand a call EXP to the stpcpy builtin.
3571 Return NULL_RTX if we failed the caller should emit a normal call,
3572 otherwise try to get the result in TARGET, if convenient (and in
3573 mode MODE if that's convenient). */
3574
3575 static rtx
expand_builtin_stpcpy_1(tree exp,rtx target,machine_mode mode)3576 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3577 {
3578 tree dst, src;
3579 location_t loc = EXPR_LOCATION (exp);
3580
3581 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3582 return NULL_RTX;
3583
3584 dst = CALL_EXPR_ARG (exp, 0);
3585 src = CALL_EXPR_ARG (exp, 1);
3586
3587 /* If return value is ignored, transform stpcpy into strcpy. */
3588 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3589 {
3590 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3591 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3592 return expand_expr (result, target, mode, EXPAND_NORMAL);
3593 }
3594 else
3595 {
3596 tree len, lenp1;
3597 rtx ret;
3598
3599 /* Ensure we get an actual string whose length can be evaluated at
3600 compile-time, not an expression containing a string. This is
3601 because the latter will potentially produce pessimized code
3602 when used to produce the return value. */
3603 c_strlen_data lendata = { };
3604 if (!c_getstr (src)
3605 || !(len = c_strlen (src, 0, &lendata, 1)))
3606 return expand_movstr (dst, src, target,
3607 /*retmode=*/ RETURN_END_MINUS_ONE);
3608
3609 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3610 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3611 target, exp,
3612 /*retmode=*/ RETURN_END_MINUS_ONE);
3613
3614 if (ret)
3615 return ret;
3616
3617 if (TREE_CODE (len) == INTEGER_CST)
3618 {
3619 rtx len_rtx = expand_normal (len);
3620
3621 if (CONST_INT_P (len_rtx))
3622 {
3623 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3624
3625 if (ret)
3626 {
3627 if (! target)
3628 {
3629 if (mode != VOIDmode)
3630 target = gen_reg_rtx (mode);
3631 else
3632 target = gen_reg_rtx (GET_MODE (ret));
3633 }
3634 if (GET_MODE (target) != GET_MODE (ret))
3635 ret = gen_lowpart (GET_MODE (target), ret);
3636
3637 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3638 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3639 gcc_assert (ret);
3640
3641 return target;
3642 }
3643 }
3644 }
3645
3646 return expand_movstr (dst, src, target,
3647 /*retmode=*/ RETURN_END_MINUS_ONE);
3648 }
3649 }
3650
3651 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3652 arguments while being careful to avoid duplicate warnings (which could
3653 be issued if the expander were to expand the call, resulting in it
3654 being emitted in expand_call(). */
3655
3656 static rtx
expand_builtin_stpcpy(tree exp,rtx target,machine_mode mode)3657 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3658 {
3659 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3660 {
3661 /* The call has been successfully expanded. Check for nonstring
3662 arguments and issue warnings as appropriate. */
3663 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3664 return ret;
3665 }
3666
3667 return NULL_RTX;
3668 }
3669
3670 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3671 bytes from constant string DATA + OFFSET and return it as target
3672 constant. */
3673
3674 rtx
builtin_strncpy_read_str(void * data,void *,HOST_WIDE_INT offset,fixed_size_mode mode)3675 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3676 fixed_size_mode mode)
3677 {
3678 const char *str = (const char *) data;
3679
3680 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3681 return const0_rtx;
3682
3683 /* The by-pieces infrastructure does not try to pick a vector mode
3684 for strncpy expansion. */
3685 return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
3686 }
3687
3688 /* Helper to check the sizes of sequences and the destination of calls
3689 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3690 success (no overflow or invalid sizes), false otherwise. */
3691
3692 static bool
check_strncat_sizes(tree exp,tree objsize)3693 check_strncat_sizes (tree exp, tree objsize)
3694 {
3695 tree dest = CALL_EXPR_ARG (exp, 0);
3696 tree src = CALL_EXPR_ARG (exp, 1);
3697 tree maxread = CALL_EXPR_ARG (exp, 2);
3698
3699 /* Try to determine the range of lengths that the source expression
3700 refers to. */
3701 c_strlen_data lendata = { };
3702 get_range_strlen (src, &lendata, /* eltsize = */ 1);
3703
3704 /* Try to verify that the destination is big enough for the shortest
3705 string. */
3706
3707 access_data data (nullptr, exp, access_read_write, maxread, true);
3708 if (!objsize && warn_stringop_overflow)
3709 {
3710 /* If it hasn't been provided by __strncat_chk, try to determine
3711 the size of the destination object into which the source is
3712 being copied. */
3713 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
3714 }
3715
3716 /* Add one for the terminating nul. */
3717 tree srclen = (lendata.minlen
3718 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
3719 size_one_node)
3720 : NULL_TREE);
3721
3722 /* The strncat function copies at most MAXREAD bytes and always appends
3723 the terminating nul so the specified upper bound should never be equal
3724 to (or greater than) the size of the destination. */
3725 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3726 && tree_int_cst_equal (objsize, maxread))
3727 {
3728 location_t loc = EXPR_LOCATION (exp);
3729 warning_at (loc, OPT_Wstringop_overflow_,
3730 "%qD specified bound %E equals destination size",
3731 get_callee_fndecl (exp), maxread);
3732
3733 return false;
3734 }
3735
3736 if (!srclen
3737 || (maxread && tree_fits_uhwi_p (maxread)
3738 && tree_fits_uhwi_p (srclen)
3739 && tree_int_cst_lt (maxread, srclen)))
3740 srclen = maxread;
3741
3742 /* The number of bytes to write is LEN but check_access will alsoa
3743 check SRCLEN if LEN's value isn't known. */
3744 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
3745 objsize, data.mode, &data);
3746 }
3747
3748 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3749 NULL_RTX if we failed the caller should emit a normal call. */
3750
3751 static rtx
expand_builtin_strncpy(tree exp,rtx target)3752 expand_builtin_strncpy (tree exp, rtx target)
3753 {
3754 location_t loc = EXPR_LOCATION (exp);
3755
3756 if (!validate_arglist (exp,
3757 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3758 return NULL_RTX;
3759 tree dest = CALL_EXPR_ARG (exp, 0);
3760 tree src = CALL_EXPR_ARG (exp, 1);
3761 /* The number of bytes to write (not the maximum). */
3762 tree len = CALL_EXPR_ARG (exp, 2);
3763
3764 /* The length of the source sequence. */
3765 tree slen = c_strlen (src, 1);
3766
3767 /* We must be passed a constant len and src parameter. */
3768 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3769 return NULL_RTX;
3770
3771 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3772
3773 /* We're required to pad with trailing zeros if the requested
3774 len is greater than strlen(s2)+1. In that case try to
3775 use store_by_pieces, if it fails, punt. */
3776 if (tree_int_cst_lt (slen, len))
3777 {
3778 unsigned int dest_align = get_pointer_alignment (dest);
3779 const char *p = c_getstr (src);
3780 rtx dest_mem;
3781
3782 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3783 || !can_store_by_pieces (tree_to_uhwi (len),
3784 builtin_strncpy_read_str,
3785 CONST_CAST (char *, p),
3786 dest_align, false))
3787 return NULL_RTX;
3788
3789 dest_mem = get_memory_rtx (dest, len);
3790 store_by_pieces (dest_mem, tree_to_uhwi (len),
3791 builtin_strncpy_read_str,
3792 CONST_CAST (char *, p), dest_align, false,
3793 RETURN_BEGIN);
3794 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3795 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3796 return dest_mem;
3797 }
3798
3799 return NULL_RTX;
3800 }
3801
3802 /* Return the RTL of a register in MODE generated from PREV in the
3803 previous iteration. */
3804
3805 static rtx
gen_memset_value_from_prev(by_pieces_prev * prev,fixed_size_mode mode)3806 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
3807 {
3808 rtx target = nullptr;
3809 if (prev != nullptr && prev->data != nullptr)
3810 {
3811 /* Use the previous data in the same mode. */
3812 if (prev->mode == mode)
3813 return prev->data;
3814
3815 fixed_size_mode prev_mode = prev->mode;
3816
3817 /* Don't use the previous data to write QImode if it is in a
3818 vector mode. */
3819 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
3820 return target;
3821
3822 rtx prev_rtx = prev->data;
3823
3824 if (REG_P (prev_rtx)
3825 && HARD_REGISTER_P (prev_rtx)
3826 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
3827 {
3828 /* This case occurs when PREV_MODE is a vector and when
3829 MODE is too small to store using vector operations.
3830 After register allocation, the code will need to move the
3831 lowpart of the vector register into a non-vector register.
3832
3833 Also, the target has chosen to use a hard register
3834 instead of going with the default choice of using a
3835 pseudo register. We should respect that choice and try to
3836 avoid creating a pseudo register with the same mode as the
3837 current hard register.
3838
3839 In principle, we could just use a lowpart MODE subreg of
3840 the vector register. However, the vector register mode might
3841 be too wide for non-vector registers, and we already know
3842 that the non-vector mode is too small for vector registers.
3843 It's therefore likely that we'd need to spill to memory in
3844 the vector mode and reload the non-vector value from there.
3845
3846 Try to avoid that by reducing the vector register to the
3847 smallest size that it can hold. This should increase the
3848 chances that non-vector registers can hold both the inner
3849 and outer modes of the subreg that we generate later. */
3850 machine_mode m;
3851 fixed_size_mode candidate;
3852 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
3853 if (is_a<fixed_size_mode> (m, &candidate))
3854 {
3855 if (GET_MODE_SIZE (candidate)
3856 >= GET_MODE_SIZE (prev_mode))
3857 break;
3858 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
3859 && lowpart_subreg_regno (REGNO (prev_rtx),
3860 prev_mode, candidate) >= 0)
3861 {
3862 target = lowpart_subreg (candidate, prev_rtx,
3863 prev_mode);
3864 prev_rtx = target;
3865 prev_mode = candidate;
3866 break;
3867 }
3868 }
3869 if (target == nullptr)
3870 prev_rtx = copy_to_reg (prev_rtx);
3871 }
3872
3873 target = lowpart_subreg (mode, prev_rtx, prev_mode);
3874 }
3875 return target;
3876 }
3877
3878 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3879 bytes from constant string DATA + OFFSET and return it as target
3880 constant. If PREV isn't nullptr, it has the RTL info from the
3881 previous iteration. */
3882
3883 rtx
builtin_memset_read_str(void * data,void * prev,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,fixed_size_mode mode)3884 builtin_memset_read_str (void *data, void *prev,
3885 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3886 fixed_size_mode mode)
3887 {
3888 const char *c = (const char *) data;
3889 unsigned int size = GET_MODE_SIZE (mode);
3890
3891 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
3892 mode);
3893 if (target != nullptr)
3894 return target;
3895 rtx src = gen_int_mode (*c, QImode);
3896
3897 if (VECTOR_MODE_P (mode))
3898 {
3899 gcc_assert (GET_MODE_INNER (mode) == QImode);
3900
3901 rtx const_vec = gen_const_vec_duplicate (mode, src);
3902 if (prev == NULL)
3903 /* Return CONST_VECTOR when called by a query function. */
3904 return const_vec;
3905
3906 /* Use the move expander with CONST_VECTOR. */
3907 target = targetm.gen_memset_scratch_rtx (mode);
3908 emit_move_insn (target, const_vec);
3909 return target;
3910 }
3911
3912 char *p = XALLOCAVEC (char, size);
3913
3914 memset (p, *c, size);
3915
3916 /* Vector modes should be handled above. */
3917 return c_readstr (p, as_a <scalar_int_mode> (mode));
3918 }
3919
3920 /* Callback routine for store_by_pieces. Return the RTL of a register
3921 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3922 char value given in the RTL register data. For example, if mode is
3923 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
3924 nullptr, it has the RTL info from the previous iteration. */
3925
3926 static rtx
builtin_memset_gen_str(void * data,void * prev,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,fixed_size_mode mode)3927 builtin_memset_gen_str (void *data, void *prev,
3928 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3929 fixed_size_mode mode)
3930 {
3931 rtx target, coeff;
3932 size_t size;
3933 char *p;
3934
3935 size = GET_MODE_SIZE (mode);
3936 if (size == 1)
3937 return (rtx) data;
3938
3939 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
3940 if (target != nullptr)
3941 return target;
3942
3943 if (VECTOR_MODE_P (mode))
3944 {
3945 gcc_assert (GET_MODE_INNER (mode) == QImode);
3946
3947 /* vec_duplicate_optab is a precondition to pick a vector mode for
3948 the memset expander. */
3949 insn_code icode = optab_handler (vec_duplicate_optab, mode);
3950
3951 target = targetm.gen_memset_scratch_rtx (mode);
3952 class expand_operand ops[2];
3953 create_output_operand (&ops[0], target, mode);
3954 create_input_operand (&ops[1], (rtx) data, QImode);
3955 expand_insn (icode, 2, ops);
3956 if (!rtx_equal_p (target, ops[0].value))
3957 emit_move_insn (target, ops[0].value);
3958
3959 return target;
3960 }
3961
3962 p = XALLOCAVEC (char, size);
3963 memset (p, 1, size);
3964 /* Vector modes should be handled above. */
3965 coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
3966
3967 target = convert_to_mode (mode, (rtx) data, 1);
3968 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3969 return force_reg (mode, target);
3970 }
3971
3972 /* Expand expression EXP, which is a call to the memset builtin. Return
3973 NULL_RTX if we failed the caller should emit a normal call, otherwise
3974 try to get the result in TARGET, if convenient (and in mode MODE if that's
3975 convenient). */
3976
3977 rtx
expand_builtin_memset(tree exp,rtx target,machine_mode mode)3978 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3979 {
3980 if (!validate_arglist (exp,
3981 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3982 return NULL_RTX;
3983
3984 tree dest = CALL_EXPR_ARG (exp, 0);
3985 tree val = CALL_EXPR_ARG (exp, 1);
3986 tree len = CALL_EXPR_ARG (exp, 2);
3987
3988 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3989 }
3990
3991 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
3992 Return TRUE if successful, FALSE otherwise. TO is assumed to be
3993 aligned at an ALIGN-bits boundary. LEN must be a multiple of
3994 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
3995
3996 The strategy is to issue one store_by_pieces for each power of two,
3997 from most to least significant, guarded by a test on whether there
3998 are at least that many bytes left to copy in LEN.
3999
4000 ??? Should we skip some powers of two in favor of loops? Maybe start
4001 at the max of TO/LEN/word alignment, at least when optimizing for
4002 size, instead of ensuring O(log len) dynamic compares? */
4003
4004 bool
try_store_by_multiple_pieces(rtx to,rtx len,unsigned int ctz_len,unsigned HOST_WIDE_INT min_len,unsigned HOST_WIDE_INT max_len,rtx val,char valc,unsigned int align)4005 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4006 unsigned HOST_WIDE_INT min_len,
4007 unsigned HOST_WIDE_INT max_len,
4008 rtx val, char valc, unsigned int align)
4009 {
4010 int max_bits = floor_log2 (max_len);
4011 int min_bits = floor_log2 (min_len);
4012 int sctz_len = ctz_len;
4013
4014 gcc_checking_assert (sctz_len >= 0);
4015
4016 if (val)
4017 valc = 1;
4018
4019 /* Bits more significant than TST_BITS are part of the shared prefix
4020 in the binary representation of both min_len and max_len. Since
4021 they're identical, we don't need to test them in the loop. */
4022 int tst_bits = (max_bits != min_bits ? max_bits
4023 : floor_log2 (max_len ^ min_len));
4024
4025 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4026 bytes, to lower max_bits. In the unlikely case of a constant LEN
4027 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4028 single store_by_pieces, but otherwise, select the minimum multiple
4029 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4030 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4031 unsigned HOST_WIDE_INT blksize;
4032 if (max_len > min_len)
4033 {
4034 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4035 align / BITS_PER_UNIT);
4036 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4037 blksize &= ~(alrng - 1);
4038 }
4039 else if (max_len == min_len)
4040 blksize = max_len;
4041 else
4042 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4043 return false;
4044 if (min_len >= blksize)
4045 {
4046 min_len -= blksize;
4047 min_bits = floor_log2 (min_len);
4048 max_len -= blksize;
4049 max_bits = floor_log2 (max_len);
4050
4051 tst_bits = (max_bits != min_bits ? max_bits
4052 : floor_log2 (max_len ^ min_len));
4053 }
4054 else
4055 blksize = 0;
4056
4057 /* Check that we can use store by pieces for the maximum store count
4058 we may issue (initial fixed-size block, plus conditional
4059 power-of-two-sized from max_bits to ctz_len. */
4060 unsigned HOST_WIDE_INT xlenest = blksize;
4061 if (max_bits >= 0)
4062 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4063 - (HOST_WIDE_INT_1U << ctz_len));
4064 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
4065 &valc, align, true))
4066 return false;
4067
4068 by_pieces_constfn constfun;
4069 void *constfundata;
4070 if (val)
4071 {
4072 constfun = builtin_memset_gen_str;
4073 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4074 val);
4075 }
4076 else
4077 {
4078 constfun = builtin_memset_read_str;
4079 constfundata = &valc;
4080 }
4081
4082 rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4083 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4084 to = replace_equiv_address (to, ptr);
4085 set_mem_align (to, align);
4086
4087 if (blksize)
4088 {
4089 to = store_by_pieces (to, blksize,
4090 constfun, constfundata,
4091 align, true,
4092 max_len != 0 ? RETURN_END : RETURN_BEGIN);
4093 if (max_len == 0)
4094 return true;
4095
4096 /* Adjust PTR, TO and REM. Since TO's address is likely
4097 PTR+offset, we have to replace it. */
4098 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4099 to = replace_equiv_address (to, ptr);
4100 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4101 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4102 }
4103
4104 /* Iterate over power-of-two block sizes from the maximum length to
4105 the least significant bit possibly set in the length. */
4106 for (int i = max_bits; i >= sctz_len; i--)
4107 {
4108 rtx_code_label *label = NULL;
4109 blksize = HOST_WIDE_INT_1U << i;
4110
4111 /* If we're past the bits shared between min_ and max_len, expand
4112 a test on the dynamic length, comparing it with the
4113 BLKSIZE. */
4114 if (i <= tst_bits)
4115 {
4116 label = gen_label_rtx ();
4117 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4118 ptr_mode, 1, label,
4119 profile_probability::even ());
4120 }
4121 /* If we are at a bit that is in the prefix shared by min_ and
4122 max_len, skip this BLKSIZE if the bit is clear. */
4123 else if ((max_len & blksize) == 0)
4124 continue;
4125
4126 /* Issue a store of BLKSIZE bytes. */
4127 to = store_by_pieces (to, blksize,
4128 constfun, constfundata,
4129 align, true,
4130 i != sctz_len ? RETURN_END : RETURN_BEGIN);
4131
4132 /* Adjust REM and PTR, unless this is the last iteration. */
4133 if (i != sctz_len)
4134 {
4135 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4136 to = replace_equiv_address (to, ptr);
4137 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4138 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4139 }
4140
4141 if (label)
4142 {
4143 emit_label (label);
4144
4145 /* Given conditional stores, the offset can no longer be
4146 known, so clear it. */
4147 clear_mem_offset (to);
4148 }
4149 }
4150
4151 return true;
4152 }
4153
4154 /* Helper function to do the actual work for expand_builtin_memset. The
4155 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4156 so that this can also be called without constructing an actual CALL_EXPR.
4157 The other arguments and return value are the same as for
4158 expand_builtin_memset. */
4159
4160 static rtx
expand_builtin_memset_args(tree dest,tree val,tree len,rtx target,machine_mode mode,tree orig_exp)4161 expand_builtin_memset_args (tree dest, tree val, tree len,
4162 rtx target, machine_mode mode, tree orig_exp)
4163 {
4164 tree fndecl, fn;
4165 enum built_in_function fcode;
4166 machine_mode val_mode;
4167 char c;
4168 unsigned int dest_align;
4169 rtx dest_mem, dest_addr, len_rtx;
4170 HOST_WIDE_INT expected_size = -1;
4171 unsigned int expected_align = 0;
4172 unsigned HOST_WIDE_INT min_size;
4173 unsigned HOST_WIDE_INT max_size;
4174 unsigned HOST_WIDE_INT probable_max_size;
4175
4176 dest_align = get_pointer_alignment (dest);
4177
4178 /* If DEST is not a pointer type, don't do this operation in-line. */
4179 if (dest_align == 0)
4180 return NULL_RTX;
4181
4182 if (currently_expanding_gimple_stmt)
4183 stringop_block_profile (currently_expanding_gimple_stmt,
4184 &expected_align, &expected_size);
4185
4186 if (expected_align < dest_align)
4187 expected_align = dest_align;
4188
4189 /* If the LEN parameter is zero, return DEST. */
4190 if (integer_zerop (len))
4191 {
4192 /* Evaluate and ignore VAL in case it has side-effects. */
4193 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4194 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4195 }
4196
4197 /* Stabilize the arguments in case we fail. */
4198 dest = builtin_save_expr (dest);
4199 val = builtin_save_expr (val);
4200 len = builtin_save_expr (len);
4201
4202 len_rtx = expand_normal (len);
4203 determine_block_size (len, len_rtx, &min_size, &max_size,
4204 &probable_max_size);
4205 dest_mem = get_memory_rtx (dest, len);
4206 val_mode = TYPE_MODE (unsigned_char_type_node);
4207
4208 if (TREE_CODE (val) != INTEGER_CST
4209 || target_char_cast (val, &c))
4210 {
4211 rtx val_rtx;
4212
4213 val_rtx = expand_normal (val);
4214 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4215
4216 /* Assume that we can memset by pieces if we can store
4217 * the coefficients by pieces (in the required modes).
4218 * We can't pass builtin_memset_gen_str as that emits RTL. */
4219 c = 1;
4220 if (tree_fits_uhwi_p (len)
4221 && can_store_by_pieces (tree_to_uhwi (len),
4222 builtin_memset_read_str, &c, dest_align,
4223 true))
4224 {
4225 val_rtx = force_reg (val_mode, val_rtx);
4226 store_by_pieces (dest_mem, tree_to_uhwi (len),
4227 builtin_memset_gen_str, val_rtx, dest_align,
4228 true, RETURN_BEGIN);
4229 }
4230 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4231 dest_align, expected_align,
4232 expected_size, min_size, max_size,
4233 probable_max_size)
4234 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4235 tree_ctz (len),
4236 min_size, max_size,
4237 val_rtx, 0,
4238 dest_align))
4239 goto do_libcall;
4240
4241 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4242 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4243 return dest_mem;
4244 }
4245
4246 if (c)
4247 {
4248 if (tree_fits_uhwi_p (len)
4249 && can_store_by_pieces (tree_to_uhwi (len),
4250 builtin_memset_read_str, &c, dest_align,
4251 true))
4252 store_by_pieces (dest_mem, tree_to_uhwi (len),
4253 builtin_memset_read_str, &c, dest_align, true,
4254 RETURN_BEGIN);
4255 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4256 gen_int_mode (c, val_mode),
4257 dest_align, expected_align,
4258 expected_size, min_size, max_size,
4259 probable_max_size)
4260 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4261 tree_ctz (len),
4262 min_size, max_size,
4263 NULL_RTX, c,
4264 dest_align))
4265 goto do_libcall;
4266
4267 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4268 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4269 return dest_mem;
4270 }
4271
4272 set_mem_align (dest_mem, dest_align);
4273 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4274 CALL_EXPR_TAILCALL (orig_exp)
4275 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4276 expected_align, expected_size,
4277 min_size, max_size,
4278 probable_max_size, tree_ctz (len));
4279
4280 if (dest_addr == 0)
4281 {
4282 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4283 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4284 }
4285
4286 return dest_addr;
4287
4288 do_libcall:
4289 fndecl = get_callee_fndecl (orig_exp);
4290 fcode = DECL_FUNCTION_CODE (fndecl);
4291 if (fcode == BUILT_IN_MEMSET)
4292 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4293 dest, val, len);
4294 else if (fcode == BUILT_IN_BZERO)
4295 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4296 dest, len);
4297 else
4298 gcc_unreachable ();
4299 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4300 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4301 return expand_call (fn, target, target == const0_rtx);
4302 }
4303
4304 /* Expand expression EXP, which is a call to the bzero builtin. Return
4305 NULL_RTX if we failed the caller should emit a normal call. */
4306
4307 static rtx
expand_builtin_bzero(tree exp)4308 expand_builtin_bzero (tree exp)
4309 {
4310 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4311 return NULL_RTX;
4312
4313 tree dest = CALL_EXPR_ARG (exp, 0);
4314 tree size = CALL_EXPR_ARG (exp, 1);
4315
4316 /* New argument list transforming bzero(ptr x, int y) to
4317 memset(ptr x, int 0, size_t y). This is done this way
4318 so that if it isn't expanded inline, we fallback to
4319 calling bzero instead of memset. */
4320
4321 location_t loc = EXPR_LOCATION (exp);
4322
4323 return expand_builtin_memset_args (dest, integer_zero_node,
4324 fold_convert_loc (loc,
4325 size_type_node, size),
4326 const0_rtx, VOIDmode, exp);
4327 }
4328
4329 /* Try to expand cmpstr operation ICODE with the given operands.
4330 Return the result rtx on success, otherwise return null. */
4331
4332 static rtx
expand_cmpstr(insn_code icode,rtx target,rtx arg1_rtx,rtx arg2_rtx,HOST_WIDE_INT align)4333 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4334 HOST_WIDE_INT align)
4335 {
4336 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4337
4338 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4339 target = NULL_RTX;
4340
4341 class expand_operand ops[4];
4342 create_output_operand (&ops[0], target, insn_mode);
4343 create_fixed_operand (&ops[1], arg1_rtx);
4344 create_fixed_operand (&ops[2], arg2_rtx);
4345 create_integer_operand (&ops[3], align);
4346 if (maybe_expand_insn (icode, 4, ops))
4347 return ops[0].value;
4348 return NULL_RTX;
4349 }
4350
4351 /* Expand expression EXP, which is a call to the memcmp built-in function.
4352 Return NULL_RTX if we failed and the caller should emit a normal call,
4353 otherwise try to get the result in TARGET, if convenient.
4354 RESULT_EQ is true if we can relax the returned value to be either zero
4355 or nonzero, without caring about the sign. */
4356
4357 static rtx
expand_builtin_memcmp(tree exp,rtx target,bool result_eq)4358 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4359 {
4360 if (!validate_arglist (exp,
4361 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4362 return NULL_RTX;
4363
4364 tree arg1 = CALL_EXPR_ARG (exp, 0);
4365 tree arg2 = CALL_EXPR_ARG (exp, 1);
4366 tree len = CALL_EXPR_ARG (exp, 2);
4367
4368 /* Due to the performance benefit, always inline the calls first
4369 when result_eq is false. */
4370 rtx result = NULL_RTX;
4371 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4372 if (!result_eq && fcode != BUILT_IN_BCMP)
4373 {
4374 result = inline_expand_builtin_bytecmp (exp, target);
4375 if (result)
4376 return result;
4377 }
4378
4379 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4380 location_t loc = EXPR_LOCATION (exp);
4381
4382 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4383 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4384
4385 /* If we don't have POINTER_TYPE, call the function. */
4386 if (arg1_align == 0 || arg2_align == 0)
4387 return NULL_RTX;
4388
4389 rtx arg1_rtx = get_memory_rtx (arg1, len);
4390 rtx arg2_rtx = get_memory_rtx (arg2, len);
4391 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4392
4393 /* Set MEM_SIZE as appropriate. */
4394 if (CONST_INT_P (len_rtx))
4395 {
4396 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4397 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4398 }
4399
4400 by_pieces_constfn constfn = NULL;
4401
4402 /* Try to get the byte representation of the constant ARG2 (or, only
4403 when the function's result is used for equality to zero, ARG1)
4404 points to, with its byte size in NBYTES. */
4405 unsigned HOST_WIDE_INT nbytes;
4406 const char *rep = getbyterep (arg2, &nbytes);
4407 if (result_eq && rep == NULL)
4408 {
4409 /* For equality to zero the arguments are interchangeable. */
4410 rep = getbyterep (arg1, &nbytes);
4411 if (rep != NULL)
4412 std::swap (arg1_rtx, arg2_rtx);
4413 }
4414
4415 /* If the function's constant bound LEN_RTX is less than or equal
4416 to the byte size of the representation of the constant argument,
4417 and if block move would be done by pieces, we can avoid loading
4418 the bytes from memory and only store the computed constant result. */
4419 if (rep
4420 && CONST_INT_P (len_rtx)
4421 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4422 constfn = builtin_memcpy_read_str;
4423
4424 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4425 TREE_TYPE (len), target,
4426 result_eq, constfn,
4427 CONST_CAST (char *, rep));
4428
4429 if (result)
4430 {
4431 /* Return the value in the proper mode for this function. */
4432 if (GET_MODE (result) == mode)
4433 return result;
4434
4435 if (target != 0)
4436 {
4437 convert_move (target, result, 0);
4438 return target;
4439 }
4440
4441 return convert_to_mode (mode, result, 0);
4442 }
4443
4444 return NULL_RTX;
4445 }
4446
4447 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4448 if we failed the caller should emit a normal call, otherwise try to get
4449 the result in TARGET, if convenient. */
4450
4451 static rtx
expand_builtin_strcmp(tree exp,ATTRIBUTE_UNUSED rtx target)4452 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4453 {
4454 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4455 return NULL_RTX;
4456
4457 tree arg1 = CALL_EXPR_ARG (exp, 0);
4458 tree arg2 = CALL_EXPR_ARG (exp, 1);
4459
4460 /* Due to the performance benefit, always inline the calls first. */
4461 rtx result = NULL_RTX;
4462 result = inline_expand_builtin_bytecmp (exp, target);
4463 if (result)
4464 return result;
4465
4466 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4467 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4468 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4469 return NULL_RTX;
4470
4471 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4472 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4473
4474 /* If we don't have POINTER_TYPE, call the function. */
4475 if (arg1_align == 0 || arg2_align == 0)
4476 return NULL_RTX;
4477
4478 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4479 arg1 = builtin_save_expr (arg1);
4480 arg2 = builtin_save_expr (arg2);
4481
4482 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4483 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4484
4485 /* Try to call cmpstrsi. */
4486 if (cmpstr_icode != CODE_FOR_nothing)
4487 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4488 MIN (arg1_align, arg2_align));
4489
4490 /* Try to determine at least one length and call cmpstrnsi. */
4491 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4492 {
4493 tree len;
4494 rtx arg3_rtx;
4495
4496 tree len1 = c_strlen (arg1, 1);
4497 tree len2 = c_strlen (arg2, 1);
4498
4499 if (len1)
4500 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4501 if (len2)
4502 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4503
4504 /* If we don't have a constant length for the first, use the length
4505 of the second, if we know it. We don't require a constant for
4506 this case; some cost analysis could be done if both are available
4507 but neither is constant. For now, assume they're equally cheap,
4508 unless one has side effects. If both strings have constant lengths,
4509 use the smaller. */
4510
4511 if (!len1)
4512 len = len2;
4513 else if (!len2)
4514 len = len1;
4515 else if (TREE_SIDE_EFFECTS (len1))
4516 len = len2;
4517 else if (TREE_SIDE_EFFECTS (len2))
4518 len = len1;
4519 else if (TREE_CODE (len1) != INTEGER_CST)
4520 len = len2;
4521 else if (TREE_CODE (len2) != INTEGER_CST)
4522 len = len1;
4523 else if (tree_int_cst_lt (len1, len2))
4524 len = len1;
4525 else
4526 len = len2;
4527
4528 /* If both arguments have side effects, we cannot optimize. */
4529 if (len && !TREE_SIDE_EFFECTS (len))
4530 {
4531 arg3_rtx = expand_normal (len);
4532 result = expand_cmpstrn_or_cmpmem
4533 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4534 arg3_rtx, MIN (arg1_align, arg2_align));
4535 }
4536 }
4537
4538 tree fndecl = get_callee_fndecl (exp);
4539 if (result)
4540 {
4541 /* Return the value in the proper mode for this function. */
4542 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4543 if (GET_MODE (result) == mode)
4544 return result;
4545 if (target == 0)
4546 return convert_to_mode (mode, result, 0);
4547 convert_move (target, result, 0);
4548 return target;
4549 }
4550
4551 /* Expand the library call ourselves using a stabilized argument
4552 list to avoid re-evaluating the function's arguments twice. */
4553 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4554 copy_warning (fn, exp);
4555 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4556 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4557 return expand_call (fn, target, target == const0_rtx);
4558 }
4559
4560 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4561 NULL_RTX if we failed the caller should emit a normal call, otherwise
4562 try to get the result in TARGET, if convenient. */
4563
4564 static rtx
expand_builtin_strncmp(tree exp,ATTRIBUTE_UNUSED rtx target,ATTRIBUTE_UNUSED machine_mode mode)4565 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4566 ATTRIBUTE_UNUSED machine_mode mode)
4567 {
4568 if (!validate_arglist (exp,
4569 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4570 return NULL_RTX;
4571
4572 tree arg1 = CALL_EXPR_ARG (exp, 0);
4573 tree arg2 = CALL_EXPR_ARG (exp, 1);
4574 tree arg3 = CALL_EXPR_ARG (exp, 2);
4575
4576 location_t loc = EXPR_LOCATION (exp);
4577 tree len1 = c_strlen (arg1, 1);
4578 tree len2 = c_strlen (arg2, 1);
4579
4580 /* Due to the performance benefit, always inline the calls first. */
4581 rtx result = NULL_RTX;
4582 result = inline_expand_builtin_bytecmp (exp, target);
4583 if (result)
4584 return result;
4585
4586 /* If c_strlen can determine an expression for one of the string
4587 lengths, and it doesn't have side effects, then emit cmpstrnsi
4588 using length MIN(strlen(string)+1, arg3). */
4589 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4590 if (cmpstrn_icode == CODE_FOR_nothing)
4591 return NULL_RTX;
4592
4593 tree len;
4594
4595 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4596 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4597
4598 if (len1)
4599 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4600 if (len2)
4601 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4602
4603 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4604
4605 /* If we don't have a constant length for the first, use the length
4606 of the second, if we know it. If neither string is constant length,
4607 use the given length argument. We don't require a constant for
4608 this case; some cost analysis could be done if both are available
4609 but neither is constant. For now, assume they're equally cheap,
4610 unless one has side effects. If both strings have constant lengths,
4611 use the smaller. */
4612
4613 if (!len1 && !len2)
4614 len = len3;
4615 else if (!len1)
4616 len = len2;
4617 else if (!len2)
4618 len = len1;
4619 else if (TREE_SIDE_EFFECTS (len1))
4620 len = len2;
4621 else if (TREE_SIDE_EFFECTS (len2))
4622 len = len1;
4623 else if (TREE_CODE (len1) != INTEGER_CST)
4624 len = len2;
4625 else if (TREE_CODE (len2) != INTEGER_CST)
4626 len = len1;
4627 else if (tree_int_cst_lt (len1, len2))
4628 len = len1;
4629 else
4630 len = len2;
4631
4632 /* If we are not using the given length, we must incorporate it here.
4633 The actual new length parameter will be MIN(len,arg3) in this case. */
4634 if (len != len3)
4635 {
4636 len = fold_convert_loc (loc, sizetype, len);
4637 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4638 }
4639 rtx arg1_rtx = get_memory_rtx (arg1, len);
4640 rtx arg2_rtx = get_memory_rtx (arg2, len);
4641 rtx arg3_rtx = expand_normal (len);
4642 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4643 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4644 MIN (arg1_align, arg2_align));
4645
4646 tree fndecl = get_callee_fndecl (exp);
4647 if (result)
4648 {
4649 /* Return the value in the proper mode for this function. */
4650 mode = TYPE_MODE (TREE_TYPE (exp));
4651 if (GET_MODE (result) == mode)
4652 return result;
4653 if (target == 0)
4654 return convert_to_mode (mode, result, 0);
4655 convert_move (target, result, 0);
4656 return target;
4657 }
4658
4659 /* Expand the library call ourselves using a stabilized argument
4660 list to avoid re-evaluating the function's arguments twice. */
4661 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4662 copy_warning (call, exp);
4663 gcc_assert (TREE_CODE (call) == CALL_EXPR);
4664 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4665 return expand_call (call, target, target == const0_rtx);
4666 }
4667
4668 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4669 if that's convenient. */
4670
4671 rtx
expand_builtin_saveregs(void)4672 expand_builtin_saveregs (void)
4673 {
4674 rtx val;
4675 rtx_insn *seq;
4676
4677 /* Don't do __builtin_saveregs more than once in a function.
4678 Save the result of the first call and reuse it. */
4679 if (saveregs_value != 0)
4680 return saveregs_value;
4681
4682 /* When this function is called, it means that registers must be
4683 saved on entry to this function. So we migrate the call to the
4684 first insn of this function. */
4685
4686 start_sequence ();
4687
4688 /* Do whatever the machine needs done in this case. */
4689 val = targetm.calls.expand_builtin_saveregs ();
4690
4691 seq = get_insns ();
4692 end_sequence ();
4693
4694 saveregs_value = val;
4695
4696 /* Put the insns after the NOTE that starts the function. If this
4697 is inside a start_sequence, make the outer-level insn chain current, so
4698 the code is placed at the start of the function. */
4699 push_topmost_sequence ();
4700 emit_insn_after (seq, entry_of_function ());
4701 pop_topmost_sequence ();
4702
4703 return val;
4704 }
4705
4706 /* Expand a call to __builtin_next_arg. */
4707
4708 static rtx
expand_builtin_next_arg(void)4709 expand_builtin_next_arg (void)
4710 {
4711 /* Checking arguments is already done in fold_builtin_next_arg
4712 that must be called before this function. */
4713 return expand_binop (ptr_mode, add_optab,
4714 crtl->args.internal_arg_pointer,
4715 crtl->args.arg_offset_rtx,
4716 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4717 }
4718
4719 /* Make it easier for the backends by protecting the valist argument
4720 from multiple evaluations. */
4721
4722 static tree
stabilize_va_list_loc(location_t loc,tree valist,int needs_lvalue)4723 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4724 {
4725 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4726
4727 /* The current way of determining the type of valist is completely
4728 bogus. We should have the information on the va builtin instead. */
4729 if (!vatype)
4730 vatype = targetm.fn_abi_va_list (cfun->decl);
4731
4732 if (TREE_CODE (vatype) == ARRAY_TYPE)
4733 {
4734 if (TREE_SIDE_EFFECTS (valist))
4735 valist = save_expr (valist);
4736
4737 /* For this case, the backends will be expecting a pointer to
4738 vatype, but it's possible we've actually been given an array
4739 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4740 So fix it. */
4741 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4742 {
4743 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4744 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4745 }
4746 }
4747 else
4748 {
4749 tree pt = build_pointer_type (vatype);
4750
4751 if (! needs_lvalue)
4752 {
4753 if (! TREE_SIDE_EFFECTS (valist))
4754 return valist;
4755
4756 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4757 TREE_SIDE_EFFECTS (valist) = 1;
4758 }
4759
4760 if (TREE_SIDE_EFFECTS (valist))
4761 valist = save_expr (valist);
4762 valist = fold_build2_loc (loc, MEM_REF,
4763 vatype, valist, build_int_cst (pt, 0));
4764 }
4765
4766 return valist;
4767 }
4768
4769 /* The "standard" definition of va_list is void*. */
4770
4771 tree
std_build_builtin_va_list(void)4772 std_build_builtin_va_list (void)
4773 {
4774 return ptr_type_node;
4775 }
4776
4777 /* The "standard" abi va_list is va_list_type_node. */
4778
4779 tree
std_fn_abi_va_list(tree fndecl ATTRIBUTE_UNUSED)4780 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4781 {
4782 return va_list_type_node;
4783 }
4784
4785 /* The "standard" type of va_list is va_list_type_node. */
4786
4787 tree
std_canonical_va_list_type(tree type)4788 std_canonical_va_list_type (tree type)
4789 {
4790 tree wtype, htype;
4791
4792 wtype = va_list_type_node;
4793 htype = type;
4794
4795 if (TREE_CODE (wtype) == ARRAY_TYPE)
4796 {
4797 /* If va_list is an array type, the argument may have decayed
4798 to a pointer type, e.g. by being passed to another function.
4799 In that case, unwrap both types so that we can compare the
4800 underlying records. */
4801 if (TREE_CODE (htype) == ARRAY_TYPE
4802 || POINTER_TYPE_P (htype))
4803 {
4804 wtype = TREE_TYPE (wtype);
4805 htype = TREE_TYPE (htype);
4806 }
4807 }
4808 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4809 return va_list_type_node;
4810
4811 return NULL_TREE;
4812 }
4813
4814 /* The "standard" implementation of va_start: just assign `nextarg' to
4815 the variable. */
4816
4817 void
std_expand_builtin_va_start(tree valist,rtx nextarg)4818 std_expand_builtin_va_start (tree valist, rtx nextarg)
4819 {
4820 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4821 convert_move (va_r, nextarg, 0);
4822 }
4823
4824 /* Expand EXP, a call to __builtin_va_start. */
4825
4826 static rtx
expand_builtin_va_start(tree exp)4827 expand_builtin_va_start (tree exp)
4828 {
4829 rtx nextarg;
4830 tree valist;
4831 location_t loc = EXPR_LOCATION (exp);
4832
4833 if (call_expr_nargs (exp) < 2)
4834 {
4835 error_at (loc, "too few arguments to function %<va_start%>");
4836 return const0_rtx;
4837 }
4838
4839 if (fold_builtin_next_arg (exp, true))
4840 return const0_rtx;
4841
4842 nextarg = expand_builtin_next_arg ();
4843 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4844
4845 if (targetm.expand_builtin_va_start)
4846 targetm.expand_builtin_va_start (valist, nextarg);
4847 else
4848 std_expand_builtin_va_start (valist, nextarg);
4849
4850 return const0_rtx;
4851 }
4852
4853 /* Expand EXP, a call to __builtin_va_end. */
4854
4855 static rtx
expand_builtin_va_end(tree exp)4856 expand_builtin_va_end (tree exp)
4857 {
4858 tree valist = CALL_EXPR_ARG (exp, 0);
4859
4860 /* Evaluate for side effects, if needed. I hate macros that don't
4861 do that. */
4862 if (TREE_SIDE_EFFECTS (valist))
4863 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4864
4865 return const0_rtx;
4866 }
4867
4868 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4869 builtin rather than just as an assignment in stdarg.h because of the
4870 nastiness of array-type va_list types. */
4871
4872 static rtx
expand_builtin_va_copy(tree exp)4873 expand_builtin_va_copy (tree exp)
4874 {
4875 tree dst, src, t;
4876 location_t loc = EXPR_LOCATION (exp);
4877
4878 dst = CALL_EXPR_ARG (exp, 0);
4879 src = CALL_EXPR_ARG (exp, 1);
4880
4881 dst = stabilize_va_list_loc (loc, dst, 1);
4882 src = stabilize_va_list_loc (loc, src, 0);
4883
4884 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4885
4886 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4887 {
4888 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4889 TREE_SIDE_EFFECTS (t) = 1;
4890 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4891 }
4892 else
4893 {
4894 rtx dstb, srcb, size;
4895
4896 /* Evaluate to pointers. */
4897 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4898 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4899 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4900 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4901
4902 dstb = convert_memory_address (Pmode, dstb);
4903 srcb = convert_memory_address (Pmode, srcb);
4904
4905 /* "Dereference" to BLKmode memories. */
4906 dstb = gen_rtx_MEM (BLKmode, dstb);
4907 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4908 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4909 srcb = gen_rtx_MEM (BLKmode, srcb);
4910 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4911 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4912
4913 /* Copy. */
4914 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4915 }
4916
4917 return const0_rtx;
4918 }
4919
4920 /* Expand a call to one of the builtin functions __builtin_frame_address or
4921 __builtin_return_address. */
4922
4923 static rtx
expand_builtin_frame_address(tree fndecl,tree exp)4924 expand_builtin_frame_address (tree fndecl, tree exp)
4925 {
4926 /* The argument must be a nonnegative integer constant.
4927 It counts the number of frames to scan up the stack.
4928 The value is either the frame pointer value or the return
4929 address saved in that frame. */
4930 if (call_expr_nargs (exp) == 0)
4931 /* Warning about missing arg was already issued. */
4932 return const0_rtx;
4933 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4934 {
4935 error ("invalid argument to %qD", fndecl);
4936 return const0_rtx;
4937 }
4938 else
4939 {
4940 /* Number of frames to scan up the stack. */
4941 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4942
4943 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4944
4945 /* Some ports cannot access arbitrary stack frames. */
4946 if (tem == NULL)
4947 {
4948 warning (0, "unsupported argument to %qD", fndecl);
4949 return const0_rtx;
4950 }
4951
4952 if (count)
4953 {
4954 /* Warn since no effort is made to ensure that any frame
4955 beyond the current one exists or can be safely reached. */
4956 warning (OPT_Wframe_address, "calling %qD with "
4957 "a nonzero argument is unsafe", fndecl);
4958 }
4959
4960 /* For __builtin_frame_address, return what we've got. */
4961 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4962 return tem;
4963
4964 if (!REG_P (tem)
4965 && ! CONSTANT_P (tem))
4966 tem = copy_addr_to_reg (tem);
4967 return tem;
4968 }
4969 }
4970
4971 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4972 failed and the caller should emit a normal call. */
4973
4974 static rtx
expand_builtin_alloca(tree exp)4975 expand_builtin_alloca (tree exp)
4976 {
4977 rtx op0;
4978 rtx result;
4979 unsigned int align;
4980 tree fndecl = get_callee_fndecl (exp);
4981 HOST_WIDE_INT max_size;
4982 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4983 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4984 bool valid_arglist
4985 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4986 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4987 VOID_TYPE)
4988 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4989 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4990 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4991
4992 if (!valid_arglist)
4993 return NULL_RTX;
4994
4995 /* Compute the argument. */
4996 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4997
4998 /* Compute the alignment. */
4999 align = (fcode == BUILT_IN_ALLOCA
5000 ? BIGGEST_ALIGNMENT
5001 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5002
5003 /* Compute the maximum size. */
5004 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5005 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5006 : -1);
5007
5008 /* Allocate the desired space. If the allocation stems from the declaration
5009 of a variable-sized object, it cannot accumulate. */
5010 result
5011 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5012 result = convert_memory_address (ptr_mode, result);
5013
5014 /* Dynamic allocations for variables are recorded during gimplification. */
5015 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5016 record_dynamic_alloc (exp);
5017
5018 return result;
5019 }
5020
5021 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5022 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5023 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5024 handle_builtin_stack_restore function. */
5025
5026 static rtx
expand_asan_emit_allocas_unpoison(tree exp)5027 expand_asan_emit_allocas_unpoison (tree exp)
5028 {
5029 tree arg0 = CALL_EXPR_ARG (exp, 0);
5030 tree arg1 = CALL_EXPR_ARG (exp, 1);
5031 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5032 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5033 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5034 stack_pointer_rtx, NULL_RTX, 0,
5035 OPTAB_LIB_WIDEN);
5036 off = convert_modes (ptr_mode, Pmode, off, 0);
5037 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5038 OPTAB_LIB_WIDEN);
5039 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5040 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5041 top, ptr_mode, bot, ptr_mode);
5042 return ret;
5043 }
5044
5045 /* Expand a call to bswap builtin in EXP.
5046 Return NULL_RTX if a normal call should be emitted rather than expanding the
5047 function in-line. If convenient, the result should be placed in TARGET.
5048 SUBTARGET may be used as the target for computing one of EXP's operands. */
5049
5050 static rtx
expand_builtin_bswap(machine_mode target_mode,tree exp,rtx target,rtx subtarget)5051 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5052 rtx subtarget)
5053 {
5054 tree arg;
5055 rtx op0;
5056
5057 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5058 return NULL_RTX;
5059
5060 arg = CALL_EXPR_ARG (exp, 0);
5061 op0 = expand_expr (arg,
5062 subtarget && GET_MODE (subtarget) == target_mode
5063 ? subtarget : NULL_RTX,
5064 target_mode, EXPAND_NORMAL);
5065 if (GET_MODE (op0) != target_mode)
5066 op0 = convert_to_mode (target_mode, op0, 1);
5067
5068 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5069
5070 gcc_assert (target);
5071
5072 return convert_to_mode (target_mode, target, 1);
5073 }
5074
5075 /* Expand a call to a unary builtin in EXP.
5076 Return NULL_RTX if a normal call should be emitted rather than expanding the
5077 function in-line. If convenient, the result should be placed in TARGET.
5078 SUBTARGET may be used as the target for computing one of EXP's operands. */
5079
5080 static rtx
expand_builtin_unop(machine_mode target_mode,tree exp,rtx target,rtx subtarget,optab op_optab)5081 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5082 rtx subtarget, optab op_optab)
5083 {
5084 rtx op0;
5085
5086 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5087 return NULL_RTX;
5088
5089 /* Compute the argument. */
5090 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5091 (subtarget
5092 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5093 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5094 VOIDmode, EXPAND_NORMAL);
5095 /* Compute op, into TARGET if possible.
5096 Set TARGET to wherever the result comes back. */
5097 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5098 op_optab, op0, target, op_optab != clrsb_optab);
5099 gcc_assert (target);
5100
5101 return convert_to_mode (target_mode, target, 0);
5102 }
5103
5104 /* Expand a call to __builtin_expect. We just return our argument
5105 as the builtin_expect semantic should've been already executed by
5106 tree branch prediction pass. */
5107
5108 static rtx
expand_builtin_expect(tree exp,rtx target)5109 expand_builtin_expect (tree exp, rtx target)
5110 {
5111 tree arg;
5112
5113 if (call_expr_nargs (exp) < 2)
5114 return const0_rtx;
5115 arg = CALL_EXPR_ARG (exp, 0);
5116
5117 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5118 /* When guessing was done, the hints should be already stripped away. */
5119 gcc_assert (!flag_guess_branch_prob
5120 || optimize == 0 || seen_error ());
5121 return target;
5122 }
5123
5124 /* Expand a call to __builtin_expect_with_probability. We just return our
5125 argument as the builtin_expect semantic should've been already executed by
5126 tree branch prediction pass. */
5127
5128 static rtx
expand_builtin_expect_with_probability(tree exp,rtx target)5129 expand_builtin_expect_with_probability (tree exp, rtx target)
5130 {
5131 tree arg;
5132
5133 if (call_expr_nargs (exp) < 3)
5134 return const0_rtx;
5135 arg = CALL_EXPR_ARG (exp, 0);
5136
5137 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5138 /* When guessing was done, the hints should be already stripped away. */
5139 gcc_assert (!flag_guess_branch_prob
5140 || optimize == 0 || seen_error ());
5141 return target;
5142 }
5143
5144
5145 /* Expand a call to __builtin_assume_aligned. We just return our first
5146 argument as the builtin_assume_aligned semantic should've been already
5147 executed by CCP. */
5148
5149 static rtx
expand_builtin_assume_aligned(tree exp,rtx target)5150 expand_builtin_assume_aligned (tree exp, rtx target)
5151 {
5152 if (call_expr_nargs (exp) < 2)
5153 return const0_rtx;
5154 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5155 EXPAND_NORMAL);
5156 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5157 && (call_expr_nargs (exp) < 3
5158 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5159 return target;
5160 }
5161
5162 void
expand_builtin_trap(void)5163 expand_builtin_trap (void)
5164 {
5165 if (targetm.have_trap ())
5166 {
5167 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5168 /* For trap insns when not accumulating outgoing args force
5169 REG_ARGS_SIZE note to prevent crossjumping of calls with
5170 different args sizes. */
5171 if (!ACCUMULATE_OUTGOING_ARGS)
5172 add_args_size_note (insn, stack_pointer_delta);
5173 }
5174 else
5175 {
5176 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5177 tree call_expr = build_call_expr (fn, 0);
5178 expand_call (call_expr, NULL_RTX, false);
5179 }
5180
5181 emit_barrier ();
5182 }
5183
5184 /* Expand a call to __builtin_unreachable. We do nothing except emit
5185 a barrier saying that control flow will not pass here.
5186
5187 It is the responsibility of the program being compiled to ensure
5188 that control flow does never reach __builtin_unreachable. */
5189 static void
expand_builtin_unreachable(void)5190 expand_builtin_unreachable (void)
5191 {
5192 emit_barrier ();
5193 }
5194
5195 /* Expand EXP, a call to fabs, fabsf or fabsl.
5196 Return NULL_RTX if a normal call should be emitted rather than expanding
5197 the function inline. If convenient, the result should be placed
5198 in TARGET. SUBTARGET may be used as the target for computing
5199 the operand. */
5200
5201 static rtx
expand_builtin_fabs(tree exp,rtx target,rtx subtarget)5202 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5203 {
5204 machine_mode mode;
5205 tree arg;
5206 rtx op0;
5207
5208 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5209 return NULL_RTX;
5210
5211 arg = CALL_EXPR_ARG (exp, 0);
5212 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5213 mode = TYPE_MODE (TREE_TYPE (arg));
5214 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5215 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5216 }
5217
5218 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5219 Return NULL is a normal call should be emitted rather than expanding the
5220 function inline. If convenient, the result should be placed in TARGET.
5221 SUBTARGET may be used as the target for computing the operand. */
5222
5223 static rtx
expand_builtin_copysign(tree exp,rtx target,rtx subtarget)5224 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5225 {
5226 rtx op0, op1;
5227 tree arg;
5228
5229 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5230 return NULL_RTX;
5231
5232 arg = CALL_EXPR_ARG (exp, 0);
5233 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5234
5235 arg = CALL_EXPR_ARG (exp, 1);
5236 op1 = expand_normal (arg);
5237
5238 return expand_copysign (op0, op1, target);
5239 }
5240
5241 /* Emit a call to __builtin___clear_cache. */
5242
5243 void
default_emit_call_builtin___clear_cache(rtx begin,rtx end)5244 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5245 {
5246 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5247 BUILTIN_ASM_NAME_PTR
5248 (BUILT_IN_CLEAR_CACHE));
5249
5250 emit_library_call (callee,
5251 LCT_NORMAL, VOIDmode,
5252 convert_memory_address (ptr_mode, begin), ptr_mode,
5253 convert_memory_address (ptr_mode, end), ptr_mode);
5254 }
5255
5256 /* Emit a call to __builtin___clear_cache, unless the target specifies
5257 it as do-nothing. This function can be used by trampoline
5258 finalizers to duplicate the effects of expanding a call to the
5259 clear_cache builtin. */
5260
5261 void
maybe_emit_call_builtin___clear_cache(rtx begin,rtx end)5262 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5263 {
5264 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5265 || CONST_INT_P (begin))
5266 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5267 || CONST_INT_P (end)));
5268
5269 if (targetm.have_clear_cache ())
5270 {
5271 /* We have a "clear_cache" insn, and it will handle everything. */
5272 class expand_operand ops[2];
5273
5274 create_address_operand (&ops[0], begin);
5275 create_address_operand (&ops[1], end);
5276
5277 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5278 return;
5279 }
5280 else
5281 {
5282 #ifndef CLEAR_INSN_CACHE
5283 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5284 does nothing. There is no need to call it. Do nothing. */
5285 return;
5286 #endif /* CLEAR_INSN_CACHE */
5287 }
5288
5289 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5290 }
5291
5292 /* Expand a call to __builtin___clear_cache. */
5293
5294 static void
expand_builtin___clear_cache(tree exp)5295 expand_builtin___clear_cache (tree exp)
5296 {
5297 tree begin, end;
5298 rtx begin_rtx, end_rtx;
5299
5300 /* We must not expand to a library call. If we did, any
5301 fallback library function in libgcc that might contain a call to
5302 __builtin___clear_cache() would recurse infinitely. */
5303 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5304 {
5305 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5306 return;
5307 }
5308
5309 begin = CALL_EXPR_ARG (exp, 0);
5310 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5311
5312 end = CALL_EXPR_ARG (exp, 1);
5313 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5314
5315 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5316 }
5317
5318 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5319
5320 static rtx
round_trampoline_addr(rtx tramp)5321 round_trampoline_addr (rtx tramp)
5322 {
5323 rtx temp, addend, mask;
5324
5325 /* If we don't need too much alignment, we'll have been guaranteed
5326 proper alignment by get_trampoline_type. */
5327 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5328 return tramp;
5329
5330 /* Round address up to desired boundary. */
5331 temp = gen_reg_rtx (Pmode);
5332 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5333 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5334
5335 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5336 temp, 0, OPTAB_LIB_WIDEN);
5337 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5338 temp, 0, OPTAB_LIB_WIDEN);
5339
5340 return tramp;
5341 }
5342
5343 static rtx
expand_builtin_init_trampoline(tree exp,bool onstack)5344 expand_builtin_init_trampoline (tree exp, bool onstack)
5345 {
5346 tree t_tramp, t_func, t_chain;
5347 rtx m_tramp, r_tramp, r_chain, tmp;
5348
5349 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5350 POINTER_TYPE, VOID_TYPE))
5351 return NULL_RTX;
5352
5353 t_tramp = CALL_EXPR_ARG (exp, 0);
5354 t_func = CALL_EXPR_ARG (exp, 1);
5355 t_chain = CALL_EXPR_ARG (exp, 2);
5356
5357 r_tramp = expand_normal (t_tramp);
5358 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5359 MEM_NOTRAP_P (m_tramp) = 1;
5360
5361 /* If ONSTACK, the TRAMP argument should be the address of a field
5362 within the local function's FRAME decl. Either way, let's see if
5363 we can fill in the MEM_ATTRs for this memory. */
5364 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5365 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5366
5367 /* Creator of a heap trampoline is responsible for making sure the
5368 address is aligned to at least STACK_BOUNDARY. Normally malloc
5369 will ensure this anyhow. */
5370 tmp = round_trampoline_addr (r_tramp);
5371 if (tmp != r_tramp)
5372 {
5373 m_tramp = change_address (m_tramp, BLKmode, tmp);
5374 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5375 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5376 }
5377
5378 /* The FUNC argument should be the address of the nested function.
5379 Extract the actual function decl to pass to the hook. */
5380 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5381 t_func = TREE_OPERAND (t_func, 0);
5382 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5383
5384 r_chain = expand_normal (t_chain);
5385
5386 /* Generate insns to initialize the trampoline. */
5387 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5388
5389 if (onstack)
5390 {
5391 trampolines_created = 1;
5392
5393 if (targetm.calls.custom_function_descriptors != 0)
5394 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5395 "trampoline generated for nested function %qD", t_func);
5396 }
5397
5398 return const0_rtx;
5399 }
5400
5401 static rtx
expand_builtin_adjust_trampoline(tree exp)5402 expand_builtin_adjust_trampoline (tree exp)
5403 {
5404 rtx tramp;
5405
5406 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5407 return NULL_RTX;
5408
5409 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5410 tramp = round_trampoline_addr (tramp);
5411 if (targetm.calls.trampoline_adjust_address)
5412 tramp = targetm.calls.trampoline_adjust_address (tramp);
5413
5414 return tramp;
5415 }
5416
5417 /* Expand a call to the builtin descriptor initialization routine.
5418 A descriptor is made up of a couple of pointers to the static
5419 chain and the code entry in this order. */
5420
5421 static rtx
expand_builtin_init_descriptor(tree exp)5422 expand_builtin_init_descriptor (tree exp)
5423 {
5424 tree t_descr, t_func, t_chain;
5425 rtx m_descr, r_descr, r_func, r_chain;
5426
5427 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5428 VOID_TYPE))
5429 return NULL_RTX;
5430
5431 t_descr = CALL_EXPR_ARG (exp, 0);
5432 t_func = CALL_EXPR_ARG (exp, 1);
5433 t_chain = CALL_EXPR_ARG (exp, 2);
5434
5435 r_descr = expand_normal (t_descr);
5436 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5437 MEM_NOTRAP_P (m_descr) = 1;
5438 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5439
5440 r_func = expand_normal (t_func);
5441 r_chain = expand_normal (t_chain);
5442
5443 /* Generate insns to initialize the descriptor. */
5444 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5445 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5446 POINTER_SIZE / BITS_PER_UNIT), r_func);
5447
5448 return const0_rtx;
5449 }
5450
5451 /* Expand a call to the builtin descriptor adjustment routine. */
5452
5453 static rtx
expand_builtin_adjust_descriptor(tree exp)5454 expand_builtin_adjust_descriptor (tree exp)
5455 {
5456 rtx tramp;
5457
5458 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5459 return NULL_RTX;
5460
5461 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5462
5463 /* Unalign the descriptor to allow runtime identification. */
5464 tramp = plus_constant (ptr_mode, tramp,
5465 targetm.calls.custom_function_descriptors);
5466
5467 return force_operand (tramp, NULL_RTX);
5468 }
5469
5470 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5471 function. The function first checks whether the back end provides
5472 an insn to implement signbit for the respective mode. If not, it
5473 checks whether the floating point format of the value is such that
5474 the sign bit can be extracted. If that is not the case, error out.
5475 EXP is the expression that is a call to the builtin function; if
5476 convenient, the result should be placed in TARGET. */
5477 static rtx
expand_builtin_signbit(tree exp,rtx target)5478 expand_builtin_signbit (tree exp, rtx target)
5479 {
5480 const struct real_format *fmt;
5481 scalar_float_mode fmode;
5482 scalar_int_mode rmode, imode;
5483 tree arg;
5484 int word, bitpos;
5485 enum insn_code icode;
5486 rtx temp;
5487 location_t loc = EXPR_LOCATION (exp);
5488
5489 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5490 return NULL_RTX;
5491
5492 arg = CALL_EXPR_ARG (exp, 0);
5493 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5494 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5495 fmt = REAL_MODE_FORMAT (fmode);
5496
5497 arg = builtin_save_expr (arg);
5498
5499 /* Expand the argument yielding a RTX expression. */
5500 temp = expand_normal (arg);
5501
5502 /* Check if the back end provides an insn that handles signbit for the
5503 argument's mode. */
5504 icode = optab_handler (signbit_optab, fmode);
5505 if (icode != CODE_FOR_nothing)
5506 {
5507 rtx_insn *last = get_last_insn ();
5508 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5509 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5510 return target;
5511 delete_insns_since (last);
5512 }
5513
5514 /* For floating point formats without a sign bit, implement signbit
5515 as "ARG < 0.0". */
5516 bitpos = fmt->signbit_ro;
5517 if (bitpos < 0)
5518 {
5519 /* But we can't do this if the format supports signed zero. */
5520 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5521
5522 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5523 build_real (TREE_TYPE (arg), dconst0));
5524 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5525 }
5526
5527 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5528 {
5529 imode = int_mode_for_mode (fmode).require ();
5530 temp = gen_lowpart (imode, temp);
5531 }
5532 else
5533 {
5534 imode = word_mode;
5535 /* Handle targets with different FP word orders. */
5536 if (FLOAT_WORDS_BIG_ENDIAN)
5537 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5538 else
5539 word = bitpos / BITS_PER_WORD;
5540 temp = operand_subword_force (temp, word, fmode);
5541 bitpos = bitpos % BITS_PER_WORD;
5542 }
5543
5544 /* Force the intermediate word_mode (or narrower) result into a
5545 register. This avoids attempting to create paradoxical SUBREGs
5546 of floating point modes below. */
5547 temp = force_reg (imode, temp);
5548
5549 /* If the bitpos is within the "result mode" lowpart, the operation
5550 can be implement with a single bitwise AND. Otherwise, we need
5551 a right shift and an AND. */
5552
5553 if (bitpos < GET_MODE_BITSIZE (rmode))
5554 {
5555 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5556
5557 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5558 temp = gen_lowpart (rmode, temp);
5559 temp = expand_binop (rmode, and_optab, temp,
5560 immed_wide_int_const (mask, rmode),
5561 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5562 }
5563 else
5564 {
5565 /* Perform a logical right shift to place the signbit in the least
5566 significant bit, then truncate the result to the desired mode
5567 and mask just this bit. */
5568 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5569 temp = gen_lowpart (rmode, temp);
5570 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5571 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5572 }
5573
5574 return temp;
5575 }
5576
5577 /* Expand fork or exec calls. TARGET is the desired target of the
5578 call. EXP is the call. FN is the
5579 identificator of the actual function. IGNORE is nonzero if the
5580 value is to be ignored. */
5581
5582 static rtx
expand_builtin_fork_or_exec(tree fn,tree exp,rtx target,int ignore)5583 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5584 {
5585 tree id, decl;
5586 tree call;
5587
5588 /* If we are not profiling, just call the function. */
5589 if (!profile_arc_flag)
5590 return NULL_RTX;
5591
5592 /* Otherwise call the wrapper. This should be equivalent for the rest of
5593 compiler, so the code does not diverge, and the wrapper may run the
5594 code necessary for keeping the profiling sane. */
5595
5596 switch (DECL_FUNCTION_CODE (fn))
5597 {
5598 case BUILT_IN_FORK:
5599 id = get_identifier ("__gcov_fork");
5600 break;
5601
5602 case BUILT_IN_EXECL:
5603 id = get_identifier ("__gcov_execl");
5604 break;
5605
5606 case BUILT_IN_EXECV:
5607 id = get_identifier ("__gcov_execv");
5608 break;
5609
5610 case BUILT_IN_EXECLP:
5611 id = get_identifier ("__gcov_execlp");
5612 break;
5613
5614 case BUILT_IN_EXECLE:
5615 id = get_identifier ("__gcov_execle");
5616 break;
5617
5618 case BUILT_IN_EXECVP:
5619 id = get_identifier ("__gcov_execvp");
5620 break;
5621
5622 case BUILT_IN_EXECVE:
5623 id = get_identifier ("__gcov_execve");
5624 break;
5625
5626 default:
5627 gcc_unreachable ();
5628 }
5629
5630 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5631 FUNCTION_DECL, id, TREE_TYPE (fn));
5632 DECL_EXTERNAL (decl) = 1;
5633 TREE_PUBLIC (decl) = 1;
5634 DECL_ARTIFICIAL (decl) = 1;
5635 TREE_NOTHROW (decl) = 1;
5636 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5637 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5638 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5639 return expand_call (call, target, ignore);
5640 }
5641
5642
5643
5644 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5645 the pointer in these functions is void*, the tree optimizers may remove
5646 casts. The mode computed in expand_builtin isn't reliable either, due
5647 to __sync_bool_compare_and_swap.
5648
5649 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5650 group of builtins. This gives us log2 of the mode size. */
5651
5652 static inline machine_mode
get_builtin_sync_mode(int fcode_diff)5653 get_builtin_sync_mode (int fcode_diff)
5654 {
5655 /* The size is not negotiable, so ask not to get BLKmode in return
5656 if the target indicates that a smaller size would be better. */
5657 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5658 }
5659
5660 /* Expand the memory expression LOC and return the appropriate memory operand
5661 for the builtin_sync operations. */
5662
5663 static rtx
get_builtin_sync_mem(tree loc,machine_mode mode)5664 get_builtin_sync_mem (tree loc, machine_mode mode)
5665 {
5666 rtx addr, mem;
5667 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5668 ? TREE_TYPE (TREE_TYPE (loc))
5669 : TREE_TYPE (loc));
5670 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5671
5672 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5673 addr = convert_memory_address (addr_mode, addr);
5674
5675 /* Note that we explicitly do not want any alias information for this
5676 memory, so that we kill all other live memories. Otherwise we don't
5677 satisfy the full barrier semantics of the intrinsic. */
5678 mem = gen_rtx_MEM (mode, addr);
5679
5680 set_mem_addr_space (mem, addr_space);
5681
5682 mem = validize_mem (mem);
5683
5684 /* The alignment needs to be at least according to that of the mode. */
5685 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5686 get_pointer_alignment (loc)));
5687 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5688 MEM_VOLATILE_P (mem) = 1;
5689
5690 return mem;
5691 }
5692
5693 /* Make sure an argument is in the right mode.
5694 EXP is the tree argument.
5695 MODE is the mode it should be in. */
5696
5697 static rtx
expand_expr_force_mode(tree exp,machine_mode mode)5698 expand_expr_force_mode (tree exp, machine_mode mode)
5699 {
5700 rtx val;
5701 machine_mode old_mode;
5702
5703 if (TREE_CODE (exp) == SSA_NAME
5704 && TYPE_MODE (TREE_TYPE (exp)) != mode)
5705 {
5706 /* Undo argument promotion if possible, as combine might not
5707 be able to do it later due to MEM_VOLATILE_P uses in the
5708 patterns. */
5709 gimple *g = get_gimple_for_ssa_name (exp);
5710 if (g && gimple_assign_cast_p (g))
5711 {
5712 tree rhs = gimple_assign_rhs1 (g);
5713 tree_code code = gimple_assign_rhs_code (g);
5714 if (CONVERT_EXPR_CODE_P (code)
5715 && TYPE_MODE (TREE_TYPE (rhs)) == mode
5716 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
5717 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
5718 && (TYPE_PRECISION (TREE_TYPE (exp))
5719 > TYPE_PRECISION (TREE_TYPE (rhs))))
5720 exp = rhs;
5721 }
5722 }
5723
5724 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5725 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5726 of CONST_INTs, where we know the old_mode only from the call argument. */
5727
5728 old_mode = GET_MODE (val);
5729 if (old_mode == VOIDmode)
5730 old_mode = TYPE_MODE (TREE_TYPE (exp));
5731 val = convert_modes (mode, old_mode, val, 1);
5732 return val;
5733 }
5734
5735
5736 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5737 EXP is the CALL_EXPR. CODE is the rtx code
5738 that corresponds to the arithmetic or logical operation from the name;
5739 an exception here is that NOT actually means NAND. TARGET is an optional
5740 place for us to store the results; AFTER is true if this is the
5741 fetch_and_xxx form. */
5742
5743 static rtx
expand_builtin_sync_operation(machine_mode mode,tree exp,enum rtx_code code,bool after,rtx target)5744 expand_builtin_sync_operation (machine_mode mode, tree exp,
5745 enum rtx_code code, bool after,
5746 rtx target)
5747 {
5748 rtx val, mem;
5749 location_t loc = EXPR_LOCATION (exp);
5750
5751 if (code == NOT && warn_sync_nand)
5752 {
5753 tree fndecl = get_callee_fndecl (exp);
5754 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5755
5756 static bool warned_f_a_n, warned_n_a_f;
5757
5758 switch (fcode)
5759 {
5760 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5761 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5762 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5763 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5764 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5765 if (warned_f_a_n)
5766 break;
5767
5768 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5769 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5770 warned_f_a_n = true;
5771 break;
5772
5773 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5774 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5775 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5776 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5777 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5778 if (warned_n_a_f)
5779 break;
5780
5781 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5782 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5783 warned_n_a_f = true;
5784 break;
5785
5786 default:
5787 gcc_unreachable ();
5788 }
5789 }
5790
5791 /* Expand the operands. */
5792 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5793 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5794
5795 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5796 after);
5797 }
5798
5799 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5800 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5801 true if this is the boolean form. TARGET is a place for us to store the
5802 results; this is NOT optional if IS_BOOL is true. */
5803
5804 static rtx
expand_builtin_compare_and_swap(machine_mode mode,tree exp,bool is_bool,rtx target)5805 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5806 bool is_bool, rtx target)
5807 {
5808 rtx old_val, new_val, mem;
5809 rtx *pbool, *poval;
5810
5811 /* Expand the operands. */
5812 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5813 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5814 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5815
5816 pbool = poval = NULL;
5817 if (target != const0_rtx)
5818 {
5819 if (is_bool)
5820 pbool = ⌖
5821 else
5822 poval = ⌖
5823 }
5824 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5825 false, MEMMODEL_SYNC_SEQ_CST,
5826 MEMMODEL_SYNC_SEQ_CST))
5827 return NULL_RTX;
5828
5829 return target;
5830 }
5831
5832 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5833 general form is actually an atomic exchange, and some targets only
5834 support a reduced form with the second argument being a constant 1.
5835 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5836 the results. */
5837
5838 static rtx
expand_builtin_sync_lock_test_and_set(machine_mode mode,tree exp,rtx target)5839 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5840 rtx target)
5841 {
5842 rtx val, mem;
5843
5844 /* Expand the operands. */
5845 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5846 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5847
5848 return expand_sync_lock_test_and_set (target, mem, val);
5849 }
5850
5851 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5852
5853 static void
expand_builtin_sync_lock_release(machine_mode mode,tree exp)5854 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5855 {
5856 rtx mem;
5857
5858 /* Expand the operands. */
5859 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5860
5861 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5862 }
5863
5864 /* Given an integer representing an ``enum memmodel'', verify its
5865 correctness and return the memory model enum. */
5866
5867 static enum memmodel
get_memmodel(tree exp)5868 get_memmodel (tree exp)
5869 {
5870 /* If the parameter is not a constant, it's a run time value so we'll just
5871 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5872 if (TREE_CODE (exp) != INTEGER_CST)
5873 return MEMMODEL_SEQ_CST;
5874
5875 rtx op = expand_normal (exp);
5876
5877 unsigned HOST_WIDE_INT val = INTVAL (op);
5878 if (targetm.memmodel_check)
5879 val = targetm.memmodel_check (val);
5880 else if (val & ~MEMMODEL_MASK)
5881 return MEMMODEL_SEQ_CST;
5882
5883 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5884 if (memmodel_base (val) >= MEMMODEL_LAST)
5885 return MEMMODEL_SEQ_CST;
5886
5887 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5888 be conservative and promote consume to acquire. */
5889 if (val == MEMMODEL_CONSUME)
5890 val = MEMMODEL_ACQUIRE;
5891
5892 return (enum memmodel) val;
5893 }
5894
5895 /* Expand the __atomic_exchange intrinsic:
5896 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5897 EXP is the CALL_EXPR.
5898 TARGET is an optional place for us to store the results. */
5899
5900 static rtx
expand_builtin_atomic_exchange(machine_mode mode,tree exp,rtx target)5901 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5902 {
5903 rtx val, mem;
5904 enum memmodel model;
5905
5906 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5907
5908 if (!flag_inline_atomics)
5909 return NULL_RTX;
5910
5911 /* Expand the operands. */
5912 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5913 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5914
5915 return expand_atomic_exchange (target, mem, val, model);
5916 }
5917
5918 /* Expand the __atomic_compare_exchange intrinsic:
5919 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5920 TYPE desired, BOOL weak,
5921 enum memmodel success,
5922 enum memmodel failure)
5923 EXP is the CALL_EXPR.
5924 TARGET is an optional place for us to store the results. */
5925
5926 static rtx
expand_builtin_atomic_compare_exchange(machine_mode mode,tree exp,rtx target)5927 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5928 rtx target)
5929 {
5930 rtx expect, desired, mem, oldval;
5931 rtx_code_label *label;
5932 tree weak;
5933 bool is_weak;
5934
5935 memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5936 memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5937
5938 if (failure > success)
5939 success = MEMMODEL_SEQ_CST;
5940
5941 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5942 {
5943 failure = MEMMODEL_SEQ_CST;
5944 success = MEMMODEL_SEQ_CST;
5945 }
5946
5947
5948 if (!flag_inline_atomics)
5949 return NULL_RTX;
5950
5951 /* Expand the operands. */
5952 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5953
5954 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5955 expect = convert_memory_address (Pmode, expect);
5956 expect = gen_rtx_MEM (mode, expect);
5957 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5958
5959 weak = CALL_EXPR_ARG (exp, 3);
5960 is_weak = false;
5961 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5962 is_weak = true;
5963
5964 if (target == const0_rtx)
5965 target = NULL;
5966
5967 /* Lest the rtl backend create a race condition with an imporoper store
5968 to memory, always create a new pseudo for OLDVAL. */
5969 oldval = NULL;
5970
5971 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5972 is_weak, success, failure))
5973 return NULL_RTX;
5974
5975 /* Conditionally store back to EXPECT, lest we create a race condition
5976 with an improper store to memory. */
5977 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5978 the normal case where EXPECT is totally private, i.e. a register. At
5979 which point the store can be unconditional. */
5980 label = gen_label_rtx ();
5981 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5982 GET_MODE (target), 1, label);
5983 emit_move_insn (expect, oldval);
5984 emit_label (label);
5985
5986 return target;
5987 }
5988
5989 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5990 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5991 call. The weak parameter must be dropped to match the expected parameter
5992 list and the expected argument changed from value to pointer to memory
5993 slot. */
5994
5995 static void
expand_ifn_atomic_compare_exchange_into_call(gcall * call,machine_mode mode)5996 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5997 {
5998 unsigned int z;
5999 vec<tree, va_gc> *vec;
6000
6001 vec_alloc (vec, 5);
6002 vec->quick_push (gimple_call_arg (call, 0));
6003 tree expected = gimple_call_arg (call, 1);
6004 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6005 TREE_TYPE (expected));
6006 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6007 if (expd != x)
6008 emit_move_insn (x, expd);
6009 tree v = make_tree (TREE_TYPE (expected), x);
6010 vec->quick_push (build1 (ADDR_EXPR,
6011 build_pointer_type (TREE_TYPE (expected)), v));
6012 vec->quick_push (gimple_call_arg (call, 2));
6013 /* Skip the boolean weak parameter. */
6014 for (z = 4; z < 6; z++)
6015 vec->quick_push (gimple_call_arg (call, z));
6016 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6017 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6018 gcc_assert (bytes_log2 < 5);
6019 built_in_function fncode
6020 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6021 + bytes_log2);
6022 tree fndecl = builtin_decl_explicit (fncode);
6023 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6024 fndecl);
6025 tree exp = build_call_vec (boolean_type_node, fn, vec);
6026 tree lhs = gimple_call_lhs (call);
6027 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6028 if (lhs)
6029 {
6030 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6031 if (GET_MODE (boolret) != mode)
6032 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6033 x = force_reg (mode, x);
6034 write_complex_part (target, boolret, true);
6035 write_complex_part (target, x, false);
6036 }
6037 }
6038
6039 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6040
6041 void
expand_ifn_atomic_compare_exchange(gcall * call)6042 expand_ifn_atomic_compare_exchange (gcall *call)
6043 {
6044 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6045 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6046 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6047
6048 memmodel success = get_memmodel (gimple_call_arg (call, 4));
6049 memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6050
6051 if (failure > success)
6052 success = MEMMODEL_SEQ_CST;
6053
6054 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6055 {
6056 failure = MEMMODEL_SEQ_CST;
6057 success = MEMMODEL_SEQ_CST;
6058 }
6059
6060 if (!flag_inline_atomics)
6061 {
6062 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6063 return;
6064 }
6065
6066 /* Expand the operands. */
6067 rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6068
6069 rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6070 rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6071
6072 bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6073
6074 rtx boolret = NULL;
6075 rtx oldval = NULL;
6076
6077 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6078 is_weak, success, failure))
6079 {
6080 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6081 return;
6082 }
6083
6084 tree lhs = gimple_call_lhs (call);
6085 if (lhs)
6086 {
6087 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6088 if (GET_MODE (boolret) != mode)
6089 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6090 write_complex_part (target, boolret, true);
6091 write_complex_part (target, oldval, false);
6092 }
6093 }
6094
6095 /* Expand the __atomic_load intrinsic:
6096 TYPE __atomic_load (TYPE *object, enum memmodel)
6097 EXP is the CALL_EXPR.
6098 TARGET is an optional place for us to store the results. */
6099
6100 static rtx
expand_builtin_atomic_load(machine_mode mode,tree exp,rtx target)6101 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6102 {
6103 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6104 if (is_mm_release (model) || is_mm_acq_rel (model))
6105 model = MEMMODEL_SEQ_CST;
6106
6107 if (!flag_inline_atomics)
6108 return NULL_RTX;
6109
6110 /* Expand the operand. */
6111 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6112
6113 return expand_atomic_load (target, mem, model);
6114 }
6115
6116
6117 /* Expand the __atomic_store intrinsic:
6118 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6119 EXP is the CALL_EXPR.
6120 TARGET is an optional place for us to store the results. */
6121
6122 static rtx
expand_builtin_atomic_store(machine_mode mode,tree exp)6123 expand_builtin_atomic_store (machine_mode mode, tree exp)
6124 {
6125 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6126 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6127 || is_mm_release (model)))
6128 model = MEMMODEL_SEQ_CST;
6129
6130 if (!flag_inline_atomics)
6131 return NULL_RTX;
6132
6133 /* Expand the operands. */
6134 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6135 rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6136
6137 return expand_atomic_store (mem, val, model, false);
6138 }
6139
6140 /* Expand the __atomic_fetch_XXX intrinsic:
6141 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6142 EXP is the CALL_EXPR.
6143 TARGET is an optional place for us to store the results.
6144 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6145 FETCH_AFTER is true if returning the result of the operation.
6146 FETCH_AFTER is false if returning the value before the operation.
6147 IGNORE is true if the result is not used.
6148 EXT_CALL is the correct builtin for an external call if this cannot be
6149 resolved to an instruction sequence. */
6150
6151 static rtx
expand_builtin_atomic_fetch_op(machine_mode mode,tree exp,rtx target,enum rtx_code code,bool fetch_after,bool ignore,enum built_in_function ext_call)6152 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6153 enum rtx_code code, bool fetch_after,
6154 bool ignore, enum built_in_function ext_call)
6155 {
6156 rtx val, mem, ret;
6157 enum memmodel model;
6158 tree fndecl;
6159 tree addr;
6160
6161 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6162
6163 /* Expand the operands. */
6164 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6165 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6166
6167 /* Only try generating instructions if inlining is turned on. */
6168 if (flag_inline_atomics)
6169 {
6170 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6171 if (ret)
6172 return ret;
6173 }
6174
6175 /* Return if a different routine isn't needed for the library call. */
6176 if (ext_call == BUILT_IN_NONE)
6177 return NULL_RTX;
6178
6179 /* Change the call to the specified function. */
6180 fndecl = get_callee_fndecl (exp);
6181 addr = CALL_EXPR_FN (exp);
6182 STRIP_NOPS (addr);
6183
6184 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6185 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6186
6187 /* If we will emit code after the call, the call cannot be a tail call.
6188 If it is emitted as a tail call, a barrier is emitted after it, and
6189 then all trailing code is removed. */
6190 if (!ignore)
6191 CALL_EXPR_TAILCALL (exp) = 0;
6192
6193 /* Expand the call here so we can emit trailing code. */
6194 ret = expand_call (exp, target, ignore);
6195
6196 /* Replace the original function just in case it matters. */
6197 TREE_OPERAND (addr, 0) = fndecl;
6198
6199 /* Then issue the arithmetic correction to return the right result. */
6200 if (!ignore)
6201 {
6202 if (code == NOT)
6203 {
6204 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6205 OPTAB_LIB_WIDEN);
6206 ret = expand_simple_unop (mode, NOT, ret, target, true);
6207 }
6208 else
6209 ret = expand_simple_binop (mode, code, ret, val, target, true,
6210 OPTAB_LIB_WIDEN);
6211 }
6212 return ret;
6213 }
6214
6215 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6216
6217 void
expand_ifn_atomic_bit_test_and(gcall * call)6218 expand_ifn_atomic_bit_test_and (gcall *call)
6219 {
6220 tree ptr = gimple_call_arg (call, 0);
6221 tree bit = gimple_call_arg (call, 1);
6222 tree flag = gimple_call_arg (call, 2);
6223 tree lhs = gimple_call_lhs (call);
6224 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6225 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6226 enum rtx_code code;
6227 optab optab;
6228 class expand_operand ops[5];
6229
6230 gcc_assert (flag_inline_atomics);
6231
6232 if (gimple_call_num_args (call) == 5)
6233 model = get_memmodel (gimple_call_arg (call, 3));
6234
6235 rtx mem = get_builtin_sync_mem (ptr, mode);
6236 rtx val = expand_expr_force_mode (bit, mode);
6237
6238 switch (gimple_call_internal_fn (call))
6239 {
6240 case IFN_ATOMIC_BIT_TEST_AND_SET:
6241 code = IOR;
6242 optab = atomic_bit_test_and_set_optab;
6243 break;
6244 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6245 code = XOR;
6246 optab = atomic_bit_test_and_complement_optab;
6247 break;
6248 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6249 code = AND;
6250 optab = atomic_bit_test_and_reset_optab;
6251 break;
6252 default:
6253 gcc_unreachable ();
6254 }
6255
6256 if (lhs == NULL_TREE)
6257 {
6258 rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
6259 val, NULL_RTX, true, OPTAB_DIRECT);
6260 if (code == AND)
6261 val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
6262 if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
6263 return;
6264 }
6265
6266 rtx target;
6267 if (lhs)
6268 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6269 else
6270 target = gen_reg_rtx (mode);
6271 enum insn_code icode = direct_optab_handler (optab, mode);
6272 gcc_assert (icode != CODE_FOR_nothing);
6273 create_output_operand (&ops[0], target, mode);
6274 create_fixed_operand (&ops[1], mem);
6275 create_convert_operand_to (&ops[2], val, mode, true);
6276 create_integer_operand (&ops[3], model);
6277 create_integer_operand (&ops[4], integer_onep (flag));
6278 if (maybe_expand_insn (icode, 5, ops))
6279 return;
6280
6281 rtx bitval = val;
6282 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6283 val, NULL_RTX, true, OPTAB_DIRECT);
6284 rtx maskval = val;
6285 if (code == AND)
6286 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6287 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6288 code, model, false);
6289 if (!result)
6290 {
6291 bool is_atomic = gimple_call_num_args (call) == 5;
6292 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6293 tree fndecl = gimple_call_addr_fndecl (tcall);
6294 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6295 tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
6296 make_tree (type, val),
6297 is_atomic
6298 ? gimple_call_arg (call, 3)
6299 : integer_zero_node);
6300 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6301 mode, !lhs);
6302 }
6303 if (!lhs)
6304 return;
6305 if (integer_onep (flag))
6306 {
6307 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6308 NULL_RTX, true, OPTAB_DIRECT);
6309 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6310 true, OPTAB_DIRECT);
6311 }
6312 else
6313 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6314 OPTAB_DIRECT);
6315 if (result != target)
6316 emit_move_insn (target, result);
6317 }
6318
6319 /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
6320
6321 void
expand_ifn_atomic_op_fetch_cmp_0(gcall * call)6322 expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
6323 {
6324 tree cmp = gimple_call_arg (call, 0);
6325 tree ptr = gimple_call_arg (call, 1);
6326 tree arg = gimple_call_arg (call, 2);
6327 tree lhs = gimple_call_lhs (call);
6328 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6329 machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
6330 optab optab;
6331 rtx_code code;
6332 class expand_operand ops[5];
6333
6334 gcc_assert (flag_inline_atomics);
6335
6336 if (gimple_call_num_args (call) == 5)
6337 model = get_memmodel (gimple_call_arg (call, 3));
6338
6339 rtx mem = get_builtin_sync_mem (ptr, mode);
6340 rtx op = expand_expr_force_mode (arg, mode);
6341
6342 switch (gimple_call_internal_fn (call))
6343 {
6344 case IFN_ATOMIC_ADD_FETCH_CMP_0:
6345 code = PLUS;
6346 optab = atomic_add_fetch_cmp_0_optab;
6347 break;
6348 case IFN_ATOMIC_SUB_FETCH_CMP_0:
6349 code = MINUS;
6350 optab = atomic_sub_fetch_cmp_0_optab;
6351 break;
6352 case IFN_ATOMIC_AND_FETCH_CMP_0:
6353 code = AND;
6354 optab = atomic_and_fetch_cmp_0_optab;
6355 break;
6356 case IFN_ATOMIC_OR_FETCH_CMP_0:
6357 code = IOR;
6358 optab = atomic_or_fetch_cmp_0_optab;
6359 break;
6360 case IFN_ATOMIC_XOR_FETCH_CMP_0:
6361 code = XOR;
6362 optab = atomic_xor_fetch_cmp_0_optab;
6363 break;
6364 default:
6365 gcc_unreachable ();
6366 }
6367
6368 enum rtx_code comp = UNKNOWN;
6369 switch (tree_to_uhwi (cmp))
6370 {
6371 case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
6372 case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
6373 case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
6374 case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
6375 case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
6376 case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
6377 default: gcc_unreachable ();
6378 }
6379
6380 rtx target;
6381 if (lhs == NULL_TREE)
6382 target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
6383 else
6384 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6385 enum insn_code icode = direct_optab_handler (optab, mode);
6386 gcc_assert (icode != CODE_FOR_nothing);
6387 create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
6388 create_fixed_operand (&ops[1], mem);
6389 create_convert_operand_to (&ops[2], op, mode, true);
6390 create_integer_operand (&ops[3], model);
6391 create_integer_operand (&ops[4], comp);
6392 if (maybe_expand_insn (icode, 5, ops))
6393 return;
6394
6395 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
6396 code, model, true);
6397 if (!result)
6398 {
6399 bool is_atomic = gimple_call_num_args (call) == 5;
6400 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6401 tree fndecl = gimple_call_addr_fndecl (tcall);
6402 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6403 tree exp = build_call_nary (type, tcall,
6404 2 + is_atomic, ptr, arg,
6405 is_atomic
6406 ? gimple_call_arg (call, 3)
6407 : integer_zero_node);
6408 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6409 mode, !lhs);
6410 }
6411
6412 if (lhs)
6413 {
6414 result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
6415 0, 1);
6416 if (result != target)
6417 emit_move_insn (target, result);
6418 }
6419 }
6420
6421 /* Expand an atomic clear operation.
6422 void _atomic_clear (BOOL *obj, enum memmodel)
6423 EXP is the call expression. */
6424
6425 static rtx
expand_builtin_atomic_clear(tree exp)6426 expand_builtin_atomic_clear (tree exp)
6427 {
6428 machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6429 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6430 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6431
6432 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6433 model = MEMMODEL_SEQ_CST;
6434
6435 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6436 Failing that, a store is issued by __atomic_store. The only way this can
6437 fail is if the bool type is larger than a word size. Unlikely, but
6438 handle it anyway for completeness. Assume a single threaded model since
6439 there is no atomic support in this case, and no barriers are required. */
6440 rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
6441 if (!ret)
6442 emit_move_insn (mem, const0_rtx);
6443 return const0_rtx;
6444 }
6445
6446 /* Expand an atomic test_and_set operation.
6447 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6448 EXP is the call expression. */
6449
6450 static rtx
expand_builtin_atomic_test_and_set(tree exp,rtx target)6451 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6452 {
6453 rtx mem;
6454 enum memmodel model;
6455 machine_mode mode;
6456
6457 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6458 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6459 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6460
6461 return expand_atomic_test_and_set (target, mem, model);
6462 }
6463
6464
6465 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6466 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6467
6468 static tree
fold_builtin_atomic_always_lock_free(tree arg0,tree arg1)6469 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6470 {
6471 int size;
6472 machine_mode mode;
6473 unsigned int mode_align, type_align;
6474
6475 if (TREE_CODE (arg0) != INTEGER_CST)
6476 return NULL_TREE;
6477
6478 /* We need a corresponding integer mode for the access to be lock-free. */
6479 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6480 if (!int_mode_for_size (size, 0).exists (&mode))
6481 return boolean_false_node;
6482
6483 mode_align = GET_MODE_ALIGNMENT (mode);
6484
6485 if (TREE_CODE (arg1) == INTEGER_CST)
6486 {
6487 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6488
6489 /* Either this argument is null, or it's a fake pointer encoding
6490 the alignment of the object. */
6491 val = least_bit_hwi (val);
6492 val *= BITS_PER_UNIT;
6493
6494 if (val == 0 || mode_align < val)
6495 type_align = mode_align;
6496 else
6497 type_align = val;
6498 }
6499 else
6500 {
6501 tree ttype = TREE_TYPE (arg1);
6502
6503 /* This function is usually invoked and folded immediately by the front
6504 end before anything else has a chance to look at it. The pointer
6505 parameter at this point is usually cast to a void *, so check for that
6506 and look past the cast. */
6507 if (CONVERT_EXPR_P (arg1)
6508 && POINTER_TYPE_P (ttype)
6509 && VOID_TYPE_P (TREE_TYPE (ttype))
6510 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6511 arg1 = TREE_OPERAND (arg1, 0);
6512
6513 ttype = TREE_TYPE (arg1);
6514 gcc_assert (POINTER_TYPE_P (ttype));
6515
6516 /* Get the underlying type of the object. */
6517 ttype = TREE_TYPE (ttype);
6518 type_align = TYPE_ALIGN (ttype);
6519 }
6520
6521 /* If the object has smaller alignment, the lock free routines cannot
6522 be used. */
6523 if (type_align < mode_align)
6524 return boolean_false_node;
6525
6526 /* Check if a compare_and_swap pattern exists for the mode which represents
6527 the required size. The pattern is not allowed to fail, so the existence
6528 of the pattern indicates support is present. Also require that an
6529 atomic load exists for the required size. */
6530 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6531 return boolean_true_node;
6532 else
6533 return boolean_false_node;
6534 }
6535
6536 /* Return true if the parameters to call EXP represent an object which will
6537 always generate lock free instructions. The first argument represents the
6538 size of the object, and the second parameter is a pointer to the object
6539 itself. If NULL is passed for the object, then the result is based on
6540 typical alignment for an object of the specified size. Otherwise return
6541 false. */
6542
6543 static rtx
expand_builtin_atomic_always_lock_free(tree exp)6544 expand_builtin_atomic_always_lock_free (tree exp)
6545 {
6546 tree size;
6547 tree arg0 = CALL_EXPR_ARG (exp, 0);
6548 tree arg1 = CALL_EXPR_ARG (exp, 1);
6549
6550 if (TREE_CODE (arg0) != INTEGER_CST)
6551 {
6552 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6553 return const0_rtx;
6554 }
6555
6556 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6557 if (size == boolean_true_node)
6558 return const1_rtx;
6559 return const0_rtx;
6560 }
6561
6562 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6563 is lock free on this architecture. */
6564
6565 static tree
fold_builtin_atomic_is_lock_free(tree arg0,tree arg1)6566 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6567 {
6568 if (!flag_inline_atomics)
6569 return NULL_TREE;
6570
6571 /* If it isn't always lock free, don't generate a result. */
6572 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6573 return boolean_true_node;
6574
6575 return NULL_TREE;
6576 }
6577
6578 /* Return true if the parameters to call EXP represent an object which will
6579 always generate lock free instructions. The first argument represents the
6580 size of the object, and the second parameter is a pointer to the object
6581 itself. If NULL is passed for the object, then the result is based on
6582 typical alignment for an object of the specified size. Otherwise return
6583 NULL*/
6584
6585 static rtx
expand_builtin_atomic_is_lock_free(tree exp)6586 expand_builtin_atomic_is_lock_free (tree exp)
6587 {
6588 tree size;
6589 tree arg0 = CALL_EXPR_ARG (exp, 0);
6590 tree arg1 = CALL_EXPR_ARG (exp, 1);
6591
6592 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6593 {
6594 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6595 return NULL_RTX;
6596 }
6597
6598 if (!flag_inline_atomics)
6599 return NULL_RTX;
6600
6601 /* If the value is known at compile time, return the RTX for it. */
6602 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6603 if (size == boolean_true_node)
6604 return const1_rtx;
6605
6606 return NULL_RTX;
6607 }
6608
6609 /* Expand the __atomic_thread_fence intrinsic:
6610 void __atomic_thread_fence (enum memmodel)
6611 EXP is the CALL_EXPR. */
6612
6613 static void
expand_builtin_atomic_thread_fence(tree exp)6614 expand_builtin_atomic_thread_fence (tree exp)
6615 {
6616 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6617 expand_mem_thread_fence (model);
6618 }
6619
6620 /* Expand the __atomic_signal_fence intrinsic:
6621 void __atomic_signal_fence (enum memmodel)
6622 EXP is the CALL_EXPR. */
6623
6624 static void
expand_builtin_atomic_signal_fence(tree exp)6625 expand_builtin_atomic_signal_fence (tree exp)
6626 {
6627 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6628 expand_mem_signal_fence (model);
6629 }
6630
6631 /* Expand the __sync_synchronize intrinsic. */
6632
6633 static void
expand_builtin_sync_synchronize(void)6634 expand_builtin_sync_synchronize (void)
6635 {
6636 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6637 }
6638
6639 static rtx
expand_builtin_thread_pointer(tree exp,rtx target)6640 expand_builtin_thread_pointer (tree exp, rtx target)
6641 {
6642 enum insn_code icode;
6643 if (!validate_arglist (exp, VOID_TYPE))
6644 return const0_rtx;
6645 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6646 if (icode != CODE_FOR_nothing)
6647 {
6648 class expand_operand op;
6649 /* If the target is not sutitable then create a new target. */
6650 if (target == NULL_RTX
6651 || !REG_P (target)
6652 || GET_MODE (target) != Pmode)
6653 target = gen_reg_rtx (Pmode);
6654 create_output_operand (&op, target, Pmode);
6655 expand_insn (icode, 1, &op);
6656 return target;
6657 }
6658 error ("%<__builtin_thread_pointer%> is not supported on this target");
6659 return const0_rtx;
6660 }
6661
6662 static void
expand_builtin_set_thread_pointer(tree exp)6663 expand_builtin_set_thread_pointer (tree exp)
6664 {
6665 enum insn_code icode;
6666 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6667 return;
6668 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6669 if (icode != CODE_FOR_nothing)
6670 {
6671 class expand_operand op;
6672 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6673 Pmode, EXPAND_NORMAL);
6674 create_input_operand (&op, val, Pmode);
6675 expand_insn (icode, 1, &op);
6676 return;
6677 }
6678 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6679 }
6680
6681
6682 /* Emit code to restore the current value of stack. */
6683
6684 static void
expand_stack_restore(tree var)6685 expand_stack_restore (tree var)
6686 {
6687 rtx_insn *prev;
6688 rtx sa = expand_normal (var);
6689
6690 sa = convert_memory_address (Pmode, sa);
6691
6692 prev = get_last_insn ();
6693 emit_stack_restore (SAVE_BLOCK, sa);
6694
6695 record_new_stack_level ();
6696
6697 fixup_args_size_notes (prev, get_last_insn (), 0);
6698 }
6699
6700 /* Emit code to save the current value of stack. */
6701
6702 static rtx
expand_stack_save(void)6703 expand_stack_save (void)
6704 {
6705 rtx ret = NULL_RTX;
6706
6707 emit_stack_save (SAVE_BLOCK, &ret);
6708 return ret;
6709 }
6710
6711 /* Emit code to get the openacc gang, worker or vector id or size. */
6712
6713 static rtx
expand_builtin_goacc_parlevel_id_size(tree exp,rtx target,int ignore)6714 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6715 {
6716 const char *name;
6717 rtx fallback_retval;
6718 rtx_insn *(*gen_fn) (rtx, rtx);
6719 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6720 {
6721 case BUILT_IN_GOACC_PARLEVEL_ID:
6722 name = "__builtin_goacc_parlevel_id";
6723 fallback_retval = const0_rtx;
6724 gen_fn = targetm.gen_oacc_dim_pos;
6725 break;
6726 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6727 name = "__builtin_goacc_parlevel_size";
6728 fallback_retval = const1_rtx;
6729 gen_fn = targetm.gen_oacc_dim_size;
6730 break;
6731 default:
6732 gcc_unreachable ();
6733 }
6734
6735 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6736 {
6737 error ("%qs only supported in OpenACC code", name);
6738 return const0_rtx;
6739 }
6740
6741 tree arg = CALL_EXPR_ARG (exp, 0);
6742 if (TREE_CODE (arg) != INTEGER_CST)
6743 {
6744 error ("non-constant argument 0 to %qs", name);
6745 return const0_rtx;
6746 }
6747
6748 int dim = TREE_INT_CST_LOW (arg);
6749 switch (dim)
6750 {
6751 case GOMP_DIM_GANG:
6752 case GOMP_DIM_WORKER:
6753 case GOMP_DIM_VECTOR:
6754 break;
6755 default:
6756 error ("illegal argument 0 to %qs", name);
6757 return const0_rtx;
6758 }
6759
6760 if (ignore)
6761 return target;
6762
6763 if (target == NULL_RTX)
6764 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6765
6766 if (!targetm.have_oacc_dim_size ())
6767 {
6768 emit_move_insn (target, fallback_retval);
6769 return target;
6770 }
6771
6772 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6773 emit_insn (gen_fn (reg, GEN_INT (dim)));
6774 if (reg != target)
6775 emit_move_insn (target, reg);
6776
6777 return target;
6778 }
6779
6780 /* Expand a string compare operation using a sequence of char comparison
6781 to get rid of the calling overhead, with result going to TARGET if
6782 that's convenient.
6783
6784 VAR_STR is the variable string source;
6785 CONST_STR is the constant string source;
6786 LENGTH is the number of chars to compare;
6787 CONST_STR_N indicates which source string is the constant string;
6788 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6789
6790 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6791
6792 target = (int) (unsigned char) var_str[0]
6793 - (int) (unsigned char) const_str[0];
6794 if (target != 0)
6795 goto ne_label;
6796 ...
6797 target = (int) (unsigned char) var_str[length - 2]
6798 - (int) (unsigned char) const_str[length - 2];
6799 if (target != 0)
6800 goto ne_label;
6801 target = (int) (unsigned char) var_str[length - 1]
6802 - (int) (unsigned char) const_str[length - 1];
6803 ne_label:
6804 */
6805
6806 static rtx
inline_string_cmp(rtx target,tree var_str,const char * const_str,unsigned HOST_WIDE_INT length,int const_str_n,machine_mode mode)6807 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6808 unsigned HOST_WIDE_INT length,
6809 int const_str_n, machine_mode mode)
6810 {
6811 HOST_WIDE_INT offset = 0;
6812 rtx var_rtx_array
6813 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6814 rtx var_rtx = NULL_RTX;
6815 rtx const_rtx = NULL_RTX;
6816 rtx result = target ? target : gen_reg_rtx (mode);
6817 rtx_code_label *ne_label = gen_label_rtx ();
6818 tree unit_type_node = unsigned_char_type_node;
6819 scalar_int_mode unit_mode
6820 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6821
6822 start_sequence ();
6823
6824 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6825 {
6826 var_rtx
6827 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6828 const_rtx = c_readstr (const_str + offset, unit_mode);
6829 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6830 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6831
6832 op0 = convert_modes (mode, unit_mode, op0, 1);
6833 op1 = convert_modes (mode, unit_mode, op1, 1);
6834 result = expand_simple_binop (mode, MINUS, op0, op1,
6835 result, 1, OPTAB_WIDEN);
6836 if (i < length - 1)
6837 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6838 mode, true, ne_label);
6839 offset += GET_MODE_SIZE (unit_mode);
6840 }
6841
6842 emit_label (ne_label);
6843 rtx_insn *insns = get_insns ();
6844 end_sequence ();
6845 emit_insn (insns);
6846
6847 return result;
6848 }
6849
6850 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
6851 to TARGET if that's convenient.
6852 If the call is not been inlined, return NULL_RTX. */
6853
6854 static rtx
inline_expand_builtin_bytecmp(tree exp,rtx target)6855 inline_expand_builtin_bytecmp (tree exp, rtx target)
6856 {
6857 tree fndecl = get_callee_fndecl (exp);
6858 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6859 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6860
6861 /* Do NOT apply this inlining expansion when optimizing for size or
6862 optimization level below 2. */
6863 if (optimize < 2 || optimize_insn_for_size_p ())
6864 return NULL_RTX;
6865
6866 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6867 || fcode == BUILT_IN_STRNCMP
6868 || fcode == BUILT_IN_MEMCMP);
6869
6870 /* On a target where the type of the call (int) has same or narrower presicion
6871 than unsigned char, give up the inlining expansion. */
6872 if (TYPE_PRECISION (unsigned_char_type_node)
6873 >= TYPE_PRECISION (TREE_TYPE (exp)))
6874 return NULL_RTX;
6875
6876 tree arg1 = CALL_EXPR_ARG (exp, 0);
6877 tree arg2 = CALL_EXPR_ARG (exp, 1);
6878 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6879
6880 unsigned HOST_WIDE_INT len1 = 0;
6881 unsigned HOST_WIDE_INT len2 = 0;
6882 unsigned HOST_WIDE_INT len3 = 0;
6883
6884 /* Get the object representation of the initializers of ARG1 and ARG2
6885 as strings, provided they refer to constant objects, with their byte
6886 sizes in LEN1 and LEN2, respectively. */
6887 const char *bytes1 = getbyterep (arg1, &len1);
6888 const char *bytes2 = getbyterep (arg2, &len2);
6889
6890 /* Fail if neither argument refers to an initialized constant. */
6891 if (!bytes1 && !bytes2)
6892 return NULL_RTX;
6893
6894 if (is_ncmp)
6895 {
6896 /* Fail if the memcmp/strncmp bound is not a constant. */
6897 if (!tree_fits_uhwi_p (len3_tree))
6898 return NULL_RTX;
6899
6900 len3 = tree_to_uhwi (len3_tree);
6901
6902 if (fcode == BUILT_IN_MEMCMP)
6903 {
6904 /* Fail if the memcmp bound is greater than the size of either
6905 of the two constant objects. */
6906 if ((bytes1 && len1 < len3)
6907 || (bytes2 && len2 < len3))
6908 return NULL_RTX;
6909 }
6910 }
6911
6912 if (fcode != BUILT_IN_MEMCMP)
6913 {
6914 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
6915 and LEN2 to the length of the nul-terminated string stored
6916 in each. */
6917 if (bytes1 != NULL)
6918 len1 = strnlen (bytes1, len1) + 1;
6919 if (bytes2 != NULL)
6920 len2 = strnlen (bytes2, len2) + 1;
6921 }
6922
6923 /* See inline_string_cmp. */
6924 int const_str_n;
6925 if (!len1)
6926 const_str_n = 2;
6927 else if (!len2)
6928 const_str_n = 1;
6929 else if (len2 > len1)
6930 const_str_n = 1;
6931 else
6932 const_str_n = 2;
6933
6934 /* For strncmp only, compute the new bound as the smallest of
6935 the lengths of the two strings (plus 1) and the bound provided
6936 to the function. */
6937 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
6938 if (is_ncmp && len3 < bound)
6939 bound = len3;
6940
6941 /* If the bound of the comparison is larger than the threshold,
6942 do nothing. */
6943 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
6944 return NULL_RTX;
6945
6946 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6947
6948 /* Now, start inline expansion the call. */
6949 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6950 (const_str_n == 1) ? bytes1 : bytes2, bound,
6951 const_str_n, mode);
6952 }
6953
6954 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6955 represents the size of the first argument to that call, or VOIDmode
6956 if the argument is a pointer. IGNORE will be true if the result
6957 isn't used. */
6958 static rtx
expand_speculation_safe_value(machine_mode mode,tree exp,rtx target,bool ignore)6959 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6960 bool ignore)
6961 {
6962 rtx val, failsafe;
6963 unsigned nargs = call_expr_nargs (exp);
6964
6965 tree arg0 = CALL_EXPR_ARG (exp, 0);
6966
6967 if (mode == VOIDmode)
6968 {
6969 mode = TYPE_MODE (TREE_TYPE (arg0));
6970 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6971 }
6972
6973 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6974
6975 /* An optional second argument can be used as a failsafe value on
6976 some machines. If it isn't present, then the failsafe value is
6977 assumed to be 0. */
6978 if (nargs > 1)
6979 {
6980 tree arg1 = CALL_EXPR_ARG (exp, 1);
6981 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6982 }
6983 else
6984 failsafe = const0_rtx;
6985
6986 /* If the result isn't used, the behavior is undefined. It would be
6987 nice to emit a warning here, but path splitting means this might
6988 happen with legitimate code. So simply drop the builtin
6989 expansion in that case; we've handled any side-effects above. */
6990 if (ignore)
6991 return const0_rtx;
6992
6993 /* If we don't have a suitable target, create one to hold the result. */
6994 if (target == NULL || GET_MODE (target) != mode)
6995 target = gen_reg_rtx (mode);
6996
6997 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6998 val = convert_modes (mode, VOIDmode, val, false);
6999
7000 return targetm.speculation_safe_value (mode, target, val, failsafe);
7001 }
7002
7003 /* Expand an expression EXP that calls a built-in function,
7004 with result going to TARGET if that's convenient
7005 (and in mode MODE if that's convenient).
7006 SUBTARGET may be used as the target for computing one of EXP's operands.
7007 IGNORE is nonzero if the value is to be ignored. */
7008
7009 rtx
expand_builtin(tree exp,rtx target,rtx subtarget,machine_mode mode,int ignore)7010 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7011 int ignore)
7012 {
7013 tree fndecl = get_callee_fndecl (exp);
7014 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7015 int flags;
7016
7017 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7018 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7019
7020 /* When ASan is enabled, we don't want to expand some memory/string
7021 builtins and rely on libsanitizer's hooks. This allows us to avoid
7022 redundant checks and be sure, that possible overflow will be detected
7023 by ASan. */
7024
7025 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7026 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7027 return expand_call (exp, target, ignore);
7028
7029 /* When not optimizing, generate calls to library functions for a certain
7030 set of builtins. */
7031 if (!optimize
7032 && !called_as_built_in (fndecl)
7033 && fcode != BUILT_IN_FORK
7034 && fcode != BUILT_IN_EXECL
7035 && fcode != BUILT_IN_EXECV
7036 && fcode != BUILT_IN_EXECLP
7037 && fcode != BUILT_IN_EXECLE
7038 && fcode != BUILT_IN_EXECVP
7039 && fcode != BUILT_IN_EXECVE
7040 && fcode != BUILT_IN_CLEAR_CACHE
7041 && !ALLOCA_FUNCTION_CODE_P (fcode)
7042 && fcode != BUILT_IN_FREE)
7043 return expand_call (exp, target, ignore);
7044
7045 /* The built-in function expanders test for target == const0_rtx
7046 to determine whether the function's result will be ignored. */
7047 if (ignore)
7048 target = const0_rtx;
7049
7050 /* If the result of a pure or const built-in function is ignored, and
7051 none of its arguments are volatile, we can avoid expanding the
7052 built-in call and just evaluate the arguments for side-effects. */
7053 if (target == const0_rtx
7054 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7055 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7056 {
7057 bool volatilep = false;
7058 tree arg;
7059 call_expr_arg_iterator iter;
7060
7061 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7062 if (TREE_THIS_VOLATILE (arg))
7063 {
7064 volatilep = true;
7065 break;
7066 }
7067
7068 if (! volatilep)
7069 {
7070 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7071 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7072 return const0_rtx;
7073 }
7074 }
7075
7076 switch (fcode)
7077 {
7078 CASE_FLT_FN (BUILT_IN_FABS):
7079 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7080 case BUILT_IN_FABSD32:
7081 case BUILT_IN_FABSD64:
7082 case BUILT_IN_FABSD128:
7083 target = expand_builtin_fabs (exp, target, subtarget);
7084 if (target)
7085 return target;
7086 break;
7087
7088 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7089 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7090 target = expand_builtin_copysign (exp, target, subtarget);
7091 if (target)
7092 return target;
7093 break;
7094
7095 /* Just do a normal library call if we were unable to fold
7096 the values. */
7097 CASE_FLT_FN (BUILT_IN_CABS):
7098 break;
7099
7100 CASE_FLT_FN (BUILT_IN_FMA):
7101 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7102 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7103 if (target)
7104 return target;
7105 break;
7106
7107 CASE_FLT_FN (BUILT_IN_ILOGB):
7108 if (! flag_unsafe_math_optimizations)
7109 break;
7110 gcc_fallthrough ();
7111 CASE_FLT_FN (BUILT_IN_ISINF):
7112 CASE_FLT_FN (BUILT_IN_FINITE):
7113 case BUILT_IN_ISFINITE:
7114 case BUILT_IN_ISNORMAL:
7115 target = expand_builtin_interclass_mathfn (exp, target);
7116 if (target)
7117 return target;
7118 break;
7119
7120 CASE_FLT_FN (BUILT_IN_ICEIL):
7121 CASE_FLT_FN (BUILT_IN_LCEIL):
7122 CASE_FLT_FN (BUILT_IN_LLCEIL):
7123 CASE_FLT_FN (BUILT_IN_LFLOOR):
7124 CASE_FLT_FN (BUILT_IN_IFLOOR):
7125 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7126 target = expand_builtin_int_roundingfn (exp, target);
7127 if (target)
7128 return target;
7129 break;
7130
7131 CASE_FLT_FN (BUILT_IN_IRINT):
7132 CASE_FLT_FN (BUILT_IN_LRINT):
7133 CASE_FLT_FN (BUILT_IN_LLRINT):
7134 CASE_FLT_FN (BUILT_IN_IROUND):
7135 CASE_FLT_FN (BUILT_IN_LROUND):
7136 CASE_FLT_FN (BUILT_IN_LLROUND):
7137 target = expand_builtin_int_roundingfn_2 (exp, target);
7138 if (target)
7139 return target;
7140 break;
7141
7142 CASE_FLT_FN (BUILT_IN_POWI):
7143 target = expand_builtin_powi (exp, target);
7144 if (target)
7145 return target;
7146 break;
7147
7148 CASE_FLT_FN (BUILT_IN_CEXPI):
7149 target = expand_builtin_cexpi (exp, target);
7150 gcc_assert (target);
7151 return target;
7152
7153 CASE_FLT_FN (BUILT_IN_SIN):
7154 CASE_FLT_FN (BUILT_IN_COS):
7155 if (! flag_unsafe_math_optimizations)
7156 break;
7157 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7158 if (target)
7159 return target;
7160 break;
7161
7162 CASE_FLT_FN (BUILT_IN_SINCOS):
7163 if (! flag_unsafe_math_optimizations)
7164 break;
7165 target = expand_builtin_sincos (exp);
7166 if (target)
7167 return target;
7168 break;
7169
7170 case BUILT_IN_FEGETROUND:
7171 target = expand_builtin_fegetround (exp, target, target_mode);
7172 if (target)
7173 return target;
7174 break;
7175
7176 case BUILT_IN_FECLEAREXCEPT:
7177 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7178 feclearexcept_optab);
7179 if (target)
7180 return target;
7181 break;
7182
7183 case BUILT_IN_FERAISEEXCEPT:
7184 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7185 feraiseexcept_optab);
7186 if (target)
7187 return target;
7188 break;
7189
7190 case BUILT_IN_APPLY_ARGS:
7191 return expand_builtin_apply_args ();
7192
7193 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7194 FUNCTION with a copy of the parameters described by
7195 ARGUMENTS, and ARGSIZE. It returns a block of memory
7196 allocated on the stack into which is stored all the registers
7197 that might possibly be used for returning the result of a
7198 function. ARGUMENTS is the value returned by
7199 __builtin_apply_args. ARGSIZE is the number of bytes of
7200 arguments that must be copied. ??? How should this value be
7201 computed? We'll also need a safe worst case value for varargs
7202 functions. */
7203 case BUILT_IN_APPLY:
7204 if (!validate_arglist (exp, POINTER_TYPE,
7205 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7206 && !validate_arglist (exp, REFERENCE_TYPE,
7207 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7208 return const0_rtx;
7209 else
7210 {
7211 rtx ops[3];
7212
7213 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7214 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7215 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7216
7217 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7218 }
7219
7220 /* __builtin_return (RESULT) causes the function to return the
7221 value described by RESULT. RESULT is address of the block of
7222 memory returned by __builtin_apply. */
7223 case BUILT_IN_RETURN:
7224 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7225 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7226 return const0_rtx;
7227
7228 case BUILT_IN_SAVEREGS:
7229 return expand_builtin_saveregs ();
7230
7231 case BUILT_IN_VA_ARG_PACK:
7232 /* All valid uses of __builtin_va_arg_pack () are removed during
7233 inlining. */
7234 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7235 return const0_rtx;
7236
7237 case BUILT_IN_VA_ARG_PACK_LEN:
7238 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7239 inlining. */
7240 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7241 return const0_rtx;
7242
7243 /* Return the address of the first anonymous stack arg. */
7244 case BUILT_IN_NEXT_ARG:
7245 if (fold_builtin_next_arg (exp, false))
7246 return const0_rtx;
7247 return expand_builtin_next_arg ();
7248
7249 case BUILT_IN_CLEAR_CACHE:
7250 expand_builtin___clear_cache (exp);
7251 return const0_rtx;
7252
7253 case BUILT_IN_CLASSIFY_TYPE:
7254 return expand_builtin_classify_type (exp);
7255
7256 case BUILT_IN_CONSTANT_P:
7257 return const0_rtx;
7258
7259 case BUILT_IN_FRAME_ADDRESS:
7260 case BUILT_IN_RETURN_ADDRESS:
7261 return expand_builtin_frame_address (fndecl, exp);
7262
7263 /* Returns the address of the area where the structure is returned.
7264 0 otherwise. */
7265 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7266 if (call_expr_nargs (exp) != 0
7267 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7268 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7269 return const0_rtx;
7270 else
7271 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7272
7273 CASE_BUILT_IN_ALLOCA:
7274 target = expand_builtin_alloca (exp);
7275 if (target)
7276 return target;
7277 break;
7278
7279 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7280 return expand_asan_emit_allocas_unpoison (exp);
7281
7282 case BUILT_IN_STACK_SAVE:
7283 return expand_stack_save ();
7284
7285 case BUILT_IN_STACK_RESTORE:
7286 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7287 return const0_rtx;
7288
7289 case BUILT_IN_BSWAP16:
7290 case BUILT_IN_BSWAP32:
7291 case BUILT_IN_BSWAP64:
7292 case BUILT_IN_BSWAP128:
7293 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7294 if (target)
7295 return target;
7296 break;
7297
7298 CASE_INT_FN (BUILT_IN_FFS):
7299 target = expand_builtin_unop (target_mode, exp, target,
7300 subtarget, ffs_optab);
7301 if (target)
7302 return target;
7303 break;
7304
7305 CASE_INT_FN (BUILT_IN_CLZ):
7306 target = expand_builtin_unop (target_mode, exp, target,
7307 subtarget, clz_optab);
7308 if (target)
7309 return target;
7310 break;
7311
7312 CASE_INT_FN (BUILT_IN_CTZ):
7313 target = expand_builtin_unop (target_mode, exp, target,
7314 subtarget, ctz_optab);
7315 if (target)
7316 return target;
7317 break;
7318
7319 CASE_INT_FN (BUILT_IN_CLRSB):
7320 target = expand_builtin_unop (target_mode, exp, target,
7321 subtarget, clrsb_optab);
7322 if (target)
7323 return target;
7324 break;
7325
7326 CASE_INT_FN (BUILT_IN_POPCOUNT):
7327 target = expand_builtin_unop (target_mode, exp, target,
7328 subtarget, popcount_optab);
7329 if (target)
7330 return target;
7331 break;
7332
7333 CASE_INT_FN (BUILT_IN_PARITY):
7334 target = expand_builtin_unop (target_mode, exp, target,
7335 subtarget, parity_optab);
7336 if (target)
7337 return target;
7338 break;
7339
7340 case BUILT_IN_STRLEN:
7341 target = expand_builtin_strlen (exp, target, target_mode);
7342 if (target)
7343 return target;
7344 break;
7345
7346 case BUILT_IN_STRNLEN:
7347 target = expand_builtin_strnlen (exp, target, target_mode);
7348 if (target)
7349 return target;
7350 break;
7351
7352 case BUILT_IN_STRCPY:
7353 target = expand_builtin_strcpy (exp, target);
7354 if (target)
7355 return target;
7356 break;
7357
7358 case BUILT_IN_STRNCPY:
7359 target = expand_builtin_strncpy (exp, target);
7360 if (target)
7361 return target;
7362 break;
7363
7364 case BUILT_IN_STPCPY:
7365 target = expand_builtin_stpcpy (exp, target, mode);
7366 if (target)
7367 return target;
7368 break;
7369
7370 case BUILT_IN_MEMCPY:
7371 target = expand_builtin_memcpy (exp, target);
7372 if (target)
7373 return target;
7374 break;
7375
7376 case BUILT_IN_MEMMOVE:
7377 target = expand_builtin_memmove (exp, target);
7378 if (target)
7379 return target;
7380 break;
7381
7382 case BUILT_IN_MEMPCPY:
7383 target = expand_builtin_mempcpy (exp, target);
7384 if (target)
7385 return target;
7386 break;
7387
7388 case BUILT_IN_MEMSET:
7389 target = expand_builtin_memset (exp, target, mode);
7390 if (target)
7391 return target;
7392 break;
7393
7394 case BUILT_IN_BZERO:
7395 target = expand_builtin_bzero (exp);
7396 if (target)
7397 return target;
7398 break;
7399
7400 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7401 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7402 when changing it to a strcmp call. */
7403 case BUILT_IN_STRCMP_EQ:
7404 target = expand_builtin_memcmp (exp, target, true);
7405 if (target)
7406 return target;
7407
7408 /* Change this call back to a BUILT_IN_STRCMP. */
7409 TREE_OPERAND (exp, 1)
7410 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7411
7412 /* Delete the last parameter. */
7413 unsigned int i;
7414 vec<tree, va_gc> *arg_vec;
7415 vec_alloc (arg_vec, 2);
7416 for (i = 0; i < 2; i++)
7417 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7418 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7419 /* FALLTHROUGH */
7420
7421 case BUILT_IN_STRCMP:
7422 target = expand_builtin_strcmp (exp, target);
7423 if (target)
7424 return target;
7425 break;
7426
7427 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7428 back to a BUILT_IN_STRNCMP. */
7429 case BUILT_IN_STRNCMP_EQ:
7430 target = expand_builtin_memcmp (exp, target, true);
7431 if (target)
7432 return target;
7433
7434 /* Change it back to a BUILT_IN_STRNCMP. */
7435 TREE_OPERAND (exp, 1)
7436 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7437 /* FALLTHROUGH */
7438
7439 case BUILT_IN_STRNCMP:
7440 target = expand_builtin_strncmp (exp, target, mode);
7441 if (target)
7442 return target;
7443 break;
7444
7445 case BUILT_IN_BCMP:
7446 case BUILT_IN_MEMCMP:
7447 case BUILT_IN_MEMCMP_EQ:
7448 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7449 if (target)
7450 return target;
7451 if (fcode == BUILT_IN_MEMCMP_EQ)
7452 {
7453 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7454 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7455 }
7456 break;
7457
7458 case BUILT_IN_SETJMP:
7459 /* This should have been lowered to the builtins below. */
7460 gcc_unreachable ();
7461
7462 case BUILT_IN_SETJMP_SETUP:
7463 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7464 and the receiver label. */
7465 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7466 {
7467 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7468 VOIDmode, EXPAND_NORMAL);
7469 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7470 rtx_insn *label_r = label_rtx (label);
7471
7472 /* This is copied from the handling of non-local gotos. */
7473 expand_builtin_setjmp_setup (buf_addr, label_r);
7474 nonlocal_goto_handler_labels
7475 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7476 nonlocal_goto_handler_labels);
7477 /* ??? Do not let expand_label treat us as such since we would
7478 not want to be both on the list of non-local labels and on
7479 the list of forced labels. */
7480 FORCED_LABEL (label) = 0;
7481 return const0_rtx;
7482 }
7483 break;
7484
7485 case BUILT_IN_SETJMP_RECEIVER:
7486 /* __builtin_setjmp_receiver is passed the receiver label. */
7487 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7488 {
7489 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7490 rtx_insn *label_r = label_rtx (label);
7491
7492 expand_builtin_setjmp_receiver (label_r);
7493 return const0_rtx;
7494 }
7495 break;
7496
7497 /* __builtin_longjmp is passed a pointer to an array of five words.
7498 It's similar to the C library longjmp function but works with
7499 __builtin_setjmp above. */
7500 case BUILT_IN_LONGJMP:
7501 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7502 {
7503 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7504 VOIDmode, EXPAND_NORMAL);
7505 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7506
7507 if (value != const1_rtx)
7508 {
7509 error ("%<__builtin_longjmp%> second argument must be 1");
7510 return const0_rtx;
7511 }
7512
7513 expand_builtin_longjmp (buf_addr, value);
7514 return const0_rtx;
7515 }
7516 break;
7517
7518 case BUILT_IN_NONLOCAL_GOTO:
7519 target = expand_builtin_nonlocal_goto (exp);
7520 if (target)
7521 return target;
7522 break;
7523
7524 /* This updates the setjmp buffer that is its argument with the value
7525 of the current stack pointer. */
7526 case BUILT_IN_UPDATE_SETJMP_BUF:
7527 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7528 {
7529 rtx buf_addr
7530 = expand_normal (CALL_EXPR_ARG (exp, 0));
7531
7532 expand_builtin_update_setjmp_buf (buf_addr);
7533 return const0_rtx;
7534 }
7535 break;
7536
7537 case BUILT_IN_TRAP:
7538 expand_builtin_trap ();
7539 return const0_rtx;
7540
7541 case BUILT_IN_UNREACHABLE:
7542 expand_builtin_unreachable ();
7543 return const0_rtx;
7544
7545 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7546 case BUILT_IN_SIGNBITD32:
7547 case BUILT_IN_SIGNBITD64:
7548 case BUILT_IN_SIGNBITD128:
7549 target = expand_builtin_signbit (exp, target);
7550 if (target)
7551 return target;
7552 break;
7553
7554 /* Various hooks for the DWARF 2 __throw routine. */
7555 case BUILT_IN_UNWIND_INIT:
7556 expand_builtin_unwind_init ();
7557 return const0_rtx;
7558 case BUILT_IN_DWARF_CFA:
7559 return virtual_cfa_rtx;
7560 #ifdef DWARF2_UNWIND_INFO
7561 case BUILT_IN_DWARF_SP_COLUMN:
7562 return expand_builtin_dwarf_sp_column ();
7563 case BUILT_IN_INIT_DWARF_REG_SIZES:
7564 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7565 return const0_rtx;
7566 #endif
7567 case BUILT_IN_FROB_RETURN_ADDR:
7568 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7569 case BUILT_IN_EXTRACT_RETURN_ADDR:
7570 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7571 case BUILT_IN_EH_RETURN:
7572 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7573 CALL_EXPR_ARG (exp, 1));
7574 return const0_rtx;
7575 case BUILT_IN_EH_RETURN_DATA_REGNO:
7576 return expand_builtin_eh_return_data_regno (exp);
7577 case BUILT_IN_EXTEND_POINTER:
7578 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7579 case BUILT_IN_EH_POINTER:
7580 return expand_builtin_eh_pointer (exp);
7581 case BUILT_IN_EH_FILTER:
7582 return expand_builtin_eh_filter (exp);
7583 case BUILT_IN_EH_COPY_VALUES:
7584 return expand_builtin_eh_copy_values (exp);
7585
7586 case BUILT_IN_VA_START:
7587 return expand_builtin_va_start (exp);
7588 case BUILT_IN_VA_END:
7589 return expand_builtin_va_end (exp);
7590 case BUILT_IN_VA_COPY:
7591 return expand_builtin_va_copy (exp);
7592 case BUILT_IN_EXPECT:
7593 return expand_builtin_expect (exp, target);
7594 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7595 return expand_builtin_expect_with_probability (exp, target);
7596 case BUILT_IN_ASSUME_ALIGNED:
7597 return expand_builtin_assume_aligned (exp, target);
7598 case BUILT_IN_PREFETCH:
7599 expand_builtin_prefetch (exp);
7600 return const0_rtx;
7601
7602 case BUILT_IN_INIT_TRAMPOLINE:
7603 return expand_builtin_init_trampoline (exp, true);
7604 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7605 return expand_builtin_init_trampoline (exp, false);
7606 case BUILT_IN_ADJUST_TRAMPOLINE:
7607 return expand_builtin_adjust_trampoline (exp);
7608
7609 case BUILT_IN_INIT_DESCRIPTOR:
7610 return expand_builtin_init_descriptor (exp);
7611 case BUILT_IN_ADJUST_DESCRIPTOR:
7612 return expand_builtin_adjust_descriptor (exp);
7613
7614 case BUILT_IN_FORK:
7615 case BUILT_IN_EXECL:
7616 case BUILT_IN_EXECV:
7617 case BUILT_IN_EXECLP:
7618 case BUILT_IN_EXECLE:
7619 case BUILT_IN_EXECVP:
7620 case BUILT_IN_EXECVE:
7621 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7622 if (target)
7623 return target;
7624 break;
7625
7626 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7627 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7628 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7629 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7630 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7631 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7632 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7633 if (target)
7634 return target;
7635 break;
7636
7637 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7638 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7639 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7640 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7641 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7642 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7643 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7644 if (target)
7645 return target;
7646 break;
7647
7648 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7649 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7650 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7651 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7652 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7653 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7654 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7655 if (target)
7656 return target;
7657 break;
7658
7659 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7660 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7661 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7662 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7663 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7664 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7665 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7666 if (target)
7667 return target;
7668 break;
7669
7670 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7671 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7672 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7673 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7674 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7675 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7676 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7677 if (target)
7678 return target;
7679 break;
7680
7681 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7682 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7683 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7684 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7685 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7686 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7687 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7688 if (target)
7689 return target;
7690 break;
7691
7692 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7693 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7694 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7695 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7696 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7697 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7698 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7699 if (target)
7700 return target;
7701 break;
7702
7703 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7704 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7705 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7706 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7707 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7708 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7709 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7710 if (target)
7711 return target;
7712 break;
7713
7714 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7715 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7716 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7717 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7718 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7719 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7720 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7721 if (target)
7722 return target;
7723 break;
7724
7725 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7726 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7727 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7728 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7729 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7730 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7731 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7732 if (target)
7733 return target;
7734 break;
7735
7736 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7737 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7738 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7739 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7740 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7741 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7742 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7743 if (target)
7744 return target;
7745 break;
7746
7747 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7748 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7749 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7750 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7751 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7752 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7753 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7754 if (target)
7755 return target;
7756 break;
7757
7758 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7759 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7760 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7761 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7762 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7763 if (mode == VOIDmode)
7764 mode = TYPE_MODE (boolean_type_node);
7765 if (!target || !register_operand (target, mode))
7766 target = gen_reg_rtx (mode);
7767
7768 mode = get_builtin_sync_mode
7769 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7770 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7771 if (target)
7772 return target;
7773 break;
7774
7775 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7776 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7777 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7778 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7779 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7780 mode = get_builtin_sync_mode
7781 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7782 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7783 if (target)
7784 return target;
7785 break;
7786
7787 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7788 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7789 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7790 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7791 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7792 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7793 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7794 if (target)
7795 return target;
7796 break;
7797
7798 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7799 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7800 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7801 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7802 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7803 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7804 expand_builtin_sync_lock_release (mode, exp);
7805 return const0_rtx;
7806
7807 case BUILT_IN_SYNC_SYNCHRONIZE:
7808 expand_builtin_sync_synchronize ();
7809 return const0_rtx;
7810
7811 case BUILT_IN_ATOMIC_EXCHANGE_1:
7812 case BUILT_IN_ATOMIC_EXCHANGE_2:
7813 case BUILT_IN_ATOMIC_EXCHANGE_4:
7814 case BUILT_IN_ATOMIC_EXCHANGE_8:
7815 case BUILT_IN_ATOMIC_EXCHANGE_16:
7816 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7817 target = expand_builtin_atomic_exchange (mode, exp, target);
7818 if (target)
7819 return target;
7820 break;
7821
7822 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7823 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7824 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7825 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7826 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7827 {
7828 unsigned int nargs, z;
7829 vec<tree, va_gc> *vec;
7830
7831 mode =
7832 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7833 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7834 if (target)
7835 return target;
7836
7837 /* If this is turned into an external library call, the weak parameter
7838 must be dropped to match the expected parameter list. */
7839 nargs = call_expr_nargs (exp);
7840 vec_alloc (vec, nargs - 1);
7841 for (z = 0; z < 3; z++)
7842 vec->quick_push (CALL_EXPR_ARG (exp, z));
7843 /* Skip the boolean weak parameter. */
7844 for (z = 4; z < 6; z++)
7845 vec->quick_push (CALL_EXPR_ARG (exp, z));
7846 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7847 break;
7848 }
7849
7850 case BUILT_IN_ATOMIC_LOAD_1:
7851 case BUILT_IN_ATOMIC_LOAD_2:
7852 case BUILT_IN_ATOMIC_LOAD_4:
7853 case BUILT_IN_ATOMIC_LOAD_8:
7854 case BUILT_IN_ATOMIC_LOAD_16:
7855 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7856 target = expand_builtin_atomic_load (mode, exp, target);
7857 if (target)
7858 return target;
7859 break;
7860
7861 case BUILT_IN_ATOMIC_STORE_1:
7862 case BUILT_IN_ATOMIC_STORE_2:
7863 case BUILT_IN_ATOMIC_STORE_4:
7864 case BUILT_IN_ATOMIC_STORE_8:
7865 case BUILT_IN_ATOMIC_STORE_16:
7866 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7867 target = expand_builtin_atomic_store (mode, exp);
7868 if (target)
7869 return const0_rtx;
7870 break;
7871
7872 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7873 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7874 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7875 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7876 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7877 {
7878 enum built_in_function lib;
7879 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7880 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7881 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7882 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7883 ignore, lib);
7884 if (target)
7885 return target;
7886 break;
7887 }
7888 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7889 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7890 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7891 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7892 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7893 {
7894 enum built_in_function lib;
7895 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7896 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7897 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7898 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7899 ignore, lib);
7900 if (target)
7901 return target;
7902 break;
7903 }
7904 case BUILT_IN_ATOMIC_AND_FETCH_1:
7905 case BUILT_IN_ATOMIC_AND_FETCH_2:
7906 case BUILT_IN_ATOMIC_AND_FETCH_4:
7907 case BUILT_IN_ATOMIC_AND_FETCH_8:
7908 case BUILT_IN_ATOMIC_AND_FETCH_16:
7909 {
7910 enum built_in_function lib;
7911 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7912 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7913 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7914 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7915 ignore, lib);
7916 if (target)
7917 return target;
7918 break;
7919 }
7920 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7921 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7922 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7923 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7924 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7925 {
7926 enum built_in_function lib;
7927 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7928 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7929 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7930 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7931 ignore, lib);
7932 if (target)
7933 return target;
7934 break;
7935 }
7936 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7937 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7938 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7939 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7940 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7941 {
7942 enum built_in_function lib;
7943 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7944 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7945 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7946 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7947 ignore, lib);
7948 if (target)
7949 return target;
7950 break;
7951 }
7952 case BUILT_IN_ATOMIC_OR_FETCH_1:
7953 case BUILT_IN_ATOMIC_OR_FETCH_2:
7954 case BUILT_IN_ATOMIC_OR_FETCH_4:
7955 case BUILT_IN_ATOMIC_OR_FETCH_8:
7956 case BUILT_IN_ATOMIC_OR_FETCH_16:
7957 {
7958 enum built_in_function lib;
7959 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7960 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7961 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7962 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7963 ignore, lib);
7964 if (target)
7965 return target;
7966 break;
7967 }
7968 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7969 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7970 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7971 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7972 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7973 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7974 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7975 ignore, BUILT_IN_NONE);
7976 if (target)
7977 return target;
7978 break;
7979
7980 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7981 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7982 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7983 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7984 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7985 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7986 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7987 ignore, BUILT_IN_NONE);
7988 if (target)
7989 return target;
7990 break;
7991
7992 case BUILT_IN_ATOMIC_FETCH_AND_1:
7993 case BUILT_IN_ATOMIC_FETCH_AND_2:
7994 case BUILT_IN_ATOMIC_FETCH_AND_4:
7995 case BUILT_IN_ATOMIC_FETCH_AND_8:
7996 case BUILT_IN_ATOMIC_FETCH_AND_16:
7997 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7998 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7999 ignore, BUILT_IN_NONE);
8000 if (target)
8001 return target;
8002 break;
8003
8004 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8005 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8006 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8007 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8008 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8009 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8010 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8011 ignore, BUILT_IN_NONE);
8012 if (target)
8013 return target;
8014 break;
8015
8016 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8017 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8018 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8019 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8020 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8021 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8022 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8023 ignore, BUILT_IN_NONE);
8024 if (target)
8025 return target;
8026 break;
8027
8028 case BUILT_IN_ATOMIC_FETCH_OR_1:
8029 case BUILT_IN_ATOMIC_FETCH_OR_2:
8030 case BUILT_IN_ATOMIC_FETCH_OR_4:
8031 case BUILT_IN_ATOMIC_FETCH_OR_8:
8032 case BUILT_IN_ATOMIC_FETCH_OR_16:
8033 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8034 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8035 ignore, BUILT_IN_NONE);
8036 if (target)
8037 return target;
8038 break;
8039
8040 case BUILT_IN_ATOMIC_TEST_AND_SET:
8041 return expand_builtin_atomic_test_and_set (exp, target);
8042
8043 case BUILT_IN_ATOMIC_CLEAR:
8044 return expand_builtin_atomic_clear (exp);
8045
8046 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8047 return expand_builtin_atomic_always_lock_free (exp);
8048
8049 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8050 target = expand_builtin_atomic_is_lock_free (exp);
8051 if (target)
8052 return target;
8053 break;
8054
8055 case BUILT_IN_ATOMIC_THREAD_FENCE:
8056 expand_builtin_atomic_thread_fence (exp);
8057 return const0_rtx;
8058
8059 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8060 expand_builtin_atomic_signal_fence (exp);
8061 return const0_rtx;
8062
8063 case BUILT_IN_OBJECT_SIZE:
8064 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8065 return expand_builtin_object_size (exp);
8066
8067 case BUILT_IN_MEMCPY_CHK:
8068 case BUILT_IN_MEMPCPY_CHK:
8069 case BUILT_IN_MEMMOVE_CHK:
8070 case BUILT_IN_MEMSET_CHK:
8071 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8072 if (target)
8073 return target;
8074 break;
8075
8076 case BUILT_IN_STRCPY_CHK:
8077 case BUILT_IN_STPCPY_CHK:
8078 case BUILT_IN_STRNCPY_CHK:
8079 case BUILT_IN_STPNCPY_CHK:
8080 case BUILT_IN_STRCAT_CHK:
8081 case BUILT_IN_STRNCAT_CHK:
8082 case BUILT_IN_SNPRINTF_CHK:
8083 case BUILT_IN_VSNPRINTF_CHK:
8084 maybe_emit_chk_warning (exp, fcode);
8085 break;
8086
8087 case BUILT_IN_SPRINTF_CHK:
8088 case BUILT_IN_VSPRINTF_CHK:
8089 maybe_emit_sprintf_chk_warning (exp, fcode);
8090 break;
8091
8092 case BUILT_IN_THREAD_POINTER:
8093 return expand_builtin_thread_pointer (exp, target);
8094
8095 case BUILT_IN_SET_THREAD_POINTER:
8096 expand_builtin_set_thread_pointer (exp);
8097 return const0_rtx;
8098
8099 case BUILT_IN_ACC_ON_DEVICE:
8100 /* Do library call, if we failed to expand the builtin when
8101 folding. */
8102 break;
8103
8104 case BUILT_IN_GOACC_PARLEVEL_ID:
8105 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8106 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8107
8108 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8109 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8110
8111 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8112 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8113 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8114 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8115 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8116 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8117 return expand_speculation_safe_value (mode, exp, target, ignore);
8118
8119 default: /* just do library call, if unknown builtin */
8120 break;
8121 }
8122
8123 /* The switch statement above can drop through to cause the function
8124 to be called normally. */
8125 return expand_call (exp, target, ignore);
8126 }
8127
8128 /* Determine whether a tree node represents a call to a built-in
8129 function. If the tree T is a call to a built-in function with
8130 the right number of arguments of the appropriate types, return
8131 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8132 Otherwise the return value is END_BUILTINS. */
8133
8134 enum built_in_function
builtin_mathfn_code(const_tree t)8135 builtin_mathfn_code (const_tree t)
8136 {
8137 const_tree fndecl, arg, parmlist;
8138 const_tree argtype, parmtype;
8139 const_call_expr_arg_iterator iter;
8140
8141 if (TREE_CODE (t) != CALL_EXPR)
8142 return END_BUILTINS;
8143
8144 fndecl = get_callee_fndecl (t);
8145 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8146 return END_BUILTINS;
8147
8148 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8149 init_const_call_expr_arg_iterator (t, &iter);
8150 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8151 {
8152 /* If a function doesn't take a variable number of arguments,
8153 the last element in the list will have type `void'. */
8154 parmtype = TREE_VALUE (parmlist);
8155 if (VOID_TYPE_P (parmtype))
8156 {
8157 if (more_const_call_expr_args_p (&iter))
8158 return END_BUILTINS;
8159 return DECL_FUNCTION_CODE (fndecl);
8160 }
8161
8162 if (! more_const_call_expr_args_p (&iter))
8163 return END_BUILTINS;
8164
8165 arg = next_const_call_expr_arg (&iter);
8166 argtype = TREE_TYPE (arg);
8167
8168 if (SCALAR_FLOAT_TYPE_P (parmtype))
8169 {
8170 if (! SCALAR_FLOAT_TYPE_P (argtype))
8171 return END_BUILTINS;
8172 }
8173 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8174 {
8175 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8176 return END_BUILTINS;
8177 }
8178 else if (POINTER_TYPE_P (parmtype))
8179 {
8180 if (! POINTER_TYPE_P (argtype))
8181 return END_BUILTINS;
8182 }
8183 else if (INTEGRAL_TYPE_P (parmtype))
8184 {
8185 if (! INTEGRAL_TYPE_P (argtype))
8186 return END_BUILTINS;
8187 }
8188 else
8189 return END_BUILTINS;
8190 }
8191
8192 /* Variable-length argument list. */
8193 return DECL_FUNCTION_CODE (fndecl);
8194 }
8195
8196 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8197 evaluate to a constant. */
8198
8199 static tree
fold_builtin_constant_p(tree arg)8200 fold_builtin_constant_p (tree arg)
8201 {
8202 /* We return 1 for a numeric type that's known to be a constant
8203 value at compile-time or for an aggregate type that's a
8204 literal constant. */
8205 STRIP_NOPS (arg);
8206
8207 /* If we know this is a constant, emit the constant of one. */
8208 if (CONSTANT_CLASS_P (arg)
8209 || (TREE_CODE (arg) == CONSTRUCTOR
8210 && TREE_CONSTANT (arg)))
8211 return integer_one_node;
8212 if (TREE_CODE (arg) == ADDR_EXPR)
8213 {
8214 tree op = TREE_OPERAND (arg, 0);
8215 if (TREE_CODE (op) == STRING_CST
8216 || (TREE_CODE (op) == ARRAY_REF
8217 && integer_zerop (TREE_OPERAND (op, 1))
8218 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8219 return integer_one_node;
8220 }
8221
8222 /* If this expression has side effects, show we don't know it to be a
8223 constant. Likewise if it's a pointer or aggregate type since in
8224 those case we only want literals, since those are only optimized
8225 when generating RTL, not later.
8226 And finally, if we are compiling an initializer, not code, we
8227 need to return a definite result now; there's not going to be any
8228 more optimization done. */
8229 if (TREE_SIDE_EFFECTS (arg)
8230 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8231 || POINTER_TYPE_P (TREE_TYPE (arg))
8232 || cfun == 0
8233 || folding_initializer
8234 || force_folding_builtin_constant_p)
8235 return integer_zero_node;
8236
8237 return NULL_TREE;
8238 }
8239
8240 /* Create builtin_expect or builtin_expect_with_probability
8241 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8242 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8243 builtin_expect_with_probability instead uses third argument as PROBABILITY
8244 value. */
8245
8246 static tree
build_builtin_expect_predicate(location_t loc,tree pred,tree expected,tree predictor,tree probability)8247 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8248 tree predictor, tree probability)
8249 {
8250 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8251
8252 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8253 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8254 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8255 ret_type = TREE_TYPE (TREE_TYPE (fn));
8256 pred_type = TREE_VALUE (arg_types);
8257 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8258
8259 pred = fold_convert_loc (loc, pred_type, pred);
8260 expected = fold_convert_loc (loc, expected_type, expected);
8261
8262 if (probability)
8263 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8264 else
8265 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8266 predictor);
8267
8268 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8269 build_int_cst (ret_type, 0));
8270 }
8271
8272 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8273 NULL_TREE if no simplification is possible. */
8274
8275 tree
fold_builtin_expect(location_t loc,tree arg0,tree arg1,tree arg2,tree arg3)8276 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8277 tree arg3)
8278 {
8279 tree inner, fndecl, inner_arg0;
8280 enum tree_code code;
8281
8282 /* Distribute the expected value over short-circuiting operators.
8283 See through the cast from truthvalue_type_node to long. */
8284 inner_arg0 = arg0;
8285 while (CONVERT_EXPR_P (inner_arg0)
8286 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8287 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8288 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8289
8290 /* If this is a builtin_expect within a builtin_expect keep the
8291 inner one. See through a comparison against a constant. It
8292 might have been added to create a thruthvalue. */
8293 inner = inner_arg0;
8294
8295 if (COMPARISON_CLASS_P (inner)
8296 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8297 inner = TREE_OPERAND (inner, 0);
8298
8299 if (TREE_CODE (inner) == CALL_EXPR
8300 && (fndecl = get_callee_fndecl (inner))
8301 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8302 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8303 return arg0;
8304
8305 inner = inner_arg0;
8306 code = TREE_CODE (inner);
8307 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8308 {
8309 tree op0 = TREE_OPERAND (inner, 0);
8310 tree op1 = TREE_OPERAND (inner, 1);
8311 arg1 = save_expr (arg1);
8312
8313 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8314 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8315 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8316
8317 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8318 }
8319
8320 /* If the argument isn't invariant then there's nothing else we can do. */
8321 if (!TREE_CONSTANT (inner_arg0))
8322 return NULL_TREE;
8323
8324 /* If we expect that a comparison against the argument will fold to
8325 a constant return the constant. In practice, this means a true
8326 constant or the address of a non-weak symbol. */
8327 inner = inner_arg0;
8328 STRIP_NOPS (inner);
8329 if (TREE_CODE (inner) == ADDR_EXPR)
8330 {
8331 do
8332 {
8333 inner = TREE_OPERAND (inner, 0);
8334 }
8335 while (TREE_CODE (inner) == COMPONENT_REF
8336 || TREE_CODE (inner) == ARRAY_REF);
8337 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8338 return NULL_TREE;
8339 }
8340
8341 /* Otherwise, ARG0 already has the proper type for the return value. */
8342 return arg0;
8343 }
8344
8345 /* Fold a call to __builtin_classify_type with argument ARG. */
8346
8347 static tree
fold_builtin_classify_type(tree arg)8348 fold_builtin_classify_type (tree arg)
8349 {
8350 if (arg == 0)
8351 return build_int_cst (integer_type_node, no_type_class);
8352
8353 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8354 }
8355
8356 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8357 ARG. */
8358
8359 static tree
fold_builtin_strlen(location_t loc,tree expr,tree type,tree arg)8360 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8361 {
8362 if (!validate_arg (arg, POINTER_TYPE))
8363 return NULL_TREE;
8364 else
8365 {
8366 c_strlen_data lendata = { };
8367 tree len = c_strlen (arg, 0, &lendata);
8368
8369 if (len)
8370 return fold_convert_loc (loc, type, len);
8371
8372 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8373 also early enough to detect invalid reads in multimensional
8374 arrays and struct members. */
8375 if (!lendata.decl)
8376 c_strlen (arg, 1, &lendata);
8377
8378 if (lendata.decl)
8379 {
8380 if (EXPR_HAS_LOCATION (arg))
8381 loc = EXPR_LOCATION (arg);
8382 else if (loc == UNKNOWN_LOCATION)
8383 loc = input_location;
8384 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8385 }
8386
8387 return NULL_TREE;
8388 }
8389 }
8390
8391 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8392
8393 static tree
fold_builtin_inf(location_t loc,tree type,int warn)8394 fold_builtin_inf (location_t loc, tree type, int warn)
8395 {
8396 REAL_VALUE_TYPE real;
8397
8398 /* __builtin_inff is intended to be usable to define INFINITY on all
8399 targets. If an infinity is not available, INFINITY expands "to a
8400 positive constant of type float that overflows at translation
8401 time", footnote "In this case, using INFINITY will violate the
8402 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8403 Thus we pedwarn to ensure this constraint violation is
8404 diagnosed. */
8405 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8406 pedwarn (loc, 0, "target format does not support infinity");
8407
8408 real_inf (&real);
8409 return build_real (type, real);
8410 }
8411
8412 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8413 NULL_TREE if no simplification can be made. */
8414
8415 static tree
fold_builtin_sincos(location_t loc,tree arg0,tree arg1,tree arg2)8416 fold_builtin_sincos (location_t loc,
8417 tree arg0, tree arg1, tree arg2)
8418 {
8419 tree type;
8420 tree fndecl, call = NULL_TREE;
8421
8422 if (!validate_arg (arg0, REAL_TYPE)
8423 || !validate_arg (arg1, POINTER_TYPE)
8424 || !validate_arg (arg2, POINTER_TYPE))
8425 return NULL_TREE;
8426
8427 type = TREE_TYPE (arg0);
8428
8429 /* Calculate the result when the argument is a constant. */
8430 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8431 if (fn == END_BUILTINS)
8432 return NULL_TREE;
8433
8434 /* Canonicalize sincos to cexpi. */
8435 if (TREE_CODE (arg0) == REAL_CST)
8436 {
8437 tree complex_type = build_complex_type (type);
8438 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8439 }
8440 if (!call)
8441 {
8442 if (!targetm.libc_has_function (function_c99_math_complex, type)
8443 || !builtin_decl_implicit_p (fn))
8444 return NULL_TREE;
8445 fndecl = builtin_decl_explicit (fn);
8446 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8447 call = builtin_save_expr (call);
8448 }
8449
8450 tree ptype = build_pointer_type (type);
8451 arg1 = fold_convert (ptype, arg1);
8452 arg2 = fold_convert (ptype, arg2);
8453 return build2 (COMPOUND_EXPR, void_type_node,
8454 build2 (MODIFY_EXPR, void_type_node,
8455 build_fold_indirect_ref_loc (loc, arg1),
8456 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8457 build2 (MODIFY_EXPR, void_type_node,
8458 build_fold_indirect_ref_loc (loc, arg2),
8459 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8460 }
8461
8462 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8463 Return NULL_TREE if no simplification can be made. */
8464
8465 static tree
fold_builtin_memcmp(location_t loc,tree arg1,tree arg2,tree len)8466 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8467 {
8468 if (!validate_arg (arg1, POINTER_TYPE)
8469 || !validate_arg (arg2, POINTER_TYPE)
8470 || !validate_arg (len, INTEGER_TYPE))
8471 return NULL_TREE;
8472
8473 /* If the LEN parameter is zero, return zero. */
8474 if (integer_zerop (len))
8475 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8476 arg1, arg2);
8477
8478 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8479 if (operand_equal_p (arg1, arg2, 0))
8480 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8481
8482 /* If len parameter is one, return an expression corresponding to
8483 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8484 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8485 {
8486 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8487 tree cst_uchar_ptr_node
8488 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8489
8490 tree ind1
8491 = fold_convert_loc (loc, integer_type_node,
8492 build1 (INDIRECT_REF, cst_uchar_node,
8493 fold_convert_loc (loc,
8494 cst_uchar_ptr_node,
8495 arg1)));
8496 tree ind2
8497 = fold_convert_loc (loc, integer_type_node,
8498 build1 (INDIRECT_REF, cst_uchar_node,
8499 fold_convert_loc (loc,
8500 cst_uchar_ptr_node,
8501 arg2)));
8502 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8503 }
8504
8505 return NULL_TREE;
8506 }
8507
8508 /* Fold a call to builtin isascii with argument ARG. */
8509
8510 static tree
fold_builtin_isascii(location_t loc,tree arg)8511 fold_builtin_isascii (location_t loc, tree arg)
8512 {
8513 if (!validate_arg (arg, INTEGER_TYPE))
8514 return NULL_TREE;
8515 else
8516 {
8517 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8518 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8519 build_int_cst (integer_type_node,
8520 ~ (unsigned HOST_WIDE_INT) 0x7f));
8521 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8522 arg, integer_zero_node);
8523 }
8524 }
8525
8526 /* Fold a call to builtin toascii with argument ARG. */
8527
8528 static tree
fold_builtin_toascii(location_t loc,tree arg)8529 fold_builtin_toascii (location_t loc, tree arg)
8530 {
8531 if (!validate_arg (arg, INTEGER_TYPE))
8532 return NULL_TREE;
8533
8534 /* Transform toascii(c) -> (c & 0x7f). */
8535 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8536 build_int_cst (integer_type_node, 0x7f));
8537 }
8538
8539 /* Fold a call to builtin isdigit with argument ARG. */
8540
8541 static tree
fold_builtin_isdigit(location_t loc,tree arg)8542 fold_builtin_isdigit (location_t loc, tree arg)
8543 {
8544 if (!validate_arg (arg, INTEGER_TYPE))
8545 return NULL_TREE;
8546 else
8547 {
8548 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8549 /* According to the C standard, isdigit is unaffected by locale.
8550 However, it definitely is affected by the target character set. */
8551 unsigned HOST_WIDE_INT target_digit0
8552 = lang_hooks.to_target_charset ('0');
8553
8554 if (target_digit0 == 0)
8555 return NULL_TREE;
8556
8557 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8558 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8559 build_int_cst (unsigned_type_node, target_digit0));
8560 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8561 build_int_cst (unsigned_type_node, 9));
8562 }
8563 }
8564
8565 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8566
8567 static tree
fold_builtin_fabs(location_t loc,tree arg,tree type)8568 fold_builtin_fabs (location_t loc, tree arg, tree type)
8569 {
8570 if (!validate_arg (arg, REAL_TYPE))
8571 return NULL_TREE;
8572
8573 arg = fold_convert_loc (loc, type, arg);
8574 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8575 }
8576
8577 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8578
8579 static tree
fold_builtin_abs(location_t loc,tree arg,tree type)8580 fold_builtin_abs (location_t loc, tree arg, tree type)
8581 {
8582 if (!validate_arg (arg, INTEGER_TYPE))
8583 return NULL_TREE;
8584
8585 arg = fold_convert_loc (loc, type, arg);
8586 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8587 }
8588
8589 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8590
8591 static tree
fold_builtin_carg(location_t loc,tree arg,tree type)8592 fold_builtin_carg (location_t loc, tree arg, tree type)
8593 {
8594 if (validate_arg (arg, COMPLEX_TYPE)
8595 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8596 {
8597 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8598
8599 if (atan2_fn)
8600 {
8601 tree new_arg = builtin_save_expr (arg);
8602 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8603 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8604 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8605 }
8606 }
8607
8608 return NULL_TREE;
8609 }
8610
8611 /* Fold a call to builtin frexp, we can assume the base is 2. */
8612
8613 static tree
fold_builtin_frexp(location_t loc,tree arg0,tree arg1,tree rettype)8614 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8615 {
8616 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8617 return NULL_TREE;
8618
8619 STRIP_NOPS (arg0);
8620
8621 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8622 return NULL_TREE;
8623
8624 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8625
8626 /* Proceed if a valid pointer type was passed in. */
8627 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8628 {
8629 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8630 tree frac, exp;
8631
8632 switch (value->cl)
8633 {
8634 case rvc_zero:
8635 /* For +-0, return (*exp = 0, +-0). */
8636 exp = integer_zero_node;
8637 frac = arg0;
8638 break;
8639 case rvc_nan:
8640 case rvc_inf:
8641 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8642 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8643 case rvc_normal:
8644 {
8645 /* Since the frexp function always expects base 2, and in
8646 GCC normalized significands are already in the range
8647 [0.5, 1.0), we have exactly what frexp wants. */
8648 REAL_VALUE_TYPE frac_rvt = *value;
8649 SET_REAL_EXP (&frac_rvt, 0);
8650 frac = build_real (rettype, frac_rvt);
8651 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8652 }
8653 break;
8654 default:
8655 gcc_unreachable ();
8656 }
8657
8658 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8659 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8660 TREE_SIDE_EFFECTS (arg1) = 1;
8661 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8662 }
8663
8664 return NULL_TREE;
8665 }
8666
8667 /* Fold a call to builtin modf. */
8668
8669 static tree
fold_builtin_modf(location_t loc,tree arg0,tree arg1,tree rettype)8670 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8671 {
8672 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8673 return NULL_TREE;
8674
8675 STRIP_NOPS (arg0);
8676
8677 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8678 return NULL_TREE;
8679
8680 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8681
8682 /* Proceed if a valid pointer type was passed in. */
8683 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8684 {
8685 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8686 REAL_VALUE_TYPE trunc, frac;
8687
8688 switch (value->cl)
8689 {
8690 case rvc_nan:
8691 case rvc_zero:
8692 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8693 trunc = frac = *value;
8694 break;
8695 case rvc_inf:
8696 /* For +-Inf, return (*arg1 = arg0, +-0). */
8697 frac = dconst0;
8698 frac.sign = value->sign;
8699 trunc = *value;
8700 break;
8701 case rvc_normal:
8702 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8703 real_trunc (&trunc, VOIDmode, value);
8704 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8705 /* If the original number was negative and already
8706 integral, then the fractional part is -0.0. */
8707 if (value->sign && frac.cl == rvc_zero)
8708 frac.sign = value->sign;
8709 break;
8710 }
8711
8712 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8713 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8714 build_real (rettype, trunc));
8715 TREE_SIDE_EFFECTS (arg1) = 1;
8716 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8717 build_real (rettype, frac));
8718 }
8719
8720 return NULL_TREE;
8721 }
8722
8723 /* Given a location LOC, an interclass builtin function decl FNDECL
8724 and its single argument ARG, return an folded expression computing
8725 the same, or NULL_TREE if we either couldn't or didn't want to fold
8726 (the latter happen if there's an RTL instruction available). */
8727
8728 static tree
fold_builtin_interclass_mathfn(location_t loc,tree fndecl,tree arg)8729 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8730 {
8731 machine_mode mode;
8732
8733 if (!validate_arg (arg, REAL_TYPE))
8734 return NULL_TREE;
8735
8736 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8737 return NULL_TREE;
8738
8739 mode = TYPE_MODE (TREE_TYPE (arg));
8740
8741 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8742
8743 /* If there is no optab, try generic code. */
8744 switch (DECL_FUNCTION_CODE (fndecl))
8745 {
8746 tree result;
8747
8748 CASE_FLT_FN (BUILT_IN_ISINF):
8749 {
8750 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8751 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8752 tree type = TREE_TYPE (arg);
8753 REAL_VALUE_TYPE r;
8754 char buf[128];
8755
8756 if (is_ibm_extended)
8757 {
8758 /* NaN and Inf are encoded in the high-order double value
8759 only. The low-order value is not significant. */
8760 type = double_type_node;
8761 mode = DFmode;
8762 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8763 }
8764 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8765 real_from_string (&r, buf);
8766 result = build_call_expr (isgr_fn, 2,
8767 fold_build1_loc (loc, ABS_EXPR, type, arg),
8768 build_real (type, r));
8769 return result;
8770 }
8771 CASE_FLT_FN (BUILT_IN_FINITE):
8772 case BUILT_IN_ISFINITE:
8773 {
8774 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8775 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8776 tree type = TREE_TYPE (arg);
8777 REAL_VALUE_TYPE r;
8778 char buf[128];
8779
8780 if (is_ibm_extended)
8781 {
8782 /* NaN and Inf are encoded in the high-order double value
8783 only. The low-order value is not significant. */
8784 type = double_type_node;
8785 mode = DFmode;
8786 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8787 }
8788 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8789 real_from_string (&r, buf);
8790 result = build_call_expr (isle_fn, 2,
8791 fold_build1_loc (loc, ABS_EXPR, type, arg),
8792 build_real (type, r));
8793 /*result = fold_build2_loc (loc, UNGT_EXPR,
8794 TREE_TYPE (TREE_TYPE (fndecl)),
8795 fold_build1_loc (loc, ABS_EXPR, type, arg),
8796 build_real (type, r));
8797 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8798 TREE_TYPE (TREE_TYPE (fndecl)),
8799 result);*/
8800 return result;
8801 }
8802 case BUILT_IN_ISNORMAL:
8803 {
8804 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8805 islessequal(fabs(x),DBL_MAX). */
8806 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8807 tree type = TREE_TYPE (arg);
8808 tree orig_arg, max_exp, min_exp;
8809 machine_mode orig_mode = mode;
8810 REAL_VALUE_TYPE rmax, rmin;
8811 char buf[128];
8812
8813 orig_arg = arg = builtin_save_expr (arg);
8814 if (is_ibm_extended)
8815 {
8816 /* Use double to test the normal range of IBM extended
8817 precision. Emin for IBM extended precision is
8818 different to emin for IEEE double, being 53 higher
8819 since the low double exponent is at least 53 lower
8820 than the high double exponent. */
8821 type = double_type_node;
8822 mode = DFmode;
8823 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8824 }
8825 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8826
8827 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8828 real_from_string (&rmax, buf);
8829 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8830 real_from_string (&rmin, buf);
8831 max_exp = build_real (type, rmax);
8832 min_exp = build_real (type, rmin);
8833
8834 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8835 if (is_ibm_extended)
8836 {
8837 /* Testing the high end of the range is done just using
8838 the high double, using the same test as isfinite().
8839 For the subnormal end of the range we first test the
8840 high double, then if its magnitude is equal to the
8841 limit of 0x1p-969, we test whether the low double is
8842 non-zero and opposite sign to the high double. */
8843 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8844 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8845 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8846 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8847 arg, min_exp);
8848 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8849 complex_double_type_node, orig_arg);
8850 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8851 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8852 tree zero = build_real (type, dconst0);
8853 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8854 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8855 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8856 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8857 fold_build3 (COND_EXPR,
8858 integer_type_node,
8859 hilt, logt, lolt));
8860 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8861 eq_min, ok_lo);
8862 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8863 gt_min, eq_min);
8864 }
8865 else
8866 {
8867 tree const isge_fn
8868 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8869 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8870 }
8871 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8872 max_exp, min_exp);
8873 return result;
8874 }
8875 default:
8876 break;
8877 }
8878
8879 return NULL_TREE;
8880 }
8881
8882 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8883 ARG is the argument for the call. */
8884
8885 static tree
fold_builtin_classify(location_t loc,tree fndecl,tree arg,int builtin_index)8886 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8887 {
8888 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8889
8890 if (!validate_arg (arg, REAL_TYPE))
8891 return NULL_TREE;
8892
8893 switch (builtin_index)
8894 {
8895 case BUILT_IN_ISINF:
8896 if (tree_expr_infinite_p (arg))
8897 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8898 if (!tree_expr_maybe_infinite_p (arg))
8899 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8900 return NULL_TREE;
8901
8902 case BUILT_IN_ISINF_SIGN:
8903 {
8904 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8905 /* In a boolean context, GCC will fold the inner COND_EXPR to
8906 1. So e.g. "if (isinf_sign(x))" would be folded to just
8907 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8908 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8909 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8910 tree tmp = NULL_TREE;
8911
8912 arg = builtin_save_expr (arg);
8913
8914 if (signbit_fn && isinf_fn)
8915 {
8916 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8917 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8918
8919 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8920 signbit_call, integer_zero_node);
8921 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8922 isinf_call, integer_zero_node);
8923
8924 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8925 integer_minus_one_node, integer_one_node);
8926 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8927 isinf_call, tmp,
8928 integer_zero_node);
8929 }
8930
8931 return tmp;
8932 }
8933
8934 case BUILT_IN_ISFINITE:
8935 if (tree_expr_finite_p (arg))
8936 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8937 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
8938 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8939 return NULL_TREE;
8940
8941 case BUILT_IN_ISNAN:
8942 if (tree_expr_nan_p (arg))
8943 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8944 if (!tree_expr_maybe_nan_p (arg))
8945 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8946
8947 {
8948 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8949 if (is_ibm_extended)
8950 {
8951 /* NaN and Inf are encoded in the high-order double value
8952 only. The low-order value is not significant. */
8953 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8954 }
8955 }
8956 arg = builtin_save_expr (arg);
8957 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8958
8959 default:
8960 gcc_unreachable ();
8961 }
8962 }
8963
8964 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8965 This builtin will generate code to return the appropriate floating
8966 point classification depending on the value of the floating point
8967 number passed in. The possible return values must be supplied as
8968 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8969 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8970 one floating point argument which is "type generic". */
8971
8972 static tree
fold_builtin_fpclassify(location_t loc,tree * args,int nargs)8973 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8974 {
8975 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8976 arg, type, res, tmp;
8977 machine_mode mode;
8978 REAL_VALUE_TYPE r;
8979 char buf[128];
8980
8981 /* Verify the required arguments in the original call. */
8982 if (nargs != 6
8983 || !validate_arg (args[0], INTEGER_TYPE)
8984 || !validate_arg (args[1], INTEGER_TYPE)
8985 || !validate_arg (args[2], INTEGER_TYPE)
8986 || !validate_arg (args[3], INTEGER_TYPE)
8987 || !validate_arg (args[4], INTEGER_TYPE)
8988 || !validate_arg (args[5], REAL_TYPE))
8989 return NULL_TREE;
8990
8991 fp_nan = args[0];
8992 fp_infinite = args[1];
8993 fp_normal = args[2];
8994 fp_subnormal = args[3];
8995 fp_zero = args[4];
8996 arg = args[5];
8997 type = TREE_TYPE (arg);
8998 mode = TYPE_MODE (type);
8999 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9000
9001 /* fpclassify(x) ->
9002 isnan(x) ? FP_NAN :
9003 (fabs(x) == Inf ? FP_INFINITE :
9004 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9005 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9006
9007 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9008 build_real (type, dconst0));
9009 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9010 tmp, fp_zero, fp_subnormal);
9011
9012 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9013 real_from_string (&r, buf);
9014 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9015 arg, build_real (type, r));
9016 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9017
9018 if (tree_expr_maybe_infinite_p (arg))
9019 {
9020 real_inf (&r);
9021 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9022 build_real (type, r));
9023 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9024 fp_infinite, res);
9025 }
9026
9027 if (tree_expr_maybe_nan_p (arg))
9028 {
9029 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9030 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9031 }
9032
9033 return res;
9034 }
9035
9036 /* Fold a call to an unordered comparison function such as
9037 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9038 being called and ARG0 and ARG1 are the arguments for the call.
9039 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9040 the opposite of the desired result. UNORDERED_CODE is used
9041 for modes that can hold NaNs and ORDERED_CODE is used for
9042 the rest. */
9043
9044 static tree
fold_builtin_unordered_cmp(location_t loc,tree fndecl,tree arg0,tree arg1,enum tree_code unordered_code,enum tree_code ordered_code)9045 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9046 enum tree_code unordered_code,
9047 enum tree_code ordered_code)
9048 {
9049 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9050 enum tree_code code;
9051 tree type0, type1;
9052 enum tree_code code0, code1;
9053 tree cmp_type = NULL_TREE;
9054
9055 type0 = TREE_TYPE (arg0);
9056 type1 = TREE_TYPE (arg1);
9057
9058 code0 = TREE_CODE (type0);
9059 code1 = TREE_CODE (type1);
9060
9061 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9062 /* Choose the wider of two real types. */
9063 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9064 ? type0 : type1;
9065 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9066 cmp_type = type0;
9067 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9068 cmp_type = type1;
9069
9070 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9071 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9072
9073 if (unordered_code == UNORDERED_EXPR)
9074 {
9075 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
9076 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
9077 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
9078 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9079 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9080 }
9081
9082 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
9083 ? unordered_code : ordered_code;
9084 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9085 fold_build2_loc (loc, code, type, arg0, arg1));
9086 }
9087
9088 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9089 arithmetics if it can never overflow, or into internal functions that
9090 return both result of arithmetics and overflowed boolean flag in
9091 a complex integer result, or some other check for overflow.
9092 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9093 checking part of that. */
9094
9095 static tree
fold_builtin_arith_overflow(location_t loc,enum built_in_function fcode,tree arg0,tree arg1,tree arg2)9096 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9097 tree arg0, tree arg1, tree arg2)
9098 {
9099 enum internal_fn ifn = IFN_LAST;
9100 /* The code of the expression corresponding to the built-in. */
9101 enum tree_code opcode = ERROR_MARK;
9102 bool ovf_only = false;
9103
9104 switch (fcode)
9105 {
9106 case BUILT_IN_ADD_OVERFLOW_P:
9107 ovf_only = true;
9108 /* FALLTHRU */
9109 case BUILT_IN_ADD_OVERFLOW:
9110 case BUILT_IN_SADD_OVERFLOW:
9111 case BUILT_IN_SADDL_OVERFLOW:
9112 case BUILT_IN_SADDLL_OVERFLOW:
9113 case BUILT_IN_UADD_OVERFLOW:
9114 case BUILT_IN_UADDL_OVERFLOW:
9115 case BUILT_IN_UADDLL_OVERFLOW:
9116 opcode = PLUS_EXPR;
9117 ifn = IFN_ADD_OVERFLOW;
9118 break;
9119 case BUILT_IN_SUB_OVERFLOW_P:
9120 ovf_only = true;
9121 /* FALLTHRU */
9122 case BUILT_IN_SUB_OVERFLOW:
9123 case BUILT_IN_SSUB_OVERFLOW:
9124 case BUILT_IN_SSUBL_OVERFLOW:
9125 case BUILT_IN_SSUBLL_OVERFLOW:
9126 case BUILT_IN_USUB_OVERFLOW:
9127 case BUILT_IN_USUBL_OVERFLOW:
9128 case BUILT_IN_USUBLL_OVERFLOW:
9129 opcode = MINUS_EXPR;
9130 ifn = IFN_SUB_OVERFLOW;
9131 break;
9132 case BUILT_IN_MUL_OVERFLOW_P:
9133 ovf_only = true;
9134 /* FALLTHRU */
9135 case BUILT_IN_MUL_OVERFLOW:
9136 case BUILT_IN_SMUL_OVERFLOW:
9137 case BUILT_IN_SMULL_OVERFLOW:
9138 case BUILT_IN_SMULLL_OVERFLOW:
9139 case BUILT_IN_UMUL_OVERFLOW:
9140 case BUILT_IN_UMULL_OVERFLOW:
9141 case BUILT_IN_UMULLL_OVERFLOW:
9142 opcode = MULT_EXPR;
9143 ifn = IFN_MUL_OVERFLOW;
9144 break;
9145 default:
9146 gcc_unreachable ();
9147 }
9148
9149 /* For the "generic" overloads, the first two arguments can have different
9150 types and the last argument determines the target type to use to check
9151 for overflow. The arguments of the other overloads all have the same
9152 type. */
9153 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9154
9155 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9156 arguments are constant, attempt to fold the built-in call into a constant
9157 expression indicating whether or not it detected an overflow. */
9158 if (ovf_only
9159 && TREE_CODE (arg0) == INTEGER_CST
9160 && TREE_CODE (arg1) == INTEGER_CST)
9161 /* Perform the computation in the target type and check for overflow. */
9162 return omit_one_operand_loc (loc, boolean_type_node,
9163 arith_overflowed_p (opcode, type, arg0, arg1)
9164 ? boolean_true_node : boolean_false_node,
9165 arg2);
9166
9167 tree intres, ovfres;
9168 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9169 {
9170 intres = fold_binary_loc (loc, opcode, type,
9171 fold_convert_loc (loc, type, arg0),
9172 fold_convert_loc (loc, type, arg1));
9173 if (TREE_OVERFLOW (intres))
9174 intres = drop_tree_overflow (intres);
9175 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9176 ? boolean_true_node : boolean_false_node);
9177 }
9178 else
9179 {
9180 tree ctype = build_complex_type (type);
9181 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9182 arg0, arg1);
9183 tree tgt;
9184 if (ovf_only)
9185 {
9186 tgt = call;
9187 intres = NULL_TREE;
9188 }
9189 else
9190 {
9191 /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
9192 as while the call itself is const, the REALPART_EXPR store is
9193 certainly not. And in any case, we want just one call,
9194 not multiple and trying to CSE them later. */
9195 TREE_SIDE_EFFECTS (call) = 1;
9196 tgt = save_expr (call);
9197 }
9198 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9199 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9200 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9201 }
9202
9203 if (ovf_only)
9204 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9205
9206 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9207 tree store
9208 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9209 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9210 }
9211
9212 /* Fold a call to __builtin_FILE to a constant string. */
9213
9214 static inline tree
fold_builtin_FILE(location_t loc)9215 fold_builtin_FILE (location_t loc)
9216 {
9217 if (const char *fname = LOCATION_FILE (loc))
9218 {
9219 /* The documentation says this builtin is equivalent to the preprocessor
9220 __FILE__ macro so it appears appropriate to use the same file prefix
9221 mappings. */
9222 fname = remap_macro_filename (fname);
9223 return build_string_literal (strlen (fname) + 1, fname);
9224 }
9225
9226 return build_string_literal (1, "");
9227 }
9228
9229 /* Fold a call to __builtin_FUNCTION to a constant string. */
9230
9231 static inline tree
fold_builtin_FUNCTION()9232 fold_builtin_FUNCTION ()
9233 {
9234 const char *name = "";
9235
9236 if (current_function_decl)
9237 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9238
9239 return build_string_literal (strlen (name) + 1, name);
9240 }
9241
9242 /* Fold a call to __builtin_LINE to an integer constant. */
9243
9244 static inline tree
fold_builtin_LINE(location_t loc,tree type)9245 fold_builtin_LINE (location_t loc, tree type)
9246 {
9247 return build_int_cst (type, LOCATION_LINE (loc));
9248 }
9249
9250 /* Fold a call to built-in function FNDECL with 0 arguments.
9251 This function returns NULL_TREE if no simplification was possible. */
9252
9253 static tree
fold_builtin_0(location_t loc,tree fndecl)9254 fold_builtin_0 (location_t loc, tree fndecl)
9255 {
9256 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9257 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9258 switch (fcode)
9259 {
9260 case BUILT_IN_FILE:
9261 return fold_builtin_FILE (loc);
9262
9263 case BUILT_IN_FUNCTION:
9264 return fold_builtin_FUNCTION ();
9265
9266 case BUILT_IN_LINE:
9267 return fold_builtin_LINE (loc, type);
9268
9269 CASE_FLT_FN (BUILT_IN_INF):
9270 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9271 case BUILT_IN_INFD32:
9272 case BUILT_IN_INFD64:
9273 case BUILT_IN_INFD128:
9274 return fold_builtin_inf (loc, type, true);
9275
9276 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9277 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9278 return fold_builtin_inf (loc, type, false);
9279
9280 case BUILT_IN_CLASSIFY_TYPE:
9281 return fold_builtin_classify_type (NULL_TREE);
9282
9283 default:
9284 break;
9285 }
9286 return NULL_TREE;
9287 }
9288
9289 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9290 This function returns NULL_TREE if no simplification was possible. */
9291
9292 static tree
fold_builtin_1(location_t loc,tree expr,tree fndecl,tree arg0)9293 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
9294 {
9295 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9296 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9297
9298 if (TREE_CODE (arg0) == ERROR_MARK)
9299 return NULL_TREE;
9300
9301 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9302 return ret;
9303
9304 switch (fcode)
9305 {
9306 case BUILT_IN_CONSTANT_P:
9307 {
9308 tree val = fold_builtin_constant_p (arg0);
9309
9310 /* Gimplification will pull the CALL_EXPR for the builtin out of
9311 an if condition. When not optimizing, we'll not CSE it back.
9312 To avoid link error types of regressions, return false now. */
9313 if (!val && !optimize)
9314 val = integer_zero_node;
9315
9316 return val;
9317 }
9318
9319 case BUILT_IN_CLASSIFY_TYPE:
9320 return fold_builtin_classify_type (arg0);
9321
9322 case BUILT_IN_STRLEN:
9323 return fold_builtin_strlen (loc, expr, type, arg0);
9324
9325 CASE_FLT_FN (BUILT_IN_FABS):
9326 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9327 case BUILT_IN_FABSD32:
9328 case BUILT_IN_FABSD64:
9329 case BUILT_IN_FABSD128:
9330 return fold_builtin_fabs (loc, arg0, type);
9331
9332 case BUILT_IN_ABS:
9333 case BUILT_IN_LABS:
9334 case BUILT_IN_LLABS:
9335 case BUILT_IN_IMAXABS:
9336 return fold_builtin_abs (loc, arg0, type);
9337
9338 CASE_FLT_FN (BUILT_IN_CONJ):
9339 if (validate_arg (arg0, COMPLEX_TYPE)
9340 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9341 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9342 break;
9343
9344 CASE_FLT_FN (BUILT_IN_CREAL):
9345 if (validate_arg (arg0, COMPLEX_TYPE)
9346 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9347 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9348 break;
9349
9350 CASE_FLT_FN (BUILT_IN_CIMAG):
9351 if (validate_arg (arg0, COMPLEX_TYPE)
9352 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9353 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9354 break;
9355
9356 CASE_FLT_FN (BUILT_IN_CARG):
9357 return fold_builtin_carg (loc, arg0, type);
9358
9359 case BUILT_IN_ISASCII:
9360 return fold_builtin_isascii (loc, arg0);
9361
9362 case BUILT_IN_TOASCII:
9363 return fold_builtin_toascii (loc, arg0);
9364
9365 case BUILT_IN_ISDIGIT:
9366 return fold_builtin_isdigit (loc, arg0);
9367
9368 CASE_FLT_FN (BUILT_IN_FINITE):
9369 case BUILT_IN_FINITED32:
9370 case BUILT_IN_FINITED64:
9371 case BUILT_IN_FINITED128:
9372 case BUILT_IN_ISFINITE:
9373 {
9374 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9375 if (ret)
9376 return ret;
9377 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9378 }
9379
9380 CASE_FLT_FN (BUILT_IN_ISINF):
9381 case BUILT_IN_ISINFD32:
9382 case BUILT_IN_ISINFD64:
9383 case BUILT_IN_ISINFD128:
9384 {
9385 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9386 if (ret)
9387 return ret;
9388 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9389 }
9390
9391 case BUILT_IN_ISNORMAL:
9392 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9393
9394 case BUILT_IN_ISINF_SIGN:
9395 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9396
9397 CASE_FLT_FN (BUILT_IN_ISNAN):
9398 case BUILT_IN_ISNAND32:
9399 case BUILT_IN_ISNAND64:
9400 case BUILT_IN_ISNAND128:
9401 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9402
9403 case BUILT_IN_FREE:
9404 if (integer_zerop (arg0))
9405 return build_empty_stmt (loc);
9406 break;
9407
9408 default:
9409 break;
9410 }
9411
9412 return NULL_TREE;
9413
9414 }
9415
9416 /* Folds a call EXPR (which may be null) to built-in function FNDECL
9417 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9418 if no simplification was possible. */
9419
9420 static tree
fold_builtin_2(location_t loc,tree expr,tree fndecl,tree arg0,tree arg1)9421 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
9422 {
9423 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9424 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9425
9426 if (TREE_CODE (arg0) == ERROR_MARK
9427 || TREE_CODE (arg1) == ERROR_MARK)
9428 return NULL_TREE;
9429
9430 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9431 return ret;
9432
9433 switch (fcode)
9434 {
9435 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9436 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9437 if (validate_arg (arg0, REAL_TYPE)
9438 && validate_arg (arg1, POINTER_TYPE))
9439 return do_mpfr_lgamma_r (arg0, arg1, type);
9440 break;
9441
9442 CASE_FLT_FN (BUILT_IN_FREXP):
9443 return fold_builtin_frexp (loc, arg0, arg1, type);
9444
9445 CASE_FLT_FN (BUILT_IN_MODF):
9446 return fold_builtin_modf (loc, arg0, arg1, type);
9447
9448 case BUILT_IN_STRSPN:
9449 return fold_builtin_strspn (loc, expr, arg0, arg1);
9450
9451 case BUILT_IN_STRCSPN:
9452 return fold_builtin_strcspn (loc, expr, arg0, arg1);
9453
9454 case BUILT_IN_STRPBRK:
9455 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
9456
9457 case BUILT_IN_EXPECT:
9458 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9459
9460 case BUILT_IN_ISGREATER:
9461 return fold_builtin_unordered_cmp (loc, fndecl,
9462 arg0, arg1, UNLE_EXPR, LE_EXPR);
9463 case BUILT_IN_ISGREATEREQUAL:
9464 return fold_builtin_unordered_cmp (loc, fndecl,
9465 arg0, arg1, UNLT_EXPR, LT_EXPR);
9466 case BUILT_IN_ISLESS:
9467 return fold_builtin_unordered_cmp (loc, fndecl,
9468 arg0, arg1, UNGE_EXPR, GE_EXPR);
9469 case BUILT_IN_ISLESSEQUAL:
9470 return fold_builtin_unordered_cmp (loc, fndecl,
9471 arg0, arg1, UNGT_EXPR, GT_EXPR);
9472 case BUILT_IN_ISLESSGREATER:
9473 return fold_builtin_unordered_cmp (loc, fndecl,
9474 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9475 case BUILT_IN_ISUNORDERED:
9476 return fold_builtin_unordered_cmp (loc, fndecl,
9477 arg0, arg1, UNORDERED_EXPR,
9478 NOP_EXPR);
9479
9480 /* We do the folding for va_start in the expander. */
9481 case BUILT_IN_VA_START:
9482 break;
9483
9484 case BUILT_IN_OBJECT_SIZE:
9485 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
9486 return fold_builtin_object_size (arg0, arg1, fcode);
9487
9488 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9489 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9490
9491 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9492 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9493
9494 default:
9495 break;
9496 }
9497 return NULL_TREE;
9498 }
9499
9500 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9501 and ARG2.
9502 This function returns NULL_TREE if no simplification was possible. */
9503
9504 static tree
fold_builtin_3(location_t loc,tree fndecl,tree arg0,tree arg1,tree arg2)9505 fold_builtin_3 (location_t loc, tree fndecl,
9506 tree arg0, tree arg1, tree arg2)
9507 {
9508 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9509 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9510
9511 if (TREE_CODE (arg0) == ERROR_MARK
9512 || TREE_CODE (arg1) == ERROR_MARK
9513 || TREE_CODE (arg2) == ERROR_MARK)
9514 return NULL_TREE;
9515
9516 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9517 arg0, arg1, arg2))
9518 return ret;
9519
9520 switch (fcode)
9521 {
9522
9523 CASE_FLT_FN (BUILT_IN_SINCOS):
9524 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9525
9526 CASE_FLT_FN (BUILT_IN_REMQUO):
9527 if (validate_arg (arg0, REAL_TYPE)
9528 && validate_arg (arg1, REAL_TYPE)
9529 && validate_arg (arg2, POINTER_TYPE))
9530 return do_mpfr_remquo (arg0, arg1, arg2);
9531 break;
9532
9533 case BUILT_IN_MEMCMP:
9534 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9535
9536 case BUILT_IN_EXPECT:
9537 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9538
9539 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9540 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9541
9542 case BUILT_IN_ADD_OVERFLOW:
9543 case BUILT_IN_SUB_OVERFLOW:
9544 case BUILT_IN_MUL_OVERFLOW:
9545 case BUILT_IN_ADD_OVERFLOW_P:
9546 case BUILT_IN_SUB_OVERFLOW_P:
9547 case BUILT_IN_MUL_OVERFLOW_P:
9548 case BUILT_IN_SADD_OVERFLOW:
9549 case BUILT_IN_SADDL_OVERFLOW:
9550 case BUILT_IN_SADDLL_OVERFLOW:
9551 case BUILT_IN_SSUB_OVERFLOW:
9552 case BUILT_IN_SSUBL_OVERFLOW:
9553 case BUILT_IN_SSUBLL_OVERFLOW:
9554 case BUILT_IN_SMUL_OVERFLOW:
9555 case BUILT_IN_SMULL_OVERFLOW:
9556 case BUILT_IN_SMULLL_OVERFLOW:
9557 case BUILT_IN_UADD_OVERFLOW:
9558 case BUILT_IN_UADDL_OVERFLOW:
9559 case BUILT_IN_UADDLL_OVERFLOW:
9560 case BUILT_IN_USUB_OVERFLOW:
9561 case BUILT_IN_USUBL_OVERFLOW:
9562 case BUILT_IN_USUBLL_OVERFLOW:
9563 case BUILT_IN_UMUL_OVERFLOW:
9564 case BUILT_IN_UMULL_OVERFLOW:
9565 case BUILT_IN_UMULLL_OVERFLOW:
9566 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9567
9568 default:
9569 break;
9570 }
9571 return NULL_TREE;
9572 }
9573
9574 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
9575 ARGS is an array of NARGS arguments. IGNORE is true if the result
9576 of the function call is ignored. This function returns NULL_TREE
9577 if no simplification was possible. */
9578
9579 static tree
fold_builtin_n(location_t loc,tree expr,tree fndecl,tree * args,int nargs,bool)9580 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
9581 int nargs, bool)
9582 {
9583 tree ret = NULL_TREE;
9584
9585 switch (nargs)
9586 {
9587 case 0:
9588 ret = fold_builtin_0 (loc, fndecl);
9589 break;
9590 case 1:
9591 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
9592 break;
9593 case 2:
9594 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
9595 break;
9596 case 3:
9597 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9598 break;
9599 default:
9600 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9601 break;
9602 }
9603 if (ret)
9604 {
9605 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9606 SET_EXPR_LOCATION (ret, loc);
9607 return ret;
9608 }
9609 return NULL_TREE;
9610 }
9611
9612 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9613 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9614 of arguments in ARGS to be omitted. OLDNARGS is the number of
9615 elements in ARGS. */
9616
9617 static tree
rewrite_call_expr_valist(location_t loc,int oldnargs,tree * args,int skip,tree fndecl,int n,va_list newargs)9618 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9619 int skip, tree fndecl, int n, va_list newargs)
9620 {
9621 int nargs = oldnargs - skip + n;
9622 tree *buffer;
9623
9624 if (n > 0)
9625 {
9626 int i, j;
9627
9628 buffer = XALLOCAVEC (tree, nargs);
9629 for (i = 0; i < n; i++)
9630 buffer[i] = va_arg (newargs, tree);
9631 for (j = skip; j < oldnargs; j++, i++)
9632 buffer[i] = args[j];
9633 }
9634 else
9635 buffer = args + skip;
9636
9637 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9638 }
9639
9640 /* Return true if FNDECL shouldn't be folded right now.
9641 If a built-in function has an inline attribute always_inline
9642 wrapper, defer folding it after always_inline functions have
9643 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9644 might not be performed. */
9645
9646 bool
avoid_folding_inline_builtin(tree fndecl)9647 avoid_folding_inline_builtin (tree fndecl)
9648 {
9649 return (DECL_DECLARED_INLINE_P (fndecl)
9650 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9651 && cfun
9652 && !cfun->always_inline_functions_inlined
9653 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9654 }
9655
9656 /* A wrapper function for builtin folding that prevents warnings for
9657 "statement without effect" and the like, caused by removing the
9658 call node earlier than the warning is generated. */
9659
9660 tree
fold_call_expr(location_t loc,tree exp,bool ignore)9661 fold_call_expr (location_t loc, tree exp, bool ignore)
9662 {
9663 tree ret = NULL_TREE;
9664 tree fndecl = get_callee_fndecl (exp);
9665 if (fndecl && fndecl_built_in_p (fndecl)
9666 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9667 yet. Defer folding until we see all the arguments
9668 (after inlining). */
9669 && !CALL_EXPR_VA_ARG_PACK (exp))
9670 {
9671 int nargs = call_expr_nargs (exp);
9672
9673 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9674 instead last argument is __builtin_va_arg_pack (). Defer folding
9675 even in that case, until arguments are finalized. */
9676 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9677 {
9678 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9679 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9680 return NULL_TREE;
9681 }
9682
9683 if (avoid_folding_inline_builtin (fndecl))
9684 return NULL_TREE;
9685
9686 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9687 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9688 CALL_EXPR_ARGP (exp), ignore);
9689 else
9690 {
9691 tree *args = CALL_EXPR_ARGP (exp);
9692 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
9693 if (ret)
9694 return ret;
9695 }
9696 }
9697 return NULL_TREE;
9698 }
9699
9700 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9701 N arguments are passed in the array ARGARRAY. Return a folded
9702 expression or NULL_TREE if no simplification was possible. */
9703
9704 tree
fold_builtin_call_array(location_t loc,tree,tree fn,int n,tree * argarray)9705 fold_builtin_call_array (location_t loc, tree,
9706 tree fn,
9707 int n,
9708 tree *argarray)
9709 {
9710 if (TREE_CODE (fn) != ADDR_EXPR)
9711 return NULL_TREE;
9712
9713 tree fndecl = TREE_OPERAND (fn, 0);
9714 if (TREE_CODE (fndecl) == FUNCTION_DECL
9715 && fndecl_built_in_p (fndecl))
9716 {
9717 /* If last argument is __builtin_va_arg_pack (), arguments to this
9718 function are not finalized yet. Defer folding until they are. */
9719 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9720 {
9721 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9722 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9723 return NULL_TREE;
9724 }
9725 if (avoid_folding_inline_builtin (fndecl))
9726 return NULL_TREE;
9727 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9728 return targetm.fold_builtin (fndecl, n, argarray, false);
9729 else
9730 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
9731 }
9732
9733 return NULL_TREE;
9734 }
9735
9736 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9737 along with N new arguments specified as the "..." parameters. SKIP
9738 is the number of arguments in EXP to be omitted. This function is used
9739 to do varargs-to-varargs transformations. */
9740
9741 static tree
rewrite_call_expr(location_t loc,tree exp,int skip,tree fndecl,int n,...)9742 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9743 {
9744 va_list ap;
9745 tree t;
9746
9747 va_start (ap, n);
9748 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9749 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9750 va_end (ap);
9751
9752 return t;
9753 }
9754
9755 /* Validate a single argument ARG against a tree code CODE representing
9756 a type. Return true when argument is valid. */
9757
9758 static bool
validate_arg(const_tree arg,enum tree_code code)9759 validate_arg (const_tree arg, enum tree_code code)
9760 {
9761 if (!arg)
9762 return false;
9763 else if (code == POINTER_TYPE)
9764 return POINTER_TYPE_P (TREE_TYPE (arg));
9765 else if (code == INTEGER_TYPE)
9766 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9767 return code == TREE_CODE (TREE_TYPE (arg));
9768 }
9769
9770 /* This function validates the types of a function call argument list
9771 against a specified list of tree_codes. If the last specifier is a 0,
9772 that represents an ellipses, otherwise the last specifier must be a
9773 VOID_TYPE.
9774
9775 This is the GIMPLE version of validate_arglist. Eventually we want to
9776 completely convert builtins.cc to work from GIMPLEs and the tree based
9777 validate_arglist will then be removed. */
9778
9779 bool
validate_gimple_arglist(const gcall * call,...)9780 validate_gimple_arglist (const gcall *call, ...)
9781 {
9782 enum tree_code code;
9783 bool res = 0;
9784 va_list ap;
9785 const_tree arg;
9786 size_t i;
9787
9788 va_start (ap, call);
9789 i = 0;
9790
9791 do
9792 {
9793 code = (enum tree_code) va_arg (ap, int);
9794 switch (code)
9795 {
9796 case 0:
9797 /* This signifies an ellipses, any further arguments are all ok. */
9798 res = true;
9799 goto end;
9800 case VOID_TYPE:
9801 /* This signifies an endlink, if no arguments remain, return
9802 true, otherwise return false. */
9803 res = (i == gimple_call_num_args (call));
9804 goto end;
9805 default:
9806 /* If no parameters remain or the parameter's code does not
9807 match the specified code, return false. Otherwise continue
9808 checking any remaining arguments. */
9809 arg = gimple_call_arg (call, i++);
9810 if (!validate_arg (arg, code))
9811 goto end;
9812 break;
9813 }
9814 }
9815 while (1);
9816
9817 /* We need gotos here since we can only have one VA_CLOSE in a
9818 function. */
9819 end: ;
9820 va_end (ap);
9821
9822 return res;
9823 }
9824
9825 /* Default target-specific builtin expander that does nothing. */
9826
9827 rtx
default_expand_builtin(tree exp ATTRIBUTE_UNUSED,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)9828 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9829 rtx target ATTRIBUTE_UNUSED,
9830 rtx subtarget ATTRIBUTE_UNUSED,
9831 machine_mode mode ATTRIBUTE_UNUSED,
9832 int ignore ATTRIBUTE_UNUSED)
9833 {
9834 return NULL_RTX;
9835 }
9836
9837 /* Returns true is EXP represents data that would potentially reside
9838 in a readonly section. */
9839
9840 bool
readonly_data_expr(tree exp)9841 readonly_data_expr (tree exp)
9842 {
9843 STRIP_NOPS (exp);
9844
9845 if (TREE_CODE (exp) != ADDR_EXPR)
9846 return false;
9847
9848 exp = get_base_address (TREE_OPERAND (exp, 0));
9849 if (!exp)
9850 return false;
9851
9852 /* Make sure we call decl_readonly_section only for trees it
9853 can handle (since it returns true for everything it doesn't
9854 understand). */
9855 if (TREE_CODE (exp) == STRING_CST
9856 || TREE_CODE (exp) == CONSTRUCTOR
9857 || (VAR_P (exp) && TREE_STATIC (exp)))
9858 return decl_readonly_section (exp, 0);
9859 else
9860 return false;
9861 }
9862
9863 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9864 to the call, and TYPE is its return type.
9865
9866 Return NULL_TREE if no simplification was possible, otherwise return the
9867 simplified form of the call as a tree.
9868
9869 The simplified form may be a constant or other expression which
9870 computes the same value, but in a more efficient manner (including
9871 calls to other builtin functions).
9872
9873 The call may contain arguments which need to be evaluated, but
9874 which are not useful to determine the result of the call. In
9875 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9876 COMPOUND_EXPR will be an argument which must be evaluated.
9877 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9878 COMPOUND_EXPR in the chain will contain the tree for the simplified
9879 form of the builtin function call. */
9880
9881 static tree
fold_builtin_strpbrk(location_t loc,tree,tree s1,tree s2,tree type)9882 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
9883 {
9884 if (!validate_arg (s1, POINTER_TYPE)
9885 || !validate_arg (s2, POINTER_TYPE))
9886 return NULL_TREE;
9887
9888 tree fn;
9889 const char *p1, *p2;
9890
9891 p2 = c_getstr (s2);
9892 if (p2 == NULL)
9893 return NULL_TREE;
9894
9895 p1 = c_getstr (s1);
9896 if (p1 != NULL)
9897 {
9898 const char *r = strpbrk (p1, p2);
9899 tree tem;
9900
9901 if (r == NULL)
9902 return build_int_cst (TREE_TYPE (s1), 0);
9903
9904 /* Return an offset into the constant string argument. */
9905 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9906 return fold_convert_loc (loc, type, tem);
9907 }
9908
9909 if (p2[0] == '\0')
9910 /* strpbrk(x, "") == NULL.
9911 Evaluate and ignore s1 in case it had side-effects. */
9912 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9913
9914 if (p2[1] != '\0')
9915 return NULL_TREE; /* Really call strpbrk. */
9916
9917 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9918 if (!fn)
9919 return NULL_TREE;
9920
9921 /* New argument list transforming strpbrk(s1, s2) to
9922 strchr(s1, s2[0]). */
9923 return build_call_expr_loc (loc, fn, 2, s1,
9924 build_int_cst (integer_type_node, p2[0]));
9925 }
9926
9927 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9928 to the call.
9929
9930 Return NULL_TREE if no simplification was possible, otherwise return the
9931 simplified form of the call as a tree.
9932
9933 The simplified form may be a constant or other expression which
9934 computes the same value, but in a more efficient manner (including
9935 calls to other builtin functions).
9936
9937 The call may contain arguments which need to be evaluated, but
9938 which are not useful to determine the result of the call. In
9939 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9940 COMPOUND_EXPR will be an argument which must be evaluated.
9941 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9942 COMPOUND_EXPR in the chain will contain the tree for the simplified
9943 form of the builtin function call. */
9944
9945 static tree
fold_builtin_strspn(location_t loc,tree expr,tree s1,tree s2)9946 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
9947 {
9948 if (!validate_arg (s1, POINTER_TYPE)
9949 || !validate_arg (s2, POINTER_TYPE))
9950 return NULL_TREE;
9951
9952 if (!check_nul_terminated_array (expr, s1)
9953 || !check_nul_terminated_array (expr, s2))
9954 return NULL_TREE;
9955
9956 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9957
9958 /* If either argument is "", return NULL_TREE. */
9959 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9960 /* Evaluate and ignore both arguments in case either one has
9961 side-effects. */
9962 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9963 s1, s2);
9964 return NULL_TREE;
9965 }
9966
9967 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9968 to the call.
9969
9970 Return NULL_TREE if no simplification was possible, otherwise return the
9971 simplified form of the call as a tree.
9972
9973 The simplified form may be a constant or other expression which
9974 computes the same value, but in a more efficient manner (including
9975 calls to other builtin functions).
9976
9977 The call may contain arguments which need to be evaluated, but
9978 which are not useful to determine the result of the call. In
9979 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9980 COMPOUND_EXPR will be an argument which must be evaluated.
9981 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9982 COMPOUND_EXPR in the chain will contain the tree for the simplified
9983 form of the builtin function call. */
9984
9985 static tree
fold_builtin_strcspn(location_t loc,tree expr,tree s1,tree s2)9986 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
9987 {
9988 if (!validate_arg (s1, POINTER_TYPE)
9989 || !validate_arg (s2, POINTER_TYPE))
9990 return NULL_TREE;
9991
9992 if (!check_nul_terminated_array (expr, s1)
9993 || !check_nul_terminated_array (expr, s2))
9994 return NULL_TREE;
9995
9996 /* If the first argument is "", return NULL_TREE. */
9997 const char *p1 = c_getstr (s1);
9998 if (p1 && *p1 == '\0')
9999 {
10000 /* Evaluate and ignore argument s2 in case it has
10001 side-effects. */
10002 return omit_one_operand_loc (loc, size_type_node,
10003 size_zero_node, s2);
10004 }
10005
10006 /* If the second argument is "", return __builtin_strlen(s1). */
10007 const char *p2 = c_getstr (s2);
10008 if (p2 && *p2 == '\0')
10009 {
10010 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10011
10012 /* If the replacement _DECL isn't initialized, don't do the
10013 transformation. */
10014 if (!fn)
10015 return NULL_TREE;
10016
10017 return build_call_expr_loc (loc, fn, 1, s1);
10018 }
10019 return NULL_TREE;
10020 }
10021
10022 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10023 produced. False otherwise. This is done so that we don't output the error
10024 or warning twice or three times. */
10025
10026 bool
fold_builtin_next_arg(tree exp,bool va_start_p)10027 fold_builtin_next_arg (tree exp, bool va_start_p)
10028 {
10029 tree fntype = TREE_TYPE (current_function_decl);
10030 int nargs = call_expr_nargs (exp);
10031 tree arg;
10032 /* There is good chance the current input_location points inside the
10033 definition of the va_start macro (perhaps on the token for
10034 builtin) in a system header, so warnings will not be emitted.
10035 Use the location in real source code. */
10036 location_t current_location =
10037 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10038 NULL);
10039
10040 if (!stdarg_p (fntype))
10041 {
10042 error ("%<va_start%> used in function with fixed arguments");
10043 return true;
10044 }
10045
10046 if (va_start_p)
10047 {
10048 if (va_start_p && (nargs != 2))
10049 {
10050 error ("wrong number of arguments to function %<va_start%>");
10051 return true;
10052 }
10053 arg = CALL_EXPR_ARG (exp, 1);
10054 }
10055 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10056 when we checked the arguments and if needed issued a warning. */
10057 else
10058 {
10059 if (nargs == 0)
10060 {
10061 /* Evidently an out of date version of <stdarg.h>; can't validate
10062 va_start's second argument, but can still work as intended. */
10063 warning_at (current_location,
10064 OPT_Wvarargs,
10065 "%<__builtin_next_arg%> called without an argument");
10066 return true;
10067 }
10068 else if (nargs > 1)
10069 {
10070 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10071 return true;
10072 }
10073 arg = CALL_EXPR_ARG (exp, 0);
10074 }
10075
10076 if (TREE_CODE (arg) == SSA_NAME
10077 && SSA_NAME_VAR (arg))
10078 arg = SSA_NAME_VAR (arg);
10079
10080 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10081 or __builtin_next_arg (0) the first time we see it, after checking
10082 the arguments and if needed issuing a warning. */
10083 if (!integer_zerop (arg))
10084 {
10085 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10086
10087 /* Strip off all nops for the sake of the comparison. This
10088 is not quite the same as STRIP_NOPS. It does more.
10089 We must also strip off INDIRECT_EXPR for C++ reference
10090 parameters. */
10091 while (CONVERT_EXPR_P (arg)
10092 || TREE_CODE (arg) == INDIRECT_REF)
10093 arg = TREE_OPERAND (arg, 0);
10094 if (arg != last_parm)
10095 {
10096 /* FIXME: Sometimes with the tree optimizers we can get the
10097 not the last argument even though the user used the last
10098 argument. We just warn and set the arg to be the last
10099 argument so that we will get wrong-code because of
10100 it. */
10101 warning_at (current_location,
10102 OPT_Wvarargs,
10103 "second parameter of %<va_start%> not last named argument");
10104 }
10105
10106 /* Undefined by C99 7.15.1.4p4 (va_start):
10107 "If the parameter parmN is declared with the register storage
10108 class, with a function or array type, or with a type that is
10109 not compatible with the type that results after application of
10110 the default argument promotions, the behavior is undefined."
10111 */
10112 else if (DECL_REGISTER (arg))
10113 {
10114 warning_at (current_location,
10115 OPT_Wvarargs,
10116 "undefined behavior when second parameter of "
10117 "%<va_start%> is declared with %<register%> storage");
10118 }
10119
10120 /* We want to verify the second parameter just once before the tree
10121 optimizers are run and then avoid keeping it in the tree,
10122 as otherwise we could warn even for correct code like:
10123 void foo (int i, ...)
10124 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10125 if (va_start_p)
10126 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10127 else
10128 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10129 }
10130 return false;
10131 }
10132
10133
10134 /* Expand a call EXP to __builtin_object_size. */
10135
10136 static rtx
expand_builtin_object_size(tree exp)10137 expand_builtin_object_size (tree exp)
10138 {
10139 tree ost;
10140 int object_size_type;
10141 tree fndecl = get_callee_fndecl (exp);
10142
10143 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10144 {
10145 error ("first argument of %qD must be a pointer, second integer constant",
10146 fndecl);
10147 expand_builtin_trap ();
10148 return const0_rtx;
10149 }
10150
10151 ost = CALL_EXPR_ARG (exp, 1);
10152 STRIP_NOPS (ost);
10153
10154 if (TREE_CODE (ost) != INTEGER_CST
10155 || tree_int_cst_sgn (ost) < 0
10156 || compare_tree_int (ost, 3) > 0)
10157 {
10158 error ("last argument of %qD is not integer constant between 0 and 3",
10159 fndecl);
10160 expand_builtin_trap ();
10161 return const0_rtx;
10162 }
10163
10164 object_size_type = tree_to_shwi (ost);
10165
10166 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10167 }
10168
10169 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10170 FCODE is the BUILT_IN_* to use.
10171 Return NULL_RTX if we failed; the caller should emit a normal call,
10172 otherwise try to get the result in TARGET, if convenient (and in
10173 mode MODE if that's convenient). */
10174
10175 static rtx
expand_builtin_memory_chk(tree exp,rtx target,machine_mode mode,enum built_in_function fcode)10176 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10177 enum built_in_function fcode)
10178 {
10179 if (!validate_arglist (exp,
10180 POINTER_TYPE,
10181 fcode == BUILT_IN_MEMSET_CHK
10182 ? INTEGER_TYPE : POINTER_TYPE,
10183 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10184 return NULL_RTX;
10185
10186 tree dest = CALL_EXPR_ARG (exp, 0);
10187 tree src = CALL_EXPR_ARG (exp, 1);
10188 tree len = CALL_EXPR_ARG (exp, 2);
10189 tree size = CALL_EXPR_ARG (exp, 3);
10190
10191 /* FIXME: Set access mode to write only for memset et al. */
10192 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
10193 /*srcstr=*/NULL_TREE, size, access_read_write);
10194
10195 if (!tree_fits_uhwi_p (size))
10196 return NULL_RTX;
10197
10198 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10199 {
10200 /* Avoid transforming the checking call to an ordinary one when
10201 an overflow has been detected or when the call couldn't be
10202 validated because the size is not constant. */
10203 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10204 return NULL_RTX;
10205
10206 tree fn = NULL_TREE;
10207 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10208 mem{cpy,pcpy,move,set} is available. */
10209 switch (fcode)
10210 {
10211 case BUILT_IN_MEMCPY_CHK:
10212 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10213 break;
10214 case BUILT_IN_MEMPCPY_CHK:
10215 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10216 break;
10217 case BUILT_IN_MEMMOVE_CHK:
10218 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10219 break;
10220 case BUILT_IN_MEMSET_CHK:
10221 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10222 break;
10223 default:
10224 break;
10225 }
10226
10227 if (! fn)
10228 return NULL_RTX;
10229
10230 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10231 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10232 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10233 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10234 }
10235 else if (fcode == BUILT_IN_MEMSET_CHK)
10236 return NULL_RTX;
10237 else
10238 {
10239 unsigned int dest_align = get_pointer_alignment (dest);
10240
10241 /* If DEST is not a pointer type, call the normal function. */
10242 if (dest_align == 0)
10243 return NULL_RTX;
10244
10245 /* If SRC and DEST are the same (and not volatile), do nothing. */
10246 if (operand_equal_p (src, dest, 0))
10247 {
10248 tree expr;
10249
10250 if (fcode != BUILT_IN_MEMPCPY_CHK)
10251 {
10252 /* Evaluate and ignore LEN in case it has side-effects. */
10253 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10254 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10255 }
10256
10257 expr = fold_build_pointer_plus (dest, len);
10258 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10259 }
10260
10261 /* __memmove_chk special case. */
10262 if (fcode == BUILT_IN_MEMMOVE_CHK)
10263 {
10264 unsigned int src_align = get_pointer_alignment (src);
10265
10266 if (src_align == 0)
10267 return NULL_RTX;
10268
10269 /* If src is categorized for a readonly section we can use
10270 normal __memcpy_chk. */
10271 if (readonly_data_expr (src))
10272 {
10273 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10274 if (!fn)
10275 return NULL_RTX;
10276 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10277 dest, src, len, size);
10278 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10279 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10280 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10281 }
10282 }
10283 return NULL_RTX;
10284 }
10285 }
10286
10287 /* Emit warning if a buffer overflow is detected at compile time. */
10288
10289 static void
maybe_emit_chk_warning(tree exp,enum built_in_function fcode)10290 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10291 {
10292 /* The source string. */
10293 tree srcstr = NULL_TREE;
10294 /* The size of the destination object returned by __builtin_object_size. */
10295 tree objsize = NULL_TREE;
10296 /* The string that is being concatenated with (as in __strcat_chk)
10297 or null if it isn't. */
10298 tree catstr = NULL_TREE;
10299 /* The maximum length of the source sequence in a bounded operation
10300 (such as __strncat_chk) or null if the operation isn't bounded
10301 (such as __strcat_chk). */
10302 tree maxread = NULL_TREE;
10303 /* The exact size of the access (such as in __strncpy_chk). */
10304 tree size = NULL_TREE;
10305 /* The access by the function that's checked. Except for snprintf
10306 both writing and reading is checked. */
10307 access_mode mode = access_read_write;
10308
10309 switch (fcode)
10310 {
10311 case BUILT_IN_STRCPY_CHK:
10312 case BUILT_IN_STPCPY_CHK:
10313 srcstr = CALL_EXPR_ARG (exp, 1);
10314 objsize = CALL_EXPR_ARG (exp, 2);
10315 break;
10316
10317 case BUILT_IN_STRCAT_CHK:
10318 /* For __strcat_chk the warning will be emitted only if overflowing
10319 by at least strlen (dest) + 1 bytes. */
10320 catstr = CALL_EXPR_ARG (exp, 0);
10321 srcstr = CALL_EXPR_ARG (exp, 1);
10322 objsize = CALL_EXPR_ARG (exp, 2);
10323 break;
10324
10325 case BUILT_IN_STRNCAT_CHK:
10326 catstr = CALL_EXPR_ARG (exp, 0);
10327 srcstr = CALL_EXPR_ARG (exp, 1);
10328 maxread = CALL_EXPR_ARG (exp, 2);
10329 objsize = CALL_EXPR_ARG (exp, 3);
10330 break;
10331
10332 case BUILT_IN_STRNCPY_CHK:
10333 case BUILT_IN_STPNCPY_CHK:
10334 srcstr = CALL_EXPR_ARG (exp, 1);
10335 size = CALL_EXPR_ARG (exp, 2);
10336 objsize = CALL_EXPR_ARG (exp, 3);
10337 break;
10338
10339 case BUILT_IN_SNPRINTF_CHK:
10340 case BUILT_IN_VSNPRINTF_CHK:
10341 maxread = CALL_EXPR_ARG (exp, 1);
10342 objsize = CALL_EXPR_ARG (exp, 3);
10343 /* The only checked access the write to the destination. */
10344 mode = access_write_only;
10345 break;
10346 default:
10347 gcc_unreachable ();
10348 }
10349
10350 if (catstr && maxread)
10351 {
10352 /* Check __strncat_chk. There is no way to determine the length
10353 of the string to which the source string is being appended so
10354 just warn when the length of the source string is not known. */
10355 check_strncat_sizes (exp, objsize);
10356 return;
10357 }
10358
10359 check_access (exp, size, maxread, srcstr, objsize, mode);
10360 }
10361
10362 /* Emit warning if a buffer overflow is detected at compile time
10363 in __sprintf_chk/__vsprintf_chk calls. */
10364
10365 static void
maybe_emit_sprintf_chk_warning(tree exp,enum built_in_function fcode)10366 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10367 {
10368 tree size, len, fmt;
10369 const char *fmt_str;
10370 int nargs = call_expr_nargs (exp);
10371
10372 /* Verify the required arguments in the original call. */
10373
10374 if (nargs < 4)
10375 return;
10376 size = CALL_EXPR_ARG (exp, 2);
10377 fmt = CALL_EXPR_ARG (exp, 3);
10378
10379 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10380 return;
10381
10382 /* Check whether the format is a literal string constant. */
10383 fmt_str = c_getstr (fmt);
10384 if (fmt_str == NULL)
10385 return;
10386
10387 if (!init_target_chars ())
10388 return;
10389
10390 /* If the format doesn't contain % args or %%, we know its size. */
10391 if (strchr (fmt_str, target_percent) == 0)
10392 len = build_int_cstu (size_type_node, strlen (fmt_str));
10393 /* If the format is "%s" and first ... argument is a string literal,
10394 we know it too. */
10395 else if (fcode == BUILT_IN_SPRINTF_CHK
10396 && strcmp (fmt_str, target_percent_s) == 0)
10397 {
10398 tree arg;
10399
10400 if (nargs < 5)
10401 return;
10402 arg = CALL_EXPR_ARG (exp, 4);
10403 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10404 return;
10405
10406 len = c_strlen (arg, 1);
10407 if (!len || ! tree_fits_uhwi_p (len))
10408 return;
10409 }
10410 else
10411 return;
10412
10413 /* Add one for the terminating nul. */
10414 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10415
10416 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
10417 access_write_only);
10418 }
10419
10420 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10421 if possible. */
10422
10423 static tree
fold_builtin_object_size(tree ptr,tree ost,enum built_in_function fcode)10424 fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
10425 {
10426 tree bytes;
10427 int object_size_type;
10428
10429 if (!validate_arg (ptr, POINTER_TYPE)
10430 || !validate_arg (ost, INTEGER_TYPE))
10431 return NULL_TREE;
10432
10433 STRIP_NOPS (ost);
10434
10435 if (TREE_CODE (ost) != INTEGER_CST
10436 || tree_int_cst_sgn (ost) < 0
10437 || compare_tree_int (ost, 3) > 0)
10438 return NULL_TREE;
10439
10440 object_size_type = tree_to_shwi (ost);
10441
10442 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10443 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10444 and (size_t) 0 for types 2 and 3. */
10445 if (TREE_SIDE_EFFECTS (ptr))
10446 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10447
10448 if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
10449 object_size_type |= OST_DYNAMIC;
10450
10451 if (TREE_CODE (ptr) == ADDR_EXPR)
10452 {
10453 compute_builtin_object_size (ptr, object_size_type, &bytes);
10454 if ((object_size_type & OST_DYNAMIC)
10455 || int_fits_type_p (bytes, size_type_node))
10456 return fold_convert (size_type_node, bytes);
10457 }
10458 else if (TREE_CODE (ptr) == SSA_NAME)
10459 {
10460 /* If object size is not known yet, delay folding until
10461 later. Maybe subsequent passes will help determining
10462 it. */
10463 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10464 && ((object_size_type & OST_DYNAMIC)
10465 || int_fits_type_p (bytes, size_type_node)))
10466 return fold_convert (size_type_node, bytes);
10467 }
10468
10469 return NULL_TREE;
10470 }
10471
10472 /* Builtins with folding operations that operate on "..." arguments
10473 need special handling; we need to store the arguments in a convenient
10474 data structure before attempting any folding. Fortunately there are
10475 only a few builtins that fall into this category. FNDECL is the
10476 function, EXP is the CALL_EXPR for the call. */
10477
10478 static tree
fold_builtin_varargs(location_t loc,tree fndecl,tree * args,int nargs)10479 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10480 {
10481 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10482 tree ret = NULL_TREE;
10483
10484 switch (fcode)
10485 {
10486 case BUILT_IN_FPCLASSIFY:
10487 ret = fold_builtin_fpclassify (loc, args, nargs);
10488 break;
10489
10490 default:
10491 break;
10492 }
10493 if (ret)
10494 {
10495 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10496 SET_EXPR_LOCATION (ret, loc);
10497 suppress_warning (ret);
10498 return ret;
10499 }
10500 return NULL_TREE;
10501 }
10502
10503 /* Initialize format string characters in the target charset. */
10504
10505 bool
init_target_chars(void)10506 init_target_chars (void)
10507 {
10508 static bool init;
10509 if (!init)
10510 {
10511 target_newline = lang_hooks.to_target_charset ('\n');
10512 target_percent = lang_hooks.to_target_charset ('%');
10513 target_c = lang_hooks.to_target_charset ('c');
10514 target_s = lang_hooks.to_target_charset ('s');
10515 if (target_newline == 0 || target_percent == 0 || target_c == 0
10516 || target_s == 0)
10517 return false;
10518
10519 target_percent_c[0] = target_percent;
10520 target_percent_c[1] = target_c;
10521 target_percent_c[2] = '\0';
10522
10523 target_percent_s[0] = target_percent;
10524 target_percent_s[1] = target_s;
10525 target_percent_s[2] = '\0';
10526
10527 target_percent_s_newline[0] = target_percent;
10528 target_percent_s_newline[1] = target_s;
10529 target_percent_s_newline[2] = target_newline;
10530 target_percent_s_newline[3] = '\0';
10531
10532 init = true;
10533 }
10534 return true;
10535 }
10536
10537 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10538 and no overflow/underflow occurred. INEXACT is true if M was not
10539 exactly calculated. TYPE is the tree type for the result. This
10540 function assumes that you cleared the MPFR flags and then
10541 calculated M to see if anything subsequently set a flag prior to
10542 entering this function. Return NULL_TREE if any checks fail. */
10543
10544 static tree
do_mpfr_ckconv(mpfr_srcptr m,tree type,int inexact)10545 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10546 {
10547 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10548 overflow/underflow occurred. If -frounding-math, proceed iff the
10549 result of calling FUNC was exact. */
10550 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10551 && (!flag_rounding_math || !inexact))
10552 {
10553 REAL_VALUE_TYPE rr;
10554
10555 real_from_mpfr (&rr, m, type, MPFR_RNDN);
10556 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10557 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10558 but the mpft_t is not, then we underflowed in the
10559 conversion. */
10560 if (real_isfinite (&rr)
10561 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10562 {
10563 REAL_VALUE_TYPE rmode;
10564
10565 real_convert (&rmode, TYPE_MODE (type), &rr);
10566 /* Proceed iff the specified mode can hold the value. */
10567 if (real_identical (&rmode, &rr))
10568 return build_real (type, rmode);
10569 }
10570 }
10571 return NULL_TREE;
10572 }
10573
10574 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10575 number and no overflow/underflow occurred. INEXACT is true if M
10576 was not exactly calculated. TYPE is the tree type for the result.
10577 This function assumes that you cleared the MPFR flags and then
10578 calculated M to see if anything subsequently set a flag prior to
10579 entering this function. Return NULL_TREE if any checks fail, if
10580 FORCE_CONVERT is true, then bypass the checks. */
10581
10582 static tree
do_mpc_ckconv(mpc_srcptr m,tree type,int inexact,int force_convert)10583 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10584 {
10585 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10586 overflow/underflow occurred. If -frounding-math, proceed iff the
10587 result of calling FUNC was exact. */
10588 if (force_convert
10589 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10590 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10591 && (!flag_rounding_math || !inexact)))
10592 {
10593 REAL_VALUE_TYPE re, im;
10594
10595 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
10596 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
10597 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10598 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10599 but the mpft_t is not, then we underflowed in the
10600 conversion. */
10601 if (force_convert
10602 || (real_isfinite (&re) && real_isfinite (&im)
10603 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10604 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10605 {
10606 REAL_VALUE_TYPE re_mode, im_mode;
10607
10608 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10609 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10610 /* Proceed iff the specified mode can hold the value. */
10611 if (force_convert
10612 || (real_identical (&re_mode, &re)
10613 && real_identical (&im_mode, &im)))
10614 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10615 build_real (TREE_TYPE (type), im_mode));
10616 }
10617 }
10618 return NULL_TREE;
10619 }
10620
10621 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10622 the pointer *(ARG_QUO) and return the result. The type is taken
10623 from the type of ARG0 and is used for setting the precision of the
10624 calculation and results. */
10625
10626 static tree
do_mpfr_remquo(tree arg0,tree arg1,tree arg_quo)10627 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10628 {
10629 tree const type = TREE_TYPE (arg0);
10630 tree result = NULL_TREE;
10631
10632 STRIP_NOPS (arg0);
10633 STRIP_NOPS (arg1);
10634
10635 /* To proceed, MPFR must exactly represent the target floating point
10636 format, which only happens when the target base equals two. */
10637 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10638 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10639 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10640 {
10641 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10642 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10643
10644 if (real_isfinite (ra0) && real_isfinite (ra1))
10645 {
10646 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10647 const int prec = fmt->p;
10648 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10649 tree result_rem;
10650 long integer_quo;
10651 mpfr_t m0, m1;
10652
10653 mpfr_inits2 (prec, m0, m1, NULL);
10654 mpfr_from_real (m0, ra0, MPFR_RNDN);
10655 mpfr_from_real (m1, ra1, MPFR_RNDN);
10656 mpfr_clear_flags ();
10657 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10658 /* Remquo is independent of the rounding mode, so pass
10659 inexact=0 to do_mpfr_ckconv(). */
10660 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10661 mpfr_clears (m0, m1, NULL);
10662 if (result_rem)
10663 {
10664 /* MPFR calculates quo in the host's long so it may
10665 return more bits in quo than the target int can hold
10666 if sizeof(host long) > sizeof(target int). This can
10667 happen even for native compilers in LP64 mode. In
10668 these cases, modulo the quo value with the largest
10669 number that the target int can hold while leaving one
10670 bit for the sign. */
10671 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10672 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10673
10674 /* Dereference the quo pointer argument. */
10675 arg_quo = build_fold_indirect_ref (arg_quo);
10676 /* Proceed iff a valid pointer type was passed in. */
10677 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10678 {
10679 /* Set the value. */
10680 tree result_quo
10681 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10682 build_int_cst (TREE_TYPE (arg_quo),
10683 integer_quo));
10684 TREE_SIDE_EFFECTS (result_quo) = 1;
10685 /* Combine the quo assignment with the rem. */
10686 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10687 result_quo, result_rem));
10688 }
10689 }
10690 }
10691 }
10692 return result;
10693 }
10694
10695 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10696 resulting value as a tree with type TYPE. The mpfr precision is
10697 set to the precision of TYPE. We assume that this mpfr function
10698 returns zero if the result could be calculated exactly within the
10699 requested precision. In addition, the integer pointer represented
10700 by ARG_SG will be dereferenced and set to the appropriate signgam
10701 (-1,1) value. */
10702
10703 static tree
do_mpfr_lgamma_r(tree arg,tree arg_sg,tree type)10704 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10705 {
10706 tree result = NULL_TREE;
10707
10708 STRIP_NOPS (arg);
10709
10710 /* To proceed, MPFR must exactly represent the target floating point
10711 format, which only happens when the target base equals two. Also
10712 verify ARG is a constant and that ARG_SG is an int pointer. */
10713 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10714 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10715 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10716 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10717 {
10718 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10719
10720 /* In addition to NaN and Inf, the argument cannot be zero or a
10721 negative integer. */
10722 if (real_isfinite (ra)
10723 && ra->cl != rvc_zero
10724 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10725 {
10726 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10727 const int prec = fmt->p;
10728 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10729 int inexact, sg;
10730 mpfr_t m;
10731 tree result_lg;
10732
10733 mpfr_init2 (m, prec);
10734 mpfr_from_real (m, ra, MPFR_RNDN);
10735 mpfr_clear_flags ();
10736 inexact = mpfr_lgamma (m, &sg, m, rnd);
10737 result_lg = do_mpfr_ckconv (m, type, inexact);
10738 mpfr_clear (m);
10739 if (result_lg)
10740 {
10741 tree result_sg;
10742
10743 /* Dereference the arg_sg pointer argument. */
10744 arg_sg = build_fold_indirect_ref (arg_sg);
10745 /* Assign the signgam value into *arg_sg. */
10746 result_sg = fold_build2 (MODIFY_EXPR,
10747 TREE_TYPE (arg_sg), arg_sg,
10748 build_int_cst (TREE_TYPE (arg_sg), sg));
10749 TREE_SIDE_EFFECTS (result_sg) = 1;
10750 /* Combine the signgam assignment with the lgamma result. */
10751 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10752 result_sg, result_lg));
10753 }
10754 }
10755 }
10756
10757 return result;
10758 }
10759
10760 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10761 mpc function FUNC on it and return the resulting value as a tree
10762 with type TYPE. The mpfr precision is set to the precision of
10763 TYPE. We assume that function FUNC returns zero if the result
10764 could be calculated exactly within the requested precision. If
10765 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10766 in the arguments and/or results. */
10767
10768 tree
do_mpc_arg2(tree arg0,tree arg1,tree type,int do_nonfinite,int (* func)(mpc_ptr,mpc_srcptr,mpc_srcptr,mpc_rnd_t))10769 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10770 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10771 {
10772 tree result = NULL_TREE;
10773
10774 STRIP_NOPS (arg0);
10775 STRIP_NOPS (arg1);
10776
10777 /* To proceed, MPFR must exactly represent the target floating point
10778 format, which only happens when the target base equals two. */
10779 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10780 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10781 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10782 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10783 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10784 {
10785 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10786 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10787 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10788 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10789
10790 if (do_nonfinite
10791 || (real_isfinite (re0) && real_isfinite (im0)
10792 && real_isfinite (re1) && real_isfinite (im1)))
10793 {
10794 const struct real_format *const fmt =
10795 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10796 const int prec = fmt->p;
10797 const mpfr_rnd_t rnd = fmt->round_towards_zero
10798 ? MPFR_RNDZ : MPFR_RNDN;
10799 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10800 int inexact;
10801 mpc_t m0, m1;
10802
10803 mpc_init2 (m0, prec);
10804 mpc_init2 (m1, prec);
10805 mpfr_from_real (mpc_realref (m0), re0, rnd);
10806 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10807 mpfr_from_real (mpc_realref (m1), re1, rnd);
10808 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10809 mpfr_clear_flags ();
10810 inexact = func (m0, m0, m1, crnd);
10811 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10812 mpc_clear (m0);
10813 mpc_clear (m1);
10814 }
10815 }
10816
10817 return result;
10818 }
10819
10820 /* A wrapper function for builtin folding that prevents warnings for
10821 "statement without effect" and the like, caused by removing the
10822 call node earlier than the warning is generated. */
10823
10824 tree
fold_call_stmt(gcall * stmt,bool ignore)10825 fold_call_stmt (gcall *stmt, bool ignore)
10826 {
10827 tree ret = NULL_TREE;
10828 tree fndecl = gimple_call_fndecl (stmt);
10829 location_t loc = gimple_location (stmt);
10830 if (fndecl && fndecl_built_in_p (fndecl)
10831 && !gimple_call_va_arg_pack_p (stmt))
10832 {
10833 int nargs = gimple_call_num_args (stmt);
10834 tree *args = (nargs > 0
10835 ? gimple_call_arg_ptr (stmt, 0)
10836 : &error_mark_node);
10837
10838 if (avoid_folding_inline_builtin (fndecl))
10839 return NULL_TREE;
10840 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10841 {
10842 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10843 }
10844 else
10845 {
10846 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
10847 if (ret)
10848 {
10849 /* Propagate location information from original call to
10850 expansion of builtin. Otherwise things like
10851 maybe_emit_chk_warning, that operate on the expansion
10852 of a builtin, will use the wrong location information. */
10853 if (gimple_has_location (stmt))
10854 {
10855 tree realret = ret;
10856 if (TREE_CODE (ret) == NOP_EXPR)
10857 realret = TREE_OPERAND (ret, 0);
10858 if (CAN_HAVE_LOCATION_P (realret)
10859 && !EXPR_HAS_LOCATION (realret))
10860 SET_EXPR_LOCATION (realret, loc);
10861 return realret;
10862 }
10863 return ret;
10864 }
10865 }
10866 }
10867 return NULL_TREE;
10868 }
10869
10870 /* Look up the function in builtin_decl that corresponds to DECL
10871 and set ASMSPEC as its user assembler name. DECL must be a
10872 function decl that declares a builtin. */
10873
10874 void
set_builtin_user_assembler_name(tree decl,const char * asmspec)10875 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10876 {
10877 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
10878 && asmspec != 0);
10879
10880 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10881 set_user_assembler_name (builtin, asmspec);
10882
10883 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10884 && INT_TYPE_SIZE < BITS_PER_WORD)
10885 {
10886 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10887 set_user_assembler_libfunc ("ffs", asmspec);
10888 set_optab_libfunc (ffs_optab, mode, "ffs");
10889 }
10890 }
10891
10892 /* Return true if DECL is a builtin that expands to a constant or similarly
10893 simple code. */
10894 bool
is_simple_builtin(tree decl)10895 is_simple_builtin (tree decl)
10896 {
10897 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
10898 switch (DECL_FUNCTION_CODE (decl))
10899 {
10900 /* Builtins that expand to constants. */
10901 case BUILT_IN_CONSTANT_P:
10902 case BUILT_IN_EXPECT:
10903 case BUILT_IN_OBJECT_SIZE:
10904 case BUILT_IN_UNREACHABLE:
10905 /* Simple register moves or loads from stack. */
10906 case BUILT_IN_ASSUME_ALIGNED:
10907 case BUILT_IN_RETURN_ADDRESS:
10908 case BUILT_IN_EXTRACT_RETURN_ADDR:
10909 case BUILT_IN_FROB_RETURN_ADDR:
10910 case BUILT_IN_RETURN:
10911 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10912 case BUILT_IN_FRAME_ADDRESS:
10913 case BUILT_IN_VA_END:
10914 case BUILT_IN_STACK_SAVE:
10915 case BUILT_IN_STACK_RESTORE:
10916 case BUILT_IN_DWARF_CFA:
10917 /* Exception state returns or moves registers around. */
10918 case BUILT_IN_EH_FILTER:
10919 case BUILT_IN_EH_POINTER:
10920 case BUILT_IN_EH_COPY_VALUES:
10921 return true;
10922
10923 default:
10924 return false;
10925 }
10926
10927 return false;
10928 }
10929
10930 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10931 most probably expanded inline into reasonably simple code. This is a
10932 superset of is_simple_builtin. */
10933 bool
is_inexpensive_builtin(tree decl)10934 is_inexpensive_builtin (tree decl)
10935 {
10936 if (!decl)
10937 return false;
10938 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10939 return true;
10940 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10941 switch (DECL_FUNCTION_CODE (decl))
10942 {
10943 case BUILT_IN_ABS:
10944 CASE_BUILT_IN_ALLOCA:
10945 case BUILT_IN_BSWAP16:
10946 case BUILT_IN_BSWAP32:
10947 case BUILT_IN_BSWAP64:
10948 case BUILT_IN_BSWAP128:
10949 case BUILT_IN_CLZ:
10950 case BUILT_IN_CLZIMAX:
10951 case BUILT_IN_CLZL:
10952 case BUILT_IN_CLZLL:
10953 case BUILT_IN_CTZ:
10954 case BUILT_IN_CTZIMAX:
10955 case BUILT_IN_CTZL:
10956 case BUILT_IN_CTZLL:
10957 case BUILT_IN_FFS:
10958 case BUILT_IN_FFSIMAX:
10959 case BUILT_IN_FFSL:
10960 case BUILT_IN_FFSLL:
10961 case BUILT_IN_IMAXABS:
10962 case BUILT_IN_FINITE:
10963 case BUILT_IN_FINITEF:
10964 case BUILT_IN_FINITEL:
10965 case BUILT_IN_FINITED32:
10966 case BUILT_IN_FINITED64:
10967 case BUILT_IN_FINITED128:
10968 case BUILT_IN_FPCLASSIFY:
10969 case BUILT_IN_ISFINITE:
10970 case BUILT_IN_ISINF_SIGN:
10971 case BUILT_IN_ISINF:
10972 case BUILT_IN_ISINFF:
10973 case BUILT_IN_ISINFL:
10974 case BUILT_IN_ISINFD32:
10975 case BUILT_IN_ISINFD64:
10976 case BUILT_IN_ISINFD128:
10977 case BUILT_IN_ISNAN:
10978 case BUILT_IN_ISNANF:
10979 case BUILT_IN_ISNANL:
10980 case BUILT_IN_ISNAND32:
10981 case BUILT_IN_ISNAND64:
10982 case BUILT_IN_ISNAND128:
10983 case BUILT_IN_ISNORMAL:
10984 case BUILT_IN_ISGREATER:
10985 case BUILT_IN_ISGREATEREQUAL:
10986 case BUILT_IN_ISLESS:
10987 case BUILT_IN_ISLESSEQUAL:
10988 case BUILT_IN_ISLESSGREATER:
10989 case BUILT_IN_ISUNORDERED:
10990 case BUILT_IN_VA_ARG_PACK:
10991 case BUILT_IN_VA_ARG_PACK_LEN:
10992 case BUILT_IN_VA_COPY:
10993 case BUILT_IN_TRAP:
10994 case BUILT_IN_SAVEREGS:
10995 case BUILT_IN_POPCOUNTL:
10996 case BUILT_IN_POPCOUNTLL:
10997 case BUILT_IN_POPCOUNTIMAX:
10998 case BUILT_IN_POPCOUNT:
10999 case BUILT_IN_PARITYL:
11000 case BUILT_IN_PARITYLL:
11001 case BUILT_IN_PARITYIMAX:
11002 case BUILT_IN_PARITY:
11003 case BUILT_IN_LABS:
11004 case BUILT_IN_LLABS:
11005 case BUILT_IN_PREFETCH:
11006 case BUILT_IN_ACC_ON_DEVICE:
11007 return true;
11008
11009 default:
11010 return is_simple_builtin (decl);
11011 }
11012
11013 return false;
11014 }
11015
11016 /* Return true if T is a constant and the value cast to a target char
11017 can be represented by a host char.
11018 Store the casted char constant in *P if so. */
11019
11020 bool
target_char_cst_p(tree t,char * p)11021 target_char_cst_p (tree t, char *p)
11022 {
11023 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11024 return false;
11025
11026 *p = (char)tree_to_uhwi (t);
11027 return true;
11028 }
11029
11030 /* Return true if the builtin DECL is implemented in a standard library.
11031 Otherwise return false which doesn't guarantee it is not (thus the list
11032 of handled builtins below may be incomplete). */
11033
11034 bool
builtin_with_linkage_p(tree decl)11035 builtin_with_linkage_p (tree decl)
11036 {
11037 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11038 switch (DECL_FUNCTION_CODE (decl))
11039 {
11040 CASE_FLT_FN (BUILT_IN_ACOS):
11041 CASE_FLT_FN (BUILT_IN_ACOSH):
11042 CASE_FLT_FN (BUILT_IN_ASIN):
11043 CASE_FLT_FN (BUILT_IN_ASINH):
11044 CASE_FLT_FN (BUILT_IN_ATAN):
11045 CASE_FLT_FN (BUILT_IN_ATANH):
11046 CASE_FLT_FN (BUILT_IN_ATAN2):
11047 CASE_FLT_FN (BUILT_IN_CBRT):
11048 CASE_FLT_FN (BUILT_IN_CEIL):
11049 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11050 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11051 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11052 CASE_FLT_FN (BUILT_IN_COS):
11053 CASE_FLT_FN (BUILT_IN_COSH):
11054 CASE_FLT_FN (BUILT_IN_ERF):
11055 CASE_FLT_FN (BUILT_IN_ERFC):
11056 CASE_FLT_FN (BUILT_IN_EXP):
11057 CASE_FLT_FN (BUILT_IN_EXP2):
11058 CASE_FLT_FN (BUILT_IN_EXPM1):
11059 CASE_FLT_FN (BUILT_IN_FABS):
11060 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11061 CASE_FLT_FN (BUILT_IN_FDIM):
11062 CASE_FLT_FN (BUILT_IN_FLOOR):
11063 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11064 CASE_FLT_FN (BUILT_IN_FMA):
11065 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11066 CASE_FLT_FN (BUILT_IN_FMAX):
11067 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11068 CASE_FLT_FN (BUILT_IN_FMIN):
11069 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11070 CASE_FLT_FN (BUILT_IN_FMOD):
11071 CASE_FLT_FN (BUILT_IN_FREXP):
11072 CASE_FLT_FN (BUILT_IN_HYPOT):
11073 CASE_FLT_FN (BUILT_IN_ILOGB):
11074 CASE_FLT_FN (BUILT_IN_LDEXP):
11075 CASE_FLT_FN (BUILT_IN_LGAMMA):
11076 CASE_FLT_FN (BUILT_IN_LLRINT):
11077 CASE_FLT_FN (BUILT_IN_LLROUND):
11078 CASE_FLT_FN (BUILT_IN_LOG):
11079 CASE_FLT_FN (BUILT_IN_LOG10):
11080 CASE_FLT_FN (BUILT_IN_LOG1P):
11081 CASE_FLT_FN (BUILT_IN_LOG2):
11082 CASE_FLT_FN (BUILT_IN_LOGB):
11083 CASE_FLT_FN (BUILT_IN_LRINT):
11084 CASE_FLT_FN (BUILT_IN_LROUND):
11085 CASE_FLT_FN (BUILT_IN_MODF):
11086 CASE_FLT_FN (BUILT_IN_NAN):
11087 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11088 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11089 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11090 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11091 CASE_FLT_FN (BUILT_IN_POW):
11092 CASE_FLT_FN (BUILT_IN_REMAINDER):
11093 CASE_FLT_FN (BUILT_IN_REMQUO):
11094 CASE_FLT_FN (BUILT_IN_RINT):
11095 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11096 CASE_FLT_FN (BUILT_IN_ROUND):
11097 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11098 CASE_FLT_FN (BUILT_IN_SCALBLN):
11099 CASE_FLT_FN (BUILT_IN_SCALBN):
11100 CASE_FLT_FN (BUILT_IN_SIN):
11101 CASE_FLT_FN (BUILT_IN_SINH):
11102 CASE_FLT_FN (BUILT_IN_SINCOS):
11103 CASE_FLT_FN (BUILT_IN_SQRT):
11104 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11105 CASE_FLT_FN (BUILT_IN_TAN):
11106 CASE_FLT_FN (BUILT_IN_TANH):
11107 CASE_FLT_FN (BUILT_IN_TGAMMA):
11108 CASE_FLT_FN (BUILT_IN_TRUNC):
11109 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11110 return true;
11111
11112 case BUILT_IN_STPCPY:
11113 case BUILT_IN_STPNCPY:
11114 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
11115 by libiberty's stpcpy.c for MinGW targets so we need to return true
11116 in order to be able to build libiberty in LTO mode for them. */
11117 return true;
11118
11119 default:
11120 break;
11121 }
11122 return false;
11123 }
11124
11125 /* Return true if OFFRNG is bounded to a subrange of offset values
11126 valid for the largest possible object. */
11127
11128 bool
offset_bounded() const11129 access_ref::offset_bounded () const
11130 {
11131 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
11132 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
11133 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
11134 }
11135
11136 /* If CALLEE has known side effects, fill in INFO and return true.
11137 See tree-ssa-structalias.cc:find_func_aliases
11138 for the list of builtins we might need to handle here. */
11139
11140 attr_fnspec
builtin_fnspec(tree callee)11141 builtin_fnspec (tree callee)
11142 {
11143 built_in_function code = DECL_FUNCTION_CODE (callee);
11144
11145 switch (code)
11146 {
11147 /* All the following functions read memory pointed to by
11148 their second argument and write memory pointed to by first
11149 argument.
11150 strcat/strncat additionally reads memory pointed to by the first
11151 argument. */
11152 case BUILT_IN_STRCAT:
11153 case BUILT_IN_STRCAT_CHK:
11154 return "1cW 1 ";
11155 case BUILT_IN_STRNCAT:
11156 case BUILT_IN_STRNCAT_CHK:
11157 return "1cW 13";
11158 case BUILT_IN_STRCPY:
11159 case BUILT_IN_STRCPY_CHK:
11160 return "1cO 1 ";
11161 case BUILT_IN_STPCPY:
11162 case BUILT_IN_STPCPY_CHK:
11163 return ".cO 1 ";
11164 case BUILT_IN_STRNCPY:
11165 case BUILT_IN_MEMCPY:
11166 case BUILT_IN_MEMMOVE:
11167 case BUILT_IN_TM_MEMCPY:
11168 case BUILT_IN_TM_MEMMOVE:
11169 case BUILT_IN_STRNCPY_CHK:
11170 case BUILT_IN_MEMCPY_CHK:
11171 case BUILT_IN_MEMMOVE_CHK:
11172 return "1cO313";
11173 case BUILT_IN_MEMPCPY:
11174 case BUILT_IN_MEMPCPY_CHK:
11175 return ".cO313";
11176 case BUILT_IN_STPNCPY:
11177 case BUILT_IN_STPNCPY_CHK:
11178 return ".cO313";
11179 case BUILT_IN_BCOPY:
11180 return ".c23O3";
11181 case BUILT_IN_BZERO:
11182 return ".cO2";
11183 case BUILT_IN_MEMCMP:
11184 case BUILT_IN_MEMCMP_EQ:
11185 case BUILT_IN_BCMP:
11186 case BUILT_IN_STRNCMP:
11187 case BUILT_IN_STRNCMP_EQ:
11188 case BUILT_IN_STRNCASECMP:
11189 return ".cR3R3";
11190
11191 /* The following functions read memory pointed to by their
11192 first argument. */
11193 CASE_BUILT_IN_TM_LOAD (1):
11194 CASE_BUILT_IN_TM_LOAD (2):
11195 CASE_BUILT_IN_TM_LOAD (4):
11196 CASE_BUILT_IN_TM_LOAD (8):
11197 CASE_BUILT_IN_TM_LOAD (FLOAT):
11198 CASE_BUILT_IN_TM_LOAD (DOUBLE):
11199 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
11200 CASE_BUILT_IN_TM_LOAD (M64):
11201 CASE_BUILT_IN_TM_LOAD (M128):
11202 CASE_BUILT_IN_TM_LOAD (M256):
11203 case BUILT_IN_TM_LOG:
11204 case BUILT_IN_TM_LOG_1:
11205 case BUILT_IN_TM_LOG_2:
11206 case BUILT_IN_TM_LOG_4:
11207 case BUILT_IN_TM_LOG_8:
11208 case BUILT_IN_TM_LOG_FLOAT:
11209 case BUILT_IN_TM_LOG_DOUBLE:
11210 case BUILT_IN_TM_LOG_LDOUBLE:
11211 case BUILT_IN_TM_LOG_M64:
11212 case BUILT_IN_TM_LOG_M128:
11213 case BUILT_IN_TM_LOG_M256:
11214 return ".cR ";
11215
11216 case BUILT_IN_INDEX:
11217 case BUILT_IN_RINDEX:
11218 case BUILT_IN_STRCHR:
11219 case BUILT_IN_STRLEN:
11220 case BUILT_IN_STRRCHR:
11221 return ".cR ";
11222 case BUILT_IN_STRNLEN:
11223 return ".cR2";
11224
11225 /* These read memory pointed to by the first argument.
11226 Allocating memory does not have any side-effects apart from
11227 being the definition point for the pointer.
11228 Unix98 specifies that errno is set on allocation failure. */
11229 case BUILT_IN_STRDUP:
11230 return "mCR ";
11231 case BUILT_IN_STRNDUP:
11232 return "mCR2";
11233 /* Allocating memory does not have any side-effects apart from
11234 being the definition point for the pointer. */
11235 case BUILT_IN_MALLOC:
11236 case BUILT_IN_ALIGNED_ALLOC:
11237 case BUILT_IN_CALLOC:
11238 case BUILT_IN_GOMP_ALLOC:
11239 return "mC";
11240 CASE_BUILT_IN_ALLOCA:
11241 return "mc";
11242 /* These read memory pointed to by the first argument with size
11243 in the third argument. */
11244 case BUILT_IN_MEMCHR:
11245 return ".cR3";
11246 /* These read memory pointed to by the first and second arguments. */
11247 case BUILT_IN_STRSTR:
11248 case BUILT_IN_STRPBRK:
11249 case BUILT_IN_STRCASECMP:
11250 case BUILT_IN_STRCSPN:
11251 case BUILT_IN_STRSPN:
11252 case BUILT_IN_STRCMP:
11253 case BUILT_IN_STRCMP_EQ:
11254 return ".cR R ";
11255 /* Freeing memory kills the pointed-to memory. More importantly
11256 the call has to serve as a barrier for moving loads and stores
11257 across it. */
11258 case BUILT_IN_STACK_RESTORE:
11259 case BUILT_IN_FREE:
11260 case BUILT_IN_GOMP_FREE:
11261 return ".co ";
11262 case BUILT_IN_VA_END:
11263 return ".cO ";
11264 /* Realloc serves both as allocation point and deallocation point. */
11265 case BUILT_IN_REALLOC:
11266 return ".Cw ";
11267 case BUILT_IN_GAMMA_R:
11268 case BUILT_IN_GAMMAF_R:
11269 case BUILT_IN_GAMMAL_R:
11270 case BUILT_IN_LGAMMA_R:
11271 case BUILT_IN_LGAMMAF_R:
11272 case BUILT_IN_LGAMMAL_R:
11273 return ".C. Ot";
11274 case BUILT_IN_FREXP:
11275 case BUILT_IN_FREXPF:
11276 case BUILT_IN_FREXPL:
11277 case BUILT_IN_MODF:
11278 case BUILT_IN_MODFF:
11279 case BUILT_IN_MODFL:
11280 return ".c. Ot";
11281 case BUILT_IN_REMQUO:
11282 case BUILT_IN_REMQUOF:
11283 case BUILT_IN_REMQUOL:
11284 return ".c. . Ot";
11285 case BUILT_IN_SINCOS:
11286 case BUILT_IN_SINCOSF:
11287 case BUILT_IN_SINCOSL:
11288 return ".c. OtOt";
11289 case BUILT_IN_MEMSET:
11290 case BUILT_IN_MEMSET_CHK:
11291 case BUILT_IN_TM_MEMSET:
11292 return "1cO3";
11293 CASE_BUILT_IN_TM_STORE (1):
11294 CASE_BUILT_IN_TM_STORE (2):
11295 CASE_BUILT_IN_TM_STORE (4):
11296 CASE_BUILT_IN_TM_STORE (8):
11297 CASE_BUILT_IN_TM_STORE (FLOAT):
11298 CASE_BUILT_IN_TM_STORE (DOUBLE):
11299 CASE_BUILT_IN_TM_STORE (LDOUBLE):
11300 CASE_BUILT_IN_TM_STORE (M64):
11301 CASE_BUILT_IN_TM_STORE (M128):
11302 CASE_BUILT_IN_TM_STORE (M256):
11303 return ".cO ";
11304 case BUILT_IN_STACK_SAVE:
11305 case BUILT_IN_RETURN:
11306 case BUILT_IN_EH_POINTER:
11307 case BUILT_IN_EH_FILTER:
11308 case BUILT_IN_UNWIND_RESUME:
11309 case BUILT_IN_CXA_END_CLEANUP:
11310 case BUILT_IN_EH_COPY_VALUES:
11311 case BUILT_IN_FRAME_ADDRESS:
11312 case BUILT_IN_APPLY_ARGS:
11313 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
11314 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
11315 case BUILT_IN_PREFETCH:
11316 case BUILT_IN_DWARF_CFA:
11317 case BUILT_IN_RETURN_ADDRESS:
11318 return ".c";
11319 case BUILT_IN_ASSUME_ALIGNED:
11320 return "1cX ";
11321 /* But posix_memalign stores a pointer into the memory pointed to
11322 by its first argument. */
11323 case BUILT_IN_POSIX_MEMALIGN:
11324 return ".cOt";
11325
11326 default:
11327 return "";
11328 }
11329 }
11330