xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/builtins.c (revision 23f5f46327e37e7811da3520f4bb933f9489322f)
1 /* Expand builtin functions.
2    Copyright (C) 1988-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Legacy warning!  Please add no further builtin simplifications here
21    (apart from pure constant folding) - builtin simplifications should go
22    to match.pd or gimple-fold.c instead.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename()  */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
81 
82 /* Define the names of the builtin function types and codes.  */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84   = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
88 {
89 #include "builtins.def"
90 };
91 
92 /* Setup an array of builtin_info_type, make sure each element decl is
93    initialized to NULL_TREE.  */
94 builtin_info_type builtin_info[(int)END_BUILTINS];
95 
96 /* Non-zero if __builtin_constant_p should be folded right away.  */
97 bool force_folding_builtin_constant_p;
98 
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_bytecmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 					    rtx target, tree exp,
130 					    memop_ret retmode,
131 					    bool might_overlap);
132 static rtx expand_builtin_memmove (tree, rtx);
133 static rtx expand_builtin_mempcpy (tree, rtx);
134 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
135 static rtx expand_builtin_strcat (tree);
136 static rtx expand_builtin_strcpy (tree, rtx);
137 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
138 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
139 static rtx expand_builtin_stpncpy (tree, rtx);
140 static rtx expand_builtin_strncat (tree, rtx);
141 static rtx expand_builtin_strncpy (tree, rtx);
142 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
143 static rtx expand_builtin_memset (tree, rtx, machine_mode);
144 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
145 static rtx expand_builtin_bzero (tree);
146 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
148 static rtx expand_builtin_alloca (tree);
149 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static tree stabilize_va_list_loc (location_t, tree, int);
152 static rtx expand_builtin_expect (tree, rtx);
153 static rtx expand_builtin_expect_with_probability (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_classify_type (tree);
156 static tree fold_builtin_strlen (location_t, tree, tree);
157 static tree fold_builtin_inf (location_t, tree, int);
158 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
159 static bool validate_arg (const_tree, enum tree_code code);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_isascii (location_t, tree);
164 static tree fold_builtin_toascii (location_t, tree);
165 static tree fold_builtin_isdigit (location_t, tree);
166 static tree fold_builtin_fabs (location_t, tree, tree);
167 static tree fold_builtin_abs (location_t, tree, tree);
168 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
169 					enum tree_code);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
171 
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
175 
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 				      enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
183 
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
194 
195 /* Return true if NAME starts with __builtin_ or __sync_.  */
196 
197 static bool
is_builtin_name(const char * name)198 is_builtin_name (const char *name)
199 {
200   if (strncmp (name, "__builtin_", 10) == 0)
201     return true;
202   if (strncmp (name, "__sync_", 7) == 0)
203     return true;
204   if (strncmp (name, "__atomic_", 9) == 0)
205     return true;
206   return false;
207 }
208 
209 /* Return true if NODE should be considered for inline expansion regardless
210    of the optimization level.  This means whenever a function is invoked with
211    its "internal" name, which normally contains the prefix "__builtin".  */
212 
213 bool
called_as_built_in(tree node)214 called_as_built_in (tree node)
215 {
216   /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
217      we want the name used to call the function, not the name it
218      will have. */
219   const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
220   return is_builtin_name (name);
221 }
222 
223 /* Compute values M and N such that M divides (address of EXP - N) and such
224    that N < M.  If these numbers can be determined, store M in alignp and N in
225    *BITPOSP and return true.  Otherwise return false and store BITS_PER_UNIT to
226    *alignp and any bit-offset to *bitposp.
227 
228    Note that the address (and thus the alignment) computed here is based
229    on the address to which a symbol resolves, whereas DECL_ALIGN is based
230    on the address at which an object is actually located.  These two
231    addresses are not always the same.  For example, on ARM targets,
232    the address &foo of a Thumb function foo() has the lowest bit set,
233    whereas foo() itself starts on an even address.
234 
235    If ADDR_P is true we are taking the address of the memory reference EXP
236    and thus cannot rely on the access taking place.  */
237 
238 static bool
get_object_alignment_2(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp,bool addr_p)239 get_object_alignment_2 (tree exp, unsigned int *alignp,
240 			unsigned HOST_WIDE_INT *bitposp, bool addr_p)
241 {
242   poly_int64 bitsize, bitpos;
243   tree offset;
244   machine_mode mode;
245   int unsignedp, reversep, volatilep;
246   unsigned int align = BITS_PER_UNIT;
247   bool known_alignment = false;
248 
249   /* Get the innermost object and the constant (bitpos) and possibly
250      variable (offset) offset of the access.  */
251   exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
252 			     &unsignedp, &reversep, &volatilep);
253 
254   /* Extract alignment information from the innermost object and
255      possibly adjust bitpos and offset.  */
256   if (TREE_CODE (exp) == FUNCTION_DECL)
257     {
258       /* Function addresses can encode extra information besides their
259 	 alignment.  However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
260 	 allows the low bit to be used as a virtual bit, we know
261 	 that the address itself must be at least 2-byte aligned.  */
262       if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
263 	align = 2 * BITS_PER_UNIT;
264     }
265   else if (TREE_CODE (exp) == LABEL_DECL)
266     ;
267   else if (TREE_CODE (exp) == CONST_DECL)
268     {
269       /* The alignment of a CONST_DECL is determined by its initializer.  */
270       exp = DECL_INITIAL (exp);
271       align = TYPE_ALIGN (TREE_TYPE (exp));
272       if (CONSTANT_CLASS_P (exp))
273 	align = targetm.constant_alignment (exp, align);
274 
275       known_alignment = true;
276     }
277   else if (DECL_P (exp))
278     {
279       align = DECL_ALIGN (exp);
280       known_alignment = true;
281     }
282   else if (TREE_CODE (exp) == INDIRECT_REF
283 	   || TREE_CODE (exp) == MEM_REF
284 	   || TREE_CODE (exp) == TARGET_MEM_REF)
285     {
286       tree addr = TREE_OPERAND (exp, 0);
287       unsigned ptr_align;
288       unsigned HOST_WIDE_INT ptr_bitpos;
289       unsigned HOST_WIDE_INT ptr_bitmask = ~0;
290 
291       /* If the address is explicitely aligned, handle that.  */
292       if (TREE_CODE (addr) == BIT_AND_EXPR
293 	  && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
294 	{
295 	  ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
296 	  ptr_bitmask *= BITS_PER_UNIT;
297 	  align = least_bit_hwi (ptr_bitmask);
298 	  addr = TREE_OPERAND (addr, 0);
299 	}
300 
301       known_alignment
302 	= get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
303       align = MAX (ptr_align, align);
304 
305       /* Re-apply explicit alignment to the bitpos.  */
306       ptr_bitpos &= ptr_bitmask;
307 
308       /* The alignment of the pointer operand in a TARGET_MEM_REF
309 	 has to take the variable offset parts into account.  */
310       if (TREE_CODE (exp) == TARGET_MEM_REF)
311 	{
312 	  if (TMR_INDEX (exp))
313 	    {
314 	      unsigned HOST_WIDE_INT step = 1;
315 	      if (TMR_STEP (exp))
316 		step = TREE_INT_CST_LOW (TMR_STEP (exp));
317 	      align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
318 	    }
319 	  if (TMR_INDEX2 (exp))
320 	    align = BITS_PER_UNIT;
321 	  known_alignment = false;
322 	}
323 
324       /* When EXP is an actual memory reference then we can use
325 	 TYPE_ALIGN of a pointer indirection to derive alignment.
326 	 Do so only if get_pointer_alignment_1 did not reveal absolute
327 	 alignment knowledge and if using that alignment would
328 	 improve the situation.  */
329       unsigned int talign;
330       if (!addr_p && !known_alignment
331 	  && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
332 	  && talign > align)
333 	align = talign;
334       else
335 	{
336 	  /* Else adjust bitpos accordingly.  */
337 	  bitpos += ptr_bitpos;
338 	  if (TREE_CODE (exp) == MEM_REF
339 	      || TREE_CODE (exp) == TARGET_MEM_REF)
340 	    bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
341 	}
342     }
343   else if (TREE_CODE (exp) == STRING_CST)
344     {
345       /* STRING_CST are the only constant objects we allow to be not
346          wrapped inside a CONST_DECL.  */
347       align = TYPE_ALIGN (TREE_TYPE (exp));
348       if (CONSTANT_CLASS_P (exp))
349 	align = targetm.constant_alignment (exp, align);
350 
351       known_alignment = true;
352     }
353 
354   /* If there is a non-constant offset part extract the maximum
355      alignment that can prevail.  */
356   if (offset)
357     {
358       unsigned int trailing_zeros = tree_ctz (offset);
359       if (trailing_zeros < HOST_BITS_PER_INT)
360 	{
361 	  unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
362 	  if (inner)
363 	    align = MIN (align, inner);
364 	}
365     }
366 
367   /* Account for the alignment of runtime coefficients, so that the constant
368      bitpos is guaranteed to be accurate.  */
369   unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
370   if (alt_align != 0 && alt_align < align)
371     {
372       align = alt_align;
373       known_alignment = false;
374     }
375 
376   *alignp = align;
377   *bitposp = bitpos.coeffs[0] & (align - 1);
378   return known_alignment;
379 }
380 
381 /* For a memory reference expression EXP compute values M and N such that M
382    divides (&EXP - N) and such that N < M.  If these numbers can be determined,
383    store M in alignp and N in *BITPOSP and return true.  Otherwise return false
384    and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.  */
385 
386 bool
get_object_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)387 get_object_alignment_1 (tree exp, unsigned int *alignp,
388 			unsigned HOST_WIDE_INT *bitposp)
389 {
390   return get_object_alignment_2 (exp, alignp, bitposp, false);
391 }
392 
393 /* Return the alignment in bits of EXP, an object.  */
394 
395 unsigned int
get_object_alignment(tree exp)396 get_object_alignment (tree exp)
397 {
398   unsigned HOST_WIDE_INT bitpos = 0;
399   unsigned int align;
400 
401   get_object_alignment_1 (exp, &align, &bitpos);
402 
403   /* align and bitpos now specify known low bits of the pointer.
404      ptr & (align - 1) == bitpos.  */
405 
406   if (bitpos != 0)
407     align = least_bit_hwi (bitpos);
408   return align;
409 }
410 
411 /* For a pointer valued expression EXP compute values M and N such that M
412    divides (EXP - N) and such that N < M.  If these numbers can be determined,
413    store M in alignp and N in *BITPOSP and return true.  Return false if
414    the results are just a conservative approximation.
415 
416    If EXP is not a pointer, false is returned too.  */
417 
418 bool
get_pointer_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)419 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 			 unsigned HOST_WIDE_INT *bitposp)
421 {
422   STRIP_NOPS (exp);
423 
424   if (TREE_CODE (exp) == ADDR_EXPR)
425     return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 				   alignp, bitposp, true);
427   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428     {
429       unsigned int align;
430       unsigned HOST_WIDE_INT bitpos;
431       bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 					  &align, &bitpos);
433       if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 	bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435       else
436 	{
437 	  unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 	  if (trailing_zeros < HOST_BITS_PER_INT)
439 	    {
440 	      unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 	      if (inner)
442 		align = MIN (align, inner);
443 	    }
444 	}
445       *alignp = align;
446       *bitposp = bitpos & (align - 1);
447       return res;
448     }
449   else if (TREE_CODE (exp) == SSA_NAME
450 	   && POINTER_TYPE_P (TREE_TYPE (exp)))
451     {
452       unsigned int ptr_align, ptr_misalign;
453       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454 
455       if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 	{
457 	  *bitposp = ptr_misalign * BITS_PER_UNIT;
458 	  *alignp = ptr_align * BITS_PER_UNIT;
459 	  /* Make sure to return a sensible alignment when the multiplication
460 	     by BITS_PER_UNIT overflowed.  */
461 	  if (*alignp == 0)
462 	    *alignp = 1u << (HOST_BITS_PER_INT - 1);
463 	  /* We cannot really tell whether this result is an approximation.  */
464 	  return false;
465 	}
466       else
467 	{
468 	  *bitposp = 0;
469 	  *alignp = BITS_PER_UNIT;
470 	  return false;
471 	}
472     }
473   else if (TREE_CODE (exp) == INTEGER_CST)
474     {
475       *alignp = BIGGEST_ALIGNMENT;
476       *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 		  & (BIGGEST_ALIGNMENT - 1));
478       return true;
479     }
480 
481   *bitposp = 0;
482   *alignp = BITS_PER_UNIT;
483   return false;
484 }
485 
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487    The alignment returned is, by default, the alignment of the thing that
488    EXP points to.  If it is not a POINTER_TYPE, 0 is returned.
489 
490    Otherwise, look at the expression to see if we can do better, i.e., if the
491    expression is actually pointing at an object whose alignment is tighter.  */
492 
493 unsigned int
get_pointer_alignment(tree exp)494 get_pointer_alignment (tree exp)
495 {
496   unsigned HOST_WIDE_INT bitpos = 0;
497   unsigned int align;
498 
499   get_pointer_alignment_1 (exp, &align, &bitpos);
500 
501   /* align and bitpos now specify known low bits of the pointer.
502      ptr & (align - 1) == bitpos.  */
503 
504   if (bitpos != 0)
505     align = least_bit_hwi (bitpos);
506 
507   return align;
508 }
509 
510 /* Return the number of leading non-zero elements in the sequence
511    [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
512    ELTSIZE must be a power of 2 less than 8.  Used by c_strlen.  */
513 
514 unsigned
string_length(const void * ptr,unsigned eltsize,unsigned maxelts)515 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
516 {
517   gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
518 
519   unsigned n;
520 
521   if (eltsize == 1)
522     {
523       /* Optimize the common case of plain char.  */
524       for (n = 0; n < maxelts; n++)
525 	{
526 	  const char *elt = (const char*) ptr + n;
527 	  if (!*elt)
528 	    break;
529 	}
530     }
531   else
532     {
533       for (n = 0; n < maxelts; n++)
534 	{
535 	  const char *elt = (const char*) ptr + n * eltsize;
536 	  if (!memcmp (elt, "\0\0\0\0", eltsize))
537 	    break;
538 	}
539     }
540   return n;
541 }
542 
543 /* For a call at LOC to a function FN that expects a string in the argument
544    ARG, issue a diagnostic due to it being a called with an argument
545    declared at NONSTR that is a character array with no terminating NUL.  */
546 
547 void
warn_string_no_nul(location_t loc,const char * fn,tree arg,tree decl)548 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
549 {
550   if (TREE_NO_WARNING (arg))
551     return;
552 
553   loc = expansion_point_location_if_in_system_header (loc);
554 
555   if (warning_at (loc, OPT_Wstringop_overflow_,
556 		  "%qs argument missing terminating nul", fn))
557     {
558       inform (DECL_SOURCE_LOCATION (decl),
559 	      "referenced argument declared here");
560       TREE_NO_WARNING (arg) = 1;
561     }
562 }
563 
564 /* For a call EXPR (which may be null) that expects a string argument
565    and SRC as the argument, returns false if SRC is a character array
566    with no terminating NUL.  When nonnull, BOUND is the number of
567    characters in which to expect the terminating NUL.
568    When EXPR is nonnull also issues a warning.  */
569 
570 bool
check_nul_terminated_array(tree expr,tree src,tree bound)571 check_nul_terminated_array (tree expr, tree src, tree bound /* = NULL_TREE */)
572 {
573   tree size;
574   bool exact;
575   tree nonstr = unterminated_array (src, &size, &exact);
576   if (!nonstr)
577     return true;
578 
579   /* NONSTR refers to the non-nul terminated constant array and SIZE
580      is the constant size of the array in bytes.  EXACT is true when
581      SIZE is exact.  */
582 
583   if (bound)
584     {
585       wide_int min, max;
586       if (TREE_CODE (bound) == INTEGER_CST)
587 	min = max = wi::to_wide (bound);
588       else
589 	{
590 	  value_range_kind rng = get_range_info (bound, &min, &max);
591 	  if (rng != VR_RANGE)
592 	    return true;
593 	}
594 
595       if (wi::leu_p (min, wi::to_wide (size)))
596 	return true;
597     }
598 
599   if (expr && !TREE_NO_WARNING (expr))
600     {
601       tree fndecl = get_callee_fndecl (expr);
602       const char *fname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
603       warn_string_no_nul (EXPR_LOCATION (expr), fname, src, nonstr);
604     }
605 
606   return false;
607 }
608 
609 /* If EXP refers to an unterminated constant character array return
610    the declaration of the object of which the array is a member or
611    element and if SIZE is not null, set *SIZE to the size of
612    the unterminated array and set *EXACT if the size is exact or
613    clear it otherwise.  Otherwise return null.  */
614 
615 tree
unterminated_array(tree exp,tree * size,bool * exact)616 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
617 {
618   /* C_STRLEN will return NULL and set DECL in the info
619      structure if EXP references a unterminated array.  */
620   c_strlen_data lendata = { };
621   tree len = c_strlen (exp, 1, &lendata);
622   if (len == NULL_TREE && lendata.minlen && lendata.decl)
623      {
624        if (size)
625 	{
626 	  len = lendata.minlen;
627 	  if (lendata.off)
628 	    {
629 	      /* Constant offsets are already accounted for in LENDATA.MINLEN,
630 		 but not in a SSA_NAME + CST expression.  */
631 	      if (TREE_CODE (lendata.off) == INTEGER_CST)
632 		*exact = true;
633 	      else if (TREE_CODE (lendata.off) == PLUS_EXPR
634 		       && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
635 		{
636 		  /* Subtract the offset from the size of the array.  */
637 		  *exact = false;
638 		  tree temp = TREE_OPERAND (lendata.off, 1);
639 		  temp = fold_convert (ssizetype, temp);
640 		  len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
641 		}
642 	      else
643 		*exact = false;
644 	    }
645 	  else
646 	    *exact = true;
647 
648 	  *size = len;
649 	}
650        return lendata.decl;
651      }
652 
653   return NULL_TREE;
654 }
655 
656 /* Compute the length of a null-terminated character string or wide
657    character string handling character sizes of 1, 2, and 4 bytes.
658    TREE_STRING_LENGTH is not the right way because it evaluates to
659    the size of the character array in bytes (as opposed to characters)
660    and because it can contain a zero byte in the middle.
661 
662    ONLY_VALUE should be nonzero if the result is not going to be emitted
663    into the instruction stream and zero if it is going to be expanded.
664    E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
665    is returned, otherwise NULL, since
666    len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
667    evaluate the side-effects.
668 
669    If ONLY_VALUE is two then we do not emit warnings about out-of-bound
670    accesses.  Note that this implies the result is not going to be emitted
671    into the instruction stream.
672 
673    Additional information about the string accessed may be recorded
674    in DATA.  For example, if ARG references an unterminated string,
675    then the declaration will be stored in the DECL field.   If the
676    length of the unterminated string can be determined, it'll be
677    stored in the LEN field.  Note this length could well be different
678    than what a C strlen call would return.
679 
680    ELTSIZE is 1 for normal single byte character strings, and 2 or
681    4 for wide characer strings.  ELTSIZE is by default 1.
682 
683    The value returned is of type `ssizetype'.  */
684 
685 tree
c_strlen(tree arg,int only_value,c_strlen_data * data,unsigned eltsize)686 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
687 {
688   /* If we were not passed a DATA pointer, then get one to a local
689      structure.  That avoids having to check DATA for NULL before
690      each time we want to use it.  */
691   c_strlen_data local_strlen_data = { };
692   if (!data)
693     data = &local_strlen_data;
694 
695   gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
696 
697   tree src = STRIP_NOPS (arg);
698   if (TREE_CODE (src) == COND_EXPR
699       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
700     {
701       tree len1, len2;
702 
703       len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
704       len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
705       if (tree_int_cst_equal (len1, len2))
706 	return len1;
707     }
708 
709   if (TREE_CODE (src) == COMPOUND_EXPR
710       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
711     return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
712 
713   location_t loc = EXPR_LOC_OR_LOC (src, input_location);
714 
715   /* Offset from the beginning of the string in bytes.  */
716   tree byteoff;
717   tree memsize;
718   tree decl;
719   src = string_constant (src, &byteoff, &memsize, &decl);
720   if (src == 0)
721     return NULL_TREE;
722 
723   /* Determine the size of the string element.  */
724   if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
725     return NULL_TREE;
726 
727   /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
728      length of SRC.  Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
729      in case the latter is less than the size of the array, such as when
730      SRC refers to a short string literal used to initialize a large array.
731      In that case, the elements of the array after the terminating NUL are
732      all NUL.  */
733   HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
734   strelts = strelts / eltsize;
735 
736   if (!tree_fits_uhwi_p (memsize))
737     return NULL_TREE;
738 
739   HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
740 
741   /* PTR can point to the byte representation of any string type, including
742      char* and wchar_t*.  */
743   const char *ptr = TREE_STRING_POINTER (src);
744 
745   if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
746     {
747       /* The code below works only for single byte character types.  */
748       if (eltsize != 1)
749 	return NULL_TREE;
750 
751       /* If the string has an internal NUL character followed by any
752 	 non-NUL characters (e.g., "foo\0bar"), we can't compute
753 	 the offset to the following NUL if we don't know where to
754 	 start searching for it.  */
755       unsigned len = string_length (ptr, eltsize, strelts);
756 
757       /* Return when an embedded null character is found or none at all.
758 	 In the latter case, set the DECL/LEN field in the DATA structure
759 	 so that callers may examine them.  */
760       if (len + 1 < strelts)
761 	return NULL_TREE;
762       else if (len >= maxelts)
763 	{
764 	  data->decl = decl;
765 	  data->off = byteoff;
766 	  data->minlen = ssize_int (len);
767 	  return NULL_TREE;
768 	}
769 
770       /* For empty strings the result should be zero.  */
771       if (len == 0)
772 	return ssize_int (0);
773 
774       /* We don't know the starting offset, but we do know that the string
775 	 has no internal zero bytes.  If the offset falls within the bounds
776 	 of the string subtract the offset from the length of the string,
777 	 and return that.  Otherwise the length is zero.  Take care to
778 	 use SAVE_EXPR in case the OFFSET has side-effects.  */
779       tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
780 						 : byteoff;
781       offsave = fold_convert_loc (loc, sizetype, offsave);
782       tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
783 				      size_int (len));
784       tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
785 				     offsave);
786       lenexp = fold_convert_loc (loc, ssizetype, lenexp);
787       return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
788 			      build_zero_cst (ssizetype));
789     }
790 
791   /* Offset from the beginning of the string in elements.  */
792   HOST_WIDE_INT eltoff;
793 
794   /* We have a known offset into the string.  Start searching there for
795      a null character if we can represent it as a single HOST_WIDE_INT.  */
796   if (byteoff == 0)
797     eltoff = 0;
798   else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
799     eltoff = -1;
800   else
801     eltoff = tree_to_uhwi (byteoff) / eltsize;
802 
803   /* If the offset is known to be out of bounds, warn, and call strlen at
804      runtime.  */
805   if (eltoff < 0 || eltoff >= maxelts)
806     {
807       /* Suppress multiple warnings for propagated constant strings.  */
808       if (only_value != 2
809 	  && !TREE_NO_WARNING (arg)
810 	  && warning_at (loc, OPT_Warray_bounds,
811 			 "offset %qwi outside bounds of constant string",
812 			 eltoff))
813 	{
814 	  if (decl)
815 	    inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
816 	  TREE_NO_WARNING (arg) = 1;
817 	}
818       return NULL_TREE;
819     }
820 
821   /* If eltoff is larger than strelts but less than maxelts the
822      string length is zero, since the excess memory will be zero.  */
823   if (eltoff > strelts)
824     return ssize_int (0);
825 
826   /* Use strlen to search for the first zero byte.  Since any strings
827      constructed with build_string will have nulls appended, we win even
828      if we get handed something like (char[4])"abcd".
829 
830      Since ELTOFF is our starting index into the string, no further
831      calculation is needed.  */
832   unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
833 				strelts - eltoff);
834 
835   /* Don't know what to return if there was no zero termination.
836      Ideally this would turn into a gcc_checking_assert over time.
837      Set DECL/LEN so callers can examine them.  */
838   if (len >= maxelts - eltoff)
839     {
840       data->decl = decl;
841       data->off = byteoff;
842       data->minlen = ssize_int (len);
843       return NULL_TREE;
844     }
845 
846   return ssize_int (len);
847 }
848 
849 /* Return a constant integer corresponding to target reading
850    GET_MODE_BITSIZE (MODE) bits from string constant STR.  If
851    NULL_TERMINATED_P, reading stops after '\0' character, all further ones
852    are assumed to be zero, otherwise it reads as many characters
853    as needed.  */
854 
855 rtx
c_readstr(const char * str,scalar_int_mode mode,bool null_terminated_p)856 c_readstr (const char *str, scalar_int_mode mode,
857 	   bool null_terminated_p/*=true*/)
858 {
859   HOST_WIDE_INT ch;
860   unsigned int i, j;
861   HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
862 
863   gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
864   unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
865     / HOST_BITS_PER_WIDE_INT;
866 
867   gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
868   for (i = 0; i < len; i++)
869     tmp[i] = 0;
870 
871   ch = 1;
872   for (i = 0; i < GET_MODE_SIZE (mode); i++)
873     {
874       j = i;
875       if (WORDS_BIG_ENDIAN)
876 	j = GET_MODE_SIZE (mode) - i - 1;
877       if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
878 	  && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
879 	j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
880       j *= BITS_PER_UNIT;
881 
882       if (ch || !null_terminated_p)
883 	ch = (unsigned char) str[i];
884       tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
885     }
886 
887   wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
888   return immed_wide_int_const (c, mode);
889 }
890 
891 /* Cast a target constant CST to target CHAR and if that value fits into
892    host char type, return zero and put that value into variable pointed to by
893    P.  */
894 
895 static int
target_char_cast(tree cst,char * p)896 target_char_cast (tree cst, char *p)
897 {
898   unsigned HOST_WIDE_INT val, hostval;
899 
900   if (TREE_CODE (cst) != INTEGER_CST
901       || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
902     return 1;
903 
904   /* Do not care if it fits or not right here.  */
905   val = TREE_INT_CST_LOW (cst);
906 
907   if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
908     val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
909 
910   hostval = val;
911   if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
912     hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
913 
914   if (val != hostval)
915     return 1;
916 
917   *p = hostval;
918   return 0;
919 }
920 
921 /* Similar to save_expr, but assumes that arbitrary code is not executed
922    in between the multiple evaluations.  In particular, we assume that a
923    non-addressable local variable will not be modified.  */
924 
925 static tree
builtin_save_expr(tree exp)926 builtin_save_expr (tree exp)
927 {
928   if (TREE_CODE (exp) == SSA_NAME
929       || (TREE_ADDRESSABLE (exp) == 0
930 	  && (TREE_CODE (exp) == PARM_DECL
931 	      || (VAR_P (exp) && !TREE_STATIC (exp)))))
932     return exp;
933 
934   return save_expr (exp);
935 }
936 
937 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
938    times to get the address of either a higher stack frame, or a return
939    address located within it (depending on FNDECL_CODE).  */
940 
941 static rtx
expand_builtin_return_addr(enum built_in_function fndecl_code,int count)942 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
943 {
944   int i;
945   rtx tem = INITIAL_FRAME_ADDRESS_RTX;
946   if (tem == NULL_RTX)
947     {
948       /* For a zero count with __builtin_return_address, we don't care what
949 	 frame address we return, because target-specific definitions will
950 	 override us.  Therefore frame pointer elimination is OK, and using
951 	 the soft frame pointer is OK.
952 
953 	 For a nonzero count, or a zero count with __builtin_frame_address,
954 	 we require a stable offset from the current frame pointer to the
955 	 previous one, so we must use the hard frame pointer, and
956 	 we must disable frame pointer elimination.  */
957       if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
958 	tem = frame_pointer_rtx;
959       else
960 	{
961 	  tem = hard_frame_pointer_rtx;
962 
963 	  /* Tell reload not to eliminate the frame pointer.  */
964 	  crtl->accesses_prior_frames = 1;
965 	}
966     }
967 
968   if (count > 0)
969     SETUP_FRAME_ADDRESSES ();
970 
971   /* On the SPARC, the return address is not in the frame, it is in a
972      register.  There is no way to access it off of the current frame
973      pointer, but it can be accessed off the previous frame pointer by
974      reading the value from the register window save area.  */
975   if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
976     count--;
977 
978   /* Scan back COUNT frames to the specified frame.  */
979   for (i = 0; i < count; i++)
980     {
981       /* Assume the dynamic chain pointer is in the word that the
982 	 frame address points to, unless otherwise specified.  */
983       tem = DYNAMIC_CHAIN_ADDRESS (tem);
984       tem = memory_address (Pmode, tem);
985       tem = gen_frame_mem (Pmode, tem);
986       tem = copy_to_reg (tem);
987     }
988 
989   /* For __builtin_frame_address, return what we've got.  But, on
990      the SPARC for example, we may have to add a bias.  */
991   if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
992     return FRAME_ADDR_RTX (tem);
993 
994   /* For __builtin_return_address, get the return address from that frame.  */
995 #ifdef RETURN_ADDR_RTX
996   tem = RETURN_ADDR_RTX (count, tem);
997 #else
998   tem = memory_address (Pmode,
999 			plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1000   tem = gen_frame_mem (Pmode, tem);
1001 #endif
1002   return tem;
1003 }
1004 
1005 /* Alias set used for setjmp buffer.  */
1006 static alias_set_type setjmp_alias_set = -1;
1007 
1008 /* Construct the leading half of a __builtin_setjmp call.  Control will
1009    return to RECEIVER_LABEL.  This is also called directly by the SJLJ
1010    exception handling code.  */
1011 
1012 void
expand_builtin_setjmp_setup(rtx buf_addr,rtx receiver_label)1013 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1014 {
1015   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016   rtx stack_save;
1017   rtx mem;
1018 
1019   if (setjmp_alias_set == -1)
1020     setjmp_alias_set = new_alias_set ();
1021 
1022   buf_addr = convert_memory_address (Pmode, buf_addr);
1023 
1024   buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1025 
1026   /* We store the frame pointer and the address of receiver_label in
1027      the buffer and use the rest of it for the stack save area, which
1028      is machine-dependent.  */
1029 
1030   mem = gen_rtx_MEM (Pmode, buf_addr);
1031   set_mem_alias_set (mem, setjmp_alias_set);
1032   emit_move_insn (mem, hard_frame_pointer_rtx);
1033 
1034   mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 					   GET_MODE_SIZE (Pmode))),
1036   set_mem_alias_set (mem, setjmp_alias_set);
1037 
1038   emit_move_insn (validize_mem (mem),
1039 		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1040 
1041   stack_save = gen_rtx_MEM (sa_mode,
1042 			    plus_constant (Pmode, buf_addr,
1043 					   2 * GET_MODE_SIZE (Pmode)));
1044   set_mem_alias_set (stack_save, setjmp_alias_set);
1045   emit_stack_save (SAVE_NONLOCAL, &stack_save);
1046 
1047   /* If there is further processing to do, do it.  */
1048   if (targetm.have_builtin_setjmp_setup ())
1049     emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1050 
1051   /* We have a nonlocal label.   */
1052   cfun->has_nonlocal_label = 1;
1053 }
1054 
1055 /* Construct the trailing part of a __builtin_setjmp call.  This is
1056    also called directly by the SJLJ exception handling code.
1057    If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler.  */
1058 
1059 void
expand_builtin_setjmp_receiver(rtx receiver_label)1060 expand_builtin_setjmp_receiver (rtx receiver_label)
1061 {
1062   rtx chain;
1063 
1064   /* Mark the FP as used when we get here, so we have to make sure it's
1065      marked as used by this function.  */
1066   emit_use (hard_frame_pointer_rtx);
1067 
1068   /* Mark the static chain as clobbered here so life information
1069      doesn't get messed up for it.  */
1070   chain = rtx_for_static_chain (current_function_decl, true);
1071   if (chain && REG_P (chain))
1072     emit_clobber (chain);
1073 
1074   if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1075     {
1076       /* If the argument pointer can be eliminated in favor of the
1077 	 frame pointer, we don't need to restore it.  We assume here
1078 	 that if such an elimination is present, it can always be used.
1079 	 This is the case on all known machines; if we don't make this
1080 	 assumption, we do unnecessary saving on many machines.  */
1081       size_t i;
1082       static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1083 
1084       for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1085 	if (elim_regs[i].from == ARG_POINTER_REGNUM
1086 	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1087 	  break;
1088 
1089       if (i == ARRAY_SIZE (elim_regs))
1090 	{
1091 	  /* Now restore our arg pointer from the address at which it
1092 	     was saved in our stack frame.  */
1093 	  emit_move_insn (crtl->args.internal_arg_pointer,
1094 			  copy_to_reg (get_arg_pointer_save_area ()));
1095 	}
1096     }
1097 
1098   if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1099     emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1100   else if (targetm.have_nonlocal_goto_receiver ())
1101     emit_insn (targetm.gen_nonlocal_goto_receiver ());
1102   else
1103     { /* Nothing */ }
1104 
1105   /* We must not allow the code we just generated to be reordered by
1106      scheduling.  Specifically, the update of the frame pointer must
1107      happen immediately, not later.  */
1108   emit_insn (gen_blockage ());
1109 }
1110 
1111 /* __builtin_longjmp is passed a pointer to an array of five words (not
1112    all will be used on all machines).  It operates similarly to the C
1113    library function of the same name, but is more efficient.  Much of
1114    the code below is copied from the handling of non-local gotos.  */
1115 
1116 static void
expand_builtin_longjmp(rtx buf_addr,rtx value)1117 expand_builtin_longjmp (rtx buf_addr, rtx value)
1118 {
1119   rtx fp, lab, stack;
1120   rtx_insn *insn, *last;
1121   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1122 
1123   /* DRAP is needed for stack realign if longjmp is expanded to current
1124      function  */
1125   if (SUPPORTS_STACK_ALIGNMENT)
1126     crtl->need_drap = true;
1127 
1128   if (setjmp_alias_set == -1)
1129     setjmp_alias_set = new_alias_set ();
1130 
1131   buf_addr = convert_memory_address (Pmode, buf_addr);
1132 
1133   buf_addr = force_reg (Pmode, buf_addr);
1134 
1135   /* We require that the user must pass a second argument of 1, because
1136      that is what builtin_setjmp will return.  */
1137   gcc_assert (value == const1_rtx);
1138 
1139   last = get_last_insn ();
1140   if (targetm.have_builtin_longjmp ())
1141     emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1142   else
1143     {
1144       fp = gen_rtx_MEM (Pmode, buf_addr);
1145       lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1146 					       GET_MODE_SIZE (Pmode)));
1147 
1148       stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1149 						   2 * GET_MODE_SIZE (Pmode)));
1150       set_mem_alias_set (fp, setjmp_alias_set);
1151       set_mem_alias_set (lab, setjmp_alias_set);
1152       set_mem_alias_set (stack, setjmp_alias_set);
1153 
1154       /* Pick up FP, label, and SP from the block and jump.  This code is
1155 	 from expand_goto in stmt.c; see there for detailed comments.  */
1156       if (targetm.have_nonlocal_goto ())
1157 	/* We have to pass a value to the nonlocal_goto pattern that will
1158 	   get copied into the static_chain pointer, but it does not matter
1159 	   what that value is, because builtin_setjmp does not use it.  */
1160 	emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1161       else
1162 	{
1163 	  emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1164 	  emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1165 
1166 	  lab = copy_to_reg (lab);
1167 
1168 	  /* Restore the frame pointer and stack pointer.  We must use a
1169 	     temporary since the setjmp buffer may be a local.  */
1170 	  fp = copy_to_reg (fp);
1171 	  emit_stack_restore (SAVE_NONLOCAL, stack);
1172 
1173 	  /* Ensure the frame pointer move is not optimized.  */
1174 	  emit_insn (gen_blockage ());
1175 	  emit_clobber (hard_frame_pointer_rtx);
1176 	  emit_clobber (frame_pointer_rtx);
1177 	  emit_move_insn (hard_frame_pointer_rtx, fp);
1178 
1179 	  emit_use (hard_frame_pointer_rtx);
1180 	  emit_use (stack_pointer_rtx);
1181 	  emit_indirect_jump (lab);
1182 	}
1183     }
1184 
1185   /* Search backwards and mark the jump insn as a non-local goto.
1186      Note that this precludes the use of __builtin_longjmp to a
1187      __builtin_setjmp target in the same function.  However, we've
1188      already cautioned the user that these functions are for
1189      internal exception handling use only.  */
1190   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1191     {
1192       gcc_assert (insn != last);
1193 
1194       if (JUMP_P (insn))
1195 	{
1196 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1197 	  break;
1198 	}
1199       else if (CALL_P (insn))
1200 	break;
1201     }
1202 }
1203 
1204 static inline bool
more_const_call_expr_args_p(const const_call_expr_arg_iterator * iter)1205 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1206 {
1207   return (iter->i < iter->n);
1208 }
1209 
1210 /* This function validates the types of a function call argument list
1211    against a specified list of tree_codes.  If the last specifier is a 0,
1212    that represents an ellipsis, otherwise the last specifier must be a
1213    VOID_TYPE.  */
1214 
1215 static bool
validate_arglist(const_tree callexpr,...)1216 validate_arglist (const_tree callexpr, ...)
1217 {
1218   enum tree_code code;
1219   bool res = 0;
1220   va_list ap;
1221   const_call_expr_arg_iterator iter;
1222   const_tree arg;
1223 
1224   va_start (ap, callexpr);
1225   init_const_call_expr_arg_iterator (callexpr, &iter);
1226 
1227   /* Get a bitmap of pointer argument numbers declared attribute nonnull.  */
1228   tree fn = CALL_EXPR_FN (callexpr);
1229   bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1230 
1231   for (unsigned argno = 1; ; ++argno)
1232     {
1233       code = (enum tree_code) va_arg (ap, int);
1234 
1235       switch (code)
1236 	{
1237 	case 0:
1238 	  /* This signifies an ellipses, any further arguments are all ok.  */
1239 	  res = true;
1240 	  goto end;
1241 	case VOID_TYPE:
1242 	  /* This signifies an endlink, if no arguments remain, return
1243 	     true, otherwise return false.  */
1244 	  res = !more_const_call_expr_args_p (&iter);
1245 	  goto end;
1246 	case POINTER_TYPE:
1247 	  /* The actual argument must be nonnull when either the whole
1248 	     called function has been declared nonnull, or when the formal
1249 	     argument corresponding to the actual argument has been.  */
1250 	  if (argmap
1251 	      && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1252 	    {
1253 	      arg = next_const_call_expr_arg (&iter);
1254 	      if (!validate_arg (arg, code) || integer_zerop (arg))
1255 		goto end;
1256 	      break;
1257 	    }
1258 	  /* FALLTHRU */
1259 	default:
1260 	  /* If no parameters remain or the parameter's code does not
1261 	     match the specified code, return false.  Otherwise continue
1262 	     checking any remaining arguments.  */
1263 	  arg = next_const_call_expr_arg (&iter);
1264 	  if (!validate_arg (arg, code))
1265 	    goto end;
1266 	  break;
1267 	}
1268     }
1269 
1270   /* We need gotos here since we can only have one VA_CLOSE in a
1271      function.  */
1272  end: ;
1273   va_end (ap);
1274 
1275   BITMAP_FREE (argmap);
1276 
1277   return res;
1278 }
1279 
1280 /* Expand a call to __builtin_nonlocal_goto.  We're passed the target label
1281    and the address of the save area.  */
1282 
1283 static rtx
expand_builtin_nonlocal_goto(tree exp)1284 expand_builtin_nonlocal_goto (tree exp)
1285 {
1286   tree t_label, t_save_area;
1287   rtx r_label, r_save_area, r_fp, r_sp;
1288   rtx_insn *insn;
1289 
1290   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1291     return NULL_RTX;
1292 
1293   t_label = CALL_EXPR_ARG (exp, 0);
1294   t_save_area = CALL_EXPR_ARG (exp, 1);
1295 
1296   r_label = expand_normal (t_label);
1297   r_label = convert_memory_address (Pmode, r_label);
1298   r_save_area = expand_normal (t_save_area);
1299   r_save_area = convert_memory_address (Pmode, r_save_area);
1300   /* Copy the address of the save location to a register just in case it was
1301      based on the frame pointer.   */
1302   r_save_area = copy_to_reg (r_save_area);
1303   r_fp = gen_rtx_MEM (Pmode, r_save_area);
1304   r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1305 		      plus_constant (Pmode, r_save_area,
1306 				     GET_MODE_SIZE (Pmode)));
1307 
1308   crtl->has_nonlocal_goto = 1;
1309 
1310   /* ??? We no longer need to pass the static chain value, afaik.  */
1311   if (targetm.have_nonlocal_goto ())
1312     emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1313   else
1314     {
1315       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1316       emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1317 
1318       r_label = copy_to_reg (r_label);
1319 
1320       /* Restore the frame pointer and stack pointer.  We must use a
1321 	 temporary since the setjmp buffer may be a local.  */
1322       r_fp = copy_to_reg (r_fp);
1323       emit_stack_restore (SAVE_NONLOCAL, r_sp);
1324 
1325       /* Ensure the frame pointer move is not optimized.  */
1326       emit_insn (gen_blockage ());
1327       emit_clobber (hard_frame_pointer_rtx);
1328       emit_clobber (frame_pointer_rtx);
1329       emit_move_insn (hard_frame_pointer_rtx, r_fp);
1330 
1331       /* USE of hard_frame_pointer_rtx added for consistency;
1332 	 not clear if really needed.  */
1333       emit_use (hard_frame_pointer_rtx);
1334       emit_use (stack_pointer_rtx);
1335 
1336       /* If the architecture is using a GP register, we must
1337 	 conservatively assume that the target function makes use of it.
1338 	 The prologue of functions with nonlocal gotos must therefore
1339 	 initialize the GP register to the appropriate value, and we
1340 	 must then make sure that this value is live at the point
1341 	 of the jump.  (Note that this doesn't necessarily apply
1342 	 to targets with a nonlocal_goto pattern; they are free
1343 	 to implement it in their own way.  Note also that this is
1344 	 a no-op if the GP register is a global invariant.)  */
1345       unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1346       if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1347 	emit_use (pic_offset_table_rtx);
1348 
1349       emit_indirect_jump (r_label);
1350     }
1351 
1352   /* Search backwards to the jump insn and mark it as a
1353      non-local goto.  */
1354   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1355     {
1356       if (JUMP_P (insn))
1357 	{
1358 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1359 	  break;
1360 	}
1361       else if (CALL_P (insn))
1362 	break;
1363     }
1364 
1365   return const0_rtx;
1366 }
1367 
1368 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1369    (not all will be used on all machines) that was passed to __builtin_setjmp.
1370    It updates the stack pointer in that block to the current value.  This is
1371    also called directly by the SJLJ exception handling code.  */
1372 
1373 void
expand_builtin_update_setjmp_buf(rtx buf_addr)1374 expand_builtin_update_setjmp_buf (rtx buf_addr)
1375 {
1376   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1377   buf_addr = convert_memory_address (Pmode, buf_addr);
1378   rtx stack_save
1379     = gen_rtx_MEM (sa_mode,
1380 		   memory_address
1381 		   (sa_mode,
1382 		    plus_constant (Pmode, buf_addr,
1383 				   2 * GET_MODE_SIZE (Pmode))));
1384 
1385   emit_stack_save (SAVE_NONLOCAL, &stack_save);
1386 }
1387 
1388 /* Expand a call to __builtin_prefetch.  For a target that does not support
1389    data prefetch, evaluate the memory address argument in case it has side
1390    effects.  */
1391 
1392 static void
expand_builtin_prefetch(tree exp)1393 expand_builtin_prefetch (tree exp)
1394 {
1395   tree arg0, arg1, arg2;
1396   int nargs;
1397   rtx op0, op1, op2;
1398 
1399   if (!validate_arglist (exp, POINTER_TYPE, 0))
1400     return;
1401 
1402   arg0 = CALL_EXPR_ARG (exp, 0);
1403 
1404   /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1405      zero (read) and argument 2 (locality) defaults to 3 (high degree of
1406      locality).  */
1407   nargs = call_expr_nargs (exp);
1408   if (nargs > 1)
1409     arg1 = CALL_EXPR_ARG (exp, 1);
1410   else
1411     arg1 = integer_zero_node;
1412   if (nargs > 2)
1413     arg2 = CALL_EXPR_ARG (exp, 2);
1414   else
1415     arg2 = integer_three_node;
1416 
1417   /* Argument 0 is an address.  */
1418   op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1419 
1420   /* Argument 1 (read/write flag) must be a compile-time constant int.  */
1421   if (TREE_CODE (arg1) != INTEGER_CST)
1422     {
1423       error ("second argument to %<__builtin_prefetch%> must be a constant");
1424       arg1 = integer_zero_node;
1425     }
1426   op1 = expand_normal (arg1);
1427   /* Argument 1 must be either zero or one.  */
1428   if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1429     {
1430       warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1431 	       " using zero");
1432       op1 = const0_rtx;
1433     }
1434 
1435   /* Argument 2 (locality) must be a compile-time constant int.  */
1436   if (TREE_CODE (arg2) != INTEGER_CST)
1437     {
1438       error ("third argument to %<__builtin_prefetch%> must be a constant");
1439       arg2 = integer_zero_node;
1440     }
1441   op2 = expand_normal (arg2);
1442   /* Argument 2 must be 0, 1, 2, or 3.  */
1443   if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1444     {
1445       warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1446       op2 = const0_rtx;
1447     }
1448 
1449   if (targetm.have_prefetch ())
1450     {
1451       class expand_operand ops[3];
1452 
1453       create_address_operand (&ops[0], op0);
1454       create_integer_operand (&ops[1], INTVAL (op1));
1455       create_integer_operand (&ops[2], INTVAL (op2));
1456       if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1457 	return;
1458     }
1459 
1460   /* Don't do anything with direct references to volatile memory, but
1461      generate code to handle other side effects.  */
1462   if (!MEM_P (op0) && side_effects_p (op0))
1463     emit_insn (op0);
1464 }
1465 
1466 /* Get a MEM rtx for expression EXP which is the address of an operand
1467    to be used in a string instruction (cmpstrsi, cpymemsi, ..).  LEN is
1468    the maximum length of the block of memory that might be accessed or
1469    NULL if unknown.  */
1470 
1471 static rtx
get_memory_rtx(tree exp,tree len)1472 get_memory_rtx (tree exp, tree len)
1473 {
1474   tree orig_exp = exp, base;
1475   rtx addr, mem;
1476 
1477   /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1478      from its expression, for expr->a.b only <variable>.a.b is recorded.  */
1479   if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1480     exp = TREE_OPERAND (exp, 0);
1481 
1482   addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1483   mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1484 
1485   /* Get an expression we can use to find the attributes to assign to MEM.
1486      First remove any nops.  */
1487   while (CONVERT_EXPR_P (exp)
1488 	 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1489     exp = TREE_OPERAND (exp, 0);
1490 
1491   /* Build a MEM_REF representing the whole accessed area as a byte blob,
1492      (as builtin stringops may alias with anything).  */
1493   exp = fold_build2 (MEM_REF,
1494 		     build_array_type (char_type_node,
1495 				       build_range_type (sizetype,
1496 							 size_one_node, len)),
1497 		     exp, build_int_cst (ptr_type_node, 0));
1498 
1499   /* If the MEM_REF has no acceptable address, try to get the base object
1500      from the original address we got, and build an all-aliasing
1501      unknown-sized access to that one.  */
1502   if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1503     set_mem_attributes (mem, exp, 0);
1504   else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1505 	   && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1506 						      0))))
1507     {
1508       unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1509       exp = build_fold_addr_expr (base);
1510       exp = fold_build2 (MEM_REF,
1511 			 build_array_type (char_type_node,
1512 					   build_range_type (sizetype,
1513 							     size_zero_node,
1514 							     NULL)),
1515 			 exp, build_int_cst (ptr_type_node, 0));
1516       set_mem_attributes (mem, exp, 0);
1517       /* Since we stripped parts make sure the offset is unknown and the
1518 	 alignment is computed from the original address.  */
1519       clear_mem_offset (mem);
1520       set_mem_align (mem, align);
1521     }
1522   set_mem_alias_set (mem, 0);
1523   return mem;
1524 }
1525 
1526 /* Built-in functions to perform an untyped call and return.  */
1527 
1528 #define apply_args_mode \
1529   (this_target_builtins->x_apply_args_mode)
1530 #define apply_result_mode \
1531   (this_target_builtins->x_apply_result_mode)
1532 
1533 /* Return the size required for the block returned by __builtin_apply_args,
1534    and initialize apply_args_mode.  */
1535 
1536 static int
apply_args_size(void)1537 apply_args_size (void)
1538 {
1539   static int size = -1;
1540   int align;
1541   unsigned int regno;
1542 
1543   /* The values computed by this function never change.  */
1544   if (size < 0)
1545     {
1546       /* The first value is the incoming arg-pointer.  */
1547       size = GET_MODE_SIZE (Pmode);
1548 
1549       /* The second value is the structure value address unless this is
1550 	 passed as an "invisible" first argument.  */
1551       if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1552 	size += GET_MODE_SIZE (Pmode);
1553 
1554       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1555 	if (FUNCTION_ARG_REGNO_P (regno))
1556 	  {
1557 	    fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1558 
1559 	    gcc_assert (mode != VOIDmode);
1560 
1561 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1562 	    if (size % align != 0)
1563 	      size = CEIL (size, align) * align;
1564 	    size += GET_MODE_SIZE (mode);
1565 	    apply_args_mode[regno] = mode;
1566 	  }
1567 	else
1568 	  {
1569 	    apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1570 	  }
1571     }
1572   return size;
1573 }
1574 
1575 /* Return the size required for the block returned by __builtin_apply,
1576    and initialize apply_result_mode.  */
1577 
1578 static int
apply_result_size(void)1579 apply_result_size (void)
1580 {
1581   static int size = -1;
1582   int align, regno;
1583 
1584   /* The values computed by this function never change.  */
1585   if (size < 0)
1586     {
1587       size = 0;
1588 
1589       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1590 	if (targetm.calls.function_value_regno_p (regno))
1591 	  {
1592 	    fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1593 
1594 	    gcc_assert (mode != VOIDmode);
1595 
1596 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1597 	    if (size % align != 0)
1598 	      size = CEIL (size, align) * align;
1599 	    size += GET_MODE_SIZE (mode);
1600 	    apply_result_mode[regno] = mode;
1601 	  }
1602 	else
1603 	  apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1604 
1605       /* Allow targets that use untyped_call and untyped_return to override
1606 	 the size so that machine-specific information can be stored here.  */
1607 #ifdef APPLY_RESULT_SIZE
1608       size = APPLY_RESULT_SIZE;
1609 #endif
1610     }
1611   return size;
1612 }
1613 
1614 /* Create a vector describing the result block RESULT.  If SAVEP is true,
1615    the result block is used to save the values; otherwise it is used to
1616    restore the values.  */
1617 
1618 static rtx
result_vector(int savep,rtx result)1619 result_vector (int savep, rtx result)
1620 {
1621   int regno, size, align, nelts;
1622   fixed_size_mode mode;
1623   rtx reg, mem;
1624   rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1625 
1626   size = nelts = 0;
1627   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1628     if ((mode = apply_result_mode[regno]) != VOIDmode)
1629       {
1630 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1631 	if (size % align != 0)
1632 	  size = CEIL (size, align) * align;
1633 	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1634 	mem = adjust_address (result, mode, size);
1635 	savevec[nelts++] = (savep
1636 			    ? gen_rtx_SET (mem, reg)
1637 			    : gen_rtx_SET (reg, mem));
1638 	size += GET_MODE_SIZE (mode);
1639       }
1640   return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1641 }
1642 
1643 /* Save the state required to perform an untyped call with the same
1644    arguments as were passed to the current function.  */
1645 
1646 static rtx
expand_builtin_apply_args_1(void)1647 expand_builtin_apply_args_1 (void)
1648 {
1649   rtx registers, tem;
1650   int size, align, regno;
1651   fixed_size_mode mode;
1652   rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1653 
1654   /* Create a block where the arg-pointer, structure value address,
1655      and argument registers can be saved.  */
1656   registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1657 
1658   /* Walk past the arg-pointer and structure value address.  */
1659   size = GET_MODE_SIZE (Pmode);
1660   if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1661     size += GET_MODE_SIZE (Pmode);
1662 
1663   /* Save each register used in calling a function to the block.  */
1664   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1665     if ((mode = apply_args_mode[regno]) != VOIDmode)
1666       {
1667 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1668 	if (size % align != 0)
1669 	  size = CEIL (size, align) * align;
1670 
1671 	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1672 
1673 	emit_move_insn (adjust_address (registers, mode, size), tem);
1674 	size += GET_MODE_SIZE (mode);
1675       }
1676 
1677   /* Save the arg pointer to the block.  */
1678   tem = copy_to_reg (crtl->args.internal_arg_pointer);
1679   /* We need the pointer as the caller actually passed them to us, not
1680      as we might have pretended they were passed.  Make sure it's a valid
1681      operand, as emit_move_insn isn't expected to handle a PLUS.  */
1682   if (STACK_GROWS_DOWNWARD)
1683     tem
1684       = force_operand (plus_constant (Pmode, tem,
1685 				      crtl->args.pretend_args_size),
1686 		       NULL_RTX);
1687   emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1688 
1689   size = GET_MODE_SIZE (Pmode);
1690 
1691   /* Save the structure value address unless this is passed as an
1692      "invisible" first argument.  */
1693   if (struct_incoming_value)
1694     emit_move_insn (adjust_address (registers, Pmode, size),
1695 		    copy_to_reg (struct_incoming_value));
1696 
1697   /* Return the address of the block.  */
1698   return copy_addr_to_reg (XEXP (registers, 0));
1699 }
1700 
1701 /* __builtin_apply_args returns block of memory allocated on
1702    the stack into which is stored the arg pointer, structure
1703    value address, static chain, and all the registers that might
1704    possibly be used in performing a function call.  The code is
1705    moved to the start of the function so the incoming values are
1706    saved.  */
1707 
1708 static rtx
expand_builtin_apply_args(void)1709 expand_builtin_apply_args (void)
1710 {
1711   /* Don't do __builtin_apply_args more than once in a function.
1712      Save the result of the first call and reuse it.  */
1713   if (apply_args_value != 0)
1714     return apply_args_value;
1715   {
1716     /* When this function is called, it means that registers must be
1717        saved on entry to this function.  So we migrate the
1718        call to the first insn of this function.  */
1719     rtx temp;
1720 
1721     start_sequence ();
1722     temp = expand_builtin_apply_args_1 ();
1723     rtx_insn *seq = get_insns ();
1724     end_sequence ();
1725 
1726     apply_args_value = temp;
1727 
1728     /* Put the insns after the NOTE that starts the function.
1729        If this is inside a start_sequence, make the outer-level insn
1730        chain current, so the code is placed at the start of the
1731        function.  If internal_arg_pointer is a non-virtual pseudo,
1732        it needs to be placed after the function that initializes
1733        that pseudo.  */
1734     push_topmost_sequence ();
1735     if (REG_P (crtl->args.internal_arg_pointer)
1736 	&& REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1737       emit_insn_before (seq, parm_birth_insn);
1738     else
1739       emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1740     pop_topmost_sequence ();
1741     return temp;
1742   }
1743 }
1744 
1745 /* Perform an untyped call and save the state required to perform an
1746    untyped return of whatever value was returned by the given function.  */
1747 
1748 static rtx
expand_builtin_apply(rtx function,rtx arguments,rtx argsize)1749 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1750 {
1751   int size, align, regno;
1752   fixed_size_mode mode;
1753   rtx incoming_args, result, reg, dest, src;
1754   rtx_call_insn *call_insn;
1755   rtx old_stack_level = 0;
1756   rtx call_fusage = 0;
1757   rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1758 
1759   arguments = convert_memory_address (Pmode, arguments);
1760 
1761   /* Create a block where the return registers can be saved.  */
1762   result = assign_stack_local (BLKmode, apply_result_size (), -1);
1763 
1764   /* Fetch the arg pointer from the ARGUMENTS block.  */
1765   incoming_args = gen_reg_rtx (Pmode);
1766   emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1767   if (!STACK_GROWS_DOWNWARD)
1768     incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1769 					 incoming_args, 0, OPTAB_LIB_WIDEN);
1770 
1771   /* Push a new argument block and copy the arguments.  Do not allow
1772      the (potential) memcpy call below to interfere with our stack
1773      manipulations.  */
1774   do_pending_stack_adjust ();
1775   NO_DEFER_POP;
1776 
1777   /* Save the stack with nonlocal if available.  */
1778   if (targetm.have_save_stack_nonlocal ())
1779     emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1780   else
1781     emit_stack_save (SAVE_BLOCK, &old_stack_level);
1782 
1783   /* Allocate a block of memory onto the stack and copy the memory
1784      arguments to the outgoing arguments address.  We can pass TRUE
1785      as the 4th argument because we just saved the stack pointer
1786      and will restore it right after the call.  */
1787   allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1788 
1789   /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1790      may have already set current_function_calls_alloca to true.
1791      current_function_calls_alloca won't be set if argsize is zero,
1792      so we have to guarantee need_drap is true here.  */
1793   if (SUPPORTS_STACK_ALIGNMENT)
1794     crtl->need_drap = true;
1795 
1796   dest = virtual_outgoing_args_rtx;
1797   if (!STACK_GROWS_DOWNWARD)
1798     {
1799       if (CONST_INT_P (argsize))
1800 	dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1801       else
1802 	dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1803     }
1804   dest = gen_rtx_MEM (BLKmode, dest);
1805   set_mem_align (dest, PARM_BOUNDARY);
1806   src = gen_rtx_MEM (BLKmode, incoming_args);
1807   set_mem_align (src, PARM_BOUNDARY);
1808   emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1809 
1810   /* Refer to the argument block.  */
1811   apply_args_size ();
1812   arguments = gen_rtx_MEM (BLKmode, arguments);
1813   set_mem_align (arguments, PARM_BOUNDARY);
1814 
1815   /* Walk past the arg-pointer and structure value address.  */
1816   size = GET_MODE_SIZE (Pmode);
1817   if (struct_value)
1818     size += GET_MODE_SIZE (Pmode);
1819 
1820   /* Restore each of the registers previously saved.  Make USE insns
1821      for each of these registers for use in making the call.  */
1822   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1823     if ((mode = apply_args_mode[regno]) != VOIDmode)
1824       {
1825 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1826 	if (size % align != 0)
1827 	  size = CEIL (size, align) * align;
1828 	reg = gen_rtx_REG (mode, regno);
1829 	emit_move_insn (reg, adjust_address (arguments, mode, size));
1830 	use_reg (&call_fusage, reg);
1831 	size += GET_MODE_SIZE (mode);
1832       }
1833 
1834   /* Restore the structure value address unless this is passed as an
1835      "invisible" first argument.  */
1836   size = GET_MODE_SIZE (Pmode);
1837   if (struct_value)
1838     {
1839       rtx value = gen_reg_rtx (Pmode);
1840       emit_move_insn (value, adjust_address (arguments, Pmode, size));
1841       emit_move_insn (struct_value, value);
1842       if (REG_P (struct_value))
1843 	use_reg (&call_fusage, struct_value);
1844     }
1845 
1846   /* All arguments and registers used for the call are set up by now!  */
1847   function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1848 
1849   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
1850      and we don't want to load it into a register as an optimization,
1851      because prepare_call_address already did it if it should be done.  */
1852   if (GET_CODE (function) != SYMBOL_REF)
1853     function = memory_address (FUNCTION_MODE, function);
1854 
1855   /* Generate the actual call instruction and save the return value.  */
1856   if (targetm.have_untyped_call ())
1857     {
1858       rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1859       emit_call_insn (targetm.gen_untyped_call (mem, result,
1860 						result_vector (1, result)));
1861     }
1862   else if (targetm.have_call_value ())
1863     {
1864       rtx valreg = 0;
1865 
1866       /* Locate the unique return register.  It is not possible to
1867 	 express a call that sets more than one return register using
1868 	 call_value; use untyped_call for that.  In fact, untyped_call
1869 	 only needs to save the return registers in the given block.  */
1870       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1871 	if ((mode = apply_result_mode[regno]) != VOIDmode)
1872 	  {
1873 	    gcc_assert (!valreg); /* have_untyped_call required.  */
1874 
1875 	    valreg = gen_rtx_REG (mode, regno);
1876 	  }
1877 
1878       emit_insn (targetm.gen_call_value (valreg,
1879 					 gen_rtx_MEM (FUNCTION_MODE, function),
1880 					 const0_rtx, NULL_RTX, const0_rtx));
1881 
1882       emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1883     }
1884   else
1885     gcc_unreachable ();
1886 
1887   /* Find the CALL insn we just emitted, and attach the register usage
1888      information.  */
1889   call_insn = last_call_insn ();
1890   add_function_usage_to (call_insn, call_fusage);
1891 
1892   /* Restore the stack.  */
1893   if (targetm.have_save_stack_nonlocal ())
1894     emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1895   else
1896     emit_stack_restore (SAVE_BLOCK, old_stack_level);
1897   fixup_args_size_notes (call_insn, get_last_insn (), 0);
1898 
1899   OK_DEFER_POP;
1900 
1901   /* Return the address of the result block.  */
1902   result = copy_addr_to_reg (XEXP (result, 0));
1903   return convert_memory_address (ptr_mode, result);
1904 }
1905 
1906 /* Perform an untyped return.  */
1907 
1908 static void
expand_builtin_return(rtx result)1909 expand_builtin_return (rtx result)
1910 {
1911   int size, align, regno;
1912   fixed_size_mode mode;
1913   rtx reg;
1914   rtx_insn *call_fusage = 0;
1915 
1916   result = convert_memory_address (Pmode, result);
1917 
1918   apply_result_size ();
1919   result = gen_rtx_MEM (BLKmode, result);
1920 
1921   if (targetm.have_untyped_return ())
1922     {
1923       rtx vector = result_vector (0, result);
1924       emit_jump_insn (targetm.gen_untyped_return (result, vector));
1925       emit_barrier ();
1926       return;
1927     }
1928 
1929   /* Restore the return value and note that each value is used.  */
1930   size = 0;
1931   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1932     if ((mode = apply_result_mode[regno]) != VOIDmode)
1933       {
1934 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1935 	if (size % align != 0)
1936 	  size = CEIL (size, align) * align;
1937 	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1938 	emit_move_insn (reg, adjust_address (result, mode, size));
1939 
1940 	push_to_sequence (call_fusage);
1941 	emit_use (reg);
1942 	call_fusage = get_insns ();
1943 	end_sequence ();
1944 	size += GET_MODE_SIZE (mode);
1945       }
1946 
1947   /* Put the USE insns before the return.  */
1948   emit_insn (call_fusage);
1949 
1950   /* Return whatever values was restored by jumping directly to the end
1951      of the function.  */
1952   expand_naked_return ();
1953 }
1954 
1955 /* Used by expand_builtin_classify_type and fold_builtin_classify_type.  */
1956 
1957 static enum type_class
type_to_class(tree type)1958 type_to_class (tree type)
1959 {
1960   switch (TREE_CODE (type))
1961     {
1962     case VOID_TYPE:	   return void_type_class;
1963     case INTEGER_TYPE:	   return integer_type_class;
1964     case ENUMERAL_TYPE:	   return enumeral_type_class;
1965     case BOOLEAN_TYPE:	   return boolean_type_class;
1966     case POINTER_TYPE:	   return pointer_type_class;
1967     case REFERENCE_TYPE:   return reference_type_class;
1968     case OFFSET_TYPE:	   return offset_type_class;
1969     case REAL_TYPE:	   return real_type_class;
1970     case COMPLEX_TYPE:	   return complex_type_class;
1971     case FUNCTION_TYPE:	   return function_type_class;
1972     case METHOD_TYPE:	   return method_type_class;
1973     case RECORD_TYPE:	   return record_type_class;
1974     case UNION_TYPE:
1975     case QUAL_UNION_TYPE:  return union_type_class;
1976     case ARRAY_TYPE:	   return (TYPE_STRING_FLAG (type)
1977 				   ? string_type_class : array_type_class);
1978     case LANG_TYPE:	   return lang_type_class;
1979     default:		   return no_type_class;
1980     }
1981 }
1982 
1983 /* Expand a call EXP to __builtin_classify_type.  */
1984 
1985 static rtx
expand_builtin_classify_type(tree exp)1986 expand_builtin_classify_type (tree exp)
1987 {
1988   if (call_expr_nargs (exp))
1989     return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1990   return GEN_INT (no_type_class);
1991 }
1992 
1993 /* This helper macro, meant to be used in mathfn_built_in below, determines
1994    which among a set of builtin math functions is appropriate for a given type
1995    mode.  The `F' (float) and `L' (long double) are automatically generated
1996    from the 'double' case.  If a function supports the _Float<N> and _Float<N>X
1997    types, there are additional types that are considered with 'F32', 'F64',
1998    'F128', etc. suffixes.  */
1999 #define CASE_MATHFN(MATHFN) \
2000   CASE_CFN_##MATHFN: \
2001   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2002   fcodel = BUILT_IN_##MATHFN##L ; break;
2003 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2004    types.  */
2005 #define CASE_MATHFN_FLOATN(MATHFN) \
2006   CASE_CFN_##MATHFN: \
2007   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2008   fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2009   fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2010   fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2011   fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2012   break;
2013 /* Similar to above, but appends _R after any F/L suffix.  */
2014 #define CASE_MATHFN_REENT(MATHFN) \
2015   case CFN_BUILT_IN_##MATHFN##_R: \
2016   case CFN_BUILT_IN_##MATHFN##F_R: \
2017   case CFN_BUILT_IN_##MATHFN##L_R: \
2018   fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2019   fcodel = BUILT_IN_##MATHFN##L_R ; break;
2020 
2021 /* Return a function equivalent to FN but operating on floating-point
2022    values of type TYPE, or END_BUILTINS if no such function exists.
2023    This is purely an operation on function codes; it does not guarantee
2024    that the target actually has an implementation of the function.  */
2025 
2026 static built_in_function
mathfn_built_in_2(tree type,combined_fn fn)2027 mathfn_built_in_2 (tree type, combined_fn fn)
2028 {
2029   tree mtype;
2030   built_in_function fcode, fcodef, fcodel;
2031   built_in_function fcodef16 = END_BUILTINS;
2032   built_in_function fcodef32 = END_BUILTINS;
2033   built_in_function fcodef64 = END_BUILTINS;
2034   built_in_function fcodef128 = END_BUILTINS;
2035   built_in_function fcodef32x = END_BUILTINS;
2036   built_in_function fcodef64x = END_BUILTINS;
2037   built_in_function fcodef128x = END_BUILTINS;
2038 
2039   switch (fn)
2040     {
2041     CASE_MATHFN (ACOS)
2042     CASE_MATHFN (ACOSH)
2043     CASE_MATHFN (ASIN)
2044     CASE_MATHFN (ASINH)
2045     CASE_MATHFN (ATAN)
2046     CASE_MATHFN (ATAN2)
2047     CASE_MATHFN (ATANH)
2048     CASE_MATHFN (CBRT)
2049     CASE_MATHFN_FLOATN (CEIL)
2050     CASE_MATHFN (CEXPI)
2051     CASE_MATHFN_FLOATN (COPYSIGN)
2052     CASE_MATHFN (COS)
2053     CASE_MATHFN (COSH)
2054     CASE_MATHFN (DREM)
2055     CASE_MATHFN (ERF)
2056     CASE_MATHFN (ERFC)
2057     CASE_MATHFN (EXP)
2058     CASE_MATHFN (EXP10)
2059     CASE_MATHFN (EXP2)
2060     CASE_MATHFN (EXPM1)
2061     CASE_MATHFN (FABS)
2062     CASE_MATHFN (FDIM)
2063     CASE_MATHFN_FLOATN (FLOOR)
2064     CASE_MATHFN_FLOATN (FMA)
2065     CASE_MATHFN_FLOATN (FMAX)
2066     CASE_MATHFN_FLOATN (FMIN)
2067     CASE_MATHFN (FMOD)
2068     CASE_MATHFN (FREXP)
2069     CASE_MATHFN (GAMMA)
2070     CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2071     CASE_MATHFN (HUGE_VAL)
2072     CASE_MATHFN (HYPOT)
2073     CASE_MATHFN (ILOGB)
2074     CASE_MATHFN (ICEIL)
2075     CASE_MATHFN (IFLOOR)
2076     CASE_MATHFN (INF)
2077     CASE_MATHFN (IRINT)
2078     CASE_MATHFN (IROUND)
2079     CASE_MATHFN (ISINF)
2080     CASE_MATHFN (J0)
2081     CASE_MATHFN (J1)
2082     CASE_MATHFN (JN)
2083     CASE_MATHFN (LCEIL)
2084     CASE_MATHFN (LDEXP)
2085     CASE_MATHFN (LFLOOR)
2086     CASE_MATHFN (LGAMMA)
2087     CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2088     CASE_MATHFN (LLCEIL)
2089     CASE_MATHFN (LLFLOOR)
2090     CASE_MATHFN (LLRINT)
2091     CASE_MATHFN (LLROUND)
2092     CASE_MATHFN (LOG)
2093     CASE_MATHFN (LOG10)
2094     CASE_MATHFN (LOG1P)
2095     CASE_MATHFN (LOG2)
2096     CASE_MATHFN (LOGB)
2097     CASE_MATHFN (LRINT)
2098     CASE_MATHFN (LROUND)
2099     CASE_MATHFN (MODF)
2100     CASE_MATHFN (NAN)
2101     CASE_MATHFN (NANS)
2102     CASE_MATHFN_FLOATN (NEARBYINT)
2103     CASE_MATHFN (NEXTAFTER)
2104     CASE_MATHFN (NEXTTOWARD)
2105     CASE_MATHFN (POW)
2106     CASE_MATHFN (POWI)
2107     CASE_MATHFN (POW10)
2108     CASE_MATHFN (REMAINDER)
2109     CASE_MATHFN (REMQUO)
2110     CASE_MATHFN_FLOATN (RINT)
2111     CASE_MATHFN_FLOATN (ROUND)
2112     CASE_MATHFN_FLOATN (ROUNDEVEN)
2113     CASE_MATHFN (SCALB)
2114     CASE_MATHFN (SCALBLN)
2115     CASE_MATHFN (SCALBN)
2116     CASE_MATHFN (SIGNBIT)
2117     CASE_MATHFN (SIGNIFICAND)
2118     CASE_MATHFN (SIN)
2119     CASE_MATHFN (SINCOS)
2120     CASE_MATHFN (SINH)
2121     CASE_MATHFN_FLOATN (SQRT)
2122     CASE_MATHFN (TAN)
2123     CASE_MATHFN (TANH)
2124     CASE_MATHFN (TGAMMA)
2125     CASE_MATHFN_FLOATN (TRUNC)
2126     CASE_MATHFN (Y0)
2127     CASE_MATHFN (Y1)
2128     CASE_MATHFN (YN)
2129 
2130     default:
2131       return END_BUILTINS;
2132     }
2133 
2134   mtype = TYPE_MAIN_VARIANT (type);
2135   if (mtype == double_type_node)
2136     return fcode;
2137   else if (mtype == float_type_node)
2138     return fcodef;
2139   else if (mtype == long_double_type_node)
2140     return fcodel;
2141   else if (mtype == float16_type_node)
2142     return fcodef16;
2143   else if (mtype == float32_type_node)
2144     return fcodef32;
2145   else if (mtype == float64_type_node)
2146     return fcodef64;
2147   else if (mtype == float128_type_node)
2148     return fcodef128;
2149   else if (mtype == float32x_type_node)
2150     return fcodef32x;
2151   else if (mtype == float64x_type_node)
2152     return fcodef64x;
2153   else if (mtype == float128x_type_node)
2154     return fcodef128x;
2155   else
2156     return END_BUILTINS;
2157 }
2158 
2159 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2160    if available.  If IMPLICIT_P is true use the implicit builtin declaration,
2161    otherwise use the explicit declaration.  If we can't do the conversion,
2162    return null.  */
2163 
2164 static tree
mathfn_built_in_1(tree type,combined_fn fn,bool implicit_p)2165 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2166 {
2167   built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2168   if (fcode2 == END_BUILTINS)
2169     return NULL_TREE;
2170 
2171   if (implicit_p && !builtin_decl_implicit_p (fcode2))
2172     return NULL_TREE;
2173 
2174   return builtin_decl_explicit (fcode2);
2175 }
2176 
2177 /* Like mathfn_built_in_1, but always use the implicit array.  */
2178 
2179 tree
mathfn_built_in(tree type,combined_fn fn)2180 mathfn_built_in (tree type, combined_fn fn)
2181 {
2182   return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2183 }
2184 
2185 /* Like mathfn_built_in_1, but take a built_in_function and
2186    always use the implicit array.  */
2187 
2188 tree
mathfn_built_in(tree type,enum built_in_function fn)2189 mathfn_built_in (tree type, enum built_in_function fn)
2190 {
2191   return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2192 }
2193 
2194 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2195    return its code, otherwise return IFN_LAST.  Note that this function
2196    only tests whether the function is defined in internals.def, not whether
2197    it is actually available on the target.  */
2198 
2199 internal_fn
associated_internal_fn(tree fndecl)2200 associated_internal_fn (tree fndecl)
2201 {
2202   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2203   tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2204   switch (DECL_FUNCTION_CODE (fndecl))
2205     {
2206 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2207     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2208 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2209     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2210     CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2211 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2212     CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2213 #include "internal-fn.def"
2214 
2215     CASE_FLT_FN (BUILT_IN_POW10):
2216       return IFN_EXP10;
2217 
2218     CASE_FLT_FN (BUILT_IN_DREM):
2219       return IFN_REMAINDER;
2220 
2221     CASE_FLT_FN (BUILT_IN_SCALBN):
2222     CASE_FLT_FN (BUILT_IN_SCALBLN):
2223       if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2224 	return IFN_LDEXP;
2225       return IFN_LAST;
2226 
2227     default:
2228       return IFN_LAST;
2229     }
2230 }
2231 
2232 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2233    on the current target by a call to an internal function, return the
2234    code of that internal function, otherwise return IFN_LAST.  The caller
2235    is responsible for ensuring that any side-effects of the built-in
2236    call are dealt with correctly.  E.g. if CALL sets errno, the caller
2237    must decide that the errno result isn't needed or make it available
2238    in some other way.  */
2239 
2240 internal_fn
replacement_internal_fn(gcall * call)2241 replacement_internal_fn (gcall *call)
2242 {
2243   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2244     {
2245       internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2246       if (ifn != IFN_LAST)
2247 	{
2248 	  tree_pair types = direct_internal_fn_types (ifn, call);
2249 	  optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2250 	  if (direct_internal_fn_supported_p (ifn, types, opt_type))
2251 	    return ifn;
2252 	}
2253     }
2254   return IFN_LAST;
2255 }
2256 
2257 /* Expand a call to the builtin trinary math functions (fma).
2258    Return NULL_RTX if a normal call should be emitted rather than expanding the
2259    function in-line.  EXP is the expression that is a call to the builtin
2260    function; if convenient, the result should be placed in TARGET.
2261    SUBTARGET may be used as the target for computing one of EXP's
2262    operands.  */
2263 
2264 static rtx
expand_builtin_mathfn_ternary(tree exp,rtx target,rtx subtarget)2265 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2266 {
2267   optab builtin_optab;
2268   rtx op0, op1, op2, result;
2269   rtx_insn *insns;
2270   tree fndecl = get_callee_fndecl (exp);
2271   tree arg0, arg1, arg2;
2272   machine_mode mode;
2273 
2274   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2275     return NULL_RTX;
2276 
2277   arg0 = CALL_EXPR_ARG (exp, 0);
2278   arg1 = CALL_EXPR_ARG (exp, 1);
2279   arg2 = CALL_EXPR_ARG (exp, 2);
2280 
2281   switch (DECL_FUNCTION_CODE (fndecl))
2282     {
2283     CASE_FLT_FN (BUILT_IN_FMA):
2284     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2285       builtin_optab = fma_optab; break;
2286     default:
2287       gcc_unreachable ();
2288     }
2289 
2290   /* Make a suitable register to place result in.  */
2291   mode = TYPE_MODE (TREE_TYPE (exp));
2292 
2293   /* Before working hard, check whether the instruction is available.  */
2294   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2295     return NULL_RTX;
2296 
2297   result = gen_reg_rtx (mode);
2298 
2299   /* Always stabilize the argument list.  */
2300   CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2301   CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2302   CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2303 
2304   op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2305   op1 = expand_normal (arg1);
2306   op2 = expand_normal (arg2);
2307 
2308   start_sequence ();
2309 
2310   /* Compute into RESULT.
2311      Set RESULT to wherever the result comes back.  */
2312   result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2313 			      result, 0);
2314 
2315   /* If we were unable to expand via the builtin, stop the sequence
2316      (without outputting the insns) and call to the library function
2317      with the stabilized argument list.  */
2318   if (result == 0)
2319     {
2320       end_sequence ();
2321       return expand_call (exp, target, target == const0_rtx);
2322     }
2323 
2324   /* Output the entire sequence.  */
2325   insns = get_insns ();
2326   end_sequence ();
2327   emit_insn (insns);
2328 
2329   return result;
2330 }
2331 
2332 /* Expand a call to the builtin sin and cos math functions.
2333    Return NULL_RTX if a normal call should be emitted rather than expanding the
2334    function in-line.  EXP is the expression that is a call to the builtin
2335    function; if convenient, the result should be placed in TARGET.
2336    SUBTARGET may be used as the target for computing one of EXP's
2337    operands.  */
2338 
2339 static rtx
expand_builtin_mathfn_3(tree exp,rtx target,rtx subtarget)2340 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2341 {
2342   optab builtin_optab;
2343   rtx op0;
2344   rtx_insn *insns;
2345   tree fndecl = get_callee_fndecl (exp);
2346   machine_mode mode;
2347   tree arg;
2348 
2349   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2350     return NULL_RTX;
2351 
2352   arg = CALL_EXPR_ARG (exp, 0);
2353 
2354   switch (DECL_FUNCTION_CODE (fndecl))
2355     {
2356     CASE_FLT_FN (BUILT_IN_SIN):
2357     CASE_FLT_FN (BUILT_IN_COS):
2358       builtin_optab = sincos_optab; break;
2359     default:
2360       gcc_unreachable ();
2361     }
2362 
2363   /* Make a suitable register to place result in.  */
2364   mode = TYPE_MODE (TREE_TYPE (exp));
2365 
2366   /* Check if sincos insn is available, otherwise fallback
2367      to sin or cos insn.  */
2368   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2369     switch (DECL_FUNCTION_CODE (fndecl))
2370       {
2371       CASE_FLT_FN (BUILT_IN_SIN):
2372 	builtin_optab = sin_optab; break;
2373       CASE_FLT_FN (BUILT_IN_COS):
2374 	builtin_optab = cos_optab; break;
2375       default:
2376 	gcc_unreachable ();
2377       }
2378 
2379   /* Before working hard, check whether the instruction is available.  */
2380   if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2381     {
2382       rtx result = gen_reg_rtx (mode);
2383 
2384       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2385 	 need to expand the argument again.  This way, we will not perform
2386 	 side-effects more the once.  */
2387       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2388 
2389       op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2390 
2391       start_sequence ();
2392 
2393       /* Compute into RESULT.
2394 	 Set RESULT to wherever the result comes back.  */
2395       if (builtin_optab == sincos_optab)
2396 	{
2397 	  int ok;
2398 
2399 	  switch (DECL_FUNCTION_CODE (fndecl))
2400 	    {
2401 	    CASE_FLT_FN (BUILT_IN_SIN):
2402 	      ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2403 	      break;
2404 	    CASE_FLT_FN (BUILT_IN_COS):
2405 	      ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2406 	      break;
2407 	    default:
2408 	      gcc_unreachable ();
2409 	    }
2410 	  gcc_assert (ok);
2411 	}
2412       else
2413 	result = expand_unop (mode, builtin_optab, op0, result, 0);
2414 
2415       if (result != 0)
2416 	{
2417 	  /* Output the entire sequence.  */
2418 	  insns = get_insns ();
2419 	  end_sequence ();
2420 	  emit_insn (insns);
2421 	  return result;
2422 	}
2423 
2424       /* If we were unable to expand via the builtin, stop the sequence
2425 	 (without outputting the insns) and call to the library function
2426 	 with the stabilized argument list.  */
2427       end_sequence ();
2428     }
2429 
2430   return expand_call (exp, target, target == const0_rtx);
2431 }
2432 
2433 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2434    return an RTL instruction code that implements the functionality.
2435    If that isn't possible or available return CODE_FOR_nothing.  */
2436 
2437 static enum insn_code
interclass_mathfn_icode(tree arg,tree fndecl)2438 interclass_mathfn_icode (tree arg, tree fndecl)
2439 {
2440   bool errno_set = false;
2441   optab builtin_optab = unknown_optab;
2442   machine_mode mode;
2443 
2444   switch (DECL_FUNCTION_CODE (fndecl))
2445     {
2446     CASE_FLT_FN (BUILT_IN_ILOGB):
2447       errno_set = true; builtin_optab = ilogb_optab; break;
2448     CASE_FLT_FN (BUILT_IN_ISINF):
2449       builtin_optab = isinf_optab; break;
2450     case BUILT_IN_ISNORMAL:
2451     case BUILT_IN_ISFINITE:
2452     CASE_FLT_FN (BUILT_IN_FINITE):
2453     case BUILT_IN_FINITED32:
2454     case BUILT_IN_FINITED64:
2455     case BUILT_IN_FINITED128:
2456     case BUILT_IN_ISINFD32:
2457     case BUILT_IN_ISINFD64:
2458     case BUILT_IN_ISINFD128:
2459       /* These builtins have no optabs (yet).  */
2460       break;
2461     default:
2462       gcc_unreachable ();
2463     }
2464 
2465   /* There's no easy way to detect the case we need to set EDOM.  */
2466   if (flag_errno_math && errno_set)
2467     return CODE_FOR_nothing;
2468 
2469   /* Optab mode depends on the mode of the input argument.  */
2470   mode = TYPE_MODE (TREE_TYPE (arg));
2471 
2472   if (builtin_optab)
2473     return optab_handler (builtin_optab, mode);
2474   return CODE_FOR_nothing;
2475 }
2476 
2477 /* Expand a call to one of the builtin math functions that operate on
2478    floating point argument and output an integer result (ilogb, isinf,
2479    isnan, etc).
2480    Return 0 if a normal call should be emitted rather than expanding the
2481    function in-line.  EXP is the expression that is a call to the builtin
2482    function; if convenient, the result should be placed in TARGET.  */
2483 
2484 static rtx
expand_builtin_interclass_mathfn(tree exp,rtx target)2485 expand_builtin_interclass_mathfn (tree exp, rtx target)
2486 {
2487   enum insn_code icode = CODE_FOR_nothing;
2488   rtx op0;
2489   tree fndecl = get_callee_fndecl (exp);
2490   machine_mode mode;
2491   tree arg;
2492 
2493   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2494     return NULL_RTX;
2495 
2496   arg = CALL_EXPR_ARG (exp, 0);
2497   icode = interclass_mathfn_icode (arg, fndecl);
2498   mode = TYPE_MODE (TREE_TYPE (arg));
2499 
2500   if (icode != CODE_FOR_nothing)
2501     {
2502       class expand_operand ops[1];
2503       rtx_insn *last = get_last_insn ();
2504       tree orig_arg = arg;
2505 
2506       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2507 	 need to expand the argument again.  This way, we will not perform
2508 	 side-effects more the once.  */
2509       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2510 
2511       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2512 
2513       if (mode != GET_MODE (op0))
2514 	op0 = convert_to_mode (mode, op0, 0);
2515 
2516       create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2517       if (maybe_legitimize_operands (icode, 0, 1, ops)
2518 	  && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2519 	return ops[0].value;
2520 
2521       delete_insns_since (last);
2522       CALL_EXPR_ARG (exp, 0) = orig_arg;
2523     }
2524 
2525   return NULL_RTX;
2526 }
2527 
2528 /* Expand a call to the builtin sincos math function.
2529    Return NULL_RTX if a normal call should be emitted rather than expanding the
2530    function in-line.  EXP is the expression that is a call to the builtin
2531    function.  */
2532 
2533 static rtx
expand_builtin_sincos(tree exp)2534 expand_builtin_sincos (tree exp)
2535 {
2536   rtx op0, op1, op2, target1, target2;
2537   machine_mode mode;
2538   tree arg, sinp, cosp;
2539   int result;
2540   location_t loc = EXPR_LOCATION (exp);
2541   tree alias_type, alias_off;
2542 
2543   if (!validate_arglist (exp, REAL_TYPE,
2544  			 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2545     return NULL_RTX;
2546 
2547   arg = CALL_EXPR_ARG (exp, 0);
2548   sinp = CALL_EXPR_ARG (exp, 1);
2549   cosp = CALL_EXPR_ARG (exp, 2);
2550 
2551   /* Make a suitable register to place result in.  */
2552   mode = TYPE_MODE (TREE_TYPE (arg));
2553 
2554   /* Check if sincos insn is available, otherwise emit the call.  */
2555   if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2556     return NULL_RTX;
2557 
2558   target1 = gen_reg_rtx (mode);
2559   target2 = gen_reg_rtx (mode);
2560 
2561   op0 = expand_normal (arg);
2562   alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2563   alias_off = build_int_cst (alias_type, 0);
2564   op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2565 					sinp, alias_off));
2566   op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2567 					cosp, alias_off));
2568 
2569   /* Compute into target1 and target2.
2570      Set TARGET to wherever the result comes back.  */
2571   result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2572   gcc_assert (result);
2573 
2574   /* Move target1 and target2 to the memory locations indicated
2575      by op1 and op2.  */
2576   emit_move_insn (op1, target1);
2577   emit_move_insn (op2, target2);
2578 
2579   return const0_rtx;
2580 }
2581 
2582 /* Expand a call to the internal cexpi builtin to the sincos math function.
2583    EXP is the expression that is a call to the builtin function; if convenient,
2584    the result should be placed in TARGET.  */
2585 
2586 static rtx
expand_builtin_cexpi(tree exp,rtx target)2587 expand_builtin_cexpi (tree exp, rtx target)
2588 {
2589   tree fndecl = get_callee_fndecl (exp);
2590   tree arg, type;
2591   machine_mode mode;
2592   rtx op0, op1, op2;
2593   location_t loc = EXPR_LOCATION (exp);
2594 
2595   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2596     return NULL_RTX;
2597 
2598   arg = CALL_EXPR_ARG (exp, 0);
2599   type = TREE_TYPE (arg);
2600   mode = TYPE_MODE (TREE_TYPE (arg));
2601 
2602   /* Try expanding via a sincos optab, fall back to emitting a libcall
2603      to sincos or cexp.  We are sure we have sincos or cexp because cexpi
2604      is only generated from sincos, cexp or if we have either of them.  */
2605   if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2606     {
2607       op1 = gen_reg_rtx (mode);
2608       op2 = gen_reg_rtx (mode);
2609 
2610       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2611 
2612       /* Compute into op1 and op2.  */
2613       expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2614     }
2615   else if (targetm.libc_has_function (function_sincos))
2616     {
2617       tree call, fn = NULL_TREE;
2618       tree top1, top2;
2619       rtx op1a, op2a;
2620 
2621       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2622 	fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2623       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2624 	fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2625       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2626 	fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2627       else
2628 	gcc_unreachable ();
2629 
2630       op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2631       op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2632       op1a = copy_addr_to_reg (XEXP (op1, 0));
2633       op2a = copy_addr_to_reg (XEXP (op2, 0));
2634       top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2635       top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2636 
2637       /* Make sure not to fold the sincos call again.  */
2638       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2639       expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2640 				      call, 3, arg, top1, top2));
2641     }
2642   else
2643     {
2644       tree call, fn = NULL_TREE, narg;
2645       tree ctype = build_complex_type (type);
2646 
2647       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2648 	fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2649       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2650 	fn = builtin_decl_explicit (BUILT_IN_CEXP);
2651       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2652 	fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2653       else
2654 	gcc_unreachable ();
2655 
2656       /* If we don't have a decl for cexp create one.  This is the
2657 	 friendliest fallback if the user calls __builtin_cexpi
2658 	 without full target C99 function support.  */
2659       if (fn == NULL_TREE)
2660 	{
2661 	  tree fntype;
2662 	  const char *name = NULL;
2663 
2664 	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2665 	    name = "cexpf";
2666 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2667 	    name = "cexp";
2668 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2669 	    name = "cexpl";
2670 
2671 	  fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2672 	  fn = build_fn_decl (name, fntype);
2673 	}
2674 
2675       narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2676 			  build_real (type, dconst0), arg);
2677 
2678       /* Make sure not to fold the cexp call again.  */
2679       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2680       return expand_expr (build_call_nary (ctype, call, 1, narg),
2681 			  target, VOIDmode, EXPAND_NORMAL);
2682     }
2683 
2684   /* Now build the proper return type.  */
2685   return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2686 			      make_tree (TREE_TYPE (arg), op2),
2687 			      make_tree (TREE_TYPE (arg), op1)),
2688 		      target, VOIDmode, EXPAND_NORMAL);
2689 }
2690 
2691 /* Conveniently construct a function call expression.  FNDECL names the
2692    function to be called, N is the number of arguments, and the "..."
2693    parameters are the argument expressions.  Unlike build_call_exr
2694    this doesn't fold the call, hence it will always return a CALL_EXPR.  */
2695 
2696 static tree
build_call_nofold_loc(location_t loc,tree fndecl,int n,...)2697 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2698 {
2699   va_list ap;
2700   tree fntype = TREE_TYPE (fndecl);
2701   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2702 
2703   va_start (ap, n);
2704   fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2705   va_end (ap);
2706   SET_EXPR_LOCATION (fn, loc);
2707   return fn;
2708 }
2709 
2710 /* Expand a call to one of the builtin rounding functions gcc defines
2711    as an extension (lfloor and lceil).  As these are gcc extensions we
2712    do not need to worry about setting errno to EDOM.
2713    If expanding via optab fails, lower expression to (int)(floor(x)).
2714    EXP is the expression that is a call to the builtin function;
2715    if convenient, the result should be placed in TARGET.  */
2716 
2717 static rtx
expand_builtin_int_roundingfn(tree exp,rtx target)2718 expand_builtin_int_roundingfn (tree exp, rtx target)
2719 {
2720   convert_optab builtin_optab;
2721   rtx op0, tmp;
2722   rtx_insn *insns;
2723   tree fndecl = get_callee_fndecl (exp);
2724   enum built_in_function fallback_fn;
2725   tree fallback_fndecl;
2726   machine_mode mode;
2727   tree arg;
2728 
2729   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2730     return NULL_RTX;
2731 
2732   arg = CALL_EXPR_ARG (exp, 0);
2733 
2734   switch (DECL_FUNCTION_CODE (fndecl))
2735     {
2736     CASE_FLT_FN (BUILT_IN_ICEIL):
2737     CASE_FLT_FN (BUILT_IN_LCEIL):
2738     CASE_FLT_FN (BUILT_IN_LLCEIL):
2739       builtin_optab = lceil_optab;
2740       fallback_fn = BUILT_IN_CEIL;
2741       break;
2742 
2743     CASE_FLT_FN (BUILT_IN_IFLOOR):
2744     CASE_FLT_FN (BUILT_IN_LFLOOR):
2745     CASE_FLT_FN (BUILT_IN_LLFLOOR):
2746       builtin_optab = lfloor_optab;
2747       fallback_fn = BUILT_IN_FLOOR;
2748       break;
2749 
2750     default:
2751       gcc_unreachable ();
2752     }
2753 
2754   /* Make a suitable register to place result in.  */
2755   mode = TYPE_MODE (TREE_TYPE (exp));
2756 
2757   target = gen_reg_rtx (mode);
2758 
2759   /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2760      need to expand the argument again.  This way, we will not perform
2761      side-effects more the once.  */
2762   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2763 
2764   op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2765 
2766   start_sequence ();
2767 
2768   /* Compute into TARGET.  */
2769   if (expand_sfix_optab (target, op0, builtin_optab))
2770     {
2771       /* Output the entire sequence.  */
2772       insns = get_insns ();
2773       end_sequence ();
2774       emit_insn (insns);
2775       return target;
2776     }
2777 
2778   /* If we were unable to expand via the builtin, stop the sequence
2779      (without outputting the insns).  */
2780   end_sequence ();
2781 
2782   /* Fall back to floating point rounding optab.  */
2783   fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2784 
2785   /* For non-C99 targets we may end up without a fallback fndecl here
2786      if the user called __builtin_lfloor directly.  In this case emit
2787      a call to the floor/ceil variants nevertheless.  This should result
2788      in the best user experience for not full C99 targets.  */
2789   if (fallback_fndecl == NULL_TREE)
2790     {
2791       tree fntype;
2792       const char *name = NULL;
2793 
2794       switch (DECL_FUNCTION_CODE (fndecl))
2795 	{
2796 	case BUILT_IN_ICEIL:
2797 	case BUILT_IN_LCEIL:
2798 	case BUILT_IN_LLCEIL:
2799 	  name = "ceil";
2800 	  break;
2801 	case BUILT_IN_ICEILF:
2802 	case BUILT_IN_LCEILF:
2803 	case BUILT_IN_LLCEILF:
2804 	  name = "ceilf";
2805 	  break;
2806 	case BUILT_IN_ICEILL:
2807 	case BUILT_IN_LCEILL:
2808 	case BUILT_IN_LLCEILL:
2809 	  name = "ceill";
2810 	  break;
2811 	case BUILT_IN_IFLOOR:
2812 	case BUILT_IN_LFLOOR:
2813 	case BUILT_IN_LLFLOOR:
2814 	  name = "floor";
2815 	  break;
2816 	case BUILT_IN_IFLOORF:
2817 	case BUILT_IN_LFLOORF:
2818 	case BUILT_IN_LLFLOORF:
2819 	  name = "floorf";
2820 	  break;
2821 	case BUILT_IN_IFLOORL:
2822 	case BUILT_IN_LFLOORL:
2823 	case BUILT_IN_LLFLOORL:
2824 	  name = "floorl";
2825 	  break;
2826 	default:
2827 	  gcc_unreachable ();
2828 	}
2829 
2830       fntype = build_function_type_list (TREE_TYPE (arg),
2831 					 TREE_TYPE (arg), NULL_TREE);
2832       fallback_fndecl = build_fn_decl (name, fntype);
2833     }
2834 
2835   exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2836 
2837   tmp = expand_normal (exp);
2838   tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2839 
2840   /* Truncate the result of floating point optab to integer
2841      via expand_fix ().  */
2842   target = gen_reg_rtx (mode);
2843   expand_fix (target, tmp, 0);
2844 
2845   return target;
2846 }
2847 
2848 /* Expand a call to one of the builtin math functions doing integer
2849    conversion (lrint).
2850    Return 0 if a normal call should be emitted rather than expanding the
2851    function in-line.  EXP is the expression that is a call to the builtin
2852    function; if convenient, the result should be placed in TARGET.  */
2853 
2854 static rtx
expand_builtin_int_roundingfn_2(tree exp,rtx target)2855 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2856 {
2857   convert_optab builtin_optab;
2858   rtx op0;
2859   rtx_insn *insns;
2860   tree fndecl = get_callee_fndecl (exp);
2861   tree arg;
2862   machine_mode mode;
2863   enum built_in_function fallback_fn = BUILT_IN_NONE;
2864 
2865   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2866     return NULL_RTX;
2867 
2868   arg = CALL_EXPR_ARG (exp, 0);
2869 
2870   switch (DECL_FUNCTION_CODE (fndecl))
2871     {
2872     CASE_FLT_FN (BUILT_IN_IRINT):
2873       fallback_fn = BUILT_IN_LRINT;
2874       gcc_fallthrough ();
2875     CASE_FLT_FN (BUILT_IN_LRINT):
2876     CASE_FLT_FN (BUILT_IN_LLRINT):
2877       builtin_optab = lrint_optab;
2878       break;
2879 
2880     CASE_FLT_FN (BUILT_IN_IROUND):
2881       fallback_fn = BUILT_IN_LROUND;
2882       gcc_fallthrough ();
2883     CASE_FLT_FN (BUILT_IN_LROUND):
2884     CASE_FLT_FN (BUILT_IN_LLROUND):
2885       builtin_optab = lround_optab;
2886       break;
2887 
2888     default:
2889       gcc_unreachable ();
2890     }
2891 
2892   /* There's no easy way to detect the case we need to set EDOM.  */
2893   if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2894     return NULL_RTX;
2895 
2896   /* Make a suitable register to place result in.  */
2897   mode = TYPE_MODE (TREE_TYPE (exp));
2898 
2899   /* There's no easy way to detect the case we need to set EDOM.  */
2900   if (!flag_errno_math)
2901     {
2902       rtx result = gen_reg_rtx (mode);
2903 
2904       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2905 	 need to expand the argument again.  This way, we will not perform
2906 	 side-effects more the once.  */
2907       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2908 
2909       op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2910 
2911       start_sequence ();
2912 
2913       if (expand_sfix_optab (result, op0, builtin_optab))
2914 	{
2915 	  /* Output the entire sequence.  */
2916 	  insns = get_insns ();
2917 	  end_sequence ();
2918 	  emit_insn (insns);
2919 	  return result;
2920 	}
2921 
2922       /* If we were unable to expand via the builtin, stop the sequence
2923 	 (without outputting the insns) and call to the library function
2924 	 with the stabilized argument list.  */
2925       end_sequence ();
2926     }
2927 
2928   if (fallback_fn != BUILT_IN_NONE)
2929     {
2930       /* Fall back to rounding to long int.  Use implicit_p 0 - for non-C99
2931 	 targets, (int) round (x) should never be transformed into
2932 	 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2933 	 a call to lround in the hope that the target provides at least some
2934 	 C99 functions.  This should result in the best user experience for
2935 	 not full C99 targets.
2936 	 As scalar float conversions with same mode are useless in GIMPLE,
2937 	 we can end up e.g. with _Float32 argument passed to float builtin,
2938 	 try to get the type from the builtin prototype first.  */
2939       tree fallback_fndecl = NULL_TREE;
2940       if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
2941         fallback_fndecl
2942           = mathfn_built_in_1 (TREE_VALUE (argtypes),
2943 			       as_combined_fn (fallback_fn), 0);
2944       if (fallback_fndecl == NULL_TREE)
2945 	fallback_fndecl
2946 	  = mathfn_built_in_1 (TREE_TYPE (arg),
2947 			       as_combined_fn (fallback_fn), 0);
2948       if (fallback_fndecl)
2949 	{
2950 	  exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2951 				       fallback_fndecl, 1, arg);
2952 
2953 	  target = expand_call (exp, NULL_RTX, target == const0_rtx);
2954 	  target = maybe_emit_group_store (target, TREE_TYPE (exp));
2955 	  return convert_to_mode (mode, target, 0);
2956 	}
2957     }
2958 
2959   return expand_call (exp, target, target == const0_rtx);
2960 }
2961 
2962 /* Expand a call to the powi built-in mathematical function.  Return NULL_RTX if
2963    a normal call should be emitted rather than expanding the function
2964    in-line.  EXP is the expression that is a call to the builtin
2965    function; if convenient, the result should be placed in TARGET.  */
2966 
2967 static rtx
expand_builtin_powi(tree exp,rtx target)2968 expand_builtin_powi (tree exp, rtx target)
2969 {
2970   tree arg0, arg1;
2971   rtx op0, op1;
2972   machine_mode mode;
2973   machine_mode mode2;
2974 
2975   if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2976     return NULL_RTX;
2977 
2978   arg0 = CALL_EXPR_ARG (exp, 0);
2979   arg1 = CALL_EXPR_ARG (exp, 1);
2980   mode = TYPE_MODE (TREE_TYPE (exp));
2981 
2982   /* Emit a libcall to libgcc.  */
2983 
2984   /* Mode of the 2nd argument must match that of an int.  */
2985   mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2986 
2987   if (target == NULL_RTX)
2988     target = gen_reg_rtx (mode);
2989 
2990   op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2991   if (GET_MODE (op0) != mode)
2992     op0 = convert_to_mode (mode, op0, 0);
2993   op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2994   if (GET_MODE (op1) != mode2)
2995     op1 = convert_to_mode (mode2, op1, 0);
2996 
2997   target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2998 				    target, LCT_CONST, mode,
2999 				    op0, mode, op1, mode2);
3000 
3001   return target;
3002 }
3003 
3004 /* Expand expression EXP which is a call to the strlen builtin.  Return
3005    NULL_RTX if we failed and the caller should emit a normal call, otherwise
3006    try to get the result in TARGET, if convenient.  */
3007 
3008 static rtx
expand_builtin_strlen(tree exp,rtx target,machine_mode target_mode)3009 expand_builtin_strlen (tree exp, rtx target,
3010 		       machine_mode target_mode)
3011 {
3012   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3013     return NULL_RTX;
3014 
3015   class expand_operand ops[4];
3016   rtx pat;
3017   tree len;
3018   tree src = CALL_EXPR_ARG (exp, 0);
3019   rtx src_reg;
3020   rtx_insn *before_strlen;
3021   machine_mode insn_mode;
3022   enum insn_code icode = CODE_FOR_nothing;
3023   unsigned int align;
3024 
3025   /* If the length can be computed at compile-time, return it.  */
3026   len = c_strlen (src, 0);
3027   if (len)
3028     return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3029 
3030   /* If the length can be computed at compile-time and is constant
3031      integer, but there are side-effects in src, evaluate
3032      src for side-effects, then return len.
3033      E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3034      can be optimized into: i++; x = 3;  */
3035   len = c_strlen (src, 1);
3036   if (len && TREE_CODE (len) == INTEGER_CST)
3037     {
3038       expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3039       return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3040     }
3041 
3042   align = get_pointer_alignment (src) / BITS_PER_UNIT;
3043 
3044   /* If SRC is not a pointer type, don't do this operation inline.  */
3045   if (align == 0)
3046     return NULL_RTX;
3047 
3048   /* Bail out if we can't compute strlen in the right mode.  */
3049   FOR_EACH_MODE_FROM (insn_mode, target_mode)
3050     {
3051       icode = optab_handler (strlen_optab, insn_mode);
3052       if (icode != CODE_FOR_nothing)
3053 	break;
3054     }
3055   if (insn_mode == VOIDmode)
3056     return NULL_RTX;
3057 
3058   /* Make a place to hold the source address.  We will not expand
3059      the actual source until we are sure that the expansion will
3060      not fail -- there are trees that cannot be expanded twice.  */
3061   src_reg = gen_reg_rtx (Pmode);
3062 
3063   /* Mark the beginning of the strlen sequence so we can emit the
3064      source operand later.  */
3065   before_strlen = get_last_insn ();
3066 
3067   create_output_operand (&ops[0], target, insn_mode);
3068   create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3069   create_integer_operand (&ops[2], 0);
3070   create_integer_operand (&ops[3], align);
3071   if (!maybe_expand_insn (icode, 4, ops))
3072     return NULL_RTX;
3073 
3074   /* Check to see if the argument was declared attribute nonstring
3075      and if so, issue a warning since at this point it's not known
3076      to be nul-terminated.  */
3077   maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3078 
3079   /* Now that we are assured of success, expand the source.  */
3080   start_sequence ();
3081   pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3082   if (pat != src_reg)
3083     {
3084 #ifdef POINTERS_EXTEND_UNSIGNED
3085       if (GET_MODE (pat) != Pmode)
3086 	pat = convert_to_mode (Pmode, pat,
3087 			       POINTERS_EXTEND_UNSIGNED);
3088 #endif
3089       emit_move_insn (src_reg, pat);
3090     }
3091   pat = get_insns ();
3092   end_sequence ();
3093 
3094   if (before_strlen)
3095     emit_insn_after (pat, before_strlen);
3096   else
3097     emit_insn_before (pat, get_insns ());
3098 
3099   /* Return the value in the proper mode for this function.  */
3100   if (GET_MODE (ops[0].value) == target_mode)
3101     target = ops[0].value;
3102   else if (target != 0)
3103     convert_move (target, ops[0].value, 0);
3104   else
3105     target = convert_to_mode (target_mode, ops[0].value, 0);
3106 
3107   return target;
3108 }
3109 
3110 /* Expand call EXP to the strnlen built-in, returning the result
3111    and setting it in TARGET.  Otherwise return NULL_RTX on failure.  */
3112 
3113 static rtx
expand_builtin_strnlen(tree exp,rtx target,machine_mode target_mode)3114 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3115 {
3116   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3117     return NULL_RTX;
3118 
3119   tree src = CALL_EXPR_ARG (exp, 0);
3120   tree bound = CALL_EXPR_ARG (exp, 1);
3121 
3122   if (!bound)
3123     return NULL_RTX;
3124 
3125   location_t loc = UNKNOWN_LOCATION;
3126   if (EXPR_HAS_LOCATION (exp))
3127     loc = EXPR_LOCATION (exp);
3128 
3129   tree maxobjsize = max_object_size ();
3130   tree func = get_callee_fndecl (exp);
3131 
3132   /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3133      so these conversions aren't necessary.  */
3134   c_strlen_data lendata = { };
3135   tree len = c_strlen (src, 0, &lendata, 1);
3136   if (len)
3137     len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3138 
3139   if (TREE_CODE (bound) == INTEGER_CST)
3140     {
3141       if (!TREE_NO_WARNING (exp)
3142 	  && tree_int_cst_lt (maxobjsize, bound)
3143 	  && warning_at (loc, OPT_Wstringop_overflow_,
3144 			 "%K%qD specified bound %E "
3145 			 "exceeds maximum object size %E",
3146 			 exp, func, bound, maxobjsize))
3147 	TREE_NO_WARNING (exp) = true;
3148 
3149       bool exact = true;
3150       if (!len || TREE_CODE (len) != INTEGER_CST)
3151 	{
3152 	  /* Clear EXACT if LEN may be less than SRC suggests,
3153 	     such as in
3154 	       strnlen (&a[i], sizeof a)
3155 	     where the value of i is unknown.  Unless i's value is
3156 	     zero, the call is unsafe because the bound is greater. */
3157 	  lendata.decl = unterminated_array (src, &len, &exact);
3158 	  if (!lendata.decl)
3159 	    return NULL_RTX;
3160 	}
3161 
3162       if (lendata.decl && (tree_int_cst_lt (len, bound) || !exact))
3163 	{
3164 	  location_t warnloc
3165 	    = expansion_point_location_if_in_system_header (loc);
3166 
3167 	  if (!TREE_NO_WARNING (exp)
3168 	      && warning_at (warnloc, OPT_Wstringop_overflow_,
3169 			     exact
3170 			     ? G_("%K%qD specified bound %E exceeds the size "
3171 				  "%E of unterminated array")
3172 			     : G_("%K%qD specified bound %E may exceed the "
3173 				  "size of at most %E of unterminated array"),
3174 			     exp, func, bound, len))
3175 	    {
3176 	      inform (DECL_SOURCE_LOCATION (lendata.decl),
3177 		      "referenced argument declared here");
3178 	      TREE_NO_WARNING (exp) = true;
3179 	    }
3180 	  return NULL_RTX;
3181 	}
3182 
3183       if (!len)
3184 	return NULL_RTX;
3185 
3186       len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3187       return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3188     }
3189 
3190   if (TREE_CODE (bound) != SSA_NAME)
3191     return NULL_RTX;
3192 
3193   wide_int min, max;
3194   enum value_range_kind rng = get_range_info (bound, &min, &max);
3195   if (rng != VR_RANGE)
3196     return NULL_RTX;
3197 
3198   if (!TREE_NO_WARNING (exp)
3199       && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3200       && warning_at (loc, OPT_Wstringop_overflow_,
3201 		     "%K%qD specified bound [%wu, %wu] "
3202 		     "exceeds maximum object size %E",
3203 		     exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3204     TREE_NO_WARNING (exp) = true;
3205 
3206   bool exact = true;
3207   if (!len || TREE_CODE (len) != INTEGER_CST)
3208     {
3209       lendata.decl = unterminated_array (src, &len, &exact);
3210       if (!lendata.decl)
3211 	return NULL_RTX;
3212     }
3213 
3214   if (lendata.decl
3215       && !TREE_NO_WARNING (exp)
3216       && (wi::ltu_p (wi::to_wide (len), min)
3217 	  || !exact))
3218     {
3219       location_t warnloc
3220 	= expansion_point_location_if_in_system_header (loc);
3221 
3222       if (warning_at (warnloc, OPT_Wstringop_overflow_,
3223 		      exact
3224 		      ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3225 			   "the size %E of unterminated array")
3226 		      : G_("%K%qD specified bound [%wu, %wu] may exceed "
3227 			   "the size of at most %E of unterminated array"),
3228 		      exp, func, min.to_uhwi (), max.to_uhwi (), len))
3229 	{
3230 	  inform (DECL_SOURCE_LOCATION (lendata.decl),
3231 		  "referenced argument declared here");
3232 	  TREE_NO_WARNING (exp) = true;
3233 	}
3234     }
3235 
3236   if (lendata.decl)
3237     return NULL_RTX;
3238 
3239   if (wi::gtu_p (min, wi::to_wide (len)))
3240     return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3241 
3242   len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3243   return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3244 }
3245 
3246 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3247    bytes from bytes at DATA + OFFSET and return it reinterpreted as
3248    a target constant.  */
3249 
3250 static rtx
builtin_memcpy_read_str(void * data,HOST_WIDE_INT offset,scalar_int_mode mode)3251 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3252 			 scalar_int_mode mode)
3253 {
3254   /* The REPresentation pointed to by DATA need not be a nul-terminated
3255      string but the caller guarantees it's large enough for MODE.  */
3256   const char *rep = (const char *) data;
3257 
3258   return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3259 }
3260 
3261 /* LEN specify length of the block of memcpy/memset operation.
3262    Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3263    In some cases we can make very likely guess on max size, then we
3264    set it into PROBABLE_MAX_SIZE.  */
3265 
3266 static void
determine_block_size(tree len,rtx len_rtx,unsigned HOST_WIDE_INT * min_size,unsigned HOST_WIDE_INT * max_size,unsigned HOST_WIDE_INT * probable_max_size)3267 determine_block_size (tree len, rtx len_rtx,
3268 		      unsigned HOST_WIDE_INT *min_size,
3269 		      unsigned HOST_WIDE_INT *max_size,
3270 		      unsigned HOST_WIDE_INT *probable_max_size)
3271 {
3272   if (CONST_INT_P (len_rtx))
3273     {
3274       *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3275       return;
3276     }
3277   else
3278     {
3279       wide_int min, max;
3280       enum value_range_kind range_type = VR_UNDEFINED;
3281 
3282       /* Determine bounds from the type.  */
3283       if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3284 	*min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3285       else
3286 	*min_size = 0;
3287       if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3288 	*probable_max_size = *max_size
3289 	  = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3290       else
3291 	*probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3292 
3293       if (TREE_CODE (len) == SSA_NAME)
3294 	range_type = get_range_info (len, &min, &max);
3295       if (range_type == VR_RANGE)
3296 	{
3297 	  if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3298 	    *min_size = min.to_uhwi ();
3299 	  if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3300 	    *probable_max_size = *max_size = max.to_uhwi ();
3301 	}
3302       else if (range_type == VR_ANTI_RANGE)
3303 	{
3304 	  /* Anti range 0...N lets us to determine minimal size to N+1.  */
3305 	  if (min == 0)
3306 	    {
3307 	      if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3308 		*min_size = max.to_uhwi () + 1;
3309 	    }
3310 	  /* Code like
3311 
3312 	     int n;
3313 	     if (n < 100)
3314 	       memcpy (a, b, n)
3315 
3316 	     Produce anti range allowing negative values of N.  We still
3317 	     can use the information and make a guess that N is not negative.
3318 	     */
3319 	  else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3320 	    *probable_max_size = min.to_uhwi () - 1;
3321 	}
3322     }
3323   gcc_checking_assert (*max_size <=
3324 		       (unsigned HOST_WIDE_INT)
3325 			  GET_MODE_MASK (GET_MODE (len_rtx)));
3326 }
3327 
3328 /* Try to verify that the sizes and lengths of the arguments to a string
3329    manipulation function given by EXP are within valid bounds and that
3330    the operation does not lead to buffer overflow or read past the end.
3331    Arguments other than EXP may be null.  When non-null, the arguments
3332    have the following meaning:
3333    DST is the destination of a copy call or NULL otherwise.
3334    SRC is the source of a copy call or NULL otherwise.
3335    DSTWRITE is the number of bytes written into the destination obtained
3336    from the user-supplied size argument to the function (such as in
3337    memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3338    MAXREAD is the user-supplied bound on the length of the source sequence
3339    (such as in strncat(d, s, N).  It specifies the upper limit on the number
3340    of bytes to write.  If NULL, it's taken to be the same as DSTWRITE.
3341    SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3342    expression EXP is a string function call (as opposed to a memory call
3343    like memcpy).  As an exception, SRCSTR can also be an integer denoting
3344    the precomputed size of the source string or object (for functions like
3345    memcpy).
3346    DSTSIZE is the size of the destination object specified by the last
3347    argument to the _chk builtins, typically resulting from the expansion
3348    of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3349    DSTSIZE).
3350 
3351    When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3352    SIZE_MAX.
3353 
3354    If the call is successfully verified as safe return true, otherwise
3355    return false.  */
3356 
3357 bool
check_access(tree exp,tree,tree,tree dstwrite,tree maxread,tree srcstr,tree dstsize)3358 check_access (tree exp, tree, tree, tree dstwrite,
3359 	      tree maxread, tree srcstr, tree dstsize)
3360 {
3361   int opt = OPT_Wstringop_overflow_;
3362 
3363   /* The size of the largest object is half the address space, or
3364      PTRDIFF_MAX.  (This is way too permissive.)  */
3365   tree maxobjsize = max_object_size ();
3366 
3367   /* Either the length of the source string for string functions or
3368      the size of the source object for raw memory functions.  */
3369   tree slen = NULL_TREE;
3370 
3371   tree range[2] = { NULL_TREE, NULL_TREE };
3372 
3373   /* Set to true when the exact number of bytes written by a string
3374      function like strcpy is not known and the only thing that is
3375      known is that it must be at least one (for the terminating nul).  */
3376   bool at_least_one = false;
3377   if (srcstr)
3378     {
3379       /* SRCSTR is normally a pointer to string but as a special case
3380 	 it can be an integer denoting the length of a string.  */
3381       if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3382 	{
3383 	  /* Try to determine the range of lengths the source string
3384 	     refers to.  If it can be determined and is less than
3385 	     the upper bound given by MAXREAD add one to it for
3386 	     the terminating nul.  Otherwise, set it to one for
3387 	     the same reason, or to MAXREAD as appropriate.  */
3388 	  c_strlen_data lendata = { };
3389 	  get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3390 	  range[0] = lendata.minlen;
3391 	  range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
3392 	  if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3393 	    {
3394 	      if (maxread && tree_int_cst_le (maxread, range[0]))
3395 		range[0] = range[1] = maxread;
3396 	      else
3397 		range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3398 					range[0], size_one_node);
3399 
3400 	      if (maxread && tree_int_cst_le (maxread, range[1]))
3401 		range[1] = maxread;
3402 	      else if (!integer_all_onesp (range[1]))
3403 		range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3404 					range[1], size_one_node);
3405 
3406 	      slen = range[0];
3407 	    }
3408 	  else
3409 	    {
3410 	      at_least_one = true;
3411 	      slen = size_one_node;
3412 	    }
3413 	}
3414       else
3415 	slen = srcstr;
3416     }
3417 
3418   if (!dstwrite && !maxread)
3419     {
3420       /* When the only available piece of data is the object size
3421 	 there is nothing to do.  */
3422       if (!slen)
3423 	return true;
3424 
3425       /* Otherwise, when the length of the source sequence is known
3426 	 (as with strlen), set DSTWRITE to it.  */
3427       if (!range[0])
3428 	dstwrite = slen;
3429     }
3430 
3431   if (!dstsize)
3432     dstsize = maxobjsize;
3433 
3434   if (dstwrite)
3435     get_size_range (dstwrite, range);
3436 
3437   tree func = get_callee_fndecl (exp);
3438 
3439   /* First check the number of bytes to be written against the maximum
3440      object size.  */
3441   if (range[0]
3442       && TREE_CODE (range[0]) == INTEGER_CST
3443       && tree_int_cst_lt (maxobjsize, range[0]))
3444     {
3445       if (TREE_NO_WARNING (exp))
3446 	return false;
3447 
3448       location_t loc = tree_nonartificial_location (exp);
3449       loc = expansion_point_location_if_in_system_header (loc);
3450 
3451       bool warned;
3452       if (range[0] == range[1])
3453 	warned = (func
3454 		  ? warning_at (loc, opt,
3455 				"%K%qD specified size %E "
3456 				"exceeds maximum object size %E",
3457 				exp, func, range[0], maxobjsize)
3458 		  : warning_at (loc, opt,
3459 				"%Kspecified size %E "
3460 				"exceeds maximum object size %E",
3461 				exp, range[0], maxobjsize));
3462       else
3463 	warned = (func
3464 		  ? warning_at (loc, opt,
3465 				"%K%qD specified size between %E and %E "
3466 				"exceeds maximum object size %E",
3467 				exp, func,
3468 				range[0], range[1], maxobjsize)
3469 		  : warning_at (loc, opt,
3470 				"%Kspecified size between %E and %E "
3471 				"exceeds maximum object size %E",
3472 				exp, range[0], range[1], maxobjsize));
3473       if (warned)
3474 	TREE_NO_WARNING (exp) = true;
3475 
3476       return false;
3477     }
3478 
3479   /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3480      constant, and in range of unsigned HOST_WIDE_INT.  */
3481   bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3482 
3483   /* Next check the number of bytes to be written against the destination
3484      object size.  */
3485   if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3486     {
3487       if (range[0]
3488 	  && TREE_CODE (range[0]) == INTEGER_CST
3489 	  && ((tree_fits_uhwi_p (dstsize)
3490 	       && tree_int_cst_lt (dstsize, range[0]))
3491 	      || (dstwrite
3492 		  && tree_fits_uhwi_p (dstwrite)
3493 		  && tree_int_cst_lt (dstwrite, range[0]))))
3494 	{
3495 	  if (TREE_NO_WARNING (exp))
3496 	    return false;
3497 
3498 	  location_t loc = tree_nonartificial_location (exp);
3499 	  loc = expansion_point_location_if_in_system_header (loc);
3500 
3501 	  bool warned = false;
3502 	  if (dstwrite == slen && at_least_one)
3503 	    {
3504 	      /* This is a call to strcpy with a destination of 0 size
3505 		 and a source of unknown length.  The call will write
3506 		 at least one byte past the end of the destination.  */
3507 	      warned = (func
3508 			? warning_at (loc, opt,
3509 				      "%K%qD writing %E or more bytes into "
3510 				      "a region of size %E overflows "
3511 				      "the destination",
3512 				      exp, func, range[0], dstsize)
3513 			: warning_at (loc, opt,
3514 				      "%Kwriting %E or more bytes into "
3515 				      "a region of size %E overflows "
3516 				      "the destination",
3517 				      exp, range[0], dstsize));
3518 	    }
3519 	  else if (tree_int_cst_equal (range[0], range[1]))
3520 	    warned = (func
3521 		      ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3522 				   "%K%qD writing %E byte into a region "
3523 				   "of size %E overflows the destination",
3524 				   "%K%qD writing %E bytes into a region "
3525 				   "of size %E overflows the destination",
3526 				   exp, func, range[0], dstsize)
3527 		      : warning_n (loc, opt, tree_to_uhwi (range[0]),
3528 				   "%Kwriting %E byte into a region "
3529 				   "of size %E overflows the destination",
3530 				   "%Kwriting %E bytes into a region "
3531 				   "of size %E overflows the destination",
3532 				   exp, range[0], dstsize));
3533 	  else if (tree_int_cst_sign_bit (range[1]))
3534 	    {
3535 	      /* Avoid printing the upper bound if it's invalid.  */
3536 	      warned = (func
3537 			? warning_at (loc, opt,
3538 				      "%K%qD writing %E or more bytes into "
3539 				      "a region of size %E overflows "
3540 				      "the destination",
3541 				      exp, func, range[0], dstsize)
3542 			: warning_at (loc, opt,
3543 				      "%Kwriting %E or more bytes into "
3544 				      "a region of size %E overflows "
3545 				      "the destination",
3546 				      exp, range[0], dstsize));
3547 	    }
3548 	  else
3549 	    warned = (func
3550 		      ? warning_at (loc, opt,
3551 				    "%K%qD writing between %E and %E bytes "
3552 				    "into a region of size %E overflows "
3553 				    "the destination",
3554 				    exp, func, range[0], range[1],
3555 				    dstsize)
3556 		      : warning_at (loc, opt,
3557 				    "%Kwriting between %E and %E bytes "
3558 				    "into a region of size %E overflows "
3559 				    "the destination",
3560 				    exp, range[0], range[1],
3561 				    dstsize));
3562 	  if (warned)
3563 	    TREE_NO_WARNING (exp) = true;
3564 
3565 	  /* Return error when an overflow has been detected.  */
3566 	  return false;
3567 	}
3568     }
3569 
3570   /* Check the maximum length of the source sequence against the size
3571      of the destination object if known, or against the maximum size
3572      of an object.  */
3573   if (maxread)
3574     {
3575       get_size_range (maxread, range);
3576       if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3577 	{
3578 	  location_t loc = tree_nonartificial_location (exp);
3579 	  loc = expansion_point_location_if_in_system_header (loc);
3580 
3581 	  if (tree_int_cst_lt (maxobjsize, range[0]))
3582 	    {
3583 	      if (TREE_NO_WARNING (exp))
3584 		return false;
3585 
3586 	      bool warned = false;
3587 
3588 	      /* Warn about crazy big sizes first since that's more
3589 		 likely to be meaningful than saying that the bound
3590 		 is greater than the object size if both are big.  */
3591 	      if (range[0] == range[1])
3592 		warned = (func
3593 			  ? warning_at (loc, opt,
3594 					"%K%qD specified bound %E "
3595 					"exceeds maximum object size %E",
3596 					exp, func, range[0], maxobjsize)
3597 			  : warning_at (loc, opt,
3598 					"%Kspecified bound %E "
3599 					"exceeds maximum object size %E",
3600 					exp, range[0], maxobjsize));
3601 	      else
3602 		warned = (func
3603 			  ? warning_at (loc, opt,
3604 					"%K%qD specified bound between "
3605 					"%E and %E exceeds maximum object "
3606 					"size %E",
3607 					exp, func,
3608 					range[0], range[1], maxobjsize)
3609 			  : warning_at (loc, opt,
3610 					"%Kspecified bound between "
3611 					"%E and %E exceeds maximum object "
3612 					"size %E",
3613 					exp, range[0], range[1], maxobjsize));
3614 	      if (warned)
3615 		TREE_NO_WARNING (exp) = true;
3616 
3617 	      return false;
3618 	    }
3619 
3620 	  if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3621 	    {
3622 	      if (TREE_NO_WARNING (exp))
3623 		return false;
3624 
3625 	      bool warned = false;
3626 
3627 	      if (tree_int_cst_equal (range[0], range[1]))
3628 		warned = (func
3629 			  ? warning_at (loc, opt,
3630 					"%K%qD specified bound %E "
3631 					"exceeds destination size %E",
3632 					exp, func,
3633 					range[0], dstsize)
3634 			  : warning_at (loc, opt,
3635 					"%Kspecified bound %E "
3636 					"exceeds destination size %E",
3637 					exp, range[0], dstsize));
3638 	      else
3639 		warned = (func
3640 			  ? warning_at (loc, opt,
3641 					"%K%qD specified bound between %E "
3642 					"and %E exceeds destination size %E",
3643 					exp, func,
3644 					range[0], range[1], dstsize)
3645 			  : warning_at (loc, opt,
3646 					"%Kspecified bound between %E "
3647 					"and %E exceeds destination size %E",
3648 					exp,
3649 					range[0], range[1], dstsize));
3650 	      if (warned)
3651 		TREE_NO_WARNING (exp) = true;
3652 
3653 	      return false;
3654 	    }
3655 	}
3656     }
3657 
3658   /* Check for reading past the end of SRC.  */
3659   if (slen
3660       && slen == srcstr
3661       && dstwrite && range[0]
3662       && tree_int_cst_lt (slen, range[0]))
3663     {
3664       if (TREE_NO_WARNING (exp))
3665 	return false;
3666 
3667       bool warned = false;
3668       location_t loc = tree_nonartificial_location (exp);
3669       loc = expansion_point_location_if_in_system_header (loc);
3670 
3671       if (tree_int_cst_equal (range[0], range[1]))
3672 	warned = (func
3673 		  ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3674 			       "%K%qD reading %E byte from a region of size %E",
3675 			       "%K%qD reading %E bytes from a region of size %E",
3676 			       exp, func, range[0], slen)
3677 		  : warning_n (loc, opt, tree_to_uhwi (range[0]),
3678 			       "%Kreading %E byte from a region of size %E",
3679 			       "%Kreading %E bytes from a region of size %E",
3680 			       exp, range[0], slen));
3681       else if (tree_int_cst_sign_bit (range[1]))
3682 	{
3683 	  /* Avoid printing the upper bound if it's invalid.  */
3684 	  warned = (func
3685 		    ? warning_at (loc, opt,
3686 				  "%K%qD reading %E or more bytes from a region "
3687 				  "of size %E",
3688 				  exp, func, range[0], slen)
3689 		    : warning_at (loc, opt,
3690 				  "%Kreading %E or more bytes from a region "
3691 				  "of size %E",
3692 				  exp, range[0], slen));
3693 	}
3694       else
3695 	warned = (func
3696 		  ? warning_at (loc, opt,
3697 				"%K%qD reading between %E and %E bytes from "
3698 				"a region of size %E",
3699 				exp, func, range[0], range[1], slen)
3700 		  : warning_at (loc, opt,
3701 				"%Kreading between %E and %E bytes from "
3702 				"a region of size %E",
3703 				exp, range[0], range[1], slen));
3704       if (warned)
3705 	TREE_NO_WARNING (exp) = true;
3706 
3707       return false;
3708     }
3709 
3710   return true;
3711 }
3712 
3713 /* If STMT is a call to an allocation function, returns the constant
3714    size of the object allocated by the call represented as sizetype.
3715    If nonnull, sets RNG1[] to the range of the size.  */
3716 
3717 tree
gimple_call_alloc_size(gimple * stmt,wide_int rng1[2],const vr_values * rvals)3718 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
3719 			const vr_values *rvals /* = NULL */)
3720 {
3721   if (!stmt)
3722     return NULL_TREE;
3723 
3724   tree allocfntype;
3725   if (tree fndecl = gimple_call_fndecl (stmt))
3726     allocfntype = TREE_TYPE (fndecl);
3727   else
3728     allocfntype = gimple_call_fntype (stmt);
3729 
3730   if (!allocfntype)
3731     return NULL_TREE;
3732 
3733   unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
3734   tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
3735   if (!at)
3736     {
3737       if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
3738 	return NULL_TREE;
3739 
3740       argidx1 = 0;
3741     }
3742 
3743   unsigned nargs = gimple_call_num_args (stmt);
3744 
3745   if (argidx1 == UINT_MAX)
3746     {
3747       tree atval = TREE_VALUE (at);
3748       if (!atval)
3749 	return NULL_TREE;
3750 
3751       argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3752       if (nargs <= argidx1)
3753 	return NULL_TREE;
3754 
3755       atval = TREE_CHAIN (atval);
3756       if (atval)
3757 	{
3758 	  argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3759 	  if (nargs <= argidx2)
3760 	    return NULL_TREE;
3761 	}
3762     }
3763 
3764   tree size = gimple_call_arg (stmt, argidx1);
3765 
3766   wide_int rng1_buf[2];
3767   /* If RNG1 is not set, use the buffer.  */
3768   if (!rng1)
3769     rng1 = rng1_buf;
3770 
3771   if (!get_range (size, rng1, rvals))
3772     return NULL_TREE;
3773 
3774   if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
3775     return fold_convert (sizetype, size);
3776 
3777   /* To handle ranges do the math in wide_int and return the product
3778      of the upper bounds as a constant.  Ignore anti-ranges.  */
3779   tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
3780   wide_int rng2[2];
3781   if (!get_range (n, rng2, rvals))
3782     return NULL_TREE;
3783 
3784   /* Extend to the maximum precision to avoid overflow.  */
3785   const int prec = ADDR_MAX_PRECISION;
3786   rng1[0] = wide_int::from (rng1[0], prec, UNSIGNED);
3787   rng1[1] = wide_int::from (rng1[1], prec, UNSIGNED);
3788   rng2[0] = wide_int::from (rng2[0], prec, UNSIGNED);
3789   rng2[1] = wide_int::from (rng2[1], prec, UNSIGNED);
3790 
3791   /* Compute products of both bounds for the caller but return the lesser
3792      of SIZE_MAX and the product of the upper bounds as a constant.  */
3793   rng1[0] = rng1[0] * rng2[0];
3794   rng1[1] = rng1[1] * rng2[1];
3795   tree size_max = TYPE_MAX_VALUE (sizetype);
3796   if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
3797     {
3798       rng1[1] = wi::to_wide (size_max);
3799       return size_max;
3800     }
3801 
3802   return wide_int_to_tree (sizetype, rng1[1]);
3803 }
3804 
3805 /* Helper for compute_objsize.  Returns the constant size of the DEST
3806    if it refers to a variable or field and sets *PDECL to the DECL and
3807    *POFF to zero.  Otherwise returns null for other nodes.  */
3808 
3809 static tree
addr_decl_size(tree dest,tree * pdecl,tree * poff)3810 addr_decl_size (tree dest, tree *pdecl, tree *poff)
3811 {
3812   if (TREE_CODE (dest) == ADDR_EXPR)
3813     dest = TREE_OPERAND (dest, 0);
3814 
3815   if (DECL_P (dest))
3816     {
3817       *pdecl = dest;
3818       *poff = integer_zero_node;
3819       if (tree size = DECL_SIZE_UNIT (dest))
3820 	return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3821     }
3822 
3823   if (TREE_CODE (dest) == COMPONENT_REF)
3824     {
3825       *pdecl = TREE_OPERAND (dest, 1);
3826       *poff = integer_zero_node;
3827       /* Only return constant sizes for now while callers depend on it.  */
3828       if (tree size = component_ref_size (dest))
3829 	return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3830     }
3831 
3832   return NULL_TREE;
3833 }
3834 
3835 /* Helper to compute the size of the object referenced by the DEST
3836    expression which must have pointer type, using Object Size type
3837    OSTYPE (only the least significant 2 bits are used).
3838    Returns an estimate of the size of the object represented as
3839    a sizetype constant if successful or NULL when the size cannot
3840    be determined.
3841    When the referenced object involves a non-constant offset in some
3842    range the returned value represents the largest size given the
3843    smallest non-negative offset in the range.
3844    If nonnull, sets *PDECL to the decl of the referenced subobject
3845    if it can be determined, or to null otherwise.  Likewise, when
3846    POFF is nonnull *POFF is set to the offset into *PDECL.
3847 
3848    The function is intended for diagnostics and should not be used
3849    to influence code generation or optimization.  */
3850 
3851 tree
compute_objsize(tree dest,int ostype,tree * pdecl,tree * poff,const vr_values * rvals)3852 compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */,
3853 		 tree *poff /* = NULL */, const vr_values *rvals /* = NULL */)
3854 {
3855   tree dummy_decl = NULL_TREE;
3856   if (!pdecl)
3857     pdecl = &dummy_decl;
3858 
3859   tree dummy_off = NULL_TREE;
3860   if (!poff)
3861     poff = &dummy_off;
3862 
3863   /* Only the two least significant bits are meaningful.  */
3864   ostype &= 3;
3865 
3866   if (ostype)
3867     /* Except for overly permissive calls to memcpy and other raw
3868        memory functions with zero OSTYPE, detect the size from simple
3869        DECLs first to more reliably than compute_builtin_object_size
3870        set *PDECL and *POFF.  */
3871     if (tree size = addr_decl_size (dest, pdecl, poff))
3872       return size;
3873 
3874   unsigned HOST_WIDE_INT size;
3875   if (compute_builtin_object_size (dest, ostype, &size, pdecl, poff))
3876     return build_int_cst (sizetype, size);
3877 
3878   if (TREE_CODE (dest) == SSA_NAME)
3879     {
3880       gimple *stmt = SSA_NAME_DEF_STMT (dest);
3881       if (is_gimple_call (stmt))
3882 	{
3883 	  /* If STMT is a call to an allocation function get the size
3884 	     from its argument(s).  If successful, also set *PDECL to
3885 	     DEST for the caller to include in diagnostics.  */
3886 	  if (tree size = gimple_call_alloc_size (stmt))
3887 	    {
3888 	      *pdecl = dest;
3889 	      *poff = integer_zero_node;
3890 	      return size;
3891 	    }
3892 	  return NULL_TREE;
3893 	}
3894 
3895       if (!is_gimple_assign (stmt))
3896 	return NULL_TREE;
3897 
3898       dest = gimple_assign_rhs1 (stmt);
3899 
3900       tree_code code = gimple_assign_rhs_code (stmt);
3901       if (code == POINTER_PLUS_EXPR)
3902 	{
3903 	  /* compute_builtin_object_size fails for addresses with
3904 	     non-constant offsets.  Try to determine the range of
3905 	     such an offset here and use it to adjust the constant
3906 	     size.  */
3907 	  tree off = gimple_assign_rhs2 (stmt);
3908 	  if (TREE_CODE (off) == INTEGER_CST)
3909 	    {
3910 	      if (tree size = compute_objsize (dest, ostype, pdecl, poff))
3911 		{
3912 		  wide_int wioff = wi::to_wide (off);
3913 		  wide_int wisiz = wi::to_wide (size);
3914 
3915 		  /* Ignore negative offsets for now.  For others,
3916 		     use the lower bound as the most optimistic
3917 		     estimate of the (remaining) size.  */
3918 		  if (wi::neg_p (wioff))
3919 		    ;
3920 		  else
3921 		    {
3922 		      if (*poff)
3923 			{
3924 			  *poff = fold_convert (ptrdiff_type_node, *poff);
3925 			  off = fold_convert (ptrdiff_type_node, *poff);
3926 			  *poff = size_binop (PLUS_EXPR, *poff, off);
3927 			}
3928 		      else
3929 			*poff = off;
3930 		      if (wi::ltu_p (wioff, wisiz))
3931 			return wide_int_to_tree (TREE_TYPE (size),
3932 						 wi::sub (wisiz, wioff));
3933 		      return size_zero_node;
3934 		    }
3935 		}
3936 	    }
3937 	  else if (TREE_CODE (off) == SSA_NAME
3938 		   && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3939 	    {
3940 	      wide_int min, max;
3941 	      enum value_range_kind rng = get_range_info (off, &min, &max);
3942 
3943 	      if (rng == VR_RANGE)
3944 		if (tree size = compute_objsize (dest, ostype, pdecl, poff))
3945 		  {
3946 		    wide_int wisiz = wi::to_wide (size);
3947 
3948 		    /* Ignore negative offsets for now.  For others,
3949 		       use the lower bound as the most optimistic
3950 		       estimate of the (remaining)size.  */
3951 		    if (wi::neg_p (min) || wi::neg_p (max))
3952 		      ;
3953 		    else
3954 		      {
3955 			/* FIXME: For now, since the offset is non-constant,
3956 			   clear *POFF to keep it from being "misused."
3957 			   Eventually *POFF will need to become a range that
3958 			   can be properly added to the outer offset if it
3959 			   too is one.  */
3960 			*poff = NULL_TREE;
3961 			if (wi::ltu_p (min, wisiz))
3962 			  return wide_int_to_tree (TREE_TYPE (size),
3963 						   wi::sub (wisiz, min));
3964 			return size_zero_node;
3965 		      }
3966 		  }
3967 	    }
3968 	}
3969       else if (code != ADDR_EXPR)
3970 	return NULL_TREE;
3971     }
3972 
3973   /* Unless computing the largest size (for memcpy and other raw memory
3974      functions), try to determine the size of the object from its type.  */
3975   if (!ostype)
3976     return NULL_TREE;
3977 
3978   if (TREE_CODE (dest) == ARRAY_REF
3979       || TREE_CODE (dest) == MEM_REF)
3980     {
3981       tree ref = TREE_OPERAND (dest, 0);
3982       tree reftype = TREE_TYPE (ref);
3983       if (TREE_CODE (dest) == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
3984 	{
3985 	  /* Give up for MEM_REFs of vector types; those may be synthesized
3986 	     from multiple assignments to consecutive data members.  See PR
3987 	     93200.
3988 	     FIXME: Deal with this more generally, e.g., by marking up such
3989 	     MEM_REFs at the time they're created.  */
3990 	  reftype = TREE_TYPE (reftype);
3991 	  if (TREE_CODE (reftype) == VECTOR_TYPE)
3992 	    return NULL_TREE;
3993 	}
3994       tree off = TREE_OPERAND (dest, 1);
3995       if (tree size = compute_objsize (ref, ostype, pdecl, poff))
3996 	{
3997 	  /* If the declaration of the destination object is known
3998 	     to have zero size, return zero.  */
3999 	  if (integer_zerop (size)
4000 	      && *pdecl && DECL_P (*pdecl)
4001 	      && *poff && integer_zerop (*poff))
4002 	    return size_zero_node;
4003 
4004 	  /* A valid offset into a declared object cannot be negative.
4005 	     A zero size with a zero "inner" offset is still zero size
4006 	     regardless of the "other" offset OFF.  */
4007 	  if (*poff
4008 	      && ((integer_zerop (*poff) && integer_zerop (size))
4009 		  || (TREE_CODE (*poff) == INTEGER_CST
4010 		      && tree_int_cst_sgn (*poff) < 0)))
4011 	    return size_zero_node;
4012 
4013 	  wide_int offrng[2];
4014 	  if (!get_range (off, offrng, rvals))
4015 	    return NULL_TREE;
4016 
4017 	  /* Convert to the same precision to keep wide_int from "helpfully"
4018 	     crashing whenever it sees other arguments.  */
4019 	  const unsigned sizprec = TYPE_PRECISION (sizetype);
4020 	  offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
4021 	  offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
4022 
4023 	  /* Adjust SIZE either up or down by the sum of *POFF and OFF
4024 	     above.  */
4025 	  if (TREE_CODE (dest) == ARRAY_REF)
4026 	    {
4027 	      tree lowbnd = array_ref_low_bound (dest);
4028 	      if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4029 		{
4030 		  /* Adjust the offset by the low bound of the array
4031 		     domain (normally zero but 1 in Fortran).  */
4032 		  unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4033 		  offrng[0] -= lb;
4034 		  offrng[1] -= lb;
4035 		}
4036 
4037 	      /* Convert the array index into a byte offset.  */
4038 	      tree eltype = TREE_TYPE (dest);
4039 	      tree tpsize = TYPE_SIZE_UNIT (eltype);
4040 	      if (tpsize && TREE_CODE (tpsize) == INTEGER_CST)
4041 		{
4042 		  wide_int wsz = wi::to_wide (tpsize, offrng->get_precision ());
4043 		  offrng[0] *= wsz;
4044 		  offrng[1] *= wsz;
4045 		}
4046 	      else
4047 		return NULL_TREE;
4048 	    }
4049 
4050 	  wide_int wisize = wi::to_wide (size);
4051 
4052 	  if (!*poff)
4053 	    {
4054 	      /* If the "inner" offset is unknown and the "outer" offset
4055 		 is either negative or less than SIZE, return the size
4056 		 minus the offset.  This may be overly optimistic in
4057 		 the first case if the inner offset happens to be less
4058 		 than the absolute value of the outer offset.  */
4059 	      if (wi::neg_p (offrng[0]))
4060 		return size;
4061 	      if (wi::ltu_p (offrng[0], wisize))
4062 		return build_int_cst (sizetype, (wisize - offrng[0]).to_uhwi ());
4063 	      return size_zero_node;
4064 	    }
4065 
4066 	  /* Convert to the same precision to keep wide_int from "helpfuly"
4067 	     crashing whenever it sees other argumments.  */
4068 	  offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
4069 	  offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
4070 
4071 	  tree dstoff = *poff;
4072 	  if (integer_zerop (*poff))
4073 	    *poff = off;
4074 	  else if (!integer_zerop (off))
4075 	    {
4076 	      *poff = fold_convert (ptrdiff_type_node, *poff);
4077 	      off = fold_convert (ptrdiff_type_node, off);
4078 	      *poff = size_binop (PLUS_EXPR, *poff, off);
4079 	    }
4080 
4081 	  if (!wi::neg_p (offrng[0]))
4082 	    {
4083 	      if (TREE_CODE (size) != INTEGER_CST)
4084 		return NULL_TREE;
4085 
4086 	      /* Return the difference between the size and the offset
4087 		 or zero if the offset is greater.  */
4088 	      wide_int wisize = wi::to_wide (size, sizprec);
4089 	      if (wi::ltu_p (wisize, offrng[0]))
4090 		return size_zero_node;
4091 
4092 	      return wide_int_to_tree (sizetype, wisize - offrng[0]);
4093 	    }
4094 
4095 	  wide_int dstoffrng[2];
4096 	  if (TREE_CODE (dstoff) == INTEGER_CST)
4097 	    dstoffrng[0] = dstoffrng[1] = wi::to_wide (dstoff);
4098 	  else if (TREE_CODE (dstoff) == SSA_NAME)
4099 	    {
4100 	      enum value_range_kind rng
4101 		= get_range_info (dstoff, dstoffrng, dstoffrng + 1);
4102 	      if (rng != VR_RANGE)
4103 		return NULL_TREE;
4104 	    }
4105 	  else
4106 	    return NULL_TREE;
4107 
4108 	  dstoffrng[0] = wide_int::from (dstoffrng[0], sizprec, SIGNED);
4109 	  dstoffrng[1] = wide_int::from (dstoffrng[1], sizprec, SIGNED);
4110 
4111 	  if (!wi::neg_p (dstoffrng[0]))
4112 	    wisize += dstoffrng[0];
4113 
4114 	  offrng[1] += dstoffrng[1];
4115 	  if (wi::neg_p (offrng[1]))
4116 	    return size_zero_node;
4117 
4118 	  return wide_int_to_tree (sizetype, wisize);
4119 	}
4120 
4121       return NULL_TREE;
4122     }
4123 
4124   /* Try simple DECLs not handled above.  */
4125   if (tree size = addr_decl_size (dest, pdecl, poff))
4126     return size;
4127 
4128   tree type = TREE_TYPE (dest);
4129   if (TREE_CODE (type) == POINTER_TYPE)
4130     type = TREE_TYPE (type);
4131 
4132   type = TYPE_MAIN_VARIANT (type);
4133   if (TREE_CODE (dest) == ADDR_EXPR)
4134     dest = TREE_OPERAND (dest, 0);
4135 
4136   if (TREE_CODE (type) == ARRAY_TYPE
4137       && !array_at_struct_end_p (dest))
4138     {
4139       if (tree size = TYPE_SIZE_UNIT (type))
4140 	return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
4141     }
4142 
4143   return NULL_TREE;
4144 }
4145 
4146 /* Helper to determine and check the sizes of the source and the destination
4147    of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls.  EXP is the
4148    call expression, DEST is the destination argument, SRC is the source
4149    argument or null, and LEN is the number of bytes.  Use Object Size type-0
4150    regardless of the OPT_Wstringop_overflow_ setting.  Return true on success
4151    (no overflow or invalid sizes), false otherwise.  */
4152 
4153 static bool
check_memop_access(tree exp,tree dest,tree src,tree size)4154 check_memop_access (tree exp, tree dest, tree src, tree size)
4155 {
4156   /* For functions like memset and memcpy that operate on raw memory
4157      try to determine the size of the largest source and destination
4158      object using type-0 Object Size regardless of the object size
4159      type specified by the option.  */
4160   tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
4161   tree dstsize = compute_objsize (dest, 0);
4162 
4163   return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
4164 		       srcsize, dstsize);
4165 }
4166 
4167 /* Validate memchr arguments without performing any expansion.
4168    Return NULL_RTX.  */
4169 
4170 static rtx
expand_builtin_memchr(tree exp,rtx)4171 expand_builtin_memchr (tree exp, rtx)
4172 {
4173   if (!validate_arglist (exp,
4174  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4175     return NULL_RTX;
4176 
4177   tree arg1 = CALL_EXPR_ARG (exp, 0);
4178   tree len = CALL_EXPR_ARG (exp, 2);
4179 
4180   /* Diagnose calls where the specified length exceeds the size
4181      of the object.  */
4182   if (warn_stringop_overflow)
4183     {
4184       tree size = compute_objsize (arg1, 0);
4185       check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4186 		    /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4187     }
4188 
4189   return NULL_RTX;
4190 }
4191 
4192 /* Expand a call EXP to the memcpy builtin.
4193    Return NULL_RTX if we failed, the caller should emit a normal call,
4194    otherwise try to get the result in TARGET, if convenient (and in
4195    mode MODE if that's convenient).  */
4196 
4197 static rtx
expand_builtin_memcpy(tree exp,rtx target)4198 expand_builtin_memcpy (tree exp, rtx target)
4199 {
4200   if (!validate_arglist (exp,
4201  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4202     return NULL_RTX;
4203 
4204   tree dest = CALL_EXPR_ARG (exp, 0);
4205   tree src = CALL_EXPR_ARG (exp, 1);
4206   tree len = CALL_EXPR_ARG (exp, 2);
4207 
4208   check_memop_access (exp, dest, src, len);
4209 
4210   return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4211 					  /*retmode=*/ RETURN_BEGIN, false);
4212 }
4213 
4214 /* Check a call EXP to the memmove built-in for validity.
4215    Return NULL_RTX on both success and failure.  */
4216 
4217 static rtx
expand_builtin_memmove(tree exp,rtx target)4218 expand_builtin_memmove (tree exp, rtx target)
4219 {
4220   if (!validate_arglist (exp,
4221  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4222     return NULL_RTX;
4223 
4224   tree dest = CALL_EXPR_ARG (exp, 0);
4225   tree src = CALL_EXPR_ARG (exp, 1);
4226   tree len = CALL_EXPR_ARG (exp, 2);
4227 
4228   check_memop_access (exp, dest, src, len);
4229 
4230   return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4231 					  /*retmode=*/ RETURN_BEGIN, true);
4232 }
4233 
4234 /* Expand a call EXP to the mempcpy builtin.
4235    Return NULL_RTX if we failed; the caller should emit a normal call,
4236    otherwise try to get the result in TARGET, if convenient (and in
4237    mode MODE if that's convenient).  */
4238 
4239 static rtx
expand_builtin_mempcpy(tree exp,rtx target)4240 expand_builtin_mempcpy (tree exp, rtx target)
4241 {
4242   if (!validate_arglist (exp,
4243  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4244     return NULL_RTX;
4245 
4246   tree dest = CALL_EXPR_ARG (exp, 0);
4247   tree src = CALL_EXPR_ARG (exp, 1);
4248   tree len = CALL_EXPR_ARG (exp, 2);
4249 
4250   /* Policy does not generally allow using compute_objsize (which
4251      is used internally by check_memop_size) to change code generation
4252      or drive optimization decisions.
4253 
4254      In this instance it is safe because the code we generate has
4255      the same semantics regardless of the return value of
4256      check_memop_sizes.   Exactly the same amount of data is copied
4257      and the return value is exactly the same in both cases.
4258 
4259      Furthermore, check_memop_size always uses mode 0 for the call to
4260      compute_objsize, so the imprecise nature of compute_objsize is
4261      avoided.  */
4262 
4263   /* Avoid expanding mempcpy into memcpy when the call is determined
4264      to overflow the buffer.  This also prevents the same overflow
4265      from being diagnosed again when expanding memcpy.  */
4266   if (!check_memop_access (exp, dest, src, len))
4267     return NULL_RTX;
4268 
4269   return expand_builtin_mempcpy_args (dest, src, len,
4270 				      target, exp, /*retmode=*/ RETURN_END);
4271 }
4272 
4273 /* Helper function to do the actual work for expand of memory copy family
4274    functions (memcpy, mempcpy, stpcpy).  Expansing should assign LEN bytes
4275    of memory from SRC to DEST and assign to TARGET if convenient.  Return
4276    value is based on RETMODE argument.  */
4277 
4278 static rtx
expand_builtin_memory_copy_args(tree dest,tree src,tree len,rtx target,tree exp,memop_ret retmode,bool might_overlap)4279 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
4280 				 rtx target, tree exp, memop_ret retmode,
4281 				 bool might_overlap)
4282 {
4283   unsigned int src_align = get_pointer_alignment (src);
4284   unsigned int dest_align = get_pointer_alignment (dest);
4285   rtx dest_mem, src_mem, dest_addr, len_rtx;
4286   HOST_WIDE_INT expected_size = -1;
4287   unsigned int expected_align = 0;
4288   unsigned HOST_WIDE_INT min_size;
4289   unsigned HOST_WIDE_INT max_size;
4290   unsigned HOST_WIDE_INT probable_max_size;
4291 
4292   bool is_move_done;
4293 
4294   /* If DEST is not a pointer type, call the normal function.  */
4295   if (dest_align == 0)
4296     return NULL_RTX;
4297 
4298   /* If either SRC is not a pointer type, don't do this
4299      operation in-line.  */
4300   if (src_align == 0)
4301     return NULL_RTX;
4302 
4303   if (currently_expanding_gimple_stmt)
4304     stringop_block_profile (currently_expanding_gimple_stmt,
4305 			    &expected_align, &expected_size);
4306 
4307   if (expected_align < dest_align)
4308     expected_align = dest_align;
4309   dest_mem = get_memory_rtx (dest, len);
4310   set_mem_align (dest_mem, dest_align);
4311   len_rtx = expand_normal (len);
4312   determine_block_size (len, len_rtx, &min_size, &max_size,
4313 			&probable_max_size);
4314 
4315   /* Try to get the byte representation of the constant SRC points to,
4316      with its byte size in NBYTES.  */
4317   unsigned HOST_WIDE_INT nbytes;
4318   const char *rep = c_getstr (src, &nbytes);
4319 
4320   /* If the function's constant bound LEN_RTX is less than or equal
4321      to the byte size of the representation of the constant argument,
4322      and if block move would be done by pieces, we can avoid loading
4323      the bytes from memory and only store the computed constant.
4324      This works in the overlap (memmove) case as well because
4325      store_by_pieces just generates a series of stores of constants
4326      from the representation returned by c_getstr().  */
4327   if (rep
4328       && CONST_INT_P (len_rtx)
4329       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
4330       && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
4331 			      CONST_CAST (char *, rep),
4332 			      dest_align, false))
4333     {
4334       dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
4335 				  builtin_memcpy_read_str,
4336 				  CONST_CAST (char *, rep),
4337 				  dest_align, false, retmode);
4338       dest_mem = force_operand (XEXP (dest_mem, 0), target);
4339       dest_mem = convert_memory_address (ptr_mode, dest_mem);
4340       return dest_mem;
4341     }
4342 
4343   src_mem = get_memory_rtx (src, len);
4344   set_mem_align (src_mem, src_align);
4345 
4346   /* Copy word part most expediently.  */
4347   enum block_op_methods method = BLOCK_OP_NORMAL;
4348   if (CALL_EXPR_TAILCALL (exp)
4349       && (retmode == RETURN_BEGIN || target == const0_rtx))
4350     method = BLOCK_OP_TAILCALL;
4351   bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
4352 			   && retmode == RETURN_END
4353 			   && !might_overlap
4354 			   && target != const0_rtx);
4355   if (use_mempcpy_call)
4356     method = BLOCK_OP_NO_LIBCALL_RET;
4357   dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
4358 				     expected_align, expected_size,
4359 				     min_size, max_size, probable_max_size,
4360 				     use_mempcpy_call, &is_move_done,
4361 				     might_overlap);
4362 
4363   /* Bail out when a mempcpy call would be expanded as libcall and when
4364      we have a target that provides a fast implementation
4365      of mempcpy routine.  */
4366   if (!is_move_done)
4367     return NULL_RTX;
4368 
4369   if (dest_addr == pc_rtx)
4370     return NULL_RTX;
4371 
4372   if (dest_addr == 0)
4373     {
4374       dest_addr = force_operand (XEXP (dest_mem, 0), target);
4375       dest_addr = convert_memory_address (ptr_mode, dest_addr);
4376     }
4377 
4378   if (retmode != RETURN_BEGIN && target != const0_rtx)
4379     {
4380       dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4381       /* stpcpy pointer to last byte.  */
4382       if (retmode == RETURN_END_MINUS_ONE)
4383 	dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
4384     }
4385 
4386   return dest_addr;
4387 }
4388 
4389 static rtx
expand_builtin_mempcpy_args(tree dest,tree src,tree len,rtx target,tree orig_exp,memop_ret retmode)4390 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
4391 			     rtx target, tree orig_exp, memop_ret retmode)
4392 {
4393   return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
4394 					  retmode, false);
4395 }
4396 
4397 /* Expand into a movstr instruction, if one is available.  Return NULL_RTX if
4398    we failed, the caller should emit a normal call, otherwise try to
4399    get the result in TARGET, if convenient.
4400    Return value is based on RETMODE argument.  */
4401 
4402 static rtx
expand_movstr(tree dest,tree src,rtx target,memop_ret retmode)4403 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
4404 {
4405   class expand_operand ops[3];
4406   rtx dest_mem;
4407   rtx src_mem;
4408 
4409   if (!targetm.have_movstr ())
4410     return NULL_RTX;
4411 
4412   dest_mem = get_memory_rtx (dest, NULL);
4413   src_mem = get_memory_rtx (src, NULL);
4414   if (retmode == RETURN_BEGIN)
4415     {
4416       target = force_reg (Pmode, XEXP (dest_mem, 0));
4417       dest_mem = replace_equiv_address (dest_mem, target);
4418     }
4419 
4420   create_output_operand (&ops[0],
4421 			 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
4422   create_fixed_operand (&ops[1], dest_mem);
4423   create_fixed_operand (&ops[2], src_mem);
4424   if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
4425     return NULL_RTX;
4426 
4427   if (retmode != RETURN_BEGIN && target != const0_rtx)
4428     {
4429       target = ops[0].value;
4430       /* movstr is supposed to set end to the address of the NUL
4431 	 terminator.  If the caller requested a mempcpy-like return value,
4432 	 adjust it.  */
4433       if (retmode == RETURN_END)
4434 	{
4435 	  rtx tem = plus_constant (GET_MODE (target),
4436 				   gen_lowpart (GET_MODE (target), target), 1);
4437 	  emit_move_insn (target, force_operand (tem, NULL_RTX));
4438 	}
4439     }
4440   return target;
4441 }
4442 
4443 /* Do some very basic size validation of a call to the strcpy builtin
4444    given by EXP.  Return NULL_RTX to have the built-in expand to a call
4445    to the library function.  */
4446 
4447 static rtx
expand_builtin_strcat(tree exp)4448 expand_builtin_strcat (tree exp)
4449 {
4450   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4451       || !warn_stringop_overflow)
4452     return NULL_RTX;
4453 
4454   tree dest = CALL_EXPR_ARG (exp, 0);
4455   tree src = CALL_EXPR_ARG (exp, 1);
4456 
4457   /* Detect unterminated source (only).  */
4458   if (!check_nul_terminated_array (exp, src))
4459     return NULL_RTX;
4460 
4461   /* There is no way here to determine the length of the string in
4462      the destination to which the SRC string is being appended so
4463      just diagnose cases when the souce string is longer than
4464      the destination object.  */
4465 
4466   tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4467 
4468   check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4469 		destsize);
4470 
4471   return NULL_RTX;
4472 }
4473 
4474 /* Expand expression EXP, which is a call to the strcpy builtin.  Return
4475    NULL_RTX if we failed the caller should emit a normal call, otherwise
4476    try to get the result in TARGET, if convenient (and in mode MODE if that's
4477    convenient).  */
4478 
4479 static rtx
expand_builtin_strcpy(tree exp,rtx target)4480 expand_builtin_strcpy (tree exp, rtx target)
4481 {
4482   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4483     return NULL_RTX;
4484 
4485   tree dest = CALL_EXPR_ARG (exp, 0);
4486   tree src = CALL_EXPR_ARG (exp, 1);
4487 
4488   if (warn_stringop_overflow)
4489     {
4490       tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4491       check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4492 		    src, destsize);
4493     }
4494 
4495   if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4496     {
4497       /* Check to see if the argument was declared attribute nonstring
4498 	 and if so, issue a warning since at this point it's not known
4499 	 to be nul-terminated.  */
4500       tree fndecl = get_callee_fndecl (exp);
4501       maybe_warn_nonstring_arg (fndecl, exp);
4502       return ret;
4503     }
4504 
4505   return NULL_RTX;
4506 }
4507 
4508 /* Helper function to do the actual work for expand_builtin_strcpy.  The
4509    arguments to the builtin_strcpy call DEST and SRC are broken out
4510    so that this can also be called without constructing an actual CALL_EXPR.
4511    The other arguments and return value are the same as for
4512    expand_builtin_strcpy.  */
4513 
4514 static rtx
expand_builtin_strcpy_args(tree exp,tree dest,tree src,rtx target)4515 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4516 {
4517   /* Detect strcpy calls with unterminated arrays..  */
4518   if (tree nonstr = unterminated_array (src))
4519     {
4520       /* NONSTR refers to the non-nul terminated constant array.  */
4521       if (!TREE_NO_WARNING (exp))
4522 	warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4523       return NULL_RTX;
4524     }
4525 
4526   return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4527 }
4528 
4529 /* Expand a call EXP to the stpcpy builtin.
4530    Return NULL_RTX if we failed the caller should emit a normal call,
4531    otherwise try to get the result in TARGET, if convenient (and in
4532    mode MODE if that's convenient).  */
4533 
4534 static rtx
expand_builtin_stpcpy_1(tree exp,rtx target,machine_mode mode)4535 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4536 {
4537   tree dst, src;
4538   location_t loc = EXPR_LOCATION (exp);
4539 
4540   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4541     return NULL_RTX;
4542 
4543   dst = CALL_EXPR_ARG (exp, 0);
4544   src = CALL_EXPR_ARG (exp, 1);
4545 
4546   if (warn_stringop_overflow)
4547     {
4548       tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4549       check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4550 		    src, destsize);
4551     }
4552 
4553   /* If return value is ignored, transform stpcpy into strcpy.  */
4554   if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4555     {
4556       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4557       tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4558       return expand_expr (result, target, mode, EXPAND_NORMAL);
4559     }
4560   else
4561     {
4562       tree len, lenp1;
4563       rtx ret;
4564 
4565       /* Ensure we get an actual string whose length can be evaluated at
4566 	 compile-time, not an expression containing a string.  This is
4567 	 because the latter will potentially produce pessimized code
4568 	 when used to produce the return value.  */
4569       c_strlen_data lendata = { };
4570       if (!c_getstr (src, NULL)
4571 	  || !(len = c_strlen (src, 0, &lendata, 1)))
4572 	return expand_movstr (dst, src, target,
4573 			      /*retmode=*/ RETURN_END_MINUS_ONE);
4574 
4575       if (lendata.decl && !TREE_NO_WARNING (exp))
4576 	warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4577 
4578       lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4579       ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4580 					 target, exp,
4581 					 /*retmode=*/ RETURN_END_MINUS_ONE);
4582 
4583       if (ret)
4584 	return ret;
4585 
4586       if (TREE_CODE (len) == INTEGER_CST)
4587 	{
4588 	  rtx len_rtx = expand_normal (len);
4589 
4590 	  if (CONST_INT_P (len_rtx))
4591 	    {
4592 	      ret = expand_builtin_strcpy_args (exp, dst, src, target);
4593 
4594 	      if (ret)
4595 		{
4596 		  if (! target)
4597 		    {
4598 		      if (mode != VOIDmode)
4599 			target = gen_reg_rtx (mode);
4600 		      else
4601 			target = gen_reg_rtx (GET_MODE (ret));
4602 		    }
4603 		  if (GET_MODE (target) != GET_MODE (ret))
4604 		    ret = gen_lowpart (GET_MODE (target), ret);
4605 
4606 		  ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4607 		  ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4608 		  gcc_assert (ret);
4609 
4610 		  return target;
4611 		}
4612 	    }
4613 	}
4614 
4615       return expand_movstr (dst, src, target,
4616 			    /*retmode=*/ RETURN_END_MINUS_ONE);
4617     }
4618 }
4619 
4620 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4621    arguments while being careful to avoid duplicate warnings (which could
4622    be issued if the expander were to expand the call, resulting in it
4623    being emitted in expand_call().  */
4624 
4625 static rtx
expand_builtin_stpcpy(tree exp,rtx target,machine_mode mode)4626 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4627 {
4628   if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4629     {
4630       /* The call has been successfully expanded.  Check for nonstring
4631 	 arguments and issue warnings as appropriate.  */
4632       maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4633       return ret;
4634     }
4635 
4636   return NULL_RTX;
4637 }
4638 
4639 /* Check a call EXP to the stpncpy built-in for validity.
4640    Return NULL_RTX on both success and failure.  */
4641 
4642 static rtx
expand_builtin_stpncpy(tree exp,rtx)4643 expand_builtin_stpncpy (tree exp, rtx)
4644 {
4645   if (!validate_arglist (exp,
4646 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4647       || !warn_stringop_overflow)
4648     return NULL_RTX;
4649 
4650   /* The source and destination of the call.  */
4651   tree dest = CALL_EXPR_ARG (exp, 0);
4652   tree src = CALL_EXPR_ARG (exp, 1);
4653 
4654   /* The exact number of bytes to write (not the maximum).  */
4655   tree len = CALL_EXPR_ARG (exp, 2);
4656   if (!check_nul_terminated_array (exp, src, len))
4657     return NULL_RTX;
4658 
4659   /* The size of the destination object.  */
4660   tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4661 
4662   check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4663 
4664   return NULL_RTX;
4665 }
4666 
4667 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
4668    bytes from constant string DATA + OFFSET and return it as target
4669    constant.  */
4670 
4671 rtx
builtin_strncpy_read_str(void * data,HOST_WIDE_INT offset,scalar_int_mode mode)4672 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4673 			  scalar_int_mode mode)
4674 {
4675   const char *str = (const char *) data;
4676 
4677   if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4678     return const0_rtx;
4679 
4680   return c_readstr (str + offset, mode);
4681 }
4682 
4683 /* Helper to check the sizes of sequences and the destination of calls
4684    to __builtin_strncat and __builtin___strncat_chk.  Returns true on
4685    success (no overflow or invalid sizes), false otherwise.  */
4686 
4687 static bool
check_strncat_sizes(tree exp,tree objsize)4688 check_strncat_sizes (tree exp, tree objsize)
4689 {
4690   tree dest = CALL_EXPR_ARG (exp, 0);
4691   tree src = CALL_EXPR_ARG (exp, 1);
4692   tree maxread = CALL_EXPR_ARG (exp, 2);
4693 
4694   /* Try to determine the range of lengths that the source expression
4695      refers to.  */
4696   c_strlen_data lendata = { };
4697   get_range_strlen (src, &lendata, /* eltsize = */ 1);
4698 
4699   /* Try to verify that the destination is big enough for the shortest
4700      string.  */
4701 
4702   if (!objsize && warn_stringop_overflow)
4703     {
4704       /* If it hasn't been provided by __strncat_chk, try to determine
4705 	 the size of the destination object into which the source is
4706 	 being copied.  */
4707       objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4708     }
4709 
4710   /* Add one for the terminating nul.  */
4711   tree srclen = (lendata.minlen
4712 		 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4713 				size_one_node)
4714 		 : NULL_TREE);
4715 
4716   /* The strncat function copies at most MAXREAD bytes and always appends
4717      the terminating nul so the specified upper bound should never be equal
4718      to (or greater than) the size of the destination.  */
4719   if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4720       && tree_int_cst_equal (objsize, maxread))
4721     {
4722       location_t loc = tree_nonartificial_location (exp);
4723       loc = expansion_point_location_if_in_system_header (loc);
4724 
4725       warning_at (loc, OPT_Wstringop_overflow_,
4726 		  "%K%qD specified bound %E equals destination size",
4727 		  exp, get_callee_fndecl (exp), maxread);
4728 
4729       return false;
4730     }
4731 
4732   if (!srclen
4733       || (maxread && tree_fits_uhwi_p (maxread)
4734 	  && tree_fits_uhwi_p (srclen)
4735 	  && tree_int_cst_lt (maxread, srclen)))
4736     srclen = maxread;
4737 
4738   /* The number of bytes to write is LEN but check_access will also
4739      check SRCLEN if LEN's value isn't known.  */
4740   return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4741 		       objsize);
4742 }
4743 
4744 /* Similar to expand_builtin_strcat, do some very basic size validation
4745    of a call to the strcpy builtin given by EXP.  Return NULL_RTX to have
4746    the built-in expand to a call to the library function.  */
4747 
4748 static rtx
expand_builtin_strncat(tree exp,rtx)4749 expand_builtin_strncat (tree exp, rtx)
4750 {
4751   if (!validate_arglist (exp,
4752 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4753       || !warn_stringop_overflow)
4754     return NULL_RTX;
4755 
4756   tree dest = CALL_EXPR_ARG (exp, 0);
4757   tree src = CALL_EXPR_ARG (exp, 1);
4758   /* The upper bound on the number of bytes to write.  */
4759   tree maxread = CALL_EXPR_ARG (exp, 2);
4760 
4761   /* Detect unterminated source (only).  */
4762   if (!check_nul_terminated_array (exp, src, maxread))
4763     return NULL_RTX;
4764 
4765   /* The length of the source sequence.  */
4766   tree slen = c_strlen (src, 1);
4767 
4768   /* Try to determine the range of lengths that the source expression
4769      refers to.  Since the lengths are only used for warning and not
4770      for code generation disable strict mode below.  */
4771   tree maxlen = slen;
4772   if (!maxlen)
4773     {
4774       c_strlen_data lendata = { };
4775       get_range_strlen (src, &lendata, /* eltsize = */ 1);
4776       maxlen = lendata.maxbound;
4777     }
4778 
4779   /* Try to verify that the destination is big enough for the shortest
4780      string.  First try to determine the size of the destination object
4781      into which the source is being copied.  */
4782   tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4783 
4784   /* Add one for the terminating nul.  */
4785   tree srclen = (maxlen
4786 		 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4787 				size_one_node)
4788 		 : NULL_TREE);
4789 
4790   /* The strncat function copies at most MAXREAD bytes and always appends
4791      the terminating nul so the specified upper bound should never be equal
4792      to (or greater than) the size of the destination.  */
4793   if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4794       && tree_int_cst_equal (destsize, maxread))
4795     {
4796       location_t loc = tree_nonartificial_location (exp);
4797       loc = expansion_point_location_if_in_system_header (loc);
4798 
4799       warning_at (loc, OPT_Wstringop_overflow_,
4800 		  "%K%qD specified bound %E equals destination size",
4801 		  exp, get_callee_fndecl (exp), maxread);
4802 
4803       return NULL_RTX;
4804     }
4805 
4806   if (!srclen
4807       || (maxread && tree_fits_uhwi_p (maxread)
4808 	  && tree_fits_uhwi_p (srclen)
4809 	  && tree_int_cst_lt (maxread, srclen)))
4810     srclen = maxread;
4811 
4812   /* The number of bytes to write is SRCLEN.  */
4813   check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4814 
4815   return NULL_RTX;
4816 }
4817 
4818 /* Expand expression EXP, which is a call to the strncpy builtin.  Return
4819    NULL_RTX if we failed the caller should emit a normal call.  */
4820 
4821 static rtx
expand_builtin_strncpy(tree exp,rtx target)4822 expand_builtin_strncpy (tree exp, rtx target)
4823 {
4824   location_t loc = EXPR_LOCATION (exp);
4825 
4826   if (!validate_arglist (exp,
4827 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4828     return NULL_RTX;
4829   tree dest = CALL_EXPR_ARG (exp, 0);
4830   tree src = CALL_EXPR_ARG (exp, 1);
4831   /* The number of bytes to write (not the maximum).  */
4832   tree len = CALL_EXPR_ARG (exp, 2);
4833 
4834   if (!check_nul_terminated_array (exp, src, len))
4835     return NULL_RTX;
4836 
4837   /* The length of the source sequence.  */
4838   tree slen = c_strlen (src, 1);
4839 
4840   if (warn_stringop_overflow)
4841     {
4842       tree destsize = compute_objsize (dest,
4843 				       warn_stringop_overflow - 1);
4844 
4845       /* The number of bytes to write is LEN but check_access will also
4846 	 check SLEN if LEN's value isn't known.  */
4847       check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4848 		    destsize);
4849     }
4850 
4851   /* We must be passed a constant len and src parameter.  */
4852   if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4853     return NULL_RTX;
4854 
4855   slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4856 
4857   /* We're required to pad with trailing zeros if the requested
4858      len is greater than strlen(s2)+1.  In that case try to
4859      use store_by_pieces, if it fails, punt.  */
4860   if (tree_int_cst_lt (slen, len))
4861     {
4862       unsigned int dest_align = get_pointer_alignment (dest);
4863       const char *p = c_getstr (src);
4864       rtx dest_mem;
4865 
4866       if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4867 	  || !can_store_by_pieces (tree_to_uhwi (len),
4868 				   builtin_strncpy_read_str,
4869 				   CONST_CAST (char *, p),
4870 				   dest_align, false))
4871 	return NULL_RTX;
4872 
4873       dest_mem = get_memory_rtx (dest, len);
4874       store_by_pieces (dest_mem, tree_to_uhwi (len),
4875 		       builtin_strncpy_read_str,
4876 		       CONST_CAST (char *, p), dest_align, false,
4877 		       RETURN_BEGIN);
4878       dest_mem = force_operand (XEXP (dest_mem, 0), target);
4879       dest_mem = convert_memory_address (ptr_mode, dest_mem);
4880       return dest_mem;
4881     }
4882 
4883   return NULL_RTX;
4884 }
4885 
4886 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
4887    bytes from constant string DATA + OFFSET and return it as target
4888    constant.  */
4889 
4890 rtx
builtin_memset_read_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,scalar_int_mode mode)4891 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4892 			 scalar_int_mode mode)
4893 {
4894   const char *c = (const char *) data;
4895   char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4896 
4897   memset (p, *c, GET_MODE_SIZE (mode));
4898 
4899   return c_readstr (p, mode);
4900 }
4901 
4902 /* Callback routine for store_by_pieces.  Return the RTL of a register
4903    containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4904    char value given in the RTL register data.  For example, if mode is
4905    4 bytes wide, return the RTL for 0x01010101*data.  */
4906 
4907 static rtx
builtin_memset_gen_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,scalar_int_mode mode)4908 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4909 			scalar_int_mode mode)
4910 {
4911   rtx target, coeff;
4912   size_t size;
4913   char *p;
4914 
4915   size = GET_MODE_SIZE (mode);
4916   if (size == 1)
4917     return (rtx) data;
4918 
4919   p = XALLOCAVEC (char, size);
4920   memset (p, 1, size);
4921   coeff = c_readstr (p, mode);
4922 
4923   target = convert_to_mode (mode, (rtx) data, 1);
4924   target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4925   return force_reg (mode, target);
4926 }
4927 
4928 /* Expand expression EXP, which is a call to the memset builtin.  Return
4929    NULL_RTX if we failed the caller should emit a normal call, otherwise
4930    try to get the result in TARGET, if convenient (and in mode MODE if that's
4931    convenient).  */
4932 
4933 static rtx
expand_builtin_memset(tree exp,rtx target,machine_mode mode)4934 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4935 {
4936   if (!validate_arglist (exp,
4937  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4938     return NULL_RTX;
4939 
4940   tree dest = CALL_EXPR_ARG (exp, 0);
4941   tree val = CALL_EXPR_ARG (exp, 1);
4942   tree len = CALL_EXPR_ARG (exp, 2);
4943 
4944   check_memop_access (exp, dest, NULL_TREE, len);
4945 
4946   return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4947 }
4948 
4949 /* Helper function to do the actual work for expand_builtin_memset.  The
4950    arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4951    so that this can also be called without constructing an actual CALL_EXPR.
4952    The other arguments and return value are the same as for
4953    expand_builtin_memset.  */
4954 
4955 static rtx
expand_builtin_memset_args(tree dest,tree val,tree len,rtx target,machine_mode mode,tree orig_exp)4956 expand_builtin_memset_args (tree dest, tree val, tree len,
4957 			    rtx target, machine_mode mode, tree orig_exp)
4958 {
4959   tree fndecl, fn;
4960   enum built_in_function fcode;
4961   machine_mode val_mode;
4962   char c;
4963   unsigned int dest_align;
4964   rtx dest_mem, dest_addr, len_rtx;
4965   HOST_WIDE_INT expected_size = -1;
4966   unsigned int expected_align = 0;
4967   unsigned HOST_WIDE_INT min_size;
4968   unsigned HOST_WIDE_INT max_size;
4969   unsigned HOST_WIDE_INT probable_max_size;
4970 
4971   dest_align = get_pointer_alignment (dest);
4972 
4973   /* If DEST is not a pointer type, don't do this operation in-line.  */
4974   if (dest_align == 0)
4975     return NULL_RTX;
4976 
4977   if (currently_expanding_gimple_stmt)
4978     stringop_block_profile (currently_expanding_gimple_stmt,
4979 			    &expected_align, &expected_size);
4980 
4981   if (expected_align < dest_align)
4982     expected_align = dest_align;
4983 
4984   /* If the LEN parameter is zero, return DEST.  */
4985   if (integer_zerop (len))
4986     {
4987       /* Evaluate and ignore VAL in case it has side-effects.  */
4988       expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4989       return expand_expr (dest, target, mode, EXPAND_NORMAL);
4990     }
4991 
4992   /* Stabilize the arguments in case we fail.  */
4993   dest = builtin_save_expr (dest);
4994   val = builtin_save_expr (val);
4995   len = builtin_save_expr (len);
4996 
4997   len_rtx = expand_normal (len);
4998   determine_block_size (len, len_rtx, &min_size, &max_size,
4999 			&probable_max_size);
5000   dest_mem = get_memory_rtx (dest, len);
5001   val_mode = TYPE_MODE (unsigned_char_type_node);
5002 
5003   if (TREE_CODE (val) != INTEGER_CST)
5004     {
5005       rtx val_rtx;
5006 
5007       val_rtx = expand_normal (val);
5008       val_rtx = convert_to_mode (val_mode, val_rtx, 0);
5009 
5010       /* Assume that we can memset by pieces if we can store
5011        * the coefficients by pieces (in the required modes).
5012        * We can't pass builtin_memset_gen_str as that emits RTL.  */
5013       c = 1;
5014       if (tree_fits_uhwi_p (len)
5015 	  && can_store_by_pieces (tree_to_uhwi (len),
5016 				  builtin_memset_read_str, &c, dest_align,
5017 				  true))
5018 	{
5019 	  val_rtx = force_reg (val_mode, val_rtx);
5020 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
5021 			   builtin_memset_gen_str, val_rtx, dest_align,
5022 			   true, RETURN_BEGIN);
5023 	}
5024       else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
5025 					dest_align, expected_align,
5026 					expected_size, min_size, max_size,
5027 					probable_max_size))
5028 	goto do_libcall;
5029 
5030       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5031       dest_mem = convert_memory_address (ptr_mode, dest_mem);
5032       return dest_mem;
5033     }
5034 
5035   if (target_char_cast (val, &c))
5036     goto do_libcall;
5037 
5038   if (c)
5039     {
5040       if (tree_fits_uhwi_p (len)
5041 	  && can_store_by_pieces (tree_to_uhwi (len),
5042 				  builtin_memset_read_str, &c, dest_align,
5043 				  true))
5044 	store_by_pieces (dest_mem, tree_to_uhwi (len),
5045 			 builtin_memset_read_str, &c, dest_align, true,
5046 			 RETURN_BEGIN);
5047       else if (!set_storage_via_setmem (dest_mem, len_rtx,
5048 					gen_int_mode (c, val_mode),
5049 					dest_align, expected_align,
5050 					expected_size, min_size, max_size,
5051 					probable_max_size))
5052 	goto do_libcall;
5053 
5054       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5055       dest_mem = convert_memory_address (ptr_mode, dest_mem);
5056       return dest_mem;
5057     }
5058 
5059   set_mem_align (dest_mem, dest_align);
5060   dest_addr = clear_storage_hints (dest_mem, len_rtx,
5061 				   CALL_EXPR_TAILCALL (orig_exp)
5062 				   ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
5063 				   expected_align, expected_size,
5064 				   min_size, max_size,
5065 				   probable_max_size);
5066 
5067   if (dest_addr == 0)
5068     {
5069       dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5070       dest_addr = convert_memory_address (ptr_mode, dest_addr);
5071     }
5072 
5073   return dest_addr;
5074 
5075  do_libcall:
5076   fndecl = get_callee_fndecl (orig_exp);
5077   fcode = DECL_FUNCTION_CODE (fndecl);
5078   if (fcode == BUILT_IN_MEMSET)
5079     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
5080 				dest, val, len);
5081   else if (fcode == BUILT_IN_BZERO)
5082     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
5083 				dest, len);
5084   else
5085     gcc_unreachable ();
5086   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5087   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5088   return expand_call (fn, target, target == const0_rtx);
5089 }
5090 
5091 /* Expand expression EXP, which is a call to the bzero builtin.  Return
5092    NULL_RTX if we failed the caller should emit a normal call.  */
5093 
5094 static rtx
expand_builtin_bzero(tree exp)5095 expand_builtin_bzero (tree exp)
5096 {
5097   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5098     return NULL_RTX;
5099 
5100   tree dest = CALL_EXPR_ARG (exp, 0);
5101   tree size = CALL_EXPR_ARG (exp, 1);
5102 
5103   check_memop_access (exp, dest, NULL_TREE, size);
5104 
5105   /* New argument list transforming bzero(ptr x, int y) to
5106      memset(ptr x, int 0, size_t y).   This is done this way
5107      so that if it isn't expanded inline, we fallback to
5108      calling bzero instead of memset.  */
5109 
5110   location_t loc = EXPR_LOCATION (exp);
5111 
5112   return expand_builtin_memset_args (dest, integer_zero_node,
5113 				     fold_convert_loc (loc,
5114 						       size_type_node, size),
5115 				     const0_rtx, VOIDmode, exp);
5116 }
5117 
5118 /* Try to expand cmpstr operation ICODE with the given operands.
5119    Return the result rtx on success, otherwise return null.  */
5120 
5121 static rtx
expand_cmpstr(insn_code icode,rtx target,rtx arg1_rtx,rtx arg2_rtx,HOST_WIDE_INT align)5122 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
5123 	       HOST_WIDE_INT align)
5124 {
5125   machine_mode insn_mode = insn_data[icode].operand[0].mode;
5126 
5127   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
5128     target = NULL_RTX;
5129 
5130   class expand_operand ops[4];
5131   create_output_operand (&ops[0], target, insn_mode);
5132   create_fixed_operand (&ops[1], arg1_rtx);
5133   create_fixed_operand (&ops[2], arg2_rtx);
5134   create_integer_operand (&ops[3], align);
5135   if (maybe_expand_insn (icode, 4, ops))
5136     return ops[0].value;
5137   return NULL_RTX;
5138 }
5139 
5140 /* Expand expression EXP, which is a call to the memcmp built-in function.
5141    Return NULL_RTX if we failed and the caller should emit a normal call,
5142    otherwise try to get the result in TARGET, if convenient.
5143    RESULT_EQ is true if we can relax the returned value to be either zero
5144    or nonzero, without caring about the sign.  */
5145 
5146 static rtx
expand_builtin_memcmp(tree exp,rtx target,bool result_eq)5147 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
5148 {
5149   if (!validate_arglist (exp,
5150  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5151     return NULL_RTX;
5152 
5153   tree arg1 = CALL_EXPR_ARG (exp, 0);
5154   tree arg2 = CALL_EXPR_ARG (exp, 1);
5155   tree len = CALL_EXPR_ARG (exp, 2);
5156   enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
5157   bool no_overflow = true;
5158 
5159   /* Diagnose calls where the specified length exceeds the size of either
5160      object.  */
5161   tree size = compute_objsize (arg1, 0);
5162   no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5163 			      len, /*maxread=*/NULL_TREE, size,
5164 			      /*objsize=*/NULL_TREE);
5165   if (no_overflow)
5166     {
5167       size = compute_objsize (arg2, 0);
5168       no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5169 				  len,  /*maxread=*/NULL_TREE, size,
5170 				  /*objsize=*/NULL_TREE);
5171     }
5172 
5173   /* If the specified length exceeds the size of either object,
5174      call the function.  */
5175   if (!no_overflow)
5176     return NULL_RTX;
5177 
5178   /* Due to the performance benefit, always inline the calls first
5179      when result_eq is false.  */
5180   rtx result = NULL_RTX;
5181 
5182   if (!result_eq && fcode != BUILT_IN_BCMP)
5183     {
5184       result = inline_expand_builtin_bytecmp (exp, target);
5185       if (result)
5186 	return result;
5187     }
5188 
5189   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5190   location_t loc = EXPR_LOCATION (exp);
5191 
5192   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5193   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5194 
5195   /* If we don't have POINTER_TYPE, call the function.  */
5196   if (arg1_align == 0 || arg2_align == 0)
5197     return NULL_RTX;
5198 
5199   rtx arg1_rtx = get_memory_rtx (arg1, len);
5200   rtx arg2_rtx = get_memory_rtx (arg2, len);
5201   rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
5202 
5203   /* Set MEM_SIZE as appropriate.  */
5204   if (CONST_INT_P (len_rtx))
5205     {
5206       set_mem_size (arg1_rtx, INTVAL (len_rtx));
5207       set_mem_size (arg2_rtx, INTVAL (len_rtx));
5208     }
5209 
5210   by_pieces_constfn constfn = NULL;
5211 
5212   /* Try to get the byte representation of the constant ARG2 (or, only
5213      when the function's result is used for equality to zero, ARG1)
5214      points to, with its byte size in NBYTES.  */
5215   unsigned HOST_WIDE_INT nbytes;
5216   const char *rep = c_getstr (arg2, &nbytes);
5217   if (result_eq && rep == NULL)
5218     {
5219       /* For equality to zero the arguments are interchangeable.  */
5220       rep = c_getstr (arg1, &nbytes);
5221       if (rep != NULL)
5222 	std::swap (arg1_rtx, arg2_rtx);
5223     }
5224 
5225   /* If the function's constant bound LEN_RTX is less than or equal
5226      to the byte size of the representation of the constant argument,
5227      and if block move would be done by pieces, we can avoid loading
5228      the bytes from memory and only store the computed constant result.  */
5229   if (rep
5230       && CONST_INT_P (len_rtx)
5231       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
5232     constfn = builtin_memcpy_read_str;
5233 
5234   result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
5235 				 TREE_TYPE (len), target,
5236 				 result_eq, constfn,
5237 				 CONST_CAST (char *, rep));
5238 
5239   if (result)
5240     {
5241       /* Return the value in the proper mode for this function.  */
5242       if (GET_MODE (result) == mode)
5243 	return result;
5244 
5245       if (target != 0)
5246 	{
5247 	  convert_move (target, result, 0);
5248 	  return target;
5249 	}
5250 
5251       return convert_to_mode (mode, result, 0);
5252     }
5253 
5254   return NULL_RTX;
5255 }
5256 
5257 /* Expand expression EXP, which is a call to the strcmp builtin.  Return NULL_RTX
5258    if we failed the caller should emit a normal call, otherwise try to get
5259    the result in TARGET, if convenient.  */
5260 
5261 static rtx
expand_builtin_strcmp(tree exp,ATTRIBUTE_UNUSED rtx target)5262 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
5263 {
5264   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5265     return NULL_RTX;
5266 
5267   tree arg1 = CALL_EXPR_ARG (exp, 0);
5268   tree arg2 = CALL_EXPR_ARG (exp, 1);
5269 
5270   if (!check_nul_terminated_array (exp, arg1)
5271       || !check_nul_terminated_array (exp, arg2))
5272     return NULL_RTX;
5273 
5274   /* Due to the performance benefit, always inline the calls first.  */
5275   rtx result = NULL_RTX;
5276   result = inline_expand_builtin_bytecmp (exp, target);
5277   if (result)
5278     return result;
5279 
5280   insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
5281   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5282   if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
5283     return NULL_RTX;
5284 
5285   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5286   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5287 
5288   /* If we don't have POINTER_TYPE, call the function.  */
5289   if (arg1_align == 0 || arg2_align == 0)
5290     return NULL_RTX;
5291 
5292   /* Stabilize the arguments in case gen_cmpstr(n)si fail.  */
5293   arg1 = builtin_save_expr (arg1);
5294   arg2 = builtin_save_expr (arg2);
5295 
5296   rtx arg1_rtx = get_memory_rtx (arg1, NULL);
5297   rtx arg2_rtx = get_memory_rtx (arg2, NULL);
5298 
5299   /* Try to call cmpstrsi.  */
5300   if (cmpstr_icode != CODE_FOR_nothing)
5301     result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
5302 			    MIN (arg1_align, arg2_align));
5303 
5304   /* Try to determine at least one length and call cmpstrnsi.  */
5305   if (!result && cmpstrn_icode != CODE_FOR_nothing)
5306     {
5307       tree len;
5308       rtx arg3_rtx;
5309 
5310       tree len1 = c_strlen (arg1, 1);
5311       tree len2 = c_strlen (arg2, 1);
5312 
5313       if (len1)
5314 	len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5315       if (len2)
5316 	len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5317 
5318       /* If we don't have a constant length for the first, use the length
5319 	 of the second, if we know it.  We don't require a constant for
5320 	 this case; some cost analysis could be done if both are available
5321 	 but neither is constant.  For now, assume they're equally cheap,
5322 	 unless one has side effects.  If both strings have constant lengths,
5323 	 use the smaller.  */
5324 
5325       if (!len1)
5326 	len = len2;
5327       else if (!len2)
5328 	len = len1;
5329       else if (TREE_SIDE_EFFECTS (len1))
5330 	len = len2;
5331       else if (TREE_SIDE_EFFECTS (len2))
5332 	len = len1;
5333       else if (TREE_CODE (len1) != INTEGER_CST)
5334 	len = len2;
5335       else if (TREE_CODE (len2) != INTEGER_CST)
5336 	len = len1;
5337       else if (tree_int_cst_lt (len1, len2))
5338 	len = len1;
5339       else
5340 	len = len2;
5341 
5342       /* If both arguments have side effects, we cannot optimize.  */
5343       if (len && !TREE_SIDE_EFFECTS (len))
5344 	{
5345 	  arg3_rtx = expand_normal (len);
5346 	  result = expand_cmpstrn_or_cmpmem
5347 	    (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5348 	     arg3_rtx, MIN (arg1_align, arg2_align));
5349 	}
5350     }
5351 
5352   tree fndecl = get_callee_fndecl (exp);
5353   if (result)
5354     {
5355       /* Check to see if the argument was declared attribute nonstring
5356 	 and if so, issue a warning since at this point it's not known
5357 	 to be nul-terminated.  */
5358       maybe_warn_nonstring_arg (fndecl, exp);
5359 
5360       /* Return the value in the proper mode for this function.  */
5361       machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5362       if (GET_MODE (result) == mode)
5363 	return result;
5364       if (target == 0)
5365 	return convert_to_mode (mode, result, 0);
5366       convert_move (target, result, 0);
5367       return target;
5368     }
5369 
5370   /* Expand the library call ourselves using a stabilized argument
5371      list to avoid re-evaluating the function's arguments twice.  */
5372   tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5373   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5374   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5375   return expand_call (fn, target, target == const0_rtx);
5376 }
5377 
5378 /* Expand expression EXP, which is a call to the strncmp builtin. Return
5379    NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
5380    the result in TARGET, if convenient.  */
5381 
5382 static rtx
expand_builtin_strncmp(tree exp,ATTRIBUTE_UNUSED rtx target,ATTRIBUTE_UNUSED machine_mode mode)5383 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5384 			ATTRIBUTE_UNUSED machine_mode mode)
5385 {
5386   if (!validate_arglist (exp,
5387  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5388     return NULL_RTX;
5389 
5390   tree arg1 = CALL_EXPR_ARG (exp, 0);
5391   tree arg2 = CALL_EXPR_ARG (exp, 1);
5392   tree arg3 = CALL_EXPR_ARG (exp, 2);
5393 
5394   if (!check_nul_terminated_array (exp, arg1, arg3)
5395       || !check_nul_terminated_array (exp, arg2, arg3))
5396     return NULL_RTX;
5397 
5398   /* Due to the performance benefit, always inline the calls first.  */
5399   rtx result = NULL_RTX;
5400   result = inline_expand_builtin_bytecmp (exp, target);
5401   if (result)
5402     return result;
5403 
5404   /* If c_strlen can determine an expression for one of the string
5405      lengths, and it doesn't have side effects, then emit cmpstrnsi
5406      using length MIN(strlen(string)+1, arg3).  */
5407   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5408   if (cmpstrn_icode == CODE_FOR_nothing)
5409     return NULL_RTX;
5410 
5411   tree len;
5412 
5413   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5414   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5415 
5416   tree len1 = c_strlen (arg1, 1);
5417   tree len2 = c_strlen (arg2, 1);
5418 
5419   location_t loc = EXPR_LOCATION (exp);
5420 
5421   if (len1)
5422     len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5423   if (len2)
5424     len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5425 
5426   tree len3 = fold_convert_loc (loc, sizetype, arg3);
5427 
5428   /* If we don't have a constant length for the first, use the length
5429      of the second, if we know it.  If neither string is constant length,
5430      use the given length argument.  We don't require a constant for
5431      this case; some cost analysis could be done if both are available
5432      but neither is constant.  For now, assume they're equally cheap,
5433      unless one has side effects.  If both strings have constant lengths,
5434      use the smaller.  */
5435 
5436   if (!len1 && !len2)
5437     len = len3;
5438   else if (!len1)
5439     len = len2;
5440   else if (!len2)
5441     len = len1;
5442   else if (TREE_SIDE_EFFECTS (len1))
5443     len = len2;
5444   else if (TREE_SIDE_EFFECTS (len2))
5445     len = len1;
5446   else if (TREE_CODE (len1) != INTEGER_CST)
5447     len = len2;
5448   else if (TREE_CODE (len2) != INTEGER_CST)
5449     len = len1;
5450   else if (tree_int_cst_lt (len1, len2))
5451     len = len1;
5452   else
5453     len = len2;
5454 
5455   /* If we are not using the given length, we must incorporate it here.
5456      The actual new length parameter will be MIN(len,arg3) in this case.  */
5457   if (len != len3)
5458     {
5459       len = fold_convert_loc (loc, sizetype, len);
5460       len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5461     }
5462   rtx arg1_rtx = get_memory_rtx (arg1, len);
5463   rtx arg2_rtx = get_memory_rtx (arg2, len);
5464   rtx arg3_rtx = expand_normal (len);
5465   result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5466 				     arg2_rtx, TREE_TYPE (len), arg3_rtx,
5467 				     MIN (arg1_align, arg2_align));
5468 
5469   tree fndecl = get_callee_fndecl (exp);
5470   if (result)
5471     {
5472       /* Check to see if the argument was declared attribute nonstring
5473 	 and if so, issue a warning since at this point it's not known
5474 	 to be nul-terminated.  */
5475       maybe_warn_nonstring_arg (fndecl, exp);
5476 
5477       /* Return the value in the proper mode for this function.  */
5478       mode = TYPE_MODE (TREE_TYPE (exp));
5479       if (GET_MODE (result) == mode)
5480 	return result;
5481       if (target == 0)
5482 	return convert_to_mode (mode, result, 0);
5483       convert_move (target, result, 0);
5484       return target;
5485     }
5486 
5487   /* Expand the library call ourselves using a stabilized argument
5488      list to avoid re-evaluating the function's arguments twice.  */
5489   tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5490   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5491   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5492   return expand_call (fn, target, target == const0_rtx);
5493 }
5494 
5495 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5496    if that's convenient.  */
5497 
5498 rtx
expand_builtin_saveregs(void)5499 expand_builtin_saveregs (void)
5500 {
5501   rtx val;
5502   rtx_insn *seq;
5503 
5504   /* Don't do __builtin_saveregs more than once in a function.
5505      Save the result of the first call and reuse it.  */
5506   if (saveregs_value != 0)
5507     return saveregs_value;
5508 
5509   /* When this function is called, it means that registers must be
5510      saved on entry to this function.  So we migrate the call to the
5511      first insn of this function.  */
5512 
5513   start_sequence ();
5514 
5515   /* Do whatever the machine needs done in this case.  */
5516   val = targetm.calls.expand_builtin_saveregs ();
5517 
5518   seq = get_insns ();
5519   end_sequence ();
5520 
5521   saveregs_value = val;
5522 
5523   /* Put the insns after the NOTE that starts the function.  If this
5524      is inside a start_sequence, make the outer-level insn chain current, so
5525      the code is placed at the start of the function.  */
5526   push_topmost_sequence ();
5527   emit_insn_after (seq, entry_of_function ());
5528   pop_topmost_sequence ();
5529 
5530   return val;
5531 }
5532 
5533 /* Expand a call to __builtin_next_arg.  */
5534 
5535 static rtx
expand_builtin_next_arg(void)5536 expand_builtin_next_arg (void)
5537 {
5538   /* Checking arguments is already done in fold_builtin_next_arg
5539      that must be called before this function.  */
5540   return expand_binop (ptr_mode, add_optab,
5541 		       crtl->args.internal_arg_pointer,
5542 		       crtl->args.arg_offset_rtx,
5543 		       NULL_RTX, 0, OPTAB_LIB_WIDEN);
5544 }
5545 
5546 /* Make it easier for the backends by protecting the valist argument
5547    from multiple evaluations.  */
5548 
5549 static tree
stabilize_va_list_loc(location_t loc,tree valist,int needs_lvalue)5550 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5551 {
5552   tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5553 
5554   /* The current way of determining the type of valist is completely
5555      bogus.  We should have the information on the va builtin instead.  */
5556   if (!vatype)
5557     vatype = targetm.fn_abi_va_list (cfun->decl);
5558 
5559   if (TREE_CODE (vatype) == ARRAY_TYPE)
5560     {
5561       if (TREE_SIDE_EFFECTS (valist))
5562 	valist = save_expr (valist);
5563 
5564       /* For this case, the backends will be expecting a pointer to
5565 	 vatype, but it's possible we've actually been given an array
5566 	 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5567 	 So fix it.  */
5568       if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5569 	{
5570 	  tree p1 = build_pointer_type (TREE_TYPE (vatype));
5571 	  valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5572 	}
5573     }
5574   else
5575     {
5576       tree pt = build_pointer_type (vatype);
5577 
5578       if (! needs_lvalue)
5579 	{
5580 	  if (! TREE_SIDE_EFFECTS (valist))
5581 	    return valist;
5582 
5583 	  valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5584 	  TREE_SIDE_EFFECTS (valist) = 1;
5585 	}
5586 
5587       if (TREE_SIDE_EFFECTS (valist))
5588 	valist = save_expr (valist);
5589       valist = fold_build2_loc (loc, MEM_REF,
5590 				vatype, valist, build_int_cst (pt, 0));
5591     }
5592 
5593   return valist;
5594 }
5595 
5596 /* The "standard" definition of va_list is void*.  */
5597 
5598 tree
std_build_builtin_va_list(void)5599 std_build_builtin_va_list (void)
5600 {
5601   return ptr_type_node;
5602 }
5603 
5604 /* The "standard" abi va_list is va_list_type_node.  */
5605 
5606 tree
std_fn_abi_va_list(tree fndecl ATTRIBUTE_UNUSED)5607 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5608 {
5609   return va_list_type_node;
5610 }
5611 
5612 /* The "standard" type of va_list is va_list_type_node.  */
5613 
5614 tree
std_canonical_va_list_type(tree type)5615 std_canonical_va_list_type (tree type)
5616 {
5617   tree wtype, htype;
5618 
5619   wtype = va_list_type_node;
5620   htype = type;
5621 
5622   if (TREE_CODE (wtype) == ARRAY_TYPE)
5623     {
5624       /* If va_list is an array type, the argument may have decayed
5625 	 to a pointer type, e.g. by being passed to another function.
5626 	 In that case, unwrap both types so that we can compare the
5627 	 underlying records.  */
5628       if (TREE_CODE (htype) == ARRAY_TYPE
5629 	  || POINTER_TYPE_P (htype))
5630 	{
5631 	  wtype = TREE_TYPE (wtype);
5632 	  htype = TREE_TYPE (htype);
5633 	}
5634     }
5635   if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5636     return va_list_type_node;
5637 
5638   return NULL_TREE;
5639 }
5640 
5641 /* The "standard" implementation of va_start: just assign `nextarg' to
5642    the variable.  */
5643 
5644 void
std_expand_builtin_va_start(tree valist,rtx nextarg)5645 std_expand_builtin_va_start (tree valist, rtx nextarg)
5646 {
5647   rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5648   convert_move (va_r, nextarg, 0);
5649 }
5650 
5651 /* Expand EXP, a call to __builtin_va_start.  */
5652 
5653 static rtx
expand_builtin_va_start(tree exp)5654 expand_builtin_va_start (tree exp)
5655 {
5656   rtx nextarg;
5657   tree valist;
5658   location_t loc = EXPR_LOCATION (exp);
5659 
5660   if (call_expr_nargs (exp) < 2)
5661     {
5662       error_at (loc, "too few arguments to function %<va_start%>");
5663       return const0_rtx;
5664     }
5665 
5666   if (fold_builtin_next_arg (exp, true))
5667     return const0_rtx;
5668 
5669   nextarg = expand_builtin_next_arg ();
5670   valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5671 
5672   if (targetm.expand_builtin_va_start)
5673     targetm.expand_builtin_va_start (valist, nextarg);
5674   else
5675     std_expand_builtin_va_start (valist, nextarg);
5676 
5677   return const0_rtx;
5678 }
5679 
5680 /* Expand EXP, a call to __builtin_va_end.  */
5681 
5682 static rtx
expand_builtin_va_end(tree exp)5683 expand_builtin_va_end (tree exp)
5684 {
5685   tree valist = CALL_EXPR_ARG (exp, 0);
5686 
5687   /* Evaluate for side effects, if needed.  I hate macros that don't
5688      do that.  */
5689   if (TREE_SIDE_EFFECTS (valist))
5690     expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5691 
5692   return const0_rtx;
5693 }
5694 
5695 /* Expand EXP, a call to __builtin_va_copy.  We do this as a
5696    builtin rather than just as an assignment in stdarg.h because of the
5697    nastiness of array-type va_list types.  */
5698 
5699 static rtx
expand_builtin_va_copy(tree exp)5700 expand_builtin_va_copy (tree exp)
5701 {
5702   tree dst, src, t;
5703   location_t loc = EXPR_LOCATION (exp);
5704 
5705   dst = CALL_EXPR_ARG (exp, 0);
5706   src = CALL_EXPR_ARG (exp, 1);
5707 
5708   dst = stabilize_va_list_loc (loc, dst, 1);
5709   src = stabilize_va_list_loc (loc, src, 0);
5710 
5711   gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5712 
5713   if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5714     {
5715       t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5716       TREE_SIDE_EFFECTS (t) = 1;
5717       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5718     }
5719   else
5720     {
5721       rtx dstb, srcb, size;
5722 
5723       /* Evaluate to pointers.  */
5724       dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5725       srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5726       size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5727       		  NULL_RTX, VOIDmode, EXPAND_NORMAL);
5728 
5729       dstb = convert_memory_address (Pmode, dstb);
5730       srcb = convert_memory_address (Pmode, srcb);
5731 
5732       /* "Dereference" to BLKmode memories.  */
5733       dstb = gen_rtx_MEM (BLKmode, dstb);
5734       set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5735       set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5736       srcb = gen_rtx_MEM (BLKmode, srcb);
5737       set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5738       set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5739 
5740       /* Copy.  */
5741       emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5742     }
5743 
5744   return const0_rtx;
5745 }
5746 
5747 /* Expand a call to one of the builtin functions __builtin_frame_address or
5748    __builtin_return_address.  */
5749 
5750 static rtx
expand_builtin_frame_address(tree fndecl,tree exp)5751 expand_builtin_frame_address (tree fndecl, tree exp)
5752 {
5753   /* The argument must be a nonnegative integer constant.
5754      It counts the number of frames to scan up the stack.
5755      The value is either the frame pointer value or the return
5756      address saved in that frame.  */
5757   if (call_expr_nargs (exp) == 0)
5758     /* Warning about missing arg was already issued.  */
5759     return const0_rtx;
5760   else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5761     {
5762       error ("invalid argument to %qD", fndecl);
5763       return const0_rtx;
5764     }
5765   else
5766     {
5767       /* Number of frames to scan up the stack.  */
5768       unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5769 
5770       rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5771 
5772       /* Some ports cannot access arbitrary stack frames.  */
5773       if (tem == NULL)
5774 	{
5775 	  warning (0, "unsupported argument to %qD", fndecl);
5776 	  return const0_rtx;
5777 	}
5778 
5779       if (count)
5780 	{
5781 	  /* Warn since no effort is made to ensure that any frame
5782 	     beyond the current one exists or can be safely reached.  */
5783 	  warning (OPT_Wframe_address, "calling %qD with "
5784 		   "a nonzero argument is unsafe", fndecl);
5785 	}
5786 
5787       /* For __builtin_frame_address, return what we've got.  */
5788       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5789 	return tem;
5790 
5791       if (!REG_P (tem)
5792 	  && ! CONSTANT_P (tem))
5793 	tem = copy_addr_to_reg (tem);
5794       return tem;
5795     }
5796 }
5797 
5798 /* Expand EXP, a call to the alloca builtin.  Return NULL_RTX if we
5799    failed and the caller should emit a normal call.  */
5800 
5801 static rtx
expand_builtin_alloca(tree exp)5802 expand_builtin_alloca (tree exp)
5803 {
5804   rtx op0;
5805   rtx result;
5806   unsigned int align;
5807   tree fndecl = get_callee_fndecl (exp);
5808   HOST_WIDE_INT max_size;
5809   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5810   bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5811   bool valid_arglist
5812     = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5813        ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5814 			   VOID_TYPE)
5815        : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5816 	 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5817 	 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5818 
5819   if (!valid_arglist)
5820     return NULL_RTX;
5821 
5822   if ((alloca_for_var
5823        && warn_vla_limit >= HOST_WIDE_INT_MAX
5824        && warn_alloc_size_limit < warn_vla_limit)
5825       || (!alloca_for_var
5826 	  && warn_alloca_limit >= HOST_WIDE_INT_MAX
5827 	  && warn_alloc_size_limit < warn_alloca_limit
5828 	  ))
5829     {
5830       /* -Walloca-larger-than and -Wvla-larger-than settings of
5831 	 less than HOST_WIDE_INT_MAX override the more general
5832 	 -Walloc-size-larger-than so unless either of the former
5833 	 options is smaller than the last one (wchich would imply
5834 	 that the call was already checked), check the alloca
5835 	 arguments for overflow.  */
5836       tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5837       int idx[] = { 0, -1 };
5838       maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5839     }
5840 
5841   /* Compute the argument.  */
5842   op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5843 
5844   /* Compute the alignment.  */
5845   align = (fcode == BUILT_IN_ALLOCA
5846 	   ? BIGGEST_ALIGNMENT
5847 	   : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5848 
5849   /* Compute the maximum size.  */
5850   max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5851               ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5852               : -1);
5853 
5854   /* Allocate the desired space.  If the allocation stems from the declaration
5855      of a variable-sized object, it cannot accumulate.  */
5856   result
5857     = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5858   result = convert_memory_address (ptr_mode, result);
5859 
5860   /* Dynamic allocations for variables are recorded during gimplification.  */
5861   if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5862     record_dynamic_alloc (exp);
5863 
5864   return result;
5865 }
5866 
5867 /* Emit a call to __asan_allocas_unpoison call in EXP.  Add to second argument
5868    of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5869    STACK_DYNAMIC_OFFSET value.  See motivation for this in comment to
5870    handle_builtin_stack_restore function.  */
5871 
5872 static rtx
expand_asan_emit_allocas_unpoison(tree exp)5873 expand_asan_emit_allocas_unpoison (tree exp)
5874 {
5875   tree arg0 = CALL_EXPR_ARG (exp, 0);
5876   tree arg1 = CALL_EXPR_ARG (exp, 1);
5877   rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5878   rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5879   rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5880 				 stack_pointer_rtx, NULL_RTX, 0,
5881 				 OPTAB_LIB_WIDEN);
5882   off = convert_modes (ptr_mode, Pmode, off, 0);
5883   bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5884 			     OPTAB_LIB_WIDEN);
5885   rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5886   ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5887 				 top, ptr_mode, bot, ptr_mode);
5888   return ret;
5889 }
5890 
5891 /* Expand a call to bswap builtin in EXP.
5892    Return NULL_RTX if a normal call should be emitted rather than expanding the
5893    function in-line.  If convenient, the result should be placed in TARGET.
5894    SUBTARGET may be used as the target for computing one of EXP's operands.  */
5895 
5896 static rtx
expand_builtin_bswap(machine_mode target_mode,tree exp,rtx target,rtx subtarget)5897 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5898 		      rtx subtarget)
5899 {
5900   tree arg;
5901   rtx op0;
5902 
5903   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5904     return NULL_RTX;
5905 
5906   arg = CALL_EXPR_ARG (exp, 0);
5907   op0 = expand_expr (arg,
5908 		     subtarget && GET_MODE (subtarget) == target_mode
5909 		     ? subtarget : NULL_RTX,
5910 		     target_mode, EXPAND_NORMAL);
5911   if (GET_MODE (op0) != target_mode)
5912     op0 = convert_to_mode (target_mode, op0, 1);
5913 
5914   target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5915 
5916   gcc_assert (target);
5917 
5918   return convert_to_mode (target_mode, target, 1);
5919 }
5920 
5921 /* Expand a call to a unary builtin in EXP.
5922    Return NULL_RTX if a normal call should be emitted rather than expanding the
5923    function in-line.  If convenient, the result should be placed in TARGET.
5924    SUBTARGET may be used as the target for computing one of EXP's operands.  */
5925 
5926 static rtx
expand_builtin_unop(machine_mode target_mode,tree exp,rtx target,rtx subtarget,optab op_optab)5927 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5928 		     rtx subtarget, optab op_optab)
5929 {
5930   rtx op0;
5931 
5932   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5933     return NULL_RTX;
5934 
5935   /* Compute the argument.  */
5936   op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5937 		     (subtarget
5938 		      && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5939 			  == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5940 		     VOIDmode, EXPAND_NORMAL);
5941   /* Compute op, into TARGET if possible.
5942      Set TARGET to wherever the result comes back.  */
5943   target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5944 			op_optab, op0, target, op_optab != clrsb_optab);
5945   gcc_assert (target);
5946 
5947   return convert_to_mode (target_mode, target, 0);
5948 }
5949 
5950 /* Expand a call to __builtin_expect.  We just return our argument
5951    as the builtin_expect semantic should've been already executed by
5952    tree branch prediction pass. */
5953 
5954 static rtx
expand_builtin_expect(tree exp,rtx target)5955 expand_builtin_expect (tree exp, rtx target)
5956 {
5957   tree arg;
5958 
5959   if (call_expr_nargs (exp) < 2)
5960     return const0_rtx;
5961   arg = CALL_EXPR_ARG (exp, 0);
5962 
5963   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5964   /* When guessing was done, the hints should be already stripped away.  */
5965   gcc_assert (!flag_guess_branch_prob
5966 	      || optimize == 0 || seen_error ());
5967   return target;
5968 }
5969 
5970 /* Expand a call to __builtin_expect_with_probability.  We just return our
5971    argument as the builtin_expect semantic should've been already executed by
5972    tree branch prediction pass.  */
5973 
5974 static rtx
expand_builtin_expect_with_probability(tree exp,rtx target)5975 expand_builtin_expect_with_probability (tree exp, rtx target)
5976 {
5977   tree arg;
5978 
5979   if (call_expr_nargs (exp) < 3)
5980     return const0_rtx;
5981   arg = CALL_EXPR_ARG (exp, 0);
5982 
5983   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5984   /* When guessing was done, the hints should be already stripped away.  */
5985   gcc_assert (!flag_guess_branch_prob
5986 	      || optimize == 0 || seen_error ());
5987   return target;
5988 }
5989 
5990 
5991 /* Expand a call to __builtin_assume_aligned.  We just return our first
5992    argument as the builtin_assume_aligned semantic should've been already
5993    executed by CCP.  */
5994 
5995 static rtx
expand_builtin_assume_aligned(tree exp,rtx target)5996 expand_builtin_assume_aligned (tree exp, rtx target)
5997 {
5998   if (call_expr_nargs (exp) < 2)
5999     return const0_rtx;
6000   target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
6001 			EXPAND_NORMAL);
6002   gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
6003 	      && (call_expr_nargs (exp) < 3
6004 		  || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
6005   return target;
6006 }
6007 
6008 void
expand_builtin_trap(void)6009 expand_builtin_trap (void)
6010 {
6011   if (targetm.have_trap ())
6012     {
6013       rtx_insn *insn = emit_insn (targetm.gen_trap ());
6014       /* For trap insns when not accumulating outgoing args force
6015 	 REG_ARGS_SIZE note to prevent crossjumping of calls with
6016 	 different args sizes.  */
6017       if (!ACCUMULATE_OUTGOING_ARGS)
6018 	add_args_size_note (insn, stack_pointer_delta);
6019     }
6020   else
6021     {
6022       tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
6023       tree call_expr = build_call_expr (fn, 0);
6024       expand_call (call_expr, NULL_RTX, false);
6025     }
6026 
6027   emit_barrier ();
6028 }
6029 
6030 /* Expand a call to __builtin_unreachable.  We do nothing except emit
6031    a barrier saying that control flow will not pass here.
6032 
6033    It is the responsibility of the program being compiled to ensure
6034    that control flow does never reach __builtin_unreachable.  */
6035 static void
expand_builtin_unreachable(void)6036 expand_builtin_unreachable (void)
6037 {
6038   emit_barrier ();
6039 }
6040 
6041 /* Expand EXP, a call to fabs, fabsf or fabsl.
6042    Return NULL_RTX if a normal call should be emitted rather than expanding
6043    the function inline.  If convenient, the result should be placed
6044    in TARGET.  SUBTARGET may be used as the target for computing
6045    the operand.  */
6046 
6047 static rtx
expand_builtin_fabs(tree exp,rtx target,rtx subtarget)6048 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
6049 {
6050   machine_mode mode;
6051   tree arg;
6052   rtx op0;
6053 
6054   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6055     return NULL_RTX;
6056 
6057   arg = CALL_EXPR_ARG (exp, 0);
6058   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6059   mode = TYPE_MODE (TREE_TYPE (arg));
6060   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6061   return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6062 }
6063 
6064 /* Expand EXP, a call to copysign, copysignf, or copysignl.
6065    Return NULL is a normal call should be emitted rather than expanding the
6066    function inline.  If convenient, the result should be placed in TARGET.
6067    SUBTARGET may be used as the target for computing the operand.  */
6068 
6069 static rtx
expand_builtin_copysign(tree exp,rtx target,rtx subtarget)6070 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
6071 {
6072   rtx op0, op1;
6073   tree arg;
6074 
6075   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6076     return NULL_RTX;
6077 
6078   arg = CALL_EXPR_ARG (exp, 0);
6079   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6080 
6081   arg = CALL_EXPR_ARG (exp, 1);
6082   op1 = expand_normal (arg);
6083 
6084   return expand_copysign (op0, op1, target);
6085 }
6086 
6087 /* Expand a call to __builtin___clear_cache.  */
6088 
6089 static rtx
expand_builtin___clear_cache(tree exp)6090 expand_builtin___clear_cache (tree exp)
6091 {
6092   if (!targetm.code_for_clear_cache)
6093     {
6094 #ifdef CLEAR_INSN_CACHE
6095       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6096 	 does something.  Just do the default expansion to a call to
6097 	 __clear_cache().  */
6098       return NULL_RTX;
6099 #else
6100       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6101 	 does nothing.  There is no need to call it.  Do nothing.  */
6102       return const0_rtx;
6103 #endif /* CLEAR_INSN_CACHE */
6104     }
6105 
6106   /* We have a "clear_cache" insn, and it will handle everything.  */
6107   tree begin, end;
6108   rtx begin_rtx, end_rtx;
6109 
6110   /* We must not expand to a library call.  If we did, any
6111      fallback library function in libgcc that might contain a call to
6112      __builtin___clear_cache() would recurse infinitely.  */
6113   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6114     {
6115       error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6116       return const0_rtx;
6117     }
6118 
6119   if (targetm.have_clear_cache ())
6120     {
6121       class expand_operand ops[2];
6122 
6123       begin = CALL_EXPR_ARG (exp, 0);
6124       begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6125 
6126       end = CALL_EXPR_ARG (exp, 1);
6127       end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6128 
6129       create_address_operand (&ops[0], begin_rtx);
6130       create_address_operand (&ops[1], end_rtx);
6131       if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6132 	return const0_rtx;
6133     }
6134   return const0_rtx;
6135 }
6136 
6137 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT.  */
6138 
6139 static rtx
round_trampoline_addr(rtx tramp)6140 round_trampoline_addr (rtx tramp)
6141 {
6142   rtx temp, addend, mask;
6143 
6144   /* If we don't need too much alignment, we'll have been guaranteed
6145      proper alignment by get_trampoline_type.  */
6146   if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6147     return tramp;
6148 
6149   /* Round address up to desired boundary.  */
6150   temp = gen_reg_rtx (Pmode);
6151   addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6152   mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6153 
6154   temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
6155 			       temp, 0, OPTAB_LIB_WIDEN);
6156   tramp = expand_simple_binop (Pmode, AND, temp, mask,
6157 			       temp, 0, OPTAB_LIB_WIDEN);
6158 
6159   return tramp;
6160 }
6161 
6162 static rtx
expand_builtin_init_trampoline(tree exp,bool onstack)6163 expand_builtin_init_trampoline (tree exp, bool onstack)
6164 {
6165   tree t_tramp, t_func, t_chain;
6166   rtx m_tramp, r_tramp, r_chain, tmp;
6167 
6168   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6169 			 POINTER_TYPE, VOID_TYPE))
6170     return NULL_RTX;
6171 
6172   t_tramp = CALL_EXPR_ARG (exp, 0);
6173   t_func = CALL_EXPR_ARG (exp, 1);
6174   t_chain = CALL_EXPR_ARG (exp, 2);
6175 
6176   r_tramp = expand_normal (t_tramp);
6177   m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6178   MEM_NOTRAP_P (m_tramp) = 1;
6179 
6180   /* If ONSTACK, the TRAMP argument should be the address of a field
6181      within the local function's FRAME decl.  Either way, let's see if
6182      we can fill in the MEM_ATTRs for this memory.  */
6183   if (TREE_CODE (t_tramp) == ADDR_EXPR)
6184     set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6185 
6186   /* Creator of a heap trampoline is responsible for making sure the
6187      address is aligned to at least STACK_BOUNDARY.  Normally malloc
6188      will ensure this anyhow.  */
6189   tmp = round_trampoline_addr (r_tramp);
6190   if (tmp != r_tramp)
6191     {
6192       m_tramp = change_address (m_tramp, BLKmode, tmp);
6193       set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6194       set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6195     }
6196 
6197   /* The FUNC argument should be the address of the nested function.
6198      Extract the actual function decl to pass to the hook.  */
6199   gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6200   t_func = TREE_OPERAND (t_func, 0);
6201   gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6202 
6203   r_chain = expand_normal (t_chain);
6204 
6205   /* Generate insns to initialize the trampoline.  */
6206   targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6207 
6208   if (onstack)
6209     {
6210       trampolines_created = 1;
6211 
6212       if (targetm.calls.custom_function_descriptors != 0)
6213 	warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6214 		    "trampoline generated for nested function %qD", t_func);
6215     }
6216 
6217   return const0_rtx;
6218 }
6219 
6220 static rtx
expand_builtin_adjust_trampoline(tree exp)6221 expand_builtin_adjust_trampoline (tree exp)
6222 {
6223   rtx tramp;
6224 
6225   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6226     return NULL_RTX;
6227 
6228   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6229   tramp = round_trampoline_addr (tramp);
6230   if (targetm.calls.trampoline_adjust_address)
6231     tramp = targetm.calls.trampoline_adjust_address (tramp);
6232 
6233   return tramp;
6234 }
6235 
6236 /* Expand a call to the builtin descriptor initialization routine.
6237    A descriptor is made up of a couple of pointers to the static
6238    chain and the code entry in this order.  */
6239 
6240 static rtx
expand_builtin_init_descriptor(tree exp)6241 expand_builtin_init_descriptor (tree exp)
6242 {
6243   tree t_descr, t_func, t_chain;
6244   rtx m_descr, r_descr, r_func, r_chain;
6245 
6246   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6247 			 VOID_TYPE))
6248     return NULL_RTX;
6249 
6250   t_descr = CALL_EXPR_ARG (exp, 0);
6251   t_func = CALL_EXPR_ARG (exp, 1);
6252   t_chain = CALL_EXPR_ARG (exp, 2);
6253 
6254   r_descr = expand_normal (t_descr);
6255   m_descr = gen_rtx_MEM (BLKmode, r_descr);
6256   MEM_NOTRAP_P (m_descr) = 1;
6257   set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6258 
6259   r_func = expand_normal (t_func);
6260   r_chain = expand_normal (t_chain);
6261 
6262   /* Generate insns to initialize the descriptor.  */
6263   emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6264   emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6265 				     POINTER_SIZE / BITS_PER_UNIT), r_func);
6266 
6267   return const0_rtx;
6268 }
6269 
6270 /* Expand a call to the builtin descriptor adjustment routine.  */
6271 
6272 static rtx
expand_builtin_adjust_descriptor(tree exp)6273 expand_builtin_adjust_descriptor (tree exp)
6274 {
6275   rtx tramp;
6276 
6277   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6278     return NULL_RTX;
6279 
6280   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6281 
6282   /* Unalign the descriptor to allow runtime identification.  */
6283   tramp = plus_constant (ptr_mode, tramp,
6284 			 targetm.calls.custom_function_descriptors);
6285 
6286   return force_operand (tramp, NULL_RTX);
6287 }
6288 
6289 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6290    function.  The function first checks whether the back end provides
6291    an insn to implement signbit for the respective mode.  If not, it
6292    checks whether the floating point format of the value is such that
6293    the sign bit can be extracted.  If that is not the case, error out.
6294    EXP is the expression that is a call to the builtin function; if
6295    convenient, the result should be placed in TARGET.  */
6296 static rtx
expand_builtin_signbit(tree exp,rtx target)6297 expand_builtin_signbit (tree exp, rtx target)
6298 {
6299   const struct real_format *fmt;
6300   scalar_float_mode fmode;
6301   scalar_int_mode rmode, imode;
6302   tree arg;
6303   int word, bitpos;
6304   enum insn_code icode;
6305   rtx temp;
6306   location_t loc = EXPR_LOCATION (exp);
6307 
6308   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6309     return NULL_RTX;
6310 
6311   arg = CALL_EXPR_ARG (exp, 0);
6312   fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6313   rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6314   fmt = REAL_MODE_FORMAT (fmode);
6315 
6316   arg = builtin_save_expr (arg);
6317 
6318   /* Expand the argument yielding a RTX expression. */
6319   temp = expand_normal (arg);
6320 
6321   /* Check if the back end provides an insn that handles signbit for the
6322      argument's mode. */
6323   icode = optab_handler (signbit_optab, fmode);
6324   if (icode != CODE_FOR_nothing)
6325     {
6326       rtx_insn *last = get_last_insn ();
6327       target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6328       if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
6329 	return target;
6330       delete_insns_since (last);
6331     }
6332 
6333   /* For floating point formats without a sign bit, implement signbit
6334      as "ARG < 0.0".  */
6335   bitpos = fmt->signbit_ro;
6336   if (bitpos < 0)
6337   {
6338     /* But we can't do this if the format supports signed zero.  */
6339     gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6340 
6341     arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6342 		       build_real (TREE_TYPE (arg), dconst0));
6343     return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6344   }
6345 
6346   if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6347     {
6348       imode = int_mode_for_mode (fmode).require ();
6349       temp = gen_lowpart (imode, temp);
6350     }
6351   else
6352     {
6353       imode = word_mode;
6354       /* Handle targets with different FP word orders.  */
6355       if (FLOAT_WORDS_BIG_ENDIAN)
6356 	word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6357       else
6358 	word = bitpos / BITS_PER_WORD;
6359       temp = operand_subword_force (temp, word, fmode);
6360       bitpos = bitpos % BITS_PER_WORD;
6361     }
6362 
6363   /* Force the intermediate word_mode (or narrower) result into a
6364      register.  This avoids attempting to create paradoxical SUBREGs
6365      of floating point modes below.  */
6366   temp = force_reg (imode, temp);
6367 
6368   /* If the bitpos is within the "result mode" lowpart, the operation
6369      can be implement with a single bitwise AND.  Otherwise, we need
6370      a right shift and an AND.  */
6371 
6372   if (bitpos < GET_MODE_BITSIZE (rmode))
6373     {
6374       wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6375 
6376       if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6377 	temp = gen_lowpart (rmode, temp);
6378       temp = expand_binop (rmode, and_optab, temp,
6379 			   immed_wide_int_const (mask, rmode),
6380 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
6381     }
6382   else
6383     {
6384       /* Perform a logical right shift to place the signbit in the least
6385 	 significant bit, then truncate the result to the desired mode
6386 	 and mask just this bit.  */
6387       temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6388       temp = gen_lowpart (rmode, temp);
6389       temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6390 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
6391     }
6392 
6393   return temp;
6394 }
6395 
6396 /* Expand fork or exec calls.  TARGET is the desired target of the
6397    call.  EXP is the call. FN is the
6398    identificator of the actual function.  IGNORE is nonzero if the
6399    value is to be ignored.  */
6400 
6401 static rtx
expand_builtin_fork_or_exec(tree fn,tree exp,rtx target,int ignore)6402 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6403 {
6404   tree id, decl;
6405   tree call;
6406 
6407   if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
6408     {
6409       /* Detect unterminated path.  */
6410       if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6411 	return NULL_RTX;
6412 
6413       /* Also detect unterminated first argument.  */
6414       switch (DECL_FUNCTION_CODE (fn))
6415 	{
6416 	case BUILT_IN_EXECL:
6417 	case BUILT_IN_EXECLE:
6418 	case BUILT_IN_EXECLP:
6419 	  if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6420 	    return NULL_RTX;
6421 	default:
6422 	  break;
6423 	}
6424     }
6425 
6426 
6427   /* If we are not profiling, just call the function.  */
6428   if (!profile_arc_flag)
6429     return NULL_RTX;
6430 
6431   /* Otherwise call the wrapper.  This should be equivalent for the rest of
6432      compiler, so the code does not diverge, and the wrapper may run the
6433      code necessary for keeping the profiling sane.  */
6434 
6435   switch (DECL_FUNCTION_CODE (fn))
6436     {
6437     case BUILT_IN_FORK:
6438       id = get_identifier ("__gcov_fork");
6439       break;
6440 
6441     case BUILT_IN_EXECL:
6442       id = get_identifier ("__gcov_execl");
6443       break;
6444 
6445     case BUILT_IN_EXECV:
6446       id = get_identifier ("__gcov_execv");
6447       break;
6448 
6449     case BUILT_IN_EXECLP:
6450       id = get_identifier ("__gcov_execlp");
6451       break;
6452 
6453     case BUILT_IN_EXECLE:
6454       id = get_identifier ("__gcov_execle");
6455       break;
6456 
6457     case BUILT_IN_EXECVP:
6458       id = get_identifier ("__gcov_execvp");
6459       break;
6460 
6461     case BUILT_IN_EXECVE:
6462       id = get_identifier ("__gcov_execve");
6463       break;
6464 
6465     default:
6466       gcc_unreachable ();
6467     }
6468 
6469   decl = build_decl (DECL_SOURCE_LOCATION (fn),
6470 		     FUNCTION_DECL, id, TREE_TYPE (fn));
6471   DECL_EXTERNAL (decl) = 1;
6472   TREE_PUBLIC (decl) = 1;
6473   DECL_ARTIFICIAL (decl) = 1;
6474   TREE_NOTHROW (decl) = 1;
6475   DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6476   DECL_VISIBILITY_SPECIFIED (decl) = 1;
6477   call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6478   return expand_call (call, target, ignore);
6479  }
6480 
6481 
6482 
6483 /* Reconstitute a mode for a __sync intrinsic operation.  Since the type of
6484    the pointer in these functions is void*, the tree optimizers may remove
6485    casts.  The mode computed in expand_builtin isn't reliable either, due
6486    to __sync_bool_compare_and_swap.
6487 
6488    FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6489    group of builtins.  This gives us log2 of the mode size.  */
6490 
6491 static inline machine_mode
get_builtin_sync_mode(int fcode_diff)6492 get_builtin_sync_mode (int fcode_diff)
6493 {
6494   /* The size is not negotiable, so ask not to get BLKmode in return
6495      if the target indicates that a smaller size would be better.  */
6496   return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6497 }
6498 
6499 /* Expand the memory expression LOC and return the appropriate memory operand
6500    for the builtin_sync operations.  */
6501 
6502 static rtx
get_builtin_sync_mem(tree loc,machine_mode mode)6503 get_builtin_sync_mem (tree loc, machine_mode mode)
6504 {
6505   rtx addr, mem;
6506   int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6507 				    ? TREE_TYPE (TREE_TYPE (loc))
6508 				    : TREE_TYPE (loc));
6509   scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6510 
6511   addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6512   addr = convert_memory_address (addr_mode, addr);
6513 
6514   /* Note that we explicitly do not want any alias information for this
6515      memory, so that we kill all other live memories.  Otherwise we don't
6516      satisfy the full barrier semantics of the intrinsic.  */
6517   mem = gen_rtx_MEM (mode, addr);
6518 
6519   set_mem_addr_space (mem, addr_space);
6520 
6521   mem = validize_mem (mem);
6522 
6523   /* The alignment needs to be at least according to that of the mode.  */
6524   set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6525 			   get_pointer_alignment (loc)));
6526   set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6527   MEM_VOLATILE_P (mem) = 1;
6528 
6529   return mem;
6530 }
6531 
6532 /* Make sure an argument is in the right mode.
6533    EXP is the tree argument.
6534    MODE is the mode it should be in.  */
6535 
6536 static rtx
expand_expr_force_mode(tree exp,machine_mode mode)6537 expand_expr_force_mode (tree exp, machine_mode mode)
6538 {
6539   rtx val;
6540   machine_mode old_mode;
6541 
6542   val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6543   /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
6544      of CONST_INTs, where we know the old_mode only from the call argument.  */
6545 
6546   old_mode = GET_MODE (val);
6547   if (old_mode == VOIDmode)
6548     old_mode = TYPE_MODE (TREE_TYPE (exp));
6549   val = convert_modes (mode, old_mode, val, 1);
6550   return val;
6551 }
6552 
6553 
6554 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6555    EXP is the CALL_EXPR.  CODE is the rtx code
6556    that corresponds to the arithmetic or logical operation from the name;
6557    an exception here is that NOT actually means NAND.  TARGET is an optional
6558    place for us to store the results; AFTER is true if this is the
6559    fetch_and_xxx form.  */
6560 
6561 static rtx
expand_builtin_sync_operation(machine_mode mode,tree exp,enum rtx_code code,bool after,rtx target)6562 expand_builtin_sync_operation (machine_mode mode, tree exp,
6563 			       enum rtx_code code, bool after,
6564 			       rtx target)
6565 {
6566   rtx val, mem;
6567   location_t loc = EXPR_LOCATION (exp);
6568 
6569   if (code == NOT && warn_sync_nand)
6570     {
6571       tree fndecl = get_callee_fndecl (exp);
6572       enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6573 
6574       static bool warned_f_a_n, warned_n_a_f;
6575 
6576       switch (fcode)
6577 	{
6578 	case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6579 	case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6580 	case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6581 	case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6582 	case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6583 	  if (warned_f_a_n)
6584 	    break;
6585 
6586 	  fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6587 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6588 	  warned_f_a_n = true;
6589 	  break;
6590 
6591 	case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6592 	case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6593 	case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6594 	case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6595 	case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6596 	  if (warned_n_a_f)
6597 	    break;
6598 
6599 	 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6600 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6601 	  warned_n_a_f = true;
6602 	  break;
6603 
6604 	default:
6605 	  gcc_unreachable ();
6606 	}
6607     }
6608 
6609   /* Expand the operands.  */
6610   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6611   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6612 
6613   return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6614 				 after);
6615 }
6616 
6617 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6618    intrinsics. EXP is the CALL_EXPR.  IS_BOOL is
6619    true if this is the boolean form.  TARGET is a place for us to store the
6620    results; this is NOT optional if IS_BOOL is true.  */
6621 
6622 static rtx
expand_builtin_compare_and_swap(machine_mode mode,tree exp,bool is_bool,rtx target)6623 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6624 				 bool is_bool, rtx target)
6625 {
6626   rtx old_val, new_val, mem;
6627   rtx *pbool, *poval;
6628 
6629   /* Expand the operands.  */
6630   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6631   old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6632   new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6633 
6634   pbool = poval = NULL;
6635   if (target != const0_rtx)
6636     {
6637       if (is_bool)
6638 	pbool = &target;
6639       else
6640 	poval = &target;
6641     }
6642   if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6643 				       false, MEMMODEL_SYNC_SEQ_CST,
6644 				       MEMMODEL_SYNC_SEQ_CST))
6645     return NULL_RTX;
6646 
6647   return target;
6648 }
6649 
6650 /* Expand the __sync_lock_test_and_set intrinsic.  Note that the most
6651    general form is actually an atomic exchange, and some targets only
6652    support a reduced form with the second argument being a constant 1.
6653    EXP is the CALL_EXPR; TARGET is an optional place for us to store
6654    the results.  */
6655 
6656 static rtx
expand_builtin_sync_lock_test_and_set(machine_mode mode,tree exp,rtx target)6657 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6658 				       rtx target)
6659 {
6660   rtx val, mem;
6661 
6662   /* Expand the operands.  */
6663   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6664   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6665 
6666   return expand_sync_lock_test_and_set (target, mem, val);
6667 }
6668 
6669 /* Expand the __sync_lock_release intrinsic.  EXP is the CALL_EXPR.  */
6670 
6671 static void
expand_builtin_sync_lock_release(machine_mode mode,tree exp)6672 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6673 {
6674   rtx mem;
6675 
6676   /* Expand the operands.  */
6677   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6678 
6679   expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6680 }
6681 
6682 /* Given an integer representing an ``enum memmodel'', verify its
6683    correctness and return the memory model enum.  */
6684 
6685 static enum memmodel
get_memmodel(tree exp)6686 get_memmodel (tree exp)
6687 {
6688   rtx op;
6689   unsigned HOST_WIDE_INT val;
6690   location_t loc
6691     = expansion_point_location_if_in_system_header (input_location);
6692 
6693   /* If the parameter is not a constant, it's a run time value so we'll just
6694      convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking.  */
6695   if (TREE_CODE (exp) != INTEGER_CST)
6696     return MEMMODEL_SEQ_CST;
6697 
6698   op = expand_normal (exp);
6699 
6700   val = INTVAL (op);
6701   if (targetm.memmodel_check)
6702     val = targetm.memmodel_check (val);
6703   else if (val & ~MEMMODEL_MASK)
6704     {
6705       warning_at (loc, OPT_Winvalid_memory_model,
6706 		  "unknown architecture specifier in memory model to builtin");
6707       return MEMMODEL_SEQ_CST;
6708     }
6709 
6710   /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6711   if (memmodel_base (val) >= MEMMODEL_LAST)
6712     {
6713       warning_at (loc, OPT_Winvalid_memory_model,
6714 		  "invalid memory model argument to builtin");
6715       return MEMMODEL_SEQ_CST;
6716     }
6717 
6718   /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6719      be conservative and promote consume to acquire.  */
6720   if (val == MEMMODEL_CONSUME)
6721     val = MEMMODEL_ACQUIRE;
6722 
6723   return (enum memmodel) val;
6724 }
6725 
6726 /* Expand the __atomic_exchange intrinsic:
6727    	TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6728    EXP is the CALL_EXPR.
6729    TARGET is an optional place for us to store the results.  */
6730 
6731 static rtx
expand_builtin_atomic_exchange(machine_mode mode,tree exp,rtx target)6732 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6733 {
6734   rtx val, mem;
6735   enum memmodel model;
6736 
6737   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6738 
6739   if (!flag_inline_atomics)
6740     return NULL_RTX;
6741 
6742   /* Expand the operands.  */
6743   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6744   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6745 
6746   return expand_atomic_exchange (target, mem, val, model);
6747 }
6748 
6749 /* Expand the __atomic_compare_exchange intrinsic:
6750    	bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6751 					TYPE desired, BOOL weak,
6752 					enum memmodel success,
6753 					enum memmodel failure)
6754    EXP is the CALL_EXPR.
6755    TARGET is an optional place for us to store the results.  */
6756 
6757 static rtx
expand_builtin_atomic_compare_exchange(machine_mode mode,tree exp,rtx target)6758 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6759 					rtx target)
6760 {
6761   rtx expect, desired, mem, oldval;
6762   rtx_code_label *label;
6763   enum memmodel success, failure;
6764   tree weak;
6765   bool is_weak;
6766   location_t loc
6767     = expansion_point_location_if_in_system_header (input_location);
6768 
6769   success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6770   failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6771 
6772   if (failure > success)
6773     {
6774       warning_at (loc, OPT_Winvalid_memory_model,
6775 		  "failure memory model cannot be stronger than success "
6776 		  "memory model for %<__atomic_compare_exchange%>");
6777       success = MEMMODEL_SEQ_CST;
6778     }
6779 
6780   if (is_mm_release (failure) || is_mm_acq_rel (failure))
6781     {
6782       warning_at (loc, OPT_Winvalid_memory_model,
6783 		  "invalid failure memory model for "
6784 		  "%<__atomic_compare_exchange%>");
6785       failure = MEMMODEL_SEQ_CST;
6786       success = MEMMODEL_SEQ_CST;
6787     }
6788 
6789 
6790   if (!flag_inline_atomics)
6791     return NULL_RTX;
6792 
6793   /* Expand the operands.  */
6794   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6795 
6796   expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6797   expect = convert_memory_address (Pmode, expect);
6798   expect = gen_rtx_MEM (mode, expect);
6799   desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6800 
6801   weak = CALL_EXPR_ARG (exp, 3);
6802   is_weak = false;
6803   if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6804     is_weak = true;
6805 
6806   if (target == const0_rtx)
6807     target = NULL;
6808 
6809   /* Lest the rtl backend create a race condition with an imporoper store
6810      to memory, always create a new pseudo for OLDVAL.  */
6811   oldval = NULL;
6812 
6813   if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6814 				       is_weak, success, failure))
6815     return NULL_RTX;
6816 
6817   /* Conditionally store back to EXPECT, lest we create a race condition
6818      with an improper store to memory.  */
6819   /* ??? With a rearrangement of atomics at the gimple level, we can handle
6820      the normal case where EXPECT is totally private, i.e. a register.  At
6821      which point the store can be unconditional.  */
6822   label = gen_label_rtx ();
6823   emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6824 			   GET_MODE (target), 1, label);
6825   emit_move_insn (expect, oldval);
6826   emit_label (label);
6827 
6828   return target;
6829 }
6830 
6831 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6832    internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6833    call.  The weak parameter must be dropped to match the expected parameter
6834    list and the expected argument changed from value to pointer to memory
6835    slot.  */
6836 
6837 static void
expand_ifn_atomic_compare_exchange_into_call(gcall * call,machine_mode mode)6838 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6839 {
6840   unsigned int z;
6841   vec<tree, va_gc> *vec;
6842 
6843   vec_alloc (vec, 5);
6844   vec->quick_push (gimple_call_arg (call, 0));
6845   tree expected = gimple_call_arg (call, 1);
6846   rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6847 				      TREE_TYPE (expected));
6848   rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6849   if (expd != x)
6850     emit_move_insn (x, expd);
6851   tree v = make_tree (TREE_TYPE (expected), x);
6852   vec->quick_push (build1 (ADDR_EXPR,
6853 			   build_pointer_type (TREE_TYPE (expected)), v));
6854   vec->quick_push (gimple_call_arg (call, 2));
6855   /* Skip the boolean weak parameter.  */
6856   for (z = 4; z < 6; z++)
6857     vec->quick_push (gimple_call_arg (call, z));
6858   /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}.  */
6859   unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6860   gcc_assert (bytes_log2 < 5);
6861   built_in_function fncode
6862     = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6863 			   + bytes_log2);
6864   tree fndecl = builtin_decl_explicit (fncode);
6865   tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6866 		    fndecl);
6867   tree exp = build_call_vec (boolean_type_node, fn, vec);
6868   tree lhs = gimple_call_lhs (call);
6869   rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6870   if (lhs)
6871     {
6872       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6873       if (GET_MODE (boolret) != mode)
6874 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6875       x = force_reg (mode, x);
6876       write_complex_part (target, boolret, true);
6877       write_complex_part (target, x, false);
6878     }
6879 }
6880 
6881 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function.  */
6882 
6883 void
expand_ifn_atomic_compare_exchange(gcall * call)6884 expand_ifn_atomic_compare_exchange (gcall *call)
6885 {
6886   int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6887   gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6888   machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6889   rtx expect, desired, mem, oldval, boolret;
6890   enum memmodel success, failure;
6891   tree lhs;
6892   bool is_weak;
6893   location_t loc
6894     = expansion_point_location_if_in_system_header (gimple_location (call));
6895 
6896   success = get_memmodel (gimple_call_arg (call, 4));
6897   failure = get_memmodel (gimple_call_arg (call, 5));
6898 
6899   if (failure > success)
6900     {
6901       warning_at (loc, OPT_Winvalid_memory_model,
6902 		  "failure memory model cannot be stronger than success "
6903 		  "memory model for %<__atomic_compare_exchange%>");
6904       success = MEMMODEL_SEQ_CST;
6905     }
6906 
6907   if (is_mm_release (failure) || is_mm_acq_rel (failure))
6908     {
6909       warning_at (loc, OPT_Winvalid_memory_model,
6910 		  "invalid failure memory model for "
6911 		  "%<__atomic_compare_exchange%>");
6912       failure = MEMMODEL_SEQ_CST;
6913       success = MEMMODEL_SEQ_CST;
6914     }
6915 
6916   if (!flag_inline_atomics)
6917     {
6918       expand_ifn_atomic_compare_exchange_into_call (call, mode);
6919       return;
6920     }
6921 
6922   /* Expand the operands.  */
6923   mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6924 
6925   expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6926   desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6927 
6928   is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6929 
6930   boolret = NULL;
6931   oldval = NULL;
6932 
6933   if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6934 				       is_weak, success, failure))
6935     {
6936       expand_ifn_atomic_compare_exchange_into_call (call, mode);
6937       return;
6938     }
6939 
6940   lhs = gimple_call_lhs (call);
6941   if (lhs)
6942     {
6943       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6944       if (GET_MODE (boolret) != mode)
6945 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6946       write_complex_part (target, boolret, true);
6947       write_complex_part (target, oldval, false);
6948     }
6949 }
6950 
6951 /* Expand the __atomic_load intrinsic:
6952    	TYPE __atomic_load (TYPE *object, enum memmodel)
6953    EXP is the CALL_EXPR.
6954    TARGET is an optional place for us to store the results.  */
6955 
6956 static rtx
expand_builtin_atomic_load(machine_mode mode,tree exp,rtx target)6957 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6958 {
6959   rtx mem;
6960   enum memmodel model;
6961 
6962   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6963   if (is_mm_release (model) || is_mm_acq_rel (model))
6964     {
6965       location_t loc
6966 	= expansion_point_location_if_in_system_header (input_location);
6967       warning_at (loc, OPT_Winvalid_memory_model,
6968 		  "invalid memory model for %<__atomic_load%>");
6969       model = MEMMODEL_SEQ_CST;
6970     }
6971 
6972   if (!flag_inline_atomics)
6973     return NULL_RTX;
6974 
6975   /* Expand the operand.  */
6976   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6977 
6978   return expand_atomic_load (target, mem, model);
6979 }
6980 
6981 
6982 /* Expand the __atomic_store intrinsic:
6983    	void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6984    EXP is the CALL_EXPR.
6985    TARGET is an optional place for us to store the results.  */
6986 
6987 static rtx
expand_builtin_atomic_store(machine_mode mode,tree exp)6988 expand_builtin_atomic_store (machine_mode mode, tree exp)
6989 {
6990   rtx mem, val;
6991   enum memmodel model;
6992 
6993   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6994   if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6995 	|| is_mm_release (model)))
6996     {
6997       location_t loc
6998 	= expansion_point_location_if_in_system_header (input_location);
6999       warning_at (loc, OPT_Winvalid_memory_model,
7000 		  "invalid memory model for %<__atomic_store%>");
7001       model = MEMMODEL_SEQ_CST;
7002     }
7003 
7004   if (!flag_inline_atomics)
7005     return NULL_RTX;
7006 
7007   /* Expand the operands.  */
7008   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7009   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7010 
7011   return expand_atomic_store (mem, val, model, false);
7012 }
7013 
7014 /* Expand the __atomic_fetch_XXX intrinsic:
7015    	TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
7016    EXP is the CALL_EXPR.
7017    TARGET is an optional place for us to store the results.
7018    CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
7019    FETCH_AFTER is true if returning the result of the operation.
7020    FETCH_AFTER is false if returning the value before the operation.
7021    IGNORE is true if the result is not used.
7022    EXT_CALL is the correct builtin for an external call if this cannot be
7023    resolved to an instruction sequence.  */
7024 
7025 static rtx
expand_builtin_atomic_fetch_op(machine_mode mode,tree exp,rtx target,enum rtx_code code,bool fetch_after,bool ignore,enum built_in_function ext_call)7026 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
7027 				enum rtx_code code, bool fetch_after,
7028 				bool ignore, enum built_in_function ext_call)
7029 {
7030   rtx val, mem, ret;
7031   enum memmodel model;
7032   tree fndecl;
7033   tree addr;
7034 
7035   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7036 
7037   /* Expand the operands.  */
7038   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7039   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7040 
7041   /* Only try generating instructions if inlining is turned on.  */
7042   if (flag_inline_atomics)
7043     {
7044       ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
7045       if (ret)
7046 	return ret;
7047     }
7048 
7049   /* Return if a different routine isn't needed for the library call.  */
7050   if (ext_call == BUILT_IN_NONE)
7051     return NULL_RTX;
7052 
7053   /* Change the call to the specified function.  */
7054   fndecl = get_callee_fndecl (exp);
7055   addr = CALL_EXPR_FN (exp);
7056   STRIP_NOPS (addr);
7057 
7058   gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
7059   TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
7060 
7061   /* If we will emit code after the call, the call cannot be a tail call.
7062      If it is emitted as a tail call, a barrier is emitted after it, and
7063      then all trailing code is removed.  */
7064   if (!ignore)
7065     CALL_EXPR_TAILCALL (exp) = 0;
7066 
7067   /* Expand the call here so we can emit trailing code.  */
7068   ret = expand_call (exp, target, ignore);
7069 
7070   /* Replace the original function just in case it matters.  */
7071   TREE_OPERAND (addr, 0) = fndecl;
7072 
7073   /* Then issue the arithmetic correction to return the right result.  */
7074   if (!ignore)
7075     {
7076       if (code == NOT)
7077 	{
7078 	  ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
7079 				     OPTAB_LIB_WIDEN);
7080 	  ret = expand_simple_unop (mode, NOT, ret, target, true);
7081 	}
7082       else
7083 	ret = expand_simple_binop (mode, code, ret, val, target, true,
7084 				   OPTAB_LIB_WIDEN);
7085     }
7086   return ret;
7087 }
7088 
7089 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function.  */
7090 
7091 void
expand_ifn_atomic_bit_test_and(gcall * call)7092 expand_ifn_atomic_bit_test_and (gcall *call)
7093 {
7094   tree ptr = gimple_call_arg (call, 0);
7095   tree bit = gimple_call_arg (call, 1);
7096   tree flag = gimple_call_arg (call, 2);
7097   tree lhs = gimple_call_lhs (call);
7098   enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7099   machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7100   enum rtx_code code;
7101   optab optab;
7102   class expand_operand ops[5];
7103 
7104   gcc_assert (flag_inline_atomics);
7105 
7106   if (gimple_call_num_args (call) == 4)
7107     model = get_memmodel (gimple_call_arg (call, 3));
7108 
7109   rtx mem = get_builtin_sync_mem (ptr, mode);
7110   rtx val = expand_expr_force_mode (bit, mode);
7111 
7112   switch (gimple_call_internal_fn (call))
7113     {
7114     case IFN_ATOMIC_BIT_TEST_AND_SET:
7115       code = IOR;
7116       optab = atomic_bit_test_and_set_optab;
7117       break;
7118     case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7119       code = XOR;
7120       optab = atomic_bit_test_and_complement_optab;
7121       break;
7122     case IFN_ATOMIC_BIT_TEST_AND_RESET:
7123       code = AND;
7124       optab = atomic_bit_test_and_reset_optab;
7125       break;
7126     default:
7127       gcc_unreachable ();
7128     }
7129 
7130   if (lhs == NULL_TREE)
7131     {
7132       val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7133 				 val, NULL_RTX, true, OPTAB_DIRECT);
7134       if (code == AND)
7135 	val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7136       expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
7137       return;
7138     }
7139 
7140   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7141   enum insn_code icode = direct_optab_handler (optab, mode);
7142   gcc_assert (icode != CODE_FOR_nothing);
7143   create_output_operand (&ops[0], target, mode);
7144   create_fixed_operand (&ops[1], mem);
7145   create_convert_operand_to (&ops[2], val, mode, true);
7146   create_integer_operand (&ops[3], model);
7147   create_integer_operand (&ops[4], integer_onep (flag));
7148   if (maybe_expand_insn (icode, 5, ops))
7149     return;
7150 
7151   rtx bitval = val;
7152   val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7153 			     val, NULL_RTX, true, OPTAB_DIRECT);
7154   rtx maskval = val;
7155   if (code == AND)
7156     val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7157   rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7158 				       code, model, false);
7159   if (integer_onep (flag))
7160     {
7161       result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7162 				    NULL_RTX, true, OPTAB_DIRECT);
7163       result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7164 				    true, OPTAB_DIRECT);
7165     }
7166   else
7167     result = expand_simple_binop (mode, AND, result, maskval, target, true,
7168 				  OPTAB_DIRECT);
7169   if (result != target)
7170     emit_move_insn (target, result);
7171 }
7172 
7173 /* Expand an atomic clear operation.
7174 	void _atomic_clear (BOOL *obj, enum memmodel)
7175    EXP is the call expression.  */
7176 
7177 static rtx
expand_builtin_atomic_clear(tree exp)7178 expand_builtin_atomic_clear (tree exp)
7179 {
7180   machine_mode mode;
7181   rtx mem, ret;
7182   enum memmodel model;
7183 
7184   mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7185   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7186   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7187 
7188   if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7189     {
7190       location_t loc
7191 	= expansion_point_location_if_in_system_header (input_location);
7192       warning_at (loc, OPT_Winvalid_memory_model,
7193 		  "invalid memory model for %<__atomic_store%>");
7194       model = MEMMODEL_SEQ_CST;
7195     }
7196 
7197   /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7198      Failing that, a store is issued by __atomic_store.  The only way this can
7199      fail is if the bool type is larger than a word size.  Unlikely, but
7200      handle it anyway for completeness.  Assume a single threaded model since
7201      there is no atomic support in this case, and no barriers are required.  */
7202   ret = expand_atomic_store (mem, const0_rtx, model, true);
7203   if (!ret)
7204     emit_move_insn (mem, const0_rtx);
7205   return const0_rtx;
7206 }
7207 
7208 /* Expand an atomic test_and_set operation.
7209 	bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7210    EXP is the call expression.  */
7211 
7212 static rtx
expand_builtin_atomic_test_and_set(tree exp,rtx target)7213 expand_builtin_atomic_test_and_set (tree exp, rtx target)
7214 {
7215   rtx mem;
7216   enum memmodel model;
7217   machine_mode mode;
7218 
7219   mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7220   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7221   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7222 
7223   return expand_atomic_test_and_set (target, mem, model);
7224 }
7225 
7226 
7227 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7228    this architecture.  If ARG1 is NULL, use typical alignment for size ARG0.  */
7229 
7230 static tree
fold_builtin_atomic_always_lock_free(tree arg0,tree arg1)7231 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7232 {
7233   int size;
7234   machine_mode mode;
7235   unsigned int mode_align, type_align;
7236 
7237   if (TREE_CODE (arg0) != INTEGER_CST)
7238     return NULL_TREE;
7239 
7240   /* We need a corresponding integer mode for the access to be lock-free.  */
7241   size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7242   if (!int_mode_for_size (size, 0).exists (&mode))
7243     return boolean_false_node;
7244 
7245   mode_align = GET_MODE_ALIGNMENT (mode);
7246 
7247   if (TREE_CODE (arg1) == INTEGER_CST)
7248     {
7249       unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7250 
7251       /* Either this argument is null, or it's a fake pointer encoding
7252          the alignment of the object.  */
7253       val = least_bit_hwi (val);
7254       val *= BITS_PER_UNIT;
7255 
7256       if (val == 0 || mode_align < val)
7257         type_align = mode_align;
7258       else
7259         type_align = val;
7260     }
7261   else
7262     {
7263       tree ttype = TREE_TYPE (arg1);
7264 
7265       /* This function is usually invoked and folded immediately by the front
7266 	 end before anything else has a chance to look at it.  The pointer
7267 	 parameter at this point is usually cast to a void *, so check for that
7268 	 and look past the cast.  */
7269       if (CONVERT_EXPR_P (arg1)
7270 	  && POINTER_TYPE_P (ttype)
7271 	  && VOID_TYPE_P (TREE_TYPE (ttype))
7272 	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7273 	arg1 = TREE_OPERAND (arg1, 0);
7274 
7275       ttype = TREE_TYPE (arg1);
7276       gcc_assert (POINTER_TYPE_P (ttype));
7277 
7278       /* Get the underlying type of the object.  */
7279       ttype = TREE_TYPE (ttype);
7280       type_align = TYPE_ALIGN (ttype);
7281     }
7282 
7283   /* If the object has smaller alignment, the lock free routines cannot
7284      be used.  */
7285   if (type_align < mode_align)
7286     return boolean_false_node;
7287 
7288   /* Check if a compare_and_swap pattern exists for the mode which represents
7289      the required size.  The pattern is not allowed to fail, so the existence
7290      of the pattern indicates support is present.  Also require that an
7291      atomic load exists for the required size.  */
7292   if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7293     return boolean_true_node;
7294   else
7295     return boolean_false_node;
7296 }
7297 
7298 /* Return true if the parameters to call EXP represent an object which will
7299    always generate lock free instructions.  The first argument represents the
7300    size of the object, and the second parameter is a pointer to the object
7301    itself.  If NULL is passed for the object, then the result is based on
7302    typical alignment for an object of the specified size.  Otherwise return
7303    false.  */
7304 
7305 static rtx
expand_builtin_atomic_always_lock_free(tree exp)7306 expand_builtin_atomic_always_lock_free (tree exp)
7307 {
7308   tree size;
7309   tree arg0 = CALL_EXPR_ARG (exp, 0);
7310   tree arg1 = CALL_EXPR_ARG (exp, 1);
7311 
7312   if (TREE_CODE (arg0) != INTEGER_CST)
7313     {
7314       error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7315       return const0_rtx;
7316     }
7317 
7318   size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7319   if (size == boolean_true_node)
7320     return const1_rtx;
7321   return const0_rtx;
7322 }
7323 
7324 /* Return a one or zero if it can be determined that object ARG1 of size ARG
7325    is lock free on this architecture.  */
7326 
7327 static tree
fold_builtin_atomic_is_lock_free(tree arg0,tree arg1)7328 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7329 {
7330   if (!flag_inline_atomics)
7331     return NULL_TREE;
7332 
7333   /* If it isn't always lock free, don't generate a result.  */
7334   if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7335     return boolean_true_node;
7336 
7337   return NULL_TREE;
7338 }
7339 
7340 /* Return true if the parameters to call EXP represent an object which will
7341    always generate lock free instructions.  The first argument represents the
7342    size of the object, and the second parameter is a pointer to the object
7343    itself.  If NULL is passed for the object, then the result is based on
7344    typical alignment for an object of the specified size.  Otherwise return
7345    NULL*/
7346 
7347 static rtx
expand_builtin_atomic_is_lock_free(tree exp)7348 expand_builtin_atomic_is_lock_free (tree exp)
7349 {
7350   tree size;
7351   tree arg0 = CALL_EXPR_ARG (exp, 0);
7352   tree arg1 = CALL_EXPR_ARG (exp, 1);
7353 
7354   if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7355     {
7356       error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7357       return NULL_RTX;
7358     }
7359 
7360   if (!flag_inline_atomics)
7361     return NULL_RTX;
7362 
7363   /* If the value is known at compile time, return the RTX for it.  */
7364   size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7365   if (size == boolean_true_node)
7366     return const1_rtx;
7367 
7368   return NULL_RTX;
7369 }
7370 
7371 /* Expand the __atomic_thread_fence intrinsic:
7372    	void __atomic_thread_fence (enum memmodel)
7373    EXP is the CALL_EXPR.  */
7374 
7375 static void
expand_builtin_atomic_thread_fence(tree exp)7376 expand_builtin_atomic_thread_fence (tree exp)
7377 {
7378   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7379   expand_mem_thread_fence (model);
7380 }
7381 
7382 /* Expand the __atomic_signal_fence intrinsic:
7383    	void __atomic_signal_fence (enum memmodel)
7384    EXP is the CALL_EXPR.  */
7385 
7386 static void
expand_builtin_atomic_signal_fence(tree exp)7387 expand_builtin_atomic_signal_fence (tree exp)
7388 {
7389   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7390   expand_mem_signal_fence (model);
7391 }
7392 
7393 /* Expand the __sync_synchronize intrinsic.  */
7394 
7395 static void
expand_builtin_sync_synchronize(void)7396 expand_builtin_sync_synchronize (void)
7397 {
7398   expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7399 }
7400 
7401 static rtx
expand_builtin_thread_pointer(tree exp,rtx target)7402 expand_builtin_thread_pointer (tree exp, rtx target)
7403 {
7404   enum insn_code icode;
7405   if (!validate_arglist (exp, VOID_TYPE))
7406     return const0_rtx;
7407   icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7408   if (icode != CODE_FOR_nothing)
7409     {
7410       class expand_operand op;
7411       /* If the target is not sutitable then create a new target. */
7412       if (target == NULL_RTX
7413 	  || !REG_P (target)
7414 	  || GET_MODE (target) != Pmode)
7415 	target = gen_reg_rtx (Pmode);
7416       create_output_operand (&op, target, Pmode);
7417       expand_insn (icode, 1, &op);
7418       return target;
7419     }
7420   error ("%<__builtin_thread_pointer%> is not supported on this target");
7421   return const0_rtx;
7422 }
7423 
7424 static void
expand_builtin_set_thread_pointer(tree exp)7425 expand_builtin_set_thread_pointer (tree exp)
7426 {
7427   enum insn_code icode;
7428   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7429     return;
7430   icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7431   if (icode != CODE_FOR_nothing)
7432     {
7433       class expand_operand op;
7434       rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7435 			     Pmode, EXPAND_NORMAL);
7436       create_input_operand (&op, val, Pmode);
7437       expand_insn (icode, 1, &op);
7438       return;
7439     }
7440   error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7441 }
7442 
7443 
7444 /* Emit code to restore the current value of stack.  */
7445 
7446 static void
expand_stack_restore(tree var)7447 expand_stack_restore (tree var)
7448 {
7449   rtx_insn *prev;
7450   rtx sa = expand_normal (var);
7451 
7452   sa = convert_memory_address (Pmode, sa);
7453 
7454   prev = get_last_insn ();
7455   emit_stack_restore (SAVE_BLOCK, sa);
7456 
7457   record_new_stack_level ();
7458 
7459   fixup_args_size_notes (prev, get_last_insn (), 0);
7460 }
7461 
7462 /* Emit code to save the current value of stack.  */
7463 
7464 static rtx
expand_stack_save(void)7465 expand_stack_save (void)
7466 {
7467   rtx ret = NULL_RTX;
7468 
7469   emit_stack_save (SAVE_BLOCK, &ret);
7470   return ret;
7471 }
7472 
7473 /* Emit code to get the openacc gang, worker or vector id or size.  */
7474 
7475 static rtx
expand_builtin_goacc_parlevel_id_size(tree exp,rtx target,int ignore)7476 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7477 {
7478   const char *name;
7479   rtx fallback_retval;
7480   rtx_insn *(*gen_fn) (rtx, rtx);
7481   switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7482     {
7483     case BUILT_IN_GOACC_PARLEVEL_ID:
7484       name = "__builtin_goacc_parlevel_id";
7485       fallback_retval = const0_rtx;
7486       gen_fn = targetm.gen_oacc_dim_pos;
7487       break;
7488     case BUILT_IN_GOACC_PARLEVEL_SIZE:
7489       name = "__builtin_goacc_parlevel_size";
7490       fallback_retval = const1_rtx;
7491       gen_fn = targetm.gen_oacc_dim_size;
7492       break;
7493     default:
7494       gcc_unreachable ();
7495     }
7496 
7497   if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7498     {
7499       error ("%qs only supported in OpenACC code", name);
7500       return const0_rtx;
7501     }
7502 
7503   tree arg = CALL_EXPR_ARG (exp, 0);
7504   if (TREE_CODE (arg) != INTEGER_CST)
7505     {
7506       error ("non-constant argument 0 to %qs", name);
7507       return const0_rtx;
7508     }
7509 
7510   int dim = TREE_INT_CST_LOW (arg);
7511   switch (dim)
7512     {
7513     case GOMP_DIM_GANG:
7514     case GOMP_DIM_WORKER:
7515     case GOMP_DIM_VECTOR:
7516       break;
7517     default:
7518       error ("illegal argument 0 to %qs", name);
7519       return const0_rtx;
7520     }
7521 
7522   if (ignore)
7523     return target;
7524 
7525   if (target == NULL_RTX)
7526     target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7527 
7528   if (!targetm.have_oacc_dim_size ())
7529     {
7530       emit_move_insn (target, fallback_retval);
7531       return target;
7532     }
7533 
7534   rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7535   emit_insn (gen_fn (reg, GEN_INT (dim)));
7536   if (reg != target)
7537     emit_move_insn (target, reg);
7538 
7539   return target;
7540 }
7541 
7542 /* Expand a string compare operation using a sequence of char comparison
7543    to get rid of the calling overhead, with result going to TARGET if
7544    that's convenient.
7545 
7546    VAR_STR is the variable string source;
7547    CONST_STR is the constant string source;
7548    LENGTH is the number of chars to compare;
7549    CONST_STR_N indicates which source string is the constant string;
7550    IS_MEMCMP indicates whether it's a memcmp or strcmp.
7551 
7552    to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7553 
7554    target = (int) (unsigned char) var_str[0]
7555 	    - (int) (unsigned char) const_str[0];
7556    if (target != 0)
7557      goto ne_label;
7558      ...
7559    target = (int) (unsigned char) var_str[length - 2]
7560 	    - (int) (unsigned char) const_str[length - 2];
7561    if (target != 0)
7562      goto ne_label;
7563    target = (int) (unsigned char) var_str[length - 1]
7564 	    - (int) (unsigned char) const_str[length - 1];
7565    ne_label:
7566   */
7567 
7568 static rtx
inline_string_cmp(rtx target,tree var_str,const char * const_str,unsigned HOST_WIDE_INT length,int const_str_n,machine_mode mode)7569 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7570 		   unsigned HOST_WIDE_INT length,
7571 		   int const_str_n, machine_mode mode)
7572 {
7573   HOST_WIDE_INT offset = 0;
7574   rtx var_rtx_array
7575     = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7576   rtx var_rtx = NULL_RTX;
7577   rtx const_rtx = NULL_RTX;
7578   rtx result = target ? target : gen_reg_rtx (mode);
7579   rtx_code_label *ne_label = gen_label_rtx ();
7580   tree unit_type_node = unsigned_char_type_node;
7581   scalar_int_mode unit_mode
7582     = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7583 
7584   start_sequence ();
7585 
7586   for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7587     {
7588       var_rtx
7589 	= adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7590       const_rtx = c_readstr (const_str + offset, unit_mode);
7591       rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7592       rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7593 
7594       op0 = convert_modes (mode, unit_mode, op0, 1);
7595       op1 = convert_modes (mode, unit_mode, op1, 1);
7596       result = expand_simple_binop (mode, MINUS, op0, op1,
7597 				    result, 1, OPTAB_WIDEN);
7598       if (i < length - 1)
7599 	emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7600 	    			 mode, true, ne_label);
7601       offset += GET_MODE_SIZE (unit_mode);
7602     }
7603 
7604   emit_label (ne_label);
7605   rtx_insn *insns = get_insns ();
7606   end_sequence ();
7607   emit_insn (insns);
7608 
7609   return result;
7610 }
7611 
7612 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7613    to TARGET if that's convenient.
7614    If the call is not been inlined, return NULL_RTX.  */
7615 
7616 static rtx
inline_expand_builtin_bytecmp(tree exp,rtx target)7617 inline_expand_builtin_bytecmp (tree exp, rtx target)
7618 {
7619   tree fndecl = get_callee_fndecl (exp);
7620   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7621   bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7622 
7623   /* Do NOT apply this inlining expansion when optimizing for size or
7624      optimization level below 2.  */
7625   if (optimize < 2 || optimize_insn_for_size_p ())
7626     return NULL_RTX;
7627 
7628   gcc_checking_assert (fcode == BUILT_IN_STRCMP
7629 		       || fcode == BUILT_IN_STRNCMP
7630 		       || fcode == BUILT_IN_MEMCMP);
7631 
7632   /* On a target where the type of the call (int) has same or narrower presicion
7633      than unsigned char, give up the inlining expansion.  */
7634   if (TYPE_PRECISION (unsigned_char_type_node)
7635       >= TYPE_PRECISION (TREE_TYPE (exp)))
7636     return NULL_RTX;
7637 
7638   tree arg1 = CALL_EXPR_ARG (exp, 0);
7639   tree arg2 = CALL_EXPR_ARG (exp, 1);
7640   tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7641 
7642   unsigned HOST_WIDE_INT len1 = 0;
7643   unsigned HOST_WIDE_INT len2 = 0;
7644   unsigned HOST_WIDE_INT len3 = 0;
7645 
7646   /* Get the object representation of the initializers of ARG1 and ARG2
7647      as strings, provided they refer to constant objects, with their byte
7648      sizes in LEN1 and LEN2, respectively.  */
7649   const char *bytes1 = c_getstr (arg1, &len1);
7650   const char *bytes2 = c_getstr (arg2, &len2);
7651 
7652   /* Fail if neither argument refers to an initialized constant.  */
7653   if (!bytes1 && !bytes2)
7654     return NULL_RTX;
7655 
7656   if (is_ncmp)
7657     {
7658       /* Fail if the memcmp/strncmp bound is not a constant.  */
7659       if (!tree_fits_uhwi_p (len3_tree))
7660 	return NULL_RTX;
7661 
7662       len3 = tree_to_uhwi (len3_tree);
7663 
7664       if (fcode == BUILT_IN_MEMCMP)
7665 	{
7666 	  /* Fail if the memcmp bound is greater than the size of either
7667 	     of the two constant objects.  */
7668 	  if ((bytes1 && len1 < len3)
7669 	      || (bytes2 && len2 < len3))
7670 	    return NULL_RTX;
7671 	}
7672     }
7673 
7674   if (fcode != BUILT_IN_MEMCMP)
7675     {
7676       /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7677 	 and LEN2 to the length of the nul-terminated string stored
7678 	 in each.  */
7679       if (bytes1 != NULL)
7680 	len1 = strnlen (bytes1, len1) + 1;
7681       if (bytes2 != NULL)
7682 	len2 = strnlen (bytes2, len2) + 1;
7683     }
7684 
7685   /* See inline_string_cmp.  */
7686   int const_str_n;
7687   if (!len1)
7688     const_str_n = 2;
7689   else if (!len2)
7690     const_str_n = 1;
7691   else if (len2 > len1)
7692     const_str_n = 1;
7693   else
7694     const_str_n = 2;
7695 
7696   /* For strncmp only, compute the new bound as the smallest of
7697      the lengths of the two strings (plus 1) and the bound provided
7698      to the function.  */
7699   unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7700   if (is_ncmp && len3 < bound)
7701     bound = len3;
7702 
7703   /* If the bound of the comparison is larger than the threshold,
7704      do nothing.  */
7705   if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7706     return NULL_RTX;
7707 
7708   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7709 
7710   /* Now, start inline expansion the call.  */
7711   return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7712 			    (const_str_n == 1) ? bytes1 : bytes2, bound,
7713 			    const_str_n, mode);
7714 }
7715 
7716 /* Expand a call to __builtin_speculation_safe_value_<N>.  MODE
7717    represents the size of the first argument to that call, or VOIDmode
7718    if the argument is a pointer.  IGNORE will be true if the result
7719    isn't used.  */
7720 static rtx
expand_speculation_safe_value(machine_mode mode,tree exp,rtx target,bool ignore)7721 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7722 			       bool ignore)
7723 {
7724   rtx val, failsafe;
7725   unsigned nargs = call_expr_nargs (exp);
7726 
7727   tree arg0 = CALL_EXPR_ARG (exp, 0);
7728 
7729   if (mode == VOIDmode)
7730     {
7731       mode = TYPE_MODE (TREE_TYPE (arg0));
7732       gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7733     }
7734 
7735   val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7736 
7737   /* An optional second argument can be used as a failsafe value on
7738      some machines.  If it isn't present, then the failsafe value is
7739      assumed to be 0.  */
7740   if (nargs > 1)
7741     {
7742       tree arg1 = CALL_EXPR_ARG (exp, 1);
7743       failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7744     }
7745   else
7746     failsafe = const0_rtx;
7747 
7748   /* If the result isn't used, the behavior is undefined.  It would be
7749      nice to emit a warning here, but path splitting means this might
7750      happen with legitimate code.  So simply drop the builtin
7751      expansion in that case; we've handled any side-effects above.  */
7752   if (ignore)
7753     return const0_rtx;
7754 
7755   /* If we don't have a suitable target, create one to hold the result.  */
7756   if (target == NULL || GET_MODE (target) != mode)
7757     target = gen_reg_rtx (mode);
7758 
7759   if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7760     val = convert_modes (mode, VOIDmode, val, false);
7761 
7762   return targetm.speculation_safe_value (mode, target, val, failsafe);
7763 }
7764 
7765 /* Expand an expression EXP that calls a built-in function,
7766    with result going to TARGET if that's convenient
7767    (and in mode MODE if that's convenient).
7768    SUBTARGET may be used as the target for computing one of EXP's operands.
7769    IGNORE is nonzero if the value is to be ignored.  */
7770 
7771 rtx
expand_builtin(tree exp,rtx target,rtx subtarget,machine_mode mode,int ignore)7772 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7773 		int ignore)
7774 {
7775   tree fndecl = get_callee_fndecl (exp);
7776   machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7777   int flags;
7778 
7779   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7780     return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7781 
7782   /* When ASan is enabled, we don't want to expand some memory/string
7783      builtins and rely on libsanitizer's hooks.  This allows us to avoid
7784      redundant checks and be sure, that possible overflow will be detected
7785      by ASan.  */
7786 
7787   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7788   if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7789     return expand_call (exp, target, ignore);
7790 
7791   /* When not optimizing, generate calls to library functions for a certain
7792      set of builtins.  */
7793   if (!optimize
7794       && !called_as_built_in (fndecl)
7795       && fcode != BUILT_IN_FORK
7796       && fcode != BUILT_IN_EXECL
7797       && fcode != BUILT_IN_EXECV
7798       && fcode != BUILT_IN_EXECLP
7799       && fcode != BUILT_IN_EXECLE
7800       && fcode != BUILT_IN_EXECVP
7801       && fcode != BUILT_IN_EXECVE
7802       && !ALLOCA_FUNCTION_CODE_P (fcode)
7803       && fcode != BUILT_IN_FREE)
7804     return expand_call (exp, target, ignore);
7805 
7806   /* The built-in function expanders test for target == const0_rtx
7807      to determine whether the function's result will be ignored.  */
7808   if (ignore)
7809     target = const0_rtx;
7810 
7811   /* If the result of a pure or const built-in function is ignored, and
7812      none of its arguments are volatile, we can avoid expanding the
7813      built-in call and just evaluate the arguments for side-effects.  */
7814   if (target == const0_rtx
7815       && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7816       && !(flags & ECF_LOOPING_CONST_OR_PURE))
7817     {
7818       bool volatilep = false;
7819       tree arg;
7820       call_expr_arg_iterator iter;
7821 
7822       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7823 	if (TREE_THIS_VOLATILE (arg))
7824 	  {
7825 	    volatilep = true;
7826 	    break;
7827 	  }
7828 
7829       if (! volatilep)
7830 	{
7831 	  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7832 	    expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7833 	  return const0_rtx;
7834 	}
7835     }
7836 
7837   switch (fcode)
7838     {
7839     CASE_FLT_FN (BUILT_IN_FABS):
7840     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7841     case BUILT_IN_FABSD32:
7842     case BUILT_IN_FABSD64:
7843     case BUILT_IN_FABSD128:
7844       target = expand_builtin_fabs (exp, target, subtarget);
7845       if (target)
7846 	return target;
7847       break;
7848 
7849     CASE_FLT_FN (BUILT_IN_COPYSIGN):
7850     CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7851       target = expand_builtin_copysign (exp, target, subtarget);
7852       if (target)
7853 	return target;
7854       break;
7855 
7856       /* Just do a normal library call if we were unable to fold
7857 	 the values.  */
7858     CASE_FLT_FN (BUILT_IN_CABS):
7859       break;
7860 
7861     CASE_FLT_FN (BUILT_IN_FMA):
7862     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7863       target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7864       if (target)
7865 	return target;
7866       break;
7867 
7868     CASE_FLT_FN (BUILT_IN_ILOGB):
7869       if (! flag_unsafe_math_optimizations)
7870 	break;
7871       gcc_fallthrough ();
7872     CASE_FLT_FN (BUILT_IN_ISINF):
7873     CASE_FLT_FN (BUILT_IN_FINITE):
7874     case BUILT_IN_ISFINITE:
7875     case BUILT_IN_ISNORMAL:
7876       target = expand_builtin_interclass_mathfn (exp, target);
7877       if (target)
7878 	return target;
7879       break;
7880 
7881     CASE_FLT_FN (BUILT_IN_ICEIL):
7882     CASE_FLT_FN (BUILT_IN_LCEIL):
7883     CASE_FLT_FN (BUILT_IN_LLCEIL):
7884     CASE_FLT_FN (BUILT_IN_LFLOOR):
7885     CASE_FLT_FN (BUILT_IN_IFLOOR):
7886     CASE_FLT_FN (BUILT_IN_LLFLOOR):
7887       target = expand_builtin_int_roundingfn (exp, target);
7888       if (target)
7889 	return target;
7890       break;
7891 
7892     CASE_FLT_FN (BUILT_IN_IRINT):
7893     CASE_FLT_FN (BUILT_IN_LRINT):
7894     CASE_FLT_FN (BUILT_IN_LLRINT):
7895     CASE_FLT_FN (BUILT_IN_IROUND):
7896     CASE_FLT_FN (BUILT_IN_LROUND):
7897     CASE_FLT_FN (BUILT_IN_LLROUND):
7898       target = expand_builtin_int_roundingfn_2 (exp, target);
7899       if (target)
7900 	return target;
7901       break;
7902 
7903     CASE_FLT_FN (BUILT_IN_POWI):
7904       target = expand_builtin_powi (exp, target);
7905       if (target)
7906 	return target;
7907       break;
7908 
7909     CASE_FLT_FN (BUILT_IN_CEXPI):
7910       target = expand_builtin_cexpi (exp, target);
7911       gcc_assert (target);
7912       return target;
7913 
7914     CASE_FLT_FN (BUILT_IN_SIN):
7915     CASE_FLT_FN (BUILT_IN_COS):
7916       if (! flag_unsafe_math_optimizations)
7917 	break;
7918       target = expand_builtin_mathfn_3 (exp, target, subtarget);
7919       if (target)
7920 	return target;
7921       break;
7922 
7923     CASE_FLT_FN (BUILT_IN_SINCOS):
7924       if (! flag_unsafe_math_optimizations)
7925 	break;
7926       target = expand_builtin_sincos (exp);
7927       if (target)
7928 	return target;
7929       break;
7930 
7931     case BUILT_IN_APPLY_ARGS:
7932       return expand_builtin_apply_args ();
7933 
7934       /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7935 	 FUNCTION with a copy of the parameters described by
7936 	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
7937 	 allocated on the stack into which is stored all the registers
7938 	 that might possibly be used for returning the result of a
7939 	 function.  ARGUMENTS is the value returned by
7940 	 __builtin_apply_args.  ARGSIZE is the number of bytes of
7941 	 arguments that must be copied.  ??? How should this value be
7942 	 computed?  We'll also need a safe worst case value for varargs
7943 	 functions.  */
7944     case BUILT_IN_APPLY:
7945       if (!validate_arglist (exp, POINTER_TYPE,
7946 			     POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7947 	  && !validate_arglist (exp, REFERENCE_TYPE,
7948 				POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7949 	return const0_rtx;
7950       else
7951 	{
7952 	  rtx ops[3];
7953 
7954 	  ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7955 	  ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7956 	  ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7957 
7958 	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
7959 	}
7960 
7961       /* __builtin_return (RESULT) causes the function to return the
7962 	 value described by RESULT.  RESULT is address of the block of
7963 	 memory returned by __builtin_apply.  */
7964     case BUILT_IN_RETURN:
7965       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7966 	expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7967       return const0_rtx;
7968 
7969     case BUILT_IN_SAVEREGS:
7970       return expand_builtin_saveregs ();
7971 
7972     case BUILT_IN_VA_ARG_PACK:
7973       /* All valid uses of __builtin_va_arg_pack () are removed during
7974 	 inlining.  */
7975       error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7976       return const0_rtx;
7977 
7978     case BUILT_IN_VA_ARG_PACK_LEN:
7979       /* All valid uses of __builtin_va_arg_pack_len () are removed during
7980 	 inlining.  */
7981       error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7982       return const0_rtx;
7983 
7984       /* Return the address of the first anonymous stack arg.  */
7985     case BUILT_IN_NEXT_ARG:
7986       if (fold_builtin_next_arg (exp, false))
7987 	return const0_rtx;
7988       return expand_builtin_next_arg ();
7989 
7990     case BUILT_IN_CLEAR_CACHE:
7991       target = expand_builtin___clear_cache (exp);
7992       if (target)
7993         return target;
7994       break;
7995 
7996     case BUILT_IN_CLASSIFY_TYPE:
7997       return expand_builtin_classify_type (exp);
7998 
7999     case BUILT_IN_CONSTANT_P:
8000       return const0_rtx;
8001 
8002     case BUILT_IN_FRAME_ADDRESS:
8003     case BUILT_IN_RETURN_ADDRESS:
8004       return expand_builtin_frame_address (fndecl, exp);
8005 
8006     /* Returns the address of the area where the structure is returned.
8007        0 otherwise.  */
8008     case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8009       if (call_expr_nargs (exp) != 0
8010 	  || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8011 	  || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
8012 	return const0_rtx;
8013       else
8014 	return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8015 
8016     CASE_BUILT_IN_ALLOCA:
8017       target = expand_builtin_alloca (exp);
8018       if (target)
8019 	return target;
8020       break;
8021 
8022     case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8023       return expand_asan_emit_allocas_unpoison (exp);
8024 
8025     case BUILT_IN_STACK_SAVE:
8026       return expand_stack_save ();
8027 
8028     case BUILT_IN_STACK_RESTORE:
8029       expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8030       return const0_rtx;
8031 
8032     case BUILT_IN_BSWAP16:
8033     case BUILT_IN_BSWAP32:
8034     case BUILT_IN_BSWAP64:
8035       target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8036       if (target)
8037 	return target;
8038       break;
8039 
8040     CASE_INT_FN (BUILT_IN_FFS):
8041       target = expand_builtin_unop (target_mode, exp, target,
8042 				    subtarget, ffs_optab);
8043       if (target)
8044 	return target;
8045       break;
8046 
8047     CASE_INT_FN (BUILT_IN_CLZ):
8048       target = expand_builtin_unop (target_mode, exp, target,
8049 				    subtarget, clz_optab);
8050       if (target)
8051 	return target;
8052       break;
8053 
8054     CASE_INT_FN (BUILT_IN_CTZ):
8055       target = expand_builtin_unop (target_mode, exp, target,
8056 				    subtarget, ctz_optab);
8057       if (target)
8058 	return target;
8059       break;
8060 
8061     CASE_INT_FN (BUILT_IN_CLRSB):
8062       target = expand_builtin_unop (target_mode, exp, target,
8063 				    subtarget, clrsb_optab);
8064       if (target)
8065 	return target;
8066       break;
8067 
8068     CASE_INT_FN (BUILT_IN_POPCOUNT):
8069       target = expand_builtin_unop (target_mode, exp, target,
8070 				    subtarget, popcount_optab);
8071       if (target)
8072 	return target;
8073       break;
8074 
8075     CASE_INT_FN (BUILT_IN_PARITY):
8076       target = expand_builtin_unop (target_mode, exp, target,
8077 				    subtarget, parity_optab);
8078       if (target)
8079 	return target;
8080       break;
8081 
8082     case BUILT_IN_STRLEN:
8083       target = expand_builtin_strlen (exp, target, target_mode);
8084       if (target)
8085 	return target;
8086       break;
8087 
8088     case BUILT_IN_STRNLEN:
8089       target = expand_builtin_strnlen (exp, target, target_mode);
8090       if (target)
8091 	return target;
8092       break;
8093 
8094     case BUILT_IN_STRCAT:
8095       target = expand_builtin_strcat (exp);
8096       if (target)
8097 	return target;
8098       break;
8099 
8100     case BUILT_IN_GETTEXT:
8101     case BUILT_IN_PUTS:
8102     case BUILT_IN_PUTS_UNLOCKED:
8103     case BUILT_IN_STRDUP:
8104       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8105 	check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8106       break;
8107 
8108     case BUILT_IN_INDEX:
8109     case BUILT_IN_RINDEX:
8110     case BUILT_IN_STRCHR:
8111     case BUILT_IN_STRRCHR:
8112       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8113 	check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8114       break;
8115 
8116     case BUILT_IN_FPUTS:
8117     case BUILT_IN_FPUTS_UNLOCKED:
8118       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8119 	check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8120       break;
8121 
8122     case BUILT_IN_STRNDUP:
8123       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8124 	check_nul_terminated_array (exp,
8125 				    CALL_EXPR_ARG (exp, 0),
8126 				    CALL_EXPR_ARG (exp, 1));
8127       break;
8128 
8129     case BUILT_IN_STRCASECMP:
8130     case BUILT_IN_STRSTR:
8131       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8132 	{
8133 	  check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8134 	  check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 1));
8135 	}
8136       break;
8137 
8138     case BUILT_IN_STRCPY:
8139       target = expand_builtin_strcpy (exp, target);
8140       if (target)
8141 	return target;
8142       break;
8143 
8144     case BUILT_IN_STRNCAT:
8145       target = expand_builtin_strncat (exp, target);
8146       if (target)
8147 	return target;
8148       break;
8149 
8150     case BUILT_IN_STRNCPY:
8151       target = expand_builtin_strncpy (exp, target);
8152       if (target)
8153 	return target;
8154       break;
8155 
8156     case BUILT_IN_STPCPY:
8157       target = expand_builtin_stpcpy (exp, target, mode);
8158       if (target)
8159 	return target;
8160       break;
8161 
8162     case BUILT_IN_STPNCPY:
8163       target = expand_builtin_stpncpy (exp, target);
8164       if (target)
8165 	return target;
8166       break;
8167 
8168     case BUILT_IN_MEMCHR:
8169       target = expand_builtin_memchr (exp, target);
8170       if (target)
8171 	return target;
8172       break;
8173 
8174     case BUILT_IN_MEMCPY:
8175       target = expand_builtin_memcpy (exp, target);
8176       if (target)
8177 	return target;
8178       break;
8179 
8180     case BUILT_IN_MEMMOVE:
8181       target = expand_builtin_memmove (exp, target);
8182       if (target)
8183 	return target;
8184       break;
8185 
8186     case BUILT_IN_MEMPCPY:
8187       target = expand_builtin_mempcpy (exp, target);
8188       if (target)
8189 	return target;
8190       break;
8191 
8192     case BUILT_IN_MEMSET:
8193       target = expand_builtin_memset (exp, target, mode);
8194       if (target)
8195 	return target;
8196       break;
8197 
8198     case BUILT_IN_BZERO:
8199       target = expand_builtin_bzero (exp);
8200       if (target)
8201 	return target;
8202       break;
8203 
8204     /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8205        back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8206        when changing it to a strcmp call.  */
8207     case BUILT_IN_STRCMP_EQ:
8208       target = expand_builtin_memcmp (exp, target, true);
8209       if (target)
8210 	return target;
8211 
8212       /* Change this call back to a BUILT_IN_STRCMP.  */
8213       TREE_OPERAND (exp, 1)
8214 	= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8215 
8216       /* Delete the last parameter.  */
8217       unsigned int i;
8218       vec<tree, va_gc> *arg_vec;
8219       vec_alloc (arg_vec, 2);
8220       for (i = 0; i < 2; i++)
8221 	arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8222       exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8223       /* FALLTHROUGH */
8224 
8225     case BUILT_IN_STRCMP:
8226       target = expand_builtin_strcmp (exp, target);
8227       if (target)
8228 	return target;
8229       break;
8230 
8231     /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8232        back to a BUILT_IN_STRNCMP.  */
8233     case BUILT_IN_STRNCMP_EQ:
8234       target = expand_builtin_memcmp (exp, target, true);
8235       if (target)
8236 	return target;
8237 
8238       /* Change it back to a BUILT_IN_STRNCMP.  */
8239       TREE_OPERAND (exp, 1)
8240 	= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8241       /* FALLTHROUGH */
8242 
8243     case BUILT_IN_STRNCMP:
8244       target = expand_builtin_strncmp (exp, target, mode);
8245       if (target)
8246 	return target;
8247       break;
8248 
8249     case BUILT_IN_BCMP:
8250     case BUILT_IN_MEMCMP:
8251     case BUILT_IN_MEMCMP_EQ:
8252       target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8253       if (target)
8254 	return target;
8255       if (fcode == BUILT_IN_MEMCMP_EQ)
8256 	{
8257 	  tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8258 	  TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8259 	}
8260       break;
8261 
8262     case BUILT_IN_SETJMP:
8263       /* This should have been lowered to the builtins below.  */
8264       gcc_unreachable ();
8265 
8266     case BUILT_IN_SETJMP_SETUP:
8267       /* __builtin_setjmp_setup is passed a pointer to an array of five words
8268           and the receiver label.  */
8269       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8270 	{
8271 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8272 				      VOIDmode, EXPAND_NORMAL);
8273 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8274 	  rtx_insn *label_r = label_rtx (label);
8275 
8276 	  /* This is copied from the handling of non-local gotos.  */
8277 	  expand_builtin_setjmp_setup (buf_addr, label_r);
8278 	  nonlocal_goto_handler_labels
8279 	    = gen_rtx_INSN_LIST (VOIDmode, label_r,
8280 				 nonlocal_goto_handler_labels);
8281 	  /* ??? Do not let expand_label treat us as such since we would
8282 	     not want to be both on the list of non-local labels and on
8283 	     the list of forced labels.  */
8284 	  FORCED_LABEL (label) = 0;
8285 	  return const0_rtx;
8286 	}
8287       break;
8288 
8289     case BUILT_IN_SETJMP_RECEIVER:
8290        /* __builtin_setjmp_receiver is passed the receiver label.  */
8291       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8292 	{
8293 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8294 	  rtx_insn *label_r = label_rtx (label);
8295 
8296 	  expand_builtin_setjmp_receiver (label_r);
8297 	  return const0_rtx;
8298 	}
8299       break;
8300 
8301       /* __builtin_longjmp is passed a pointer to an array of five words.
8302 	 It's similar to the C library longjmp function but works with
8303 	 __builtin_setjmp above.  */
8304     case BUILT_IN_LONGJMP:
8305       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8306 	{
8307 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8308 				      VOIDmode, EXPAND_NORMAL);
8309 	  rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8310 
8311 	  if (value != const1_rtx)
8312 	    {
8313 	      error ("%<__builtin_longjmp%> second argument must be 1");
8314 	      return const0_rtx;
8315 	    }
8316 
8317 	  expand_builtin_longjmp (buf_addr, value);
8318 	  return const0_rtx;
8319 	}
8320       break;
8321 
8322     case BUILT_IN_NONLOCAL_GOTO:
8323       target = expand_builtin_nonlocal_goto (exp);
8324       if (target)
8325 	return target;
8326       break;
8327 
8328       /* This updates the setjmp buffer that is its argument with the value
8329 	 of the current stack pointer.  */
8330     case BUILT_IN_UPDATE_SETJMP_BUF:
8331       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8332 	{
8333 	  rtx buf_addr
8334 	    = expand_normal (CALL_EXPR_ARG (exp, 0));
8335 
8336 	  expand_builtin_update_setjmp_buf (buf_addr);
8337 	  return const0_rtx;
8338 	}
8339       break;
8340 
8341     case BUILT_IN_TRAP:
8342       expand_builtin_trap ();
8343       return const0_rtx;
8344 
8345     case BUILT_IN_UNREACHABLE:
8346       expand_builtin_unreachable ();
8347       return const0_rtx;
8348 
8349     CASE_FLT_FN (BUILT_IN_SIGNBIT):
8350     case BUILT_IN_SIGNBITD32:
8351     case BUILT_IN_SIGNBITD64:
8352     case BUILT_IN_SIGNBITD128:
8353       target = expand_builtin_signbit (exp, target);
8354       if (target)
8355 	return target;
8356       break;
8357 
8358       /* Various hooks for the DWARF 2 __throw routine.  */
8359     case BUILT_IN_UNWIND_INIT:
8360       expand_builtin_unwind_init ();
8361       return const0_rtx;
8362     case BUILT_IN_DWARF_CFA:
8363       return virtual_cfa_rtx;
8364 #ifdef DWARF2_UNWIND_INFO
8365     case BUILT_IN_DWARF_SP_COLUMN:
8366       return expand_builtin_dwarf_sp_column ();
8367     case BUILT_IN_INIT_DWARF_REG_SIZES:
8368       expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8369       return const0_rtx;
8370 #endif
8371     case BUILT_IN_FROB_RETURN_ADDR:
8372       return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8373     case BUILT_IN_EXTRACT_RETURN_ADDR:
8374       return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8375     case BUILT_IN_EH_RETURN:
8376       expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8377 				CALL_EXPR_ARG (exp, 1));
8378       return const0_rtx;
8379     case BUILT_IN_EH_RETURN_DATA_REGNO:
8380       return expand_builtin_eh_return_data_regno (exp);
8381     case BUILT_IN_EXTEND_POINTER:
8382       return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8383     case BUILT_IN_EH_POINTER:
8384       return expand_builtin_eh_pointer (exp);
8385     case BUILT_IN_EH_FILTER:
8386       return expand_builtin_eh_filter (exp);
8387     case BUILT_IN_EH_COPY_VALUES:
8388       return expand_builtin_eh_copy_values (exp);
8389 
8390     case BUILT_IN_VA_START:
8391       return expand_builtin_va_start (exp);
8392     case BUILT_IN_VA_END:
8393       return expand_builtin_va_end (exp);
8394     case BUILT_IN_VA_COPY:
8395       return expand_builtin_va_copy (exp);
8396     case BUILT_IN_EXPECT:
8397       return expand_builtin_expect (exp, target);
8398     case BUILT_IN_EXPECT_WITH_PROBABILITY:
8399       return expand_builtin_expect_with_probability (exp, target);
8400     case BUILT_IN_ASSUME_ALIGNED:
8401       return expand_builtin_assume_aligned (exp, target);
8402     case BUILT_IN_PREFETCH:
8403       expand_builtin_prefetch (exp);
8404       return const0_rtx;
8405 
8406     case BUILT_IN_INIT_TRAMPOLINE:
8407       return expand_builtin_init_trampoline (exp, true);
8408     case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8409       return expand_builtin_init_trampoline (exp, false);
8410     case BUILT_IN_ADJUST_TRAMPOLINE:
8411       return expand_builtin_adjust_trampoline (exp);
8412 
8413     case BUILT_IN_INIT_DESCRIPTOR:
8414       return expand_builtin_init_descriptor (exp);
8415     case BUILT_IN_ADJUST_DESCRIPTOR:
8416       return expand_builtin_adjust_descriptor (exp);
8417 
8418     case BUILT_IN_FORK:
8419     case BUILT_IN_EXECL:
8420     case BUILT_IN_EXECV:
8421     case BUILT_IN_EXECLP:
8422     case BUILT_IN_EXECLE:
8423     case BUILT_IN_EXECVP:
8424     case BUILT_IN_EXECVE:
8425       target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8426       if (target)
8427 	return target;
8428       break;
8429 
8430     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8431     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8432     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8433     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8434     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8435       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8436       target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8437       if (target)
8438 	return target;
8439       break;
8440 
8441     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8442     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8443     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8444     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8445     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8446       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8447       target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8448       if (target)
8449 	return target;
8450       break;
8451 
8452     case BUILT_IN_SYNC_FETCH_AND_OR_1:
8453     case BUILT_IN_SYNC_FETCH_AND_OR_2:
8454     case BUILT_IN_SYNC_FETCH_AND_OR_4:
8455     case BUILT_IN_SYNC_FETCH_AND_OR_8:
8456     case BUILT_IN_SYNC_FETCH_AND_OR_16:
8457       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8458       target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8459       if (target)
8460 	return target;
8461       break;
8462 
8463     case BUILT_IN_SYNC_FETCH_AND_AND_1:
8464     case BUILT_IN_SYNC_FETCH_AND_AND_2:
8465     case BUILT_IN_SYNC_FETCH_AND_AND_4:
8466     case BUILT_IN_SYNC_FETCH_AND_AND_8:
8467     case BUILT_IN_SYNC_FETCH_AND_AND_16:
8468       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8469       target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8470       if (target)
8471 	return target;
8472       break;
8473 
8474     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8475     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8476     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8477     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8478     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8479       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8480       target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8481       if (target)
8482 	return target;
8483       break;
8484 
8485     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8486     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8487     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8488     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8489     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8490       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8491       target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8492       if (target)
8493 	return target;
8494       break;
8495 
8496     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8497     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8498     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8499     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8500     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8501       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8502       target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8503       if (target)
8504 	return target;
8505       break;
8506 
8507     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8508     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8509     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8510     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8511     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8512       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8513       target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8514       if (target)
8515 	return target;
8516       break;
8517 
8518     case BUILT_IN_SYNC_OR_AND_FETCH_1:
8519     case BUILT_IN_SYNC_OR_AND_FETCH_2:
8520     case BUILT_IN_SYNC_OR_AND_FETCH_4:
8521     case BUILT_IN_SYNC_OR_AND_FETCH_8:
8522     case BUILT_IN_SYNC_OR_AND_FETCH_16:
8523       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8524       target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8525       if (target)
8526 	return target;
8527       break;
8528 
8529     case BUILT_IN_SYNC_AND_AND_FETCH_1:
8530     case BUILT_IN_SYNC_AND_AND_FETCH_2:
8531     case BUILT_IN_SYNC_AND_AND_FETCH_4:
8532     case BUILT_IN_SYNC_AND_AND_FETCH_8:
8533     case BUILT_IN_SYNC_AND_AND_FETCH_16:
8534       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8535       target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8536       if (target)
8537 	return target;
8538       break;
8539 
8540     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8541     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8542     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8543     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8544     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8545       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8546       target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8547       if (target)
8548 	return target;
8549       break;
8550 
8551     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8552     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8553     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8554     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8555     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8556       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8557       target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8558       if (target)
8559 	return target;
8560       break;
8561 
8562     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8563     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8564     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8565     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8566     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8567       if (mode == VOIDmode)
8568 	mode = TYPE_MODE (boolean_type_node);
8569       if (!target || !register_operand (target, mode))
8570 	target = gen_reg_rtx (mode);
8571 
8572       mode = get_builtin_sync_mode
8573 				(fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8574       target = expand_builtin_compare_and_swap (mode, exp, true, target);
8575       if (target)
8576 	return target;
8577       break;
8578 
8579     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8580     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8581     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8582     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8583     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8584       mode = get_builtin_sync_mode
8585 				(fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8586       target = expand_builtin_compare_and_swap (mode, exp, false, target);
8587       if (target)
8588 	return target;
8589       break;
8590 
8591     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8592     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8593     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8594     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8595     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8596       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8597       target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8598       if (target)
8599 	return target;
8600       break;
8601 
8602     case BUILT_IN_SYNC_LOCK_RELEASE_1:
8603     case BUILT_IN_SYNC_LOCK_RELEASE_2:
8604     case BUILT_IN_SYNC_LOCK_RELEASE_4:
8605     case BUILT_IN_SYNC_LOCK_RELEASE_8:
8606     case BUILT_IN_SYNC_LOCK_RELEASE_16:
8607       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8608       expand_builtin_sync_lock_release (mode, exp);
8609       return const0_rtx;
8610 
8611     case BUILT_IN_SYNC_SYNCHRONIZE:
8612       expand_builtin_sync_synchronize ();
8613       return const0_rtx;
8614 
8615     case BUILT_IN_ATOMIC_EXCHANGE_1:
8616     case BUILT_IN_ATOMIC_EXCHANGE_2:
8617     case BUILT_IN_ATOMIC_EXCHANGE_4:
8618     case BUILT_IN_ATOMIC_EXCHANGE_8:
8619     case BUILT_IN_ATOMIC_EXCHANGE_16:
8620       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8621       target = expand_builtin_atomic_exchange (mode, exp, target);
8622       if (target)
8623 	return target;
8624       break;
8625 
8626     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8627     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8628     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8629     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8630     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8631       {
8632 	unsigned int nargs, z;
8633 	vec<tree, va_gc> *vec;
8634 
8635 	mode =
8636 	    get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8637 	target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8638 	if (target)
8639 	  return target;
8640 
8641 	/* If this is turned into an external library call, the weak parameter
8642 	   must be dropped to match the expected parameter list.  */
8643 	nargs = call_expr_nargs (exp);
8644 	vec_alloc (vec, nargs - 1);
8645 	for (z = 0; z < 3; z++)
8646 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
8647 	/* Skip the boolean weak parameter.  */
8648 	for (z = 4; z < 6; z++)
8649 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
8650 	exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8651 	break;
8652       }
8653 
8654     case BUILT_IN_ATOMIC_LOAD_1:
8655     case BUILT_IN_ATOMIC_LOAD_2:
8656     case BUILT_IN_ATOMIC_LOAD_4:
8657     case BUILT_IN_ATOMIC_LOAD_8:
8658     case BUILT_IN_ATOMIC_LOAD_16:
8659       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8660       target = expand_builtin_atomic_load (mode, exp, target);
8661       if (target)
8662 	return target;
8663       break;
8664 
8665     case BUILT_IN_ATOMIC_STORE_1:
8666     case BUILT_IN_ATOMIC_STORE_2:
8667     case BUILT_IN_ATOMIC_STORE_4:
8668     case BUILT_IN_ATOMIC_STORE_8:
8669     case BUILT_IN_ATOMIC_STORE_16:
8670       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8671       target = expand_builtin_atomic_store (mode, exp);
8672       if (target)
8673 	return const0_rtx;
8674       break;
8675 
8676     case BUILT_IN_ATOMIC_ADD_FETCH_1:
8677     case BUILT_IN_ATOMIC_ADD_FETCH_2:
8678     case BUILT_IN_ATOMIC_ADD_FETCH_4:
8679     case BUILT_IN_ATOMIC_ADD_FETCH_8:
8680     case BUILT_IN_ATOMIC_ADD_FETCH_16:
8681       {
8682 	enum built_in_function lib;
8683 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8684 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8685 				       (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8686 	target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8687 						 ignore, lib);
8688 	if (target)
8689 	  return target;
8690 	break;
8691       }
8692     case BUILT_IN_ATOMIC_SUB_FETCH_1:
8693     case BUILT_IN_ATOMIC_SUB_FETCH_2:
8694     case BUILT_IN_ATOMIC_SUB_FETCH_4:
8695     case BUILT_IN_ATOMIC_SUB_FETCH_8:
8696     case BUILT_IN_ATOMIC_SUB_FETCH_16:
8697       {
8698 	enum built_in_function lib;
8699 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8700 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8701 				       (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8702 	target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8703 						 ignore, lib);
8704 	if (target)
8705 	  return target;
8706 	break;
8707       }
8708     case BUILT_IN_ATOMIC_AND_FETCH_1:
8709     case BUILT_IN_ATOMIC_AND_FETCH_2:
8710     case BUILT_IN_ATOMIC_AND_FETCH_4:
8711     case BUILT_IN_ATOMIC_AND_FETCH_8:
8712     case BUILT_IN_ATOMIC_AND_FETCH_16:
8713       {
8714 	enum built_in_function lib;
8715 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8716 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8717 				       (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8718 	target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8719 						 ignore, lib);
8720 	if (target)
8721 	  return target;
8722 	break;
8723       }
8724     case BUILT_IN_ATOMIC_NAND_FETCH_1:
8725     case BUILT_IN_ATOMIC_NAND_FETCH_2:
8726     case BUILT_IN_ATOMIC_NAND_FETCH_4:
8727     case BUILT_IN_ATOMIC_NAND_FETCH_8:
8728     case BUILT_IN_ATOMIC_NAND_FETCH_16:
8729       {
8730 	enum built_in_function lib;
8731 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8732 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8733 				       (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8734 	target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8735 						 ignore, lib);
8736 	if (target)
8737 	  return target;
8738 	break;
8739       }
8740     case BUILT_IN_ATOMIC_XOR_FETCH_1:
8741     case BUILT_IN_ATOMIC_XOR_FETCH_2:
8742     case BUILT_IN_ATOMIC_XOR_FETCH_4:
8743     case BUILT_IN_ATOMIC_XOR_FETCH_8:
8744     case BUILT_IN_ATOMIC_XOR_FETCH_16:
8745       {
8746 	enum built_in_function lib;
8747 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8748 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8749 				       (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8750 	target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8751 						 ignore, lib);
8752 	if (target)
8753 	  return target;
8754 	break;
8755       }
8756     case BUILT_IN_ATOMIC_OR_FETCH_1:
8757     case BUILT_IN_ATOMIC_OR_FETCH_2:
8758     case BUILT_IN_ATOMIC_OR_FETCH_4:
8759     case BUILT_IN_ATOMIC_OR_FETCH_8:
8760     case BUILT_IN_ATOMIC_OR_FETCH_16:
8761       {
8762 	enum built_in_function lib;
8763 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8764 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8765 				       (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8766 	target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8767 						 ignore, lib);
8768 	if (target)
8769 	  return target;
8770 	break;
8771       }
8772     case BUILT_IN_ATOMIC_FETCH_ADD_1:
8773     case BUILT_IN_ATOMIC_FETCH_ADD_2:
8774     case BUILT_IN_ATOMIC_FETCH_ADD_4:
8775     case BUILT_IN_ATOMIC_FETCH_ADD_8:
8776     case BUILT_IN_ATOMIC_FETCH_ADD_16:
8777       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8778       target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8779 					       ignore, BUILT_IN_NONE);
8780       if (target)
8781 	return target;
8782       break;
8783 
8784     case BUILT_IN_ATOMIC_FETCH_SUB_1:
8785     case BUILT_IN_ATOMIC_FETCH_SUB_2:
8786     case BUILT_IN_ATOMIC_FETCH_SUB_4:
8787     case BUILT_IN_ATOMIC_FETCH_SUB_8:
8788     case BUILT_IN_ATOMIC_FETCH_SUB_16:
8789       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8790       target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8791 					       ignore, BUILT_IN_NONE);
8792       if (target)
8793 	return target;
8794       break;
8795 
8796     case BUILT_IN_ATOMIC_FETCH_AND_1:
8797     case BUILT_IN_ATOMIC_FETCH_AND_2:
8798     case BUILT_IN_ATOMIC_FETCH_AND_4:
8799     case BUILT_IN_ATOMIC_FETCH_AND_8:
8800     case BUILT_IN_ATOMIC_FETCH_AND_16:
8801       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8802       target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8803 					       ignore, BUILT_IN_NONE);
8804       if (target)
8805 	return target;
8806       break;
8807 
8808     case BUILT_IN_ATOMIC_FETCH_NAND_1:
8809     case BUILT_IN_ATOMIC_FETCH_NAND_2:
8810     case BUILT_IN_ATOMIC_FETCH_NAND_4:
8811     case BUILT_IN_ATOMIC_FETCH_NAND_8:
8812     case BUILT_IN_ATOMIC_FETCH_NAND_16:
8813       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8814       target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8815 					       ignore, BUILT_IN_NONE);
8816       if (target)
8817 	return target;
8818       break;
8819 
8820     case BUILT_IN_ATOMIC_FETCH_XOR_1:
8821     case BUILT_IN_ATOMIC_FETCH_XOR_2:
8822     case BUILT_IN_ATOMIC_FETCH_XOR_4:
8823     case BUILT_IN_ATOMIC_FETCH_XOR_8:
8824     case BUILT_IN_ATOMIC_FETCH_XOR_16:
8825       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8826       target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8827 					       ignore, BUILT_IN_NONE);
8828       if (target)
8829 	return target;
8830       break;
8831 
8832     case BUILT_IN_ATOMIC_FETCH_OR_1:
8833     case BUILT_IN_ATOMIC_FETCH_OR_2:
8834     case BUILT_IN_ATOMIC_FETCH_OR_4:
8835     case BUILT_IN_ATOMIC_FETCH_OR_8:
8836     case BUILT_IN_ATOMIC_FETCH_OR_16:
8837       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8838       target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8839 					       ignore, BUILT_IN_NONE);
8840       if (target)
8841 	return target;
8842       break;
8843 
8844     case BUILT_IN_ATOMIC_TEST_AND_SET:
8845       return expand_builtin_atomic_test_and_set (exp, target);
8846 
8847     case BUILT_IN_ATOMIC_CLEAR:
8848       return expand_builtin_atomic_clear (exp);
8849 
8850     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8851       return expand_builtin_atomic_always_lock_free (exp);
8852 
8853     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8854       target = expand_builtin_atomic_is_lock_free (exp);
8855       if (target)
8856         return target;
8857       break;
8858 
8859     case BUILT_IN_ATOMIC_THREAD_FENCE:
8860       expand_builtin_atomic_thread_fence (exp);
8861       return const0_rtx;
8862 
8863     case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8864       expand_builtin_atomic_signal_fence (exp);
8865       return const0_rtx;
8866 
8867     case BUILT_IN_OBJECT_SIZE:
8868       return expand_builtin_object_size (exp);
8869 
8870     case BUILT_IN_MEMCPY_CHK:
8871     case BUILT_IN_MEMPCPY_CHK:
8872     case BUILT_IN_MEMMOVE_CHK:
8873     case BUILT_IN_MEMSET_CHK:
8874       target = expand_builtin_memory_chk (exp, target, mode, fcode);
8875       if (target)
8876 	return target;
8877       break;
8878 
8879     case BUILT_IN_STRCPY_CHK:
8880     case BUILT_IN_STPCPY_CHK:
8881     case BUILT_IN_STRNCPY_CHK:
8882     case BUILT_IN_STPNCPY_CHK:
8883     case BUILT_IN_STRCAT_CHK:
8884     case BUILT_IN_STRNCAT_CHK:
8885     case BUILT_IN_SNPRINTF_CHK:
8886     case BUILT_IN_VSNPRINTF_CHK:
8887       maybe_emit_chk_warning (exp, fcode);
8888       break;
8889 
8890     case BUILT_IN_SPRINTF_CHK:
8891     case BUILT_IN_VSPRINTF_CHK:
8892       maybe_emit_sprintf_chk_warning (exp, fcode);
8893       break;
8894 
8895     case BUILT_IN_FREE:
8896       if (warn_free_nonheap_object)
8897 	maybe_emit_free_warning (exp);
8898       break;
8899 
8900     case BUILT_IN_THREAD_POINTER:
8901       return expand_builtin_thread_pointer (exp, target);
8902 
8903     case BUILT_IN_SET_THREAD_POINTER:
8904       expand_builtin_set_thread_pointer (exp);
8905       return const0_rtx;
8906 
8907     case BUILT_IN_ACC_ON_DEVICE:
8908       /* Do library call, if we failed to expand the builtin when
8909 	 folding.  */
8910       break;
8911 
8912     case BUILT_IN_GOACC_PARLEVEL_ID:
8913     case BUILT_IN_GOACC_PARLEVEL_SIZE:
8914       return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8915 
8916     case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8917       return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8918 
8919     case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8920     case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8921     case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8922     case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8923     case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8924       mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8925       return expand_speculation_safe_value (mode, exp, target, ignore);
8926 
8927     default:	/* just do library call, if unknown builtin */
8928       break;
8929     }
8930 
8931   /* The switch statement above can drop through to cause the function
8932      to be called normally.  */
8933   return expand_call (exp, target, ignore);
8934 }
8935 
8936 /* Determine whether a tree node represents a call to a built-in
8937    function.  If the tree T is a call to a built-in function with
8938    the right number of arguments of the appropriate types, return
8939    the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8940    Otherwise the return value is END_BUILTINS.  */
8941 
8942 enum built_in_function
builtin_mathfn_code(const_tree t)8943 builtin_mathfn_code (const_tree t)
8944 {
8945   const_tree fndecl, arg, parmlist;
8946   const_tree argtype, parmtype;
8947   const_call_expr_arg_iterator iter;
8948 
8949   if (TREE_CODE (t) != CALL_EXPR)
8950     return END_BUILTINS;
8951 
8952   fndecl = get_callee_fndecl (t);
8953   if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8954       return END_BUILTINS;
8955 
8956   parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8957   init_const_call_expr_arg_iterator (t, &iter);
8958   for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8959     {
8960       /* If a function doesn't take a variable number of arguments,
8961 	 the last element in the list will have type `void'.  */
8962       parmtype = TREE_VALUE (parmlist);
8963       if (VOID_TYPE_P (parmtype))
8964 	{
8965 	  if (more_const_call_expr_args_p (&iter))
8966 	    return END_BUILTINS;
8967 	  return DECL_FUNCTION_CODE (fndecl);
8968 	}
8969 
8970       if (! more_const_call_expr_args_p (&iter))
8971 	return END_BUILTINS;
8972 
8973       arg = next_const_call_expr_arg (&iter);
8974       argtype = TREE_TYPE (arg);
8975 
8976       if (SCALAR_FLOAT_TYPE_P (parmtype))
8977 	{
8978 	  if (! SCALAR_FLOAT_TYPE_P (argtype))
8979 	    return END_BUILTINS;
8980 	}
8981       else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8982 	{
8983 	  if (! COMPLEX_FLOAT_TYPE_P (argtype))
8984 	    return END_BUILTINS;
8985 	}
8986       else if (POINTER_TYPE_P (parmtype))
8987 	{
8988 	  if (! POINTER_TYPE_P (argtype))
8989 	    return END_BUILTINS;
8990 	}
8991       else if (INTEGRAL_TYPE_P (parmtype))
8992 	{
8993 	  if (! INTEGRAL_TYPE_P (argtype))
8994 	    return END_BUILTINS;
8995 	}
8996       else
8997 	return END_BUILTINS;
8998     }
8999 
9000   /* Variable-length argument list.  */
9001   return DECL_FUNCTION_CODE (fndecl);
9002 }
9003 
9004 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9005    evaluate to a constant.  */
9006 
9007 static tree
fold_builtin_constant_p(tree arg)9008 fold_builtin_constant_p (tree arg)
9009 {
9010   /* We return 1 for a numeric type that's known to be a constant
9011      value at compile-time or for an aggregate type that's a
9012      literal constant.  */
9013   STRIP_NOPS (arg);
9014 
9015   /* If we know this is a constant, emit the constant of one.  */
9016   if (CONSTANT_CLASS_P (arg)
9017       || (TREE_CODE (arg) == CONSTRUCTOR
9018 	  && TREE_CONSTANT (arg)))
9019     return integer_one_node;
9020   if (TREE_CODE (arg) == ADDR_EXPR)
9021     {
9022        tree op = TREE_OPERAND (arg, 0);
9023        if (TREE_CODE (op) == STRING_CST
9024 	   || (TREE_CODE (op) == ARRAY_REF
9025 	       && integer_zerop (TREE_OPERAND (op, 1))
9026 	       && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9027 	 return integer_one_node;
9028     }
9029 
9030   /* If this expression has side effects, show we don't know it to be a
9031      constant.  Likewise if it's a pointer or aggregate type since in
9032      those case we only want literals, since those are only optimized
9033      when generating RTL, not later.
9034      And finally, if we are compiling an initializer, not code, we
9035      need to return a definite result now; there's not going to be any
9036      more optimization done.  */
9037   if (TREE_SIDE_EFFECTS (arg)
9038       || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9039       || POINTER_TYPE_P (TREE_TYPE (arg))
9040       || cfun == 0
9041       || folding_initializer
9042       || force_folding_builtin_constant_p)
9043     return integer_zero_node;
9044 
9045   return NULL_TREE;
9046 }
9047 
9048 /* Create builtin_expect or builtin_expect_with_probability
9049    with PRED and EXPECTED as its arguments and return it as a truthvalue.
9050    Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9051    builtin_expect_with_probability instead uses third argument as PROBABILITY
9052    value.  */
9053 
9054 static tree
build_builtin_expect_predicate(location_t loc,tree pred,tree expected,tree predictor,tree probability)9055 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9056 				tree predictor, tree probability)
9057 {
9058   tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9059 
9060   fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9061 			      : BUILT_IN_EXPECT_WITH_PROBABILITY);
9062   arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9063   ret_type = TREE_TYPE (TREE_TYPE (fn));
9064   pred_type = TREE_VALUE (arg_types);
9065   expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9066 
9067   pred = fold_convert_loc (loc, pred_type, pred);
9068   expected = fold_convert_loc (loc, expected_type, expected);
9069 
9070   if (probability)
9071     call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9072   else
9073     call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9074 				     predictor);
9075 
9076   return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9077 		 build_int_cst (ret_type, 0));
9078 }
9079 
9080 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3.  Return
9081    NULL_TREE if no simplification is possible.  */
9082 
9083 tree
fold_builtin_expect(location_t loc,tree arg0,tree arg1,tree arg2,tree arg3)9084 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9085 		     tree arg3)
9086 {
9087   tree inner, fndecl, inner_arg0;
9088   enum tree_code code;
9089 
9090   /* Distribute the expected value over short-circuiting operators.
9091      See through the cast from truthvalue_type_node to long.  */
9092   inner_arg0 = arg0;
9093   while (CONVERT_EXPR_P (inner_arg0)
9094 	 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9095 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9096     inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9097 
9098   /* If this is a builtin_expect within a builtin_expect keep the
9099      inner one.  See through a comparison against a constant.  It
9100      might have been added to create a thruthvalue.  */
9101   inner = inner_arg0;
9102 
9103   if (COMPARISON_CLASS_P (inner)
9104       && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9105     inner = TREE_OPERAND (inner, 0);
9106 
9107   if (TREE_CODE (inner) == CALL_EXPR
9108       && (fndecl = get_callee_fndecl (inner))
9109       && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
9110 	  || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
9111     return arg0;
9112 
9113   inner = inner_arg0;
9114   code = TREE_CODE (inner);
9115   if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9116     {
9117       tree op0 = TREE_OPERAND (inner, 0);
9118       tree op1 = TREE_OPERAND (inner, 1);
9119       arg1 = save_expr (arg1);
9120 
9121       op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9122       op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9123       inner = build2 (code, TREE_TYPE (inner), op0, op1);
9124 
9125       return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9126     }
9127 
9128   /* If the argument isn't invariant then there's nothing else we can do.  */
9129   if (!TREE_CONSTANT (inner_arg0))
9130     return NULL_TREE;
9131 
9132   /* If we expect that a comparison against the argument will fold to
9133      a constant return the constant.  In practice, this means a true
9134      constant or the address of a non-weak symbol.  */
9135   inner = inner_arg0;
9136   STRIP_NOPS (inner);
9137   if (TREE_CODE (inner) == ADDR_EXPR)
9138     {
9139       do
9140 	{
9141 	  inner = TREE_OPERAND (inner, 0);
9142 	}
9143       while (TREE_CODE (inner) == COMPONENT_REF
9144 	     || TREE_CODE (inner) == ARRAY_REF);
9145       if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9146 	return NULL_TREE;
9147     }
9148 
9149   /* Otherwise, ARG0 already has the proper type for the return value.  */
9150   return arg0;
9151 }
9152 
9153 /* Fold a call to __builtin_classify_type with argument ARG.  */
9154 
9155 static tree
fold_builtin_classify_type(tree arg)9156 fold_builtin_classify_type (tree arg)
9157 {
9158   if (arg == 0)
9159     return build_int_cst (integer_type_node, no_type_class);
9160 
9161   return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9162 }
9163 
9164 /* Fold a call to __builtin_strlen with argument ARG.  */
9165 
9166 static tree
fold_builtin_strlen(location_t loc,tree type,tree arg)9167 fold_builtin_strlen (location_t loc, tree type, tree arg)
9168 {
9169   if (!validate_arg (arg, POINTER_TYPE))
9170     return NULL_TREE;
9171   else
9172     {
9173       c_strlen_data lendata = { };
9174       tree len = c_strlen (arg, 0, &lendata);
9175 
9176       if (len)
9177 	return fold_convert_loc (loc, type, len);
9178 
9179       if (!lendata.decl)
9180 	c_strlen (arg, 1, &lendata);
9181 
9182       if (lendata.decl)
9183 	{
9184 	  if (EXPR_HAS_LOCATION (arg))
9185 	    loc = EXPR_LOCATION (arg);
9186 	  else if (loc == UNKNOWN_LOCATION)
9187 	    loc = input_location;
9188 	  warn_string_no_nul (loc, "strlen", arg, lendata.decl);
9189 	}
9190 
9191       return NULL_TREE;
9192     }
9193 }
9194 
9195 /* Fold a call to __builtin_inf or __builtin_huge_val.  */
9196 
9197 static tree
fold_builtin_inf(location_t loc,tree type,int warn)9198 fold_builtin_inf (location_t loc, tree type, int warn)
9199 {
9200   REAL_VALUE_TYPE real;
9201 
9202   /* __builtin_inff is intended to be usable to define INFINITY on all
9203      targets.  If an infinity is not available, INFINITY expands "to a
9204      positive constant of type float that overflows at translation
9205      time", footnote "In this case, using INFINITY will violate the
9206      constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9207      Thus we pedwarn to ensure this constraint violation is
9208      diagnosed.  */
9209   if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9210     pedwarn (loc, 0, "target format does not support infinity");
9211 
9212   real_inf (&real);
9213   return build_real (type, real);
9214 }
9215 
9216 /* Fold function call to builtin sincos, sincosf, or sincosl.  Return
9217    NULL_TREE if no simplification can be made.  */
9218 
9219 static tree
fold_builtin_sincos(location_t loc,tree arg0,tree arg1,tree arg2)9220 fold_builtin_sincos (location_t loc,
9221 		     tree arg0, tree arg1, tree arg2)
9222 {
9223   tree type;
9224   tree fndecl, call = NULL_TREE;
9225 
9226   if (!validate_arg (arg0, REAL_TYPE)
9227       || !validate_arg (arg1, POINTER_TYPE)
9228       || !validate_arg (arg2, POINTER_TYPE))
9229     return NULL_TREE;
9230 
9231   type = TREE_TYPE (arg0);
9232 
9233   /* Calculate the result when the argument is a constant.  */
9234   built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9235   if (fn == END_BUILTINS)
9236     return NULL_TREE;
9237 
9238   /* Canonicalize sincos to cexpi.  */
9239   if (TREE_CODE (arg0) == REAL_CST)
9240     {
9241       tree complex_type = build_complex_type (type);
9242       call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9243     }
9244   if (!call)
9245     {
9246       if (!targetm.libc_has_function (function_c99_math_complex)
9247 	  || !builtin_decl_implicit_p (fn))
9248 	return NULL_TREE;
9249       fndecl = builtin_decl_explicit (fn);
9250       call = build_call_expr_loc (loc, fndecl, 1, arg0);
9251       call = builtin_save_expr (call);
9252     }
9253 
9254   tree ptype = build_pointer_type (type);
9255   arg1 = fold_convert (ptype, arg1);
9256   arg2 = fold_convert (ptype, arg2);
9257   return build2 (COMPOUND_EXPR, void_type_node,
9258 		 build2 (MODIFY_EXPR, void_type_node,
9259 			 build_fold_indirect_ref_loc (loc, arg1),
9260 			 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9261 		 build2 (MODIFY_EXPR, void_type_node,
9262 			 build_fold_indirect_ref_loc (loc, arg2),
9263 			 fold_build1_loc (loc, REALPART_EXPR, type, call)));
9264 }
9265 
9266 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9267    Return NULL_TREE if no simplification can be made.  */
9268 
9269 static tree
fold_builtin_memcmp(location_t loc,tree arg1,tree arg2,tree len)9270 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9271 {
9272   if (!validate_arg (arg1, POINTER_TYPE)
9273       || !validate_arg (arg2, POINTER_TYPE)
9274       || !validate_arg (len, INTEGER_TYPE))
9275     return NULL_TREE;
9276 
9277   /* If the LEN parameter is zero, return zero.  */
9278   if (integer_zerop (len))
9279     return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9280 			      arg1, arg2);
9281 
9282   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
9283   if (operand_equal_p (arg1, arg2, 0))
9284     return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9285 
9286   /* If len parameter is one, return an expression corresponding to
9287      (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
9288   if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9289     {
9290       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9291       tree cst_uchar_ptr_node
9292 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9293 
9294       tree ind1
9295 	= fold_convert_loc (loc, integer_type_node,
9296 			    build1 (INDIRECT_REF, cst_uchar_node,
9297 				    fold_convert_loc (loc,
9298 						      cst_uchar_ptr_node,
9299 						      arg1)));
9300       tree ind2
9301 	= fold_convert_loc (loc, integer_type_node,
9302 			    build1 (INDIRECT_REF, cst_uchar_node,
9303 				    fold_convert_loc (loc,
9304 						      cst_uchar_ptr_node,
9305 						      arg2)));
9306       return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9307     }
9308 
9309   return NULL_TREE;
9310 }
9311 
9312 /* Fold a call to builtin isascii with argument ARG.  */
9313 
9314 static tree
fold_builtin_isascii(location_t loc,tree arg)9315 fold_builtin_isascii (location_t loc, tree arg)
9316 {
9317   if (!validate_arg (arg, INTEGER_TYPE))
9318     return NULL_TREE;
9319   else
9320     {
9321       /* Transform isascii(c) -> ((c & ~0x7f) == 0).  */
9322       arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9323 			 build_int_cst (integer_type_node,
9324 					~ (unsigned HOST_WIDE_INT) 0x7f));
9325       return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9326 			      arg, integer_zero_node);
9327     }
9328 }
9329 
9330 /* Fold a call to builtin toascii with argument ARG.  */
9331 
9332 static tree
fold_builtin_toascii(location_t loc,tree arg)9333 fold_builtin_toascii (location_t loc, tree arg)
9334 {
9335   if (!validate_arg (arg, INTEGER_TYPE))
9336     return NULL_TREE;
9337 
9338   /* Transform toascii(c) -> (c & 0x7f).  */
9339   return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9340 			  build_int_cst (integer_type_node, 0x7f));
9341 }
9342 
9343 /* Fold a call to builtin isdigit with argument ARG.  */
9344 
9345 static tree
fold_builtin_isdigit(location_t loc,tree arg)9346 fold_builtin_isdigit (location_t loc, tree arg)
9347 {
9348   if (!validate_arg (arg, INTEGER_TYPE))
9349     return NULL_TREE;
9350   else
9351     {
9352       /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9.  */
9353       /* According to the C standard, isdigit is unaffected by locale.
9354 	 However, it definitely is affected by the target character set.  */
9355       unsigned HOST_WIDE_INT target_digit0
9356 	= lang_hooks.to_target_charset ('0');
9357 
9358       if (target_digit0 == 0)
9359 	return NULL_TREE;
9360 
9361       arg = fold_convert_loc (loc, unsigned_type_node, arg);
9362       arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9363 			 build_int_cst (unsigned_type_node, target_digit0));
9364       return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9365 			  build_int_cst (unsigned_type_node, 9));
9366     }
9367 }
9368 
9369 /* Fold a call to fabs, fabsf or fabsl with argument ARG.  */
9370 
9371 static tree
fold_builtin_fabs(location_t loc,tree arg,tree type)9372 fold_builtin_fabs (location_t loc, tree arg, tree type)
9373 {
9374   if (!validate_arg (arg, REAL_TYPE))
9375     return NULL_TREE;
9376 
9377   arg = fold_convert_loc (loc, type, arg);
9378   return fold_build1_loc (loc, ABS_EXPR, type, arg);
9379 }
9380 
9381 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG.  */
9382 
9383 static tree
fold_builtin_abs(location_t loc,tree arg,tree type)9384 fold_builtin_abs (location_t loc, tree arg, tree type)
9385 {
9386   if (!validate_arg (arg, INTEGER_TYPE))
9387     return NULL_TREE;
9388 
9389   arg = fold_convert_loc (loc, type, arg);
9390   return fold_build1_loc (loc, ABS_EXPR, type, arg);
9391 }
9392 
9393 /* Fold a call to builtin carg(a+bi) -> atan2(b,a).  */
9394 
9395 static tree
fold_builtin_carg(location_t loc,tree arg,tree type)9396 fold_builtin_carg (location_t loc, tree arg, tree type)
9397 {
9398   if (validate_arg (arg, COMPLEX_TYPE)
9399       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9400     {
9401       tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9402 
9403       if (atan2_fn)
9404         {
9405   	  tree new_arg = builtin_save_expr (arg);
9406 	  tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9407 	  tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9408 	  return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9409 	}
9410     }
9411 
9412   return NULL_TREE;
9413 }
9414 
9415 /* Fold a call to builtin frexp, we can assume the base is 2.  */
9416 
9417 static tree
fold_builtin_frexp(location_t loc,tree arg0,tree arg1,tree rettype)9418 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9419 {
9420   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9421     return NULL_TREE;
9422 
9423   STRIP_NOPS (arg0);
9424 
9425   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9426     return NULL_TREE;
9427 
9428   arg1 = build_fold_indirect_ref_loc (loc, arg1);
9429 
9430   /* Proceed if a valid pointer type was passed in.  */
9431   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9432     {
9433       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9434       tree frac, exp;
9435 
9436       switch (value->cl)
9437       {
9438       case rvc_zero:
9439 	/* For +-0, return (*exp = 0, +-0).  */
9440 	exp = integer_zero_node;
9441 	frac = arg0;
9442 	break;
9443       case rvc_nan:
9444       case rvc_inf:
9445 	/* For +-NaN or +-Inf, *exp is unspecified, return arg0.  */
9446 	return omit_one_operand_loc (loc, rettype, arg0, arg1);
9447       case rvc_normal:
9448 	{
9449 	  /* Since the frexp function always expects base 2, and in
9450 	     GCC normalized significands are already in the range
9451 	     [0.5, 1.0), we have exactly what frexp wants.  */
9452 	  REAL_VALUE_TYPE frac_rvt = *value;
9453 	  SET_REAL_EXP (&frac_rvt, 0);
9454 	  frac = build_real (rettype, frac_rvt);
9455 	  exp = build_int_cst (integer_type_node, REAL_EXP (value));
9456 	}
9457 	break;
9458       default:
9459 	gcc_unreachable ();
9460       }
9461 
9462       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9463       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9464       TREE_SIDE_EFFECTS (arg1) = 1;
9465       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9466     }
9467 
9468   return NULL_TREE;
9469 }
9470 
9471 /* Fold a call to builtin modf.  */
9472 
9473 static tree
fold_builtin_modf(location_t loc,tree arg0,tree arg1,tree rettype)9474 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9475 {
9476   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9477     return NULL_TREE;
9478 
9479   STRIP_NOPS (arg0);
9480 
9481   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9482     return NULL_TREE;
9483 
9484   arg1 = build_fold_indirect_ref_loc (loc, arg1);
9485 
9486   /* Proceed if a valid pointer type was passed in.  */
9487   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9488     {
9489       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9490       REAL_VALUE_TYPE trunc, frac;
9491 
9492       switch (value->cl)
9493       {
9494       case rvc_nan:
9495       case rvc_zero:
9496 	/* For +-NaN or +-0, return (*arg1 = arg0, arg0).  */
9497 	trunc = frac = *value;
9498 	break;
9499       case rvc_inf:
9500 	/* For +-Inf, return (*arg1 = arg0, +-0).  */
9501 	frac = dconst0;
9502 	frac.sign = value->sign;
9503 	trunc = *value;
9504 	break;
9505       case rvc_normal:
9506 	/* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)).  */
9507 	real_trunc (&trunc, VOIDmode, value);
9508 	real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9509 	/* If the original number was negative and already
9510 	   integral, then the fractional part is -0.0.  */
9511 	if (value->sign && frac.cl == rvc_zero)
9512 	  frac.sign = value->sign;
9513 	break;
9514       }
9515 
9516       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9517       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9518 			  build_real (rettype, trunc));
9519       TREE_SIDE_EFFECTS (arg1) = 1;
9520       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9521 			  build_real (rettype, frac));
9522     }
9523 
9524   return NULL_TREE;
9525 }
9526 
9527 /* Given a location LOC, an interclass builtin function decl FNDECL
9528    and its single argument ARG, return an folded expression computing
9529    the same, or NULL_TREE if we either couldn't or didn't want to fold
9530    (the latter happen if there's an RTL instruction available).  */
9531 
9532 static tree
fold_builtin_interclass_mathfn(location_t loc,tree fndecl,tree arg)9533 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9534 {
9535   machine_mode mode;
9536 
9537   if (!validate_arg (arg, REAL_TYPE))
9538     return NULL_TREE;
9539 
9540   if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9541     return NULL_TREE;
9542 
9543   mode = TYPE_MODE (TREE_TYPE (arg));
9544 
9545   bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9546 
9547   /* If there is no optab, try generic code.  */
9548   switch (DECL_FUNCTION_CODE (fndecl))
9549     {
9550       tree result;
9551 
9552     CASE_FLT_FN (BUILT_IN_ISINF):
9553       {
9554 	/* isinf(x) -> isgreater(fabs(x),DBL_MAX).  */
9555 	tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9556 	tree type = TREE_TYPE (arg);
9557 	REAL_VALUE_TYPE r;
9558 	char buf[128];
9559 
9560 	if (is_ibm_extended)
9561 	  {
9562 	    /* NaN and Inf are encoded in the high-order double value
9563 	       only.  The low-order value is not significant.  */
9564 	    type = double_type_node;
9565 	    mode = DFmode;
9566 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9567 	  }
9568 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9569 	real_from_string (&r, buf);
9570 	result = build_call_expr (isgr_fn, 2,
9571 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
9572 				  build_real (type, r));
9573 	return result;
9574       }
9575     CASE_FLT_FN (BUILT_IN_FINITE):
9576     case BUILT_IN_ISFINITE:
9577       {
9578 	/* isfinite(x) -> islessequal(fabs(x),DBL_MAX).  */
9579 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9580 	tree type = TREE_TYPE (arg);
9581 	REAL_VALUE_TYPE r;
9582 	char buf[128];
9583 
9584 	if (is_ibm_extended)
9585 	  {
9586 	    /* NaN and Inf are encoded in the high-order double value
9587 	       only.  The low-order value is not significant.  */
9588 	    type = double_type_node;
9589 	    mode = DFmode;
9590 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9591 	  }
9592 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9593 	real_from_string (&r, buf);
9594 	result = build_call_expr (isle_fn, 2,
9595 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
9596 				  build_real (type, r));
9597 	/*result = fold_build2_loc (loc, UNGT_EXPR,
9598 				  TREE_TYPE (TREE_TYPE (fndecl)),
9599 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
9600 				  build_real (type, r));
9601 	result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9602 				  TREE_TYPE (TREE_TYPE (fndecl)),
9603 				  result);*/
9604 	return result;
9605       }
9606     case BUILT_IN_ISNORMAL:
9607       {
9608 	/* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9609 	   islessequal(fabs(x),DBL_MAX).  */
9610 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9611 	tree type = TREE_TYPE (arg);
9612 	tree orig_arg, max_exp, min_exp;
9613 	machine_mode orig_mode = mode;
9614 	REAL_VALUE_TYPE rmax, rmin;
9615 	char buf[128];
9616 
9617 	orig_arg = arg = builtin_save_expr (arg);
9618 	if (is_ibm_extended)
9619 	  {
9620 	    /* Use double to test the normal range of IBM extended
9621 	       precision.  Emin for IBM extended precision is
9622 	       different to emin for IEEE double, being 53 higher
9623 	       since the low double exponent is at least 53 lower
9624 	       than the high double exponent.  */
9625 	    type = double_type_node;
9626 	    mode = DFmode;
9627 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9628 	  }
9629 	arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9630 
9631 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9632 	real_from_string (&rmax, buf);
9633 	sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9634 	real_from_string (&rmin, buf);
9635 	max_exp = build_real (type, rmax);
9636 	min_exp = build_real (type, rmin);
9637 
9638 	max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9639 	if (is_ibm_extended)
9640 	  {
9641 	    /* Testing the high end of the range is done just using
9642 	       the high double, using the same test as isfinite().
9643 	       For the subnormal end of the range we first test the
9644 	       high double, then if its magnitude is equal to the
9645 	       limit of 0x1p-969, we test whether the low double is
9646 	       non-zero and opposite sign to the high double.  */
9647 	    tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9648 	    tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9649 	    tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9650 	    tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9651 				       arg, min_exp);
9652 	    tree as_complex = build1 (VIEW_CONVERT_EXPR,
9653 				      complex_double_type_node, orig_arg);
9654 	    tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9655 	    tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9656 	    tree zero = build_real (type, dconst0);
9657 	    tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9658 	    tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9659 	    tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9660 	    tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9661 				      fold_build3 (COND_EXPR,
9662 						   integer_type_node,
9663 						   hilt, logt, lolt));
9664 	    eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9665 				  eq_min, ok_lo);
9666 	    min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9667 				   gt_min, eq_min);
9668 	  }
9669 	else
9670 	  {
9671 	    tree const isge_fn
9672 	      = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9673 	    min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9674 	  }
9675 	result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9676 			      max_exp, min_exp);
9677 	return result;
9678       }
9679     default:
9680       break;
9681     }
9682 
9683   return NULL_TREE;
9684 }
9685 
9686 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9687    ARG is the argument for the call.  */
9688 
9689 static tree
fold_builtin_classify(location_t loc,tree fndecl,tree arg,int builtin_index)9690 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9691 {
9692   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9693 
9694   if (!validate_arg (arg, REAL_TYPE))
9695     return NULL_TREE;
9696 
9697   switch (builtin_index)
9698     {
9699     case BUILT_IN_ISINF:
9700       if (!HONOR_INFINITIES (arg))
9701 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9702 
9703       return NULL_TREE;
9704 
9705     case BUILT_IN_ISINF_SIGN:
9706       {
9707 	/* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9708 	/* In a boolean context, GCC will fold the inner COND_EXPR to
9709 	   1.  So e.g. "if (isinf_sign(x))" would be folded to just
9710 	   "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9711 	tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9712 	tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9713 	tree tmp = NULL_TREE;
9714 
9715 	arg = builtin_save_expr (arg);
9716 
9717 	if (signbit_fn && isinf_fn)
9718 	  {
9719 	    tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9720 	    tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9721 
9722 	    signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9723 					signbit_call, integer_zero_node);
9724 	    isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9725 				      isinf_call, integer_zero_node);
9726 
9727 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9728 			       integer_minus_one_node, integer_one_node);
9729 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9730 			       isinf_call, tmp,
9731 			       integer_zero_node);
9732 	  }
9733 
9734 	return tmp;
9735       }
9736 
9737     case BUILT_IN_ISFINITE:
9738       if (!HONOR_NANS (arg)
9739 	  && !HONOR_INFINITIES (arg))
9740 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
9741 
9742       return NULL_TREE;
9743 
9744     case BUILT_IN_ISNAN:
9745       if (!HONOR_NANS (arg))
9746 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9747 
9748       {
9749 	bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9750 	if (is_ibm_extended)
9751 	  {
9752 	    /* NaN and Inf are encoded in the high-order double value
9753 	       only.  The low-order value is not significant.  */
9754 	    arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9755 	  }
9756       }
9757       arg = builtin_save_expr (arg);
9758       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9759 
9760     default:
9761       gcc_unreachable ();
9762     }
9763 }
9764 
9765 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9766    This builtin will generate code to return the appropriate floating
9767    point classification depending on the value of the floating point
9768    number passed in.  The possible return values must be supplied as
9769    int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9770    FP_NORMAL, FP_SUBNORMAL and FP_ZERO.  The ellipses is for exactly
9771    one floating point argument which is "type generic".  */
9772 
9773 static tree
fold_builtin_fpclassify(location_t loc,tree * args,int nargs)9774 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9775 {
9776   tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9777     arg, type, res, tmp;
9778   machine_mode mode;
9779   REAL_VALUE_TYPE r;
9780   char buf[128];
9781 
9782   /* Verify the required arguments in the original call.  */
9783   if (nargs != 6
9784       || !validate_arg (args[0], INTEGER_TYPE)
9785       || !validate_arg (args[1], INTEGER_TYPE)
9786       || !validate_arg (args[2], INTEGER_TYPE)
9787       || !validate_arg (args[3], INTEGER_TYPE)
9788       || !validate_arg (args[4], INTEGER_TYPE)
9789       || !validate_arg (args[5], REAL_TYPE))
9790     return NULL_TREE;
9791 
9792   fp_nan = args[0];
9793   fp_infinite = args[1];
9794   fp_normal = args[2];
9795   fp_subnormal = args[3];
9796   fp_zero = args[4];
9797   arg = args[5];
9798   type = TREE_TYPE (arg);
9799   mode = TYPE_MODE (type);
9800   arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9801 
9802   /* fpclassify(x) ->
9803        isnan(x) ? FP_NAN :
9804          (fabs(x) == Inf ? FP_INFINITE :
9805 	   (fabs(x) >= DBL_MIN ? FP_NORMAL :
9806 	     (x == 0 ? FP_ZERO : FP_SUBNORMAL))).  */
9807 
9808   tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9809 		     build_real (type, dconst0));
9810   res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9811 		     tmp, fp_zero, fp_subnormal);
9812 
9813   sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9814   real_from_string (&r, buf);
9815   tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9816 		     arg, build_real (type, r));
9817   res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9818 
9819   if (HONOR_INFINITIES (mode))
9820     {
9821       real_inf (&r);
9822       tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9823 			 build_real (type, r));
9824       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9825 			 fp_infinite, res);
9826     }
9827 
9828   if (HONOR_NANS (mode))
9829     {
9830       tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9831       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9832     }
9833 
9834   return res;
9835 }
9836 
9837 /* Fold a call to an unordered comparison function such as
9838    __builtin_isgreater().  FNDECL is the FUNCTION_DECL for the function
9839    being called and ARG0 and ARG1 are the arguments for the call.
9840    UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9841    the opposite of the desired result.  UNORDERED_CODE is used
9842    for modes that can hold NaNs and ORDERED_CODE is used for
9843    the rest.  */
9844 
9845 static tree
fold_builtin_unordered_cmp(location_t loc,tree fndecl,tree arg0,tree arg1,enum tree_code unordered_code,enum tree_code ordered_code)9846 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9847 			    enum tree_code unordered_code,
9848 			    enum tree_code ordered_code)
9849 {
9850   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9851   enum tree_code code;
9852   tree type0, type1;
9853   enum tree_code code0, code1;
9854   tree cmp_type = NULL_TREE;
9855 
9856   type0 = TREE_TYPE (arg0);
9857   type1 = TREE_TYPE (arg1);
9858 
9859   code0 = TREE_CODE (type0);
9860   code1 = TREE_CODE (type1);
9861 
9862   if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9863     /* Choose the wider of two real types.  */
9864     cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9865       ? type0 : type1;
9866   else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9867     cmp_type = type0;
9868   else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9869     cmp_type = type1;
9870 
9871   arg0 = fold_convert_loc (loc, cmp_type, arg0);
9872   arg1 = fold_convert_loc (loc, cmp_type, arg1);
9873 
9874   if (unordered_code == UNORDERED_EXPR)
9875     {
9876       if (!HONOR_NANS (arg0))
9877 	return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9878       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9879     }
9880 
9881   code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9882   return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9883 		      fold_build2_loc (loc, code, type, arg0, arg1));
9884 }
9885 
9886 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9887    arithmetics if it can never overflow, or into internal functions that
9888    return both result of arithmetics and overflowed boolean flag in
9889    a complex integer result, or some other check for overflow.
9890    Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9891    checking part of that.  */
9892 
9893 static tree
fold_builtin_arith_overflow(location_t loc,enum built_in_function fcode,tree arg0,tree arg1,tree arg2)9894 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9895 			     tree arg0, tree arg1, tree arg2)
9896 {
9897   enum internal_fn ifn = IFN_LAST;
9898   /* The code of the expression corresponding to the built-in.  */
9899   enum tree_code opcode = ERROR_MARK;
9900   bool ovf_only = false;
9901 
9902   switch (fcode)
9903     {
9904     case BUILT_IN_ADD_OVERFLOW_P:
9905       ovf_only = true;
9906       /* FALLTHRU */
9907     case BUILT_IN_ADD_OVERFLOW:
9908     case BUILT_IN_SADD_OVERFLOW:
9909     case BUILT_IN_SADDL_OVERFLOW:
9910     case BUILT_IN_SADDLL_OVERFLOW:
9911     case BUILT_IN_UADD_OVERFLOW:
9912     case BUILT_IN_UADDL_OVERFLOW:
9913     case BUILT_IN_UADDLL_OVERFLOW:
9914       opcode = PLUS_EXPR;
9915       ifn = IFN_ADD_OVERFLOW;
9916       break;
9917     case BUILT_IN_SUB_OVERFLOW_P:
9918       ovf_only = true;
9919       /* FALLTHRU */
9920     case BUILT_IN_SUB_OVERFLOW:
9921     case BUILT_IN_SSUB_OVERFLOW:
9922     case BUILT_IN_SSUBL_OVERFLOW:
9923     case BUILT_IN_SSUBLL_OVERFLOW:
9924     case BUILT_IN_USUB_OVERFLOW:
9925     case BUILT_IN_USUBL_OVERFLOW:
9926     case BUILT_IN_USUBLL_OVERFLOW:
9927       opcode = MINUS_EXPR;
9928       ifn = IFN_SUB_OVERFLOW;
9929       break;
9930     case BUILT_IN_MUL_OVERFLOW_P:
9931       ovf_only = true;
9932       /* FALLTHRU */
9933     case BUILT_IN_MUL_OVERFLOW:
9934     case BUILT_IN_SMUL_OVERFLOW:
9935     case BUILT_IN_SMULL_OVERFLOW:
9936     case BUILT_IN_SMULLL_OVERFLOW:
9937     case BUILT_IN_UMUL_OVERFLOW:
9938     case BUILT_IN_UMULL_OVERFLOW:
9939     case BUILT_IN_UMULLL_OVERFLOW:
9940       opcode = MULT_EXPR;
9941       ifn = IFN_MUL_OVERFLOW;
9942       break;
9943     default:
9944       gcc_unreachable ();
9945     }
9946 
9947   /* For the "generic" overloads, the first two arguments can have different
9948      types and the last argument determines the target type to use to check
9949      for overflow.  The arguments of the other overloads all have the same
9950      type.  */
9951   tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9952 
9953   /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9954      arguments are constant, attempt to fold the built-in call into a constant
9955      expression indicating whether or not it detected an overflow.  */
9956   if (ovf_only
9957       && TREE_CODE (arg0) == INTEGER_CST
9958       && TREE_CODE (arg1) == INTEGER_CST)
9959     /* Perform the computation in the target type and check for overflow.  */
9960     return omit_one_operand_loc (loc, boolean_type_node,
9961 				 arith_overflowed_p (opcode, type, arg0, arg1)
9962 				 ? boolean_true_node : boolean_false_node,
9963 				 arg2);
9964 
9965   tree intres, ovfres;
9966   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9967     {
9968       intres = fold_binary_loc (loc, opcode, type,
9969 				fold_convert_loc (loc, type, arg0),
9970 				fold_convert_loc (loc, type, arg1));
9971       if (TREE_OVERFLOW (intres))
9972 	intres = drop_tree_overflow (intres);
9973       ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9974 		? boolean_true_node : boolean_false_node);
9975     }
9976   else
9977     {
9978       tree ctype = build_complex_type (type);
9979       tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9980 						arg0, arg1);
9981       tree tgt = save_expr (call);
9982       intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9983       ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9984       ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9985     }
9986 
9987   if (ovf_only)
9988     return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9989 
9990   tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9991   tree store
9992     = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9993   return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9994 }
9995 
9996 /* Fold a call to __builtin_FILE to a constant string.  */
9997 
9998 static inline tree
fold_builtin_FILE(location_t loc)9999 fold_builtin_FILE (location_t loc)
10000 {
10001   if (const char *fname = LOCATION_FILE (loc))
10002     {
10003       /* The documentation says this builtin is equivalent to the preprocessor
10004 	 __FILE__ macro so it appears appropriate to use the same file prefix
10005 	 mappings.  */
10006       fname = remap_macro_filename (fname);
10007     return build_string_literal (strlen (fname) + 1, fname);
10008     }
10009 
10010   return build_string_literal (1, "");
10011 }
10012 
10013 /* Fold a call to __builtin_FUNCTION to a constant string.  */
10014 
10015 static inline tree
fold_builtin_FUNCTION()10016 fold_builtin_FUNCTION ()
10017 {
10018   const char *name = "";
10019 
10020   if (current_function_decl)
10021     name = lang_hooks.decl_printable_name (current_function_decl, 0);
10022 
10023   return build_string_literal (strlen (name) + 1, name);
10024 }
10025 
10026 /* Fold a call to __builtin_LINE to an integer constant.  */
10027 
10028 static inline tree
fold_builtin_LINE(location_t loc,tree type)10029 fold_builtin_LINE (location_t loc, tree type)
10030 {
10031   return build_int_cst (type, LOCATION_LINE (loc));
10032 }
10033 
10034 /* Fold a call to built-in function FNDECL with 0 arguments.
10035    This function returns NULL_TREE if no simplification was possible.  */
10036 
10037 static tree
fold_builtin_0(location_t loc,tree fndecl)10038 fold_builtin_0 (location_t loc, tree fndecl)
10039 {
10040   tree type = TREE_TYPE (TREE_TYPE (fndecl));
10041   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10042   switch (fcode)
10043     {
10044     case BUILT_IN_FILE:
10045       return fold_builtin_FILE (loc);
10046 
10047     case BUILT_IN_FUNCTION:
10048       return fold_builtin_FUNCTION ();
10049 
10050     case BUILT_IN_LINE:
10051       return fold_builtin_LINE (loc, type);
10052 
10053     CASE_FLT_FN (BUILT_IN_INF):
10054     CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10055     case BUILT_IN_INFD32:
10056     case BUILT_IN_INFD64:
10057     case BUILT_IN_INFD128:
10058       return fold_builtin_inf (loc, type, true);
10059 
10060     CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10061     CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10062       return fold_builtin_inf (loc, type, false);
10063 
10064     case BUILT_IN_CLASSIFY_TYPE:
10065       return fold_builtin_classify_type (NULL_TREE);
10066 
10067     default:
10068       break;
10069     }
10070   return NULL_TREE;
10071 }
10072 
10073 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10074    This function returns NULL_TREE if no simplification was possible.  */
10075 
10076 static tree
fold_builtin_1(location_t loc,tree fndecl,tree arg0)10077 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
10078 {
10079   tree type = TREE_TYPE (TREE_TYPE (fndecl));
10080   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10081 
10082   if (TREE_CODE (arg0) == ERROR_MARK)
10083     return NULL_TREE;
10084 
10085   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10086     return ret;
10087 
10088   switch (fcode)
10089     {
10090     case BUILT_IN_CONSTANT_P:
10091       {
10092 	tree val = fold_builtin_constant_p (arg0);
10093 
10094 	/* Gimplification will pull the CALL_EXPR for the builtin out of
10095 	   an if condition.  When not optimizing, we'll not CSE it back.
10096 	   To avoid link error types of regressions, return false now.  */
10097 	if (!val && !optimize)
10098 	  val = integer_zero_node;
10099 
10100 	return val;
10101       }
10102 
10103     case BUILT_IN_CLASSIFY_TYPE:
10104       return fold_builtin_classify_type (arg0);
10105 
10106     case BUILT_IN_STRLEN:
10107       return fold_builtin_strlen (loc, type, arg0);
10108 
10109     CASE_FLT_FN (BUILT_IN_FABS):
10110     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10111     case BUILT_IN_FABSD32:
10112     case BUILT_IN_FABSD64:
10113     case BUILT_IN_FABSD128:
10114       return fold_builtin_fabs (loc, arg0, type);
10115 
10116     case BUILT_IN_ABS:
10117     case BUILT_IN_LABS:
10118     case BUILT_IN_LLABS:
10119     case BUILT_IN_IMAXABS:
10120       return fold_builtin_abs (loc, arg0, type);
10121 
10122     CASE_FLT_FN (BUILT_IN_CONJ):
10123       if (validate_arg (arg0, COMPLEX_TYPE)
10124 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10125 	return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10126     break;
10127 
10128     CASE_FLT_FN (BUILT_IN_CREAL):
10129       if (validate_arg (arg0, COMPLEX_TYPE)
10130 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10131 	return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10132     break;
10133 
10134     CASE_FLT_FN (BUILT_IN_CIMAG):
10135       if (validate_arg (arg0, COMPLEX_TYPE)
10136 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10137 	return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10138     break;
10139 
10140     CASE_FLT_FN (BUILT_IN_CARG):
10141       return fold_builtin_carg (loc, arg0, type);
10142 
10143     case BUILT_IN_ISASCII:
10144       return fold_builtin_isascii (loc, arg0);
10145 
10146     case BUILT_IN_TOASCII:
10147       return fold_builtin_toascii (loc, arg0);
10148 
10149     case BUILT_IN_ISDIGIT:
10150       return fold_builtin_isdigit (loc, arg0);
10151 
10152     CASE_FLT_FN (BUILT_IN_FINITE):
10153     case BUILT_IN_FINITED32:
10154     case BUILT_IN_FINITED64:
10155     case BUILT_IN_FINITED128:
10156     case BUILT_IN_ISFINITE:
10157       {
10158 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10159 	if (ret)
10160 	  return ret;
10161 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10162       }
10163 
10164     CASE_FLT_FN (BUILT_IN_ISINF):
10165     case BUILT_IN_ISINFD32:
10166     case BUILT_IN_ISINFD64:
10167     case BUILT_IN_ISINFD128:
10168       {
10169 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10170 	if (ret)
10171 	  return ret;
10172 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10173       }
10174 
10175     case BUILT_IN_ISNORMAL:
10176       return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10177 
10178     case BUILT_IN_ISINF_SIGN:
10179       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10180 
10181     CASE_FLT_FN (BUILT_IN_ISNAN):
10182     case BUILT_IN_ISNAND32:
10183     case BUILT_IN_ISNAND64:
10184     case BUILT_IN_ISNAND128:
10185       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10186 
10187     case BUILT_IN_FREE:
10188       if (integer_zerop (arg0))
10189 	return build_empty_stmt (loc);
10190       break;
10191 
10192     default:
10193       break;
10194     }
10195 
10196   return NULL_TREE;
10197 
10198 }
10199 
10200 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10201    with 2 arguments, ARG0 and ARG1.  This function returns NULL_TREE
10202    if no simplification was possible.  */
10203 
10204 static tree
fold_builtin_2(location_t loc,tree expr,tree fndecl,tree arg0,tree arg1)10205 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10206 {
10207   tree type = TREE_TYPE (TREE_TYPE (fndecl));
10208   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10209 
10210   if (TREE_CODE (arg0) == ERROR_MARK
10211       || TREE_CODE (arg1) == ERROR_MARK)
10212     return NULL_TREE;
10213 
10214   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10215     return ret;
10216 
10217   switch (fcode)
10218     {
10219     CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10220     CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10221       if (validate_arg (arg0, REAL_TYPE)
10222 	  && validate_arg (arg1, POINTER_TYPE))
10223 	return do_mpfr_lgamma_r (arg0, arg1, type);
10224     break;
10225 
10226     CASE_FLT_FN (BUILT_IN_FREXP):
10227       return fold_builtin_frexp (loc, arg0, arg1, type);
10228 
10229     CASE_FLT_FN (BUILT_IN_MODF):
10230       return fold_builtin_modf (loc, arg0, arg1, type);
10231 
10232     case BUILT_IN_STRSPN:
10233       return fold_builtin_strspn (loc, expr, arg0, arg1);
10234 
10235     case BUILT_IN_STRCSPN:
10236       return fold_builtin_strcspn (loc, expr, arg0, arg1);
10237 
10238     case BUILT_IN_STRPBRK:
10239       return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10240 
10241     case BUILT_IN_EXPECT:
10242       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10243 
10244     case BUILT_IN_ISGREATER:
10245       return fold_builtin_unordered_cmp (loc, fndecl,
10246 					 arg0, arg1, UNLE_EXPR, LE_EXPR);
10247     case BUILT_IN_ISGREATEREQUAL:
10248       return fold_builtin_unordered_cmp (loc, fndecl,
10249 					 arg0, arg1, UNLT_EXPR, LT_EXPR);
10250     case BUILT_IN_ISLESS:
10251       return fold_builtin_unordered_cmp (loc, fndecl,
10252 					 arg0, arg1, UNGE_EXPR, GE_EXPR);
10253     case BUILT_IN_ISLESSEQUAL:
10254       return fold_builtin_unordered_cmp (loc, fndecl,
10255 					 arg0, arg1, UNGT_EXPR, GT_EXPR);
10256     case BUILT_IN_ISLESSGREATER:
10257       return fold_builtin_unordered_cmp (loc, fndecl,
10258 					 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10259     case BUILT_IN_ISUNORDERED:
10260       return fold_builtin_unordered_cmp (loc, fndecl,
10261 					 arg0, arg1, UNORDERED_EXPR,
10262 					 NOP_EXPR);
10263 
10264       /* We do the folding for va_start in the expander.  */
10265     case BUILT_IN_VA_START:
10266       break;
10267 
10268     case BUILT_IN_OBJECT_SIZE:
10269       return fold_builtin_object_size (arg0, arg1);
10270 
10271     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10272       return fold_builtin_atomic_always_lock_free (arg0, arg1);
10273 
10274     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10275       return fold_builtin_atomic_is_lock_free (arg0, arg1);
10276 
10277     default:
10278       break;
10279     }
10280   return NULL_TREE;
10281 }
10282 
10283 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10284    and ARG2.
10285    This function returns NULL_TREE if no simplification was possible.  */
10286 
10287 static tree
fold_builtin_3(location_t loc,tree fndecl,tree arg0,tree arg1,tree arg2)10288 fold_builtin_3 (location_t loc, tree fndecl,
10289 		tree arg0, tree arg1, tree arg2)
10290 {
10291   tree type = TREE_TYPE (TREE_TYPE (fndecl));
10292   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10293 
10294   if (TREE_CODE (arg0) == ERROR_MARK
10295       || TREE_CODE (arg1) == ERROR_MARK
10296       || TREE_CODE (arg2) == ERROR_MARK)
10297     return NULL_TREE;
10298 
10299   if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10300 				  arg0, arg1, arg2))
10301     return ret;
10302 
10303   switch (fcode)
10304     {
10305 
10306     CASE_FLT_FN (BUILT_IN_SINCOS):
10307       return fold_builtin_sincos (loc, arg0, arg1, arg2);
10308 
10309     CASE_FLT_FN (BUILT_IN_REMQUO):
10310       if (validate_arg (arg0, REAL_TYPE)
10311 	  && validate_arg (arg1, REAL_TYPE)
10312 	  && validate_arg (arg2, POINTER_TYPE))
10313 	return do_mpfr_remquo (arg0, arg1, arg2);
10314     break;
10315 
10316     case BUILT_IN_MEMCMP:
10317       return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10318 
10319     case BUILT_IN_EXPECT:
10320       return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10321 
10322     case BUILT_IN_EXPECT_WITH_PROBABILITY:
10323       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10324 
10325     case BUILT_IN_ADD_OVERFLOW:
10326     case BUILT_IN_SUB_OVERFLOW:
10327     case BUILT_IN_MUL_OVERFLOW:
10328     case BUILT_IN_ADD_OVERFLOW_P:
10329     case BUILT_IN_SUB_OVERFLOW_P:
10330     case BUILT_IN_MUL_OVERFLOW_P:
10331     case BUILT_IN_SADD_OVERFLOW:
10332     case BUILT_IN_SADDL_OVERFLOW:
10333     case BUILT_IN_SADDLL_OVERFLOW:
10334     case BUILT_IN_SSUB_OVERFLOW:
10335     case BUILT_IN_SSUBL_OVERFLOW:
10336     case BUILT_IN_SSUBLL_OVERFLOW:
10337     case BUILT_IN_SMUL_OVERFLOW:
10338     case BUILT_IN_SMULL_OVERFLOW:
10339     case BUILT_IN_SMULLL_OVERFLOW:
10340     case BUILT_IN_UADD_OVERFLOW:
10341     case BUILT_IN_UADDL_OVERFLOW:
10342     case BUILT_IN_UADDLL_OVERFLOW:
10343     case BUILT_IN_USUB_OVERFLOW:
10344     case BUILT_IN_USUBL_OVERFLOW:
10345     case BUILT_IN_USUBLL_OVERFLOW:
10346     case BUILT_IN_UMUL_OVERFLOW:
10347     case BUILT_IN_UMULL_OVERFLOW:
10348     case BUILT_IN_UMULLL_OVERFLOW:
10349       return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10350 
10351     default:
10352       break;
10353     }
10354   return NULL_TREE;
10355 }
10356 
10357 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10358    ARGS is an array of NARGS arguments.  IGNORE is true if the result
10359    of the function call is ignored.  This function returns NULL_TREE
10360    if no simplification was possible.  */
10361 
10362 static tree
fold_builtin_n(location_t loc,tree expr,tree fndecl,tree * args,int nargs,bool)10363 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10364 		int nargs, bool)
10365 {
10366   tree ret = NULL_TREE;
10367 
10368   switch (nargs)
10369     {
10370     case 0:
10371       ret = fold_builtin_0 (loc, fndecl);
10372       break;
10373     case 1:
10374       ret = fold_builtin_1 (loc, fndecl, args[0]);
10375       break;
10376     case 2:
10377       ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10378       break;
10379     case 3:
10380       ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10381       break;
10382     default:
10383       ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10384       break;
10385     }
10386   if (ret)
10387     {
10388       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10389       SET_EXPR_LOCATION (ret, loc);
10390       return ret;
10391     }
10392   return NULL_TREE;
10393 }
10394 
10395 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10396    list ARGS along with N new arguments in NEWARGS.  SKIP is the number
10397    of arguments in ARGS to be omitted.  OLDNARGS is the number of
10398    elements in ARGS.  */
10399 
10400 static tree
rewrite_call_expr_valist(location_t loc,int oldnargs,tree * args,int skip,tree fndecl,int n,va_list newargs)10401 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10402 			  int skip, tree fndecl, int n, va_list newargs)
10403 {
10404   int nargs = oldnargs - skip + n;
10405   tree *buffer;
10406 
10407   if (n > 0)
10408     {
10409       int i, j;
10410 
10411       buffer = XALLOCAVEC (tree, nargs);
10412       for (i = 0; i < n; i++)
10413 	buffer[i] = va_arg (newargs, tree);
10414       for (j = skip; j < oldnargs; j++, i++)
10415 	buffer[i] = args[j];
10416     }
10417   else
10418     buffer = args + skip;
10419 
10420   return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10421 }
10422 
10423 /* Return true if FNDECL shouldn't be folded right now.
10424    If a built-in function has an inline attribute always_inline
10425    wrapper, defer folding it after always_inline functions have
10426    been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10427    might not be performed.  */
10428 
10429 bool
avoid_folding_inline_builtin(tree fndecl)10430 avoid_folding_inline_builtin (tree fndecl)
10431 {
10432   return (DECL_DECLARED_INLINE_P (fndecl)
10433 	  && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10434 	  && cfun
10435 	  && !cfun->always_inline_functions_inlined
10436 	  && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10437 }
10438 
10439 /* A wrapper function for builtin folding that prevents warnings for
10440    "statement without effect" and the like, caused by removing the
10441    call node earlier than the warning is generated.  */
10442 
10443 tree
fold_call_expr(location_t loc,tree exp,bool ignore)10444 fold_call_expr (location_t loc, tree exp, bool ignore)
10445 {
10446   tree ret = NULL_TREE;
10447   tree fndecl = get_callee_fndecl (exp);
10448   if (fndecl && fndecl_built_in_p (fndecl)
10449       /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10450 	 yet.  Defer folding until we see all the arguments
10451 	 (after inlining).  */
10452       && !CALL_EXPR_VA_ARG_PACK (exp))
10453     {
10454       int nargs = call_expr_nargs (exp);
10455 
10456       /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10457 	 instead last argument is __builtin_va_arg_pack ().  Defer folding
10458 	 even in that case, until arguments are finalized.  */
10459       if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10460 	{
10461 	  tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10462 	  if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10463 	    return NULL_TREE;
10464 	}
10465 
10466       if (avoid_folding_inline_builtin (fndecl))
10467 	return NULL_TREE;
10468 
10469       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10470         return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10471 				     CALL_EXPR_ARGP (exp), ignore);
10472       else
10473 	{
10474 	  tree *args = CALL_EXPR_ARGP (exp);
10475 	  ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10476 	  if (ret)
10477 	    return ret;
10478 	}
10479     }
10480   return NULL_TREE;
10481 }
10482 
10483 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10484    N arguments are passed in the array ARGARRAY.  Return a folded
10485    expression or NULL_TREE if no simplification was possible.  */
10486 
10487 tree
fold_builtin_call_array(location_t loc,tree,tree fn,int n,tree * argarray)10488 fold_builtin_call_array (location_t loc, tree,
10489 			 tree fn,
10490 			 int n,
10491 			 tree *argarray)
10492 {
10493   if (TREE_CODE (fn) != ADDR_EXPR)
10494     return NULL_TREE;
10495 
10496   tree fndecl = TREE_OPERAND (fn, 0);
10497   if (TREE_CODE (fndecl) == FUNCTION_DECL
10498       && fndecl_built_in_p (fndecl))
10499     {
10500       /* If last argument is __builtin_va_arg_pack (), arguments to this
10501 	 function are not finalized yet.  Defer folding until they are.  */
10502       if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10503 	{
10504 	  tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10505 	  if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10506 	    return NULL_TREE;
10507 	}
10508       if (avoid_folding_inline_builtin (fndecl))
10509 	return NULL_TREE;
10510       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10511 	return targetm.fold_builtin (fndecl, n, argarray, false);
10512       else
10513 	return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10514     }
10515 
10516   return NULL_TREE;
10517 }
10518 
10519 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10520    along with N new arguments specified as the "..." parameters.  SKIP
10521    is the number of arguments in EXP to be omitted.  This function is used
10522    to do varargs-to-varargs transformations.  */
10523 
10524 static tree
rewrite_call_expr(location_t loc,tree exp,int skip,tree fndecl,int n,...)10525 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10526 {
10527   va_list ap;
10528   tree t;
10529 
10530   va_start (ap, n);
10531   t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10532 				CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10533   va_end (ap);
10534 
10535   return t;
10536 }
10537 
10538 /* Validate a single argument ARG against a tree code CODE representing
10539    a type.  Return true when argument is valid.  */
10540 
10541 static bool
validate_arg(const_tree arg,enum tree_code code)10542 validate_arg (const_tree arg, enum tree_code code)
10543 {
10544   if (!arg)
10545     return false;
10546   else if (code == POINTER_TYPE)
10547     return POINTER_TYPE_P (TREE_TYPE (arg));
10548   else if (code == INTEGER_TYPE)
10549     return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10550   return code == TREE_CODE (TREE_TYPE (arg));
10551 }
10552 
10553 /* This function validates the types of a function call argument list
10554    against a specified list of tree_codes.  If the last specifier is a 0,
10555    that represents an ellipses, otherwise the last specifier must be a
10556    VOID_TYPE.
10557 
10558    This is the GIMPLE version of validate_arglist.  Eventually we want to
10559    completely convert builtins.c to work from GIMPLEs and the tree based
10560    validate_arglist will then be removed.  */
10561 
10562 bool
validate_gimple_arglist(const gcall * call,...)10563 validate_gimple_arglist (const gcall *call, ...)
10564 {
10565   enum tree_code code;
10566   bool res = 0;
10567   va_list ap;
10568   const_tree arg;
10569   size_t i;
10570 
10571   va_start (ap, call);
10572   i = 0;
10573 
10574   do
10575     {
10576       code = (enum tree_code) va_arg (ap, int);
10577       switch (code)
10578 	{
10579 	case 0:
10580 	  /* This signifies an ellipses, any further arguments are all ok.  */
10581 	  res = true;
10582 	  goto end;
10583 	case VOID_TYPE:
10584 	  /* This signifies an endlink, if no arguments remain, return
10585 	     true, otherwise return false.  */
10586 	  res = (i == gimple_call_num_args (call));
10587 	  goto end;
10588 	default:
10589 	  /* If no parameters remain or the parameter's code does not
10590 	     match the specified code, return false.  Otherwise continue
10591 	     checking any remaining arguments.  */
10592 	  arg = gimple_call_arg (call, i++);
10593 	  if (!validate_arg (arg, code))
10594 	    goto end;
10595 	  break;
10596 	}
10597     }
10598   while (1);
10599 
10600   /* We need gotos here since we can only have one VA_CLOSE in a
10601      function.  */
10602  end: ;
10603   va_end (ap);
10604 
10605   return res;
10606 }
10607 
10608 /* Default target-specific builtin expander that does nothing.  */
10609 
10610 rtx
default_expand_builtin(tree exp ATTRIBUTE_UNUSED,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)10611 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10612 			rtx target ATTRIBUTE_UNUSED,
10613 			rtx subtarget ATTRIBUTE_UNUSED,
10614 			machine_mode mode ATTRIBUTE_UNUSED,
10615 			int ignore ATTRIBUTE_UNUSED)
10616 {
10617   return NULL_RTX;
10618 }
10619 
10620 /* Returns true is EXP represents data that would potentially reside
10621    in a readonly section.  */
10622 
10623 bool
readonly_data_expr(tree exp)10624 readonly_data_expr (tree exp)
10625 {
10626   STRIP_NOPS (exp);
10627 
10628   if (TREE_CODE (exp) != ADDR_EXPR)
10629     return false;
10630 
10631   exp = get_base_address (TREE_OPERAND (exp, 0));
10632   if (!exp)
10633     return false;
10634 
10635   /* Make sure we call decl_readonly_section only for trees it
10636      can handle (since it returns true for everything it doesn't
10637      understand).  */
10638   if (TREE_CODE (exp) == STRING_CST
10639       || TREE_CODE (exp) == CONSTRUCTOR
10640       || (VAR_P (exp) && TREE_STATIC (exp)))
10641     return decl_readonly_section (exp, 0);
10642   else
10643     return false;
10644 }
10645 
10646 /* Simplify a call to the strpbrk builtin.  S1 and S2 are the arguments
10647    to the call, and TYPE is its return type.
10648 
10649    Return NULL_TREE if no simplification was possible, otherwise return the
10650    simplified form of the call as a tree.
10651 
10652    The simplified form may be a constant or other expression which
10653    computes the same value, but in a more efficient manner (including
10654    calls to other builtin functions).
10655 
10656    The call may contain arguments which need to be evaluated, but
10657    which are not useful to determine the result of the call.  In
10658    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10659    COMPOUND_EXPR will be an argument which must be evaluated.
10660    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10661    COMPOUND_EXPR in the chain will contain the tree for the simplified
10662    form of the builtin function call.  */
10663 
10664 static tree
fold_builtin_strpbrk(location_t loc,tree expr,tree s1,tree s2,tree type)10665 fold_builtin_strpbrk (location_t loc, tree expr, tree s1, tree s2, tree type)
10666 {
10667   if (!validate_arg (s1, POINTER_TYPE)
10668       || !validate_arg (s2, POINTER_TYPE))
10669     return NULL_TREE;
10670 
10671   if (!check_nul_terminated_array (expr, s1)
10672       || !check_nul_terminated_array (expr, s2))
10673     return NULL_TREE;
10674 
10675   tree fn;
10676   const char *p1, *p2;
10677 
10678   p2 = c_getstr (s2);
10679   if (p2 == NULL)
10680     return NULL_TREE;
10681 
10682   p1 = c_getstr (s1);
10683   if (p1 != NULL)
10684     {
10685       const char *r = strpbrk (p1, p2);
10686       tree tem;
10687 
10688       if (r == NULL)
10689 	return build_int_cst (TREE_TYPE (s1), 0);
10690 
10691       /* Return an offset into the constant string argument.  */
10692       tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10693       return fold_convert_loc (loc, type, tem);
10694     }
10695 
10696   if (p2[0] == '\0')
10697     /* strpbrk(x, "") == NULL.
10698        Evaluate and ignore s1 in case it had side-effects.  */
10699     return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10700 
10701   if (p2[1] != '\0')
10702     return NULL_TREE;  /* Really call strpbrk.  */
10703 
10704   fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10705   if (!fn)
10706     return NULL_TREE;
10707 
10708   /* New argument list transforming strpbrk(s1, s2) to
10709      strchr(s1, s2[0]).  */
10710   return build_call_expr_loc (loc, fn, 2, s1,
10711 			      build_int_cst (integer_type_node, p2[0]));
10712 }
10713 
10714 /* Simplify a call to the strspn builtin.  S1 and S2 are the arguments
10715    to the call.
10716 
10717    Return NULL_TREE if no simplification was possible, otherwise return the
10718    simplified form of the call as a tree.
10719 
10720    The simplified form may be a constant or other expression which
10721    computes the same value, but in a more efficient manner (including
10722    calls to other builtin functions).
10723 
10724    The call may contain arguments which need to be evaluated, but
10725    which are not useful to determine the result of the call.  In
10726    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10727    COMPOUND_EXPR will be an argument which must be evaluated.
10728    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10729    COMPOUND_EXPR in the chain will contain the tree for the simplified
10730    form of the builtin function call.  */
10731 
10732 static tree
fold_builtin_strspn(location_t loc,tree expr,tree s1,tree s2)10733 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
10734 {
10735   if (!validate_arg (s1, POINTER_TYPE)
10736       || !validate_arg (s2, POINTER_TYPE))
10737     return NULL_TREE;
10738 
10739   if (!check_nul_terminated_array (expr, s1)
10740       || !check_nul_terminated_array (expr, s2))
10741     return NULL_TREE;
10742 
10743   const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10744 
10745   /* If either argument is "", return NULL_TREE.  */
10746   if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10747     /* Evaluate and ignore both arguments in case either one has
10748        side-effects.  */
10749     return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10750 				  s1, s2);
10751   return NULL_TREE;
10752 }
10753 
10754 /* Simplify a call to the strcspn builtin.  S1 and S2 are the arguments
10755    to the call.
10756 
10757    Return NULL_TREE if no simplification was possible, otherwise return the
10758    simplified form of the call as a tree.
10759 
10760    The simplified form may be a constant or other expression which
10761    computes the same value, but in a more efficient manner (including
10762    calls to other builtin functions).
10763 
10764    The call may contain arguments which need to be evaluated, but
10765    which are not useful to determine the result of the call.  In
10766    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10767    COMPOUND_EXPR will be an argument which must be evaluated.
10768    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10769    COMPOUND_EXPR in the chain will contain the tree for the simplified
10770    form of the builtin function call.  */
10771 
10772 static tree
fold_builtin_strcspn(location_t loc,tree expr,tree s1,tree s2)10773 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
10774 {
10775   if (!validate_arg (s1, POINTER_TYPE)
10776       || !validate_arg (s2, POINTER_TYPE))
10777     return NULL_TREE;
10778 
10779   if (!check_nul_terminated_array (expr, s1)
10780       || !check_nul_terminated_array (expr, s2))
10781     return NULL_TREE;
10782 
10783   /* If the first argument is "", return NULL_TREE.  */
10784   const char *p1 = c_getstr (s1);
10785   if (p1 && *p1 == '\0')
10786     {
10787       /* Evaluate and ignore argument s2 in case it has
10788 	 side-effects.  */
10789       return omit_one_operand_loc (loc, size_type_node,
10790 				   size_zero_node, s2);
10791     }
10792 
10793   /* If the second argument is "", return __builtin_strlen(s1).  */
10794   const char *p2 = c_getstr (s2);
10795   if (p2 && *p2 == '\0')
10796     {
10797       tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10798 
10799       /* If the replacement _DECL isn't initialized, don't do the
10800 	 transformation.  */
10801       if (!fn)
10802 	return NULL_TREE;
10803 
10804       return build_call_expr_loc (loc, fn, 1, s1);
10805     }
10806   return NULL_TREE;
10807 }
10808 
10809 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10810    produced.  False otherwise.  This is done so that we don't output the error
10811    or warning twice or three times.  */
10812 
10813 bool
fold_builtin_next_arg(tree exp,bool va_start_p)10814 fold_builtin_next_arg (tree exp, bool va_start_p)
10815 {
10816   tree fntype = TREE_TYPE (current_function_decl);
10817   int nargs = call_expr_nargs (exp);
10818   tree arg;
10819   /* There is good chance the current input_location points inside the
10820      definition of the va_start macro (perhaps on the token for
10821      builtin) in a system header, so warnings will not be emitted.
10822      Use the location in real source code.  */
10823   location_t current_location =
10824     linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10825 					      NULL);
10826 
10827   if (!stdarg_p (fntype))
10828     {
10829       error ("%<va_start%> used in function with fixed arguments");
10830       return true;
10831     }
10832 
10833   if (va_start_p)
10834     {
10835       if (va_start_p && (nargs != 2))
10836 	{
10837 	  error ("wrong number of arguments to function %<va_start%>");
10838 	  return true;
10839 	}
10840       arg = CALL_EXPR_ARG (exp, 1);
10841     }
10842   /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10843      when we checked the arguments and if needed issued a warning.  */
10844   else
10845     {
10846       if (nargs == 0)
10847 	{
10848 	  /* Evidently an out of date version of <stdarg.h>; can't validate
10849 	     va_start's second argument, but can still work as intended.  */
10850 	  warning_at (current_location,
10851 		      OPT_Wvarargs,
10852 		   "%<__builtin_next_arg%> called without an argument");
10853 	  return true;
10854 	}
10855       else if (nargs > 1)
10856 	{
10857 	  error ("wrong number of arguments to function %<__builtin_next_arg%>");
10858 	  return true;
10859 	}
10860       arg = CALL_EXPR_ARG (exp, 0);
10861     }
10862 
10863   if (TREE_CODE (arg) == SSA_NAME
10864       && SSA_NAME_VAR (arg))
10865     arg = SSA_NAME_VAR (arg);
10866 
10867   /* We destructively modify the call to be __builtin_va_start (ap, 0)
10868      or __builtin_next_arg (0) the first time we see it, after checking
10869      the arguments and if needed issuing a warning.  */
10870   if (!integer_zerop (arg))
10871     {
10872       tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10873 
10874       /* Strip off all nops for the sake of the comparison.  This
10875 	 is not quite the same as STRIP_NOPS.  It does more.
10876 	 We must also strip off INDIRECT_EXPR for C++ reference
10877 	 parameters.  */
10878       while (CONVERT_EXPR_P (arg)
10879 	     || TREE_CODE (arg) == INDIRECT_REF)
10880 	arg = TREE_OPERAND (arg, 0);
10881       if (arg != last_parm)
10882 	{
10883 	  /* FIXME: Sometimes with the tree optimizers we can get the
10884 	     not the last argument even though the user used the last
10885 	     argument.  We just warn and set the arg to be the last
10886 	     argument so that we will get wrong-code because of
10887 	     it.  */
10888 	  warning_at (current_location,
10889 		      OPT_Wvarargs,
10890 		      "second parameter of %<va_start%> not last named argument");
10891 	}
10892 
10893       /* Undefined by C99 7.15.1.4p4 (va_start):
10894          "If the parameter parmN is declared with the register storage
10895          class, with a function or array type, or with a type that is
10896          not compatible with the type that results after application of
10897          the default argument promotions, the behavior is undefined."
10898       */
10899       else if (DECL_REGISTER (arg))
10900 	{
10901 	  warning_at (current_location,
10902 		      OPT_Wvarargs,
10903 		      "undefined behavior when second parameter of "
10904 		      "%<va_start%> is declared with %<register%> storage");
10905 	}
10906 
10907       /* We want to verify the second parameter just once before the tree
10908 	 optimizers are run and then avoid keeping it in the tree,
10909 	 as otherwise we could warn even for correct code like:
10910 	 void foo (int i, ...)
10911 	 { va_list ap; i++; va_start (ap, i); va_end (ap); }  */
10912       if (va_start_p)
10913 	CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10914       else
10915 	CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10916     }
10917   return false;
10918 }
10919 
10920 
10921 /* Expand a call EXP to __builtin_object_size.  */
10922 
10923 static rtx
expand_builtin_object_size(tree exp)10924 expand_builtin_object_size (tree exp)
10925 {
10926   tree ost;
10927   int object_size_type;
10928   tree fndecl = get_callee_fndecl (exp);
10929 
10930   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10931     {
10932       error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10933 	     exp, fndecl);
10934       expand_builtin_trap ();
10935       return const0_rtx;
10936     }
10937 
10938   ost = CALL_EXPR_ARG (exp, 1);
10939   STRIP_NOPS (ost);
10940 
10941   if (TREE_CODE (ost) != INTEGER_CST
10942       || tree_int_cst_sgn (ost) < 0
10943       || compare_tree_int (ost, 3) > 0)
10944     {
10945       error ("%Klast argument of %qD is not integer constant between 0 and 3",
10946 	     exp, fndecl);
10947       expand_builtin_trap ();
10948       return const0_rtx;
10949     }
10950 
10951   object_size_type = tree_to_shwi (ost);
10952 
10953   return object_size_type < 2 ? constm1_rtx : const0_rtx;
10954 }
10955 
10956 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10957    FCODE is the BUILT_IN_* to use.
10958    Return NULL_RTX if we failed; the caller should emit a normal call,
10959    otherwise try to get the result in TARGET, if convenient (and in
10960    mode MODE if that's convenient).  */
10961 
10962 static rtx
expand_builtin_memory_chk(tree exp,rtx target,machine_mode mode,enum built_in_function fcode)10963 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10964 			   enum built_in_function fcode)
10965 {
10966   if (!validate_arglist (exp,
10967 			 POINTER_TYPE,
10968 			 fcode == BUILT_IN_MEMSET_CHK
10969 			 ? INTEGER_TYPE : POINTER_TYPE,
10970 			 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10971     return NULL_RTX;
10972 
10973   tree dest = CALL_EXPR_ARG (exp, 0);
10974   tree src = CALL_EXPR_ARG (exp, 1);
10975   tree len = CALL_EXPR_ARG (exp, 2);
10976   tree size = CALL_EXPR_ARG (exp, 3);
10977 
10978   bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10979 				/*str=*/NULL_TREE, size);
10980 
10981   if (!tree_fits_uhwi_p (size))
10982     return NULL_RTX;
10983 
10984   if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10985     {
10986       /* Avoid transforming the checking call to an ordinary one when
10987 	 an overflow has been detected or when the call couldn't be
10988 	 validated because the size is not constant.  */
10989       if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10990 	return NULL_RTX;
10991 
10992       tree fn = NULL_TREE;
10993       /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10994 	 mem{cpy,pcpy,move,set} is available.  */
10995       switch (fcode)
10996 	{
10997 	case BUILT_IN_MEMCPY_CHK:
10998 	  fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10999 	  break;
11000 	case BUILT_IN_MEMPCPY_CHK:
11001 	  fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11002 	  break;
11003 	case BUILT_IN_MEMMOVE_CHK:
11004 	  fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11005 	  break;
11006 	case BUILT_IN_MEMSET_CHK:
11007 	  fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11008 	  break;
11009 	default:
11010 	  break;
11011 	}
11012 
11013       if (! fn)
11014 	return NULL_RTX;
11015 
11016       fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11017       gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11018       CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11019       return expand_expr (fn, target, mode, EXPAND_NORMAL);
11020     }
11021   else if (fcode == BUILT_IN_MEMSET_CHK)
11022     return NULL_RTX;
11023   else
11024     {
11025       unsigned int dest_align = get_pointer_alignment (dest);
11026 
11027       /* If DEST is not a pointer type, call the normal function.  */
11028       if (dest_align == 0)
11029 	return NULL_RTX;
11030 
11031       /* If SRC and DEST are the same (and not volatile), do nothing.  */
11032       if (operand_equal_p (src, dest, 0))
11033 	{
11034 	  tree expr;
11035 
11036 	  if (fcode != BUILT_IN_MEMPCPY_CHK)
11037 	    {
11038 	      /* Evaluate and ignore LEN in case it has side-effects.  */
11039 	      expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11040 	      return expand_expr (dest, target, mode, EXPAND_NORMAL);
11041 	    }
11042 
11043 	  expr = fold_build_pointer_plus (dest, len);
11044 	  return expand_expr (expr, target, mode, EXPAND_NORMAL);
11045 	}
11046 
11047       /* __memmove_chk special case.  */
11048       if (fcode == BUILT_IN_MEMMOVE_CHK)
11049 	{
11050 	  unsigned int src_align = get_pointer_alignment (src);
11051 
11052 	  if (src_align == 0)
11053 	    return NULL_RTX;
11054 
11055 	  /* If src is categorized for a readonly section we can use
11056 	     normal __memcpy_chk.  */
11057 	  if (readonly_data_expr (src))
11058 	    {
11059 	      tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11060 	      if (!fn)
11061 		return NULL_RTX;
11062 	      fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11063 					  dest, src, len, size);
11064 	      gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11065 	      CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11066 	      return expand_expr (fn, target, mode, EXPAND_NORMAL);
11067 	    }
11068 	}
11069       return NULL_RTX;
11070     }
11071 }
11072 
11073 /* Emit warning if a buffer overflow is detected at compile time.  */
11074 
11075 static void
maybe_emit_chk_warning(tree exp,enum built_in_function fcode)11076 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11077 {
11078   /* The source string.  */
11079   tree srcstr = NULL_TREE;
11080   /* The size of the destination object.  */
11081   tree objsize = NULL_TREE;
11082   /* The string that is being concatenated with (as in __strcat_chk)
11083      or null if it isn't.  */
11084   tree catstr = NULL_TREE;
11085   /* The maximum length of the source sequence in a bounded operation
11086      (such as __strncat_chk) or null if the operation isn't bounded
11087      (such as __strcat_chk).  */
11088   tree maxread = NULL_TREE;
11089   /* The exact size of the access (such as in __strncpy_chk).  */
11090   tree size = NULL_TREE;
11091 
11092   switch (fcode)
11093     {
11094     case BUILT_IN_STRCPY_CHK:
11095     case BUILT_IN_STPCPY_CHK:
11096       srcstr = CALL_EXPR_ARG (exp, 1);
11097       objsize = CALL_EXPR_ARG (exp, 2);
11098       break;
11099 
11100     case BUILT_IN_STRCAT_CHK:
11101       /* For __strcat_chk the warning will be emitted only if overflowing
11102 	 by at least strlen (dest) + 1 bytes.  */
11103       catstr = CALL_EXPR_ARG (exp, 0);
11104       srcstr = CALL_EXPR_ARG (exp, 1);
11105       objsize = CALL_EXPR_ARG (exp, 2);
11106       break;
11107 
11108     case BUILT_IN_STRNCAT_CHK:
11109       catstr = CALL_EXPR_ARG (exp, 0);
11110       srcstr = CALL_EXPR_ARG (exp, 1);
11111       maxread = CALL_EXPR_ARG (exp, 2);
11112       objsize = CALL_EXPR_ARG (exp, 3);
11113       break;
11114 
11115     case BUILT_IN_STRNCPY_CHK:
11116     case BUILT_IN_STPNCPY_CHK:
11117       srcstr = CALL_EXPR_ARG (exp, 1);
11118       size = CALL_EXPR_ARG (exp, 2);
11119       objsize = CALL_EXPR_ARG (exp, 3);
11120       break;
11121 
11122     case BUILT_IN_SNPRINTF_CHK:
11123     case BUILT_IN_VSNPRINTF_CHK:
11124       maxread = CALL_EXPR_ARG (exp, 1);
11125       objsize = CALL_EXPR_ARG (exp, 3);
11126       break;
11127     default:
11128       gcc_unreachable ();
11129     }
11130 
11131   if (catstr && maxread)
11132     {
11133       /* Check __strncat_chk.  There is no way to determine the length
11134 	 of the string to which the source string is being appended so
11135 	 just warn when the length of the source string is not known.  */
11136       check_strncat_sizes (exp, objsize);
11137       return;
11138     }
11139 
11140   /* The destination argument is the first one for all built-ins above.  */
11141   tree dst = CALL_EXPR_ARG (exp, 0);
11142 
11143   check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
11144 }
11145 
11146 /* Emit warning if a buffer overflow is detected at compile time
11147    in __sprintf_chk/__vsprintf_chk calls.  */
11148 
11149 static void
maybe_emit_sprintf_chk_warning(tree exp,enum built_in_function fcode)11150 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11151 {
11152   tree size, len, fmt;
11153   const char *fmt_str;
11154   int nargs = call_expr_nargs (exp);
11155 
11156   /* Verify the required arguments in the original call.  */
11157 
11158   if (nargs < 4)
11159     return;
11160   size = CALL_EXPR_ARG (exp, 2);
11161   fmt = CALL_EXPR_ARG (exp, 3);
11162 
11163   if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11164     return;
11165 
11166   /* Check whether the format is a literal string constant.  */
11167   fmt_str = c_getstr (fmt);
11168   if (fmt_str == NULL)
11169     return;
11170 
11171   if (!init_target_chars ())
11172     return;
11173 
11174   /* If the format doesn't contain % args or %%, we know its size.  */
11175   if (strchr (fmt_str, target_percent) == 0)
11176     len = build_int_cstu (size_type_node, strlen (fmt_str));
11177   /* If the format is "%s" and first ... argument is a string literal,
11178      we know it too.  */
11179   else if (fcode == BUILT_IN_SPRINTF_CHK
11180 	   && strcmp (fmt_str, target_percent_s) == 0)
11181     {
11182       tree arg;
11183 
11184       if (nargs < 5)
11185 	return;
11186       arg = CALL_EXPR_ARG (exp, 4);
11187       if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11188 	return;
11189 
11190       len = c_strlen (arg, 1);
11191       if (!len || ! tree_fits_uhwi_p (len))
11192 	return;
11193     }
11194   else
11195     return;
11196 
11197   /* Add one for the terminating nul.  */
11198   len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11199 
11200   check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
11201 		/*maxread=*/NULL_TREE, len, size);
11202 }
11203 
11204 /* Emit warning if a free is called with address of a variable.  */
11205 
11206 static void
maybe_emit_free_warning(tree exp)11207 maybe_emit_free_warning (tree exp)
11208 {
11209   if (call_expr_nargs (exp) != 1)
11210     return;
11211 
11212   tree arg = CALL_EXPR_ARG (exp, 0);
11213 
11214   STRIP_NOPS (arg);
11215   if (TREE_CODE (arg) != ADDR_EXPR)
11216     return;
11217 
11218   arg = get_base_address (TREE_OPERAND (arg, 0));
11219   if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11220     return;
11221 
11222   if (SSA_VAR_P (arg))
11223     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11224 		"%Kattempt to free a non-heap object %qD", exp, arg);
11225   else
11226     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11227 		"%Kattempt to free a non-heap object", exp);
11228 }
11229 
11230 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11231    if possible.  */
11232 
11233 static tree
fold_builtin_object_size(tree ptr,tree ost)11234 fold_builtin_object_size (tree ptr, tree ost)
11235 {
11236   unsigned HOST_WIDE_INT bytes;
11237   int object_size_type;
11238 
11239   if (!validate_arg (ptr, POINTER_TYPE)
11240       || !validate_arg (ost, INTEGER_TYPE))
11241     return NULL_TREE;
11242 
11243   STRIP_NOPS (ost);
11244 
11245   if (TREE_CODE (ost) != INTEGER_CST
11246       || tree_int_cst_sgn (ost) < 0
11247       || compare_tree_int (ost, 3) > 0)
11248     return NULL_TREE;
11249 
11250   object_size_type = tree_to_shwi (ost);
11251 
11252   /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11253      if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11254      and (size_t) 0 for types 2 and 3.  */
11255   if (TREE_SIDE_EFFECTS (ptr))
11256     return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11257 
11258   if (TREE_CODE (ptr) == ADDR_EXPR)
11259     {
11260       compute_builtin_object_size (ptr, object_size_type, &bytes);
11261       if (wi::fits_to_tree_p (bytes, size_type_node))
11262 	return build_int_cstu (size_type_node, bytes);
11263     }
11264   else if (TREE_CODE (ptr) == SSA_NAME)
11265     {
11266       /* If object size is not known yet, delay folding until
11267        later.  Maybe subsequent passes will help determining
11268        it.  */
11269       if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11270 	  && wi::fits_to_tree_p (bytes, size_type_node))
11271 	return build_int_cstu (size_type_node, bytes);
11272     }
11273 
11274   return NULL_TREE;
11275 }
11276 
11277 /* Builtins with folding operations that operate on "..." arguments
11278    need special handling; we need to store the arguments in a convenient
11279    data structure before attempting any folding.  Fortunately there are
11280    only a few builtins that fall into this category.  FNDECL is the
11281    function, EXP is the CALL_EXPR for the call.  */
11282 
11283 static tree
fold_builtin_varargs(location_t loc,tree fndecl,tree * args,int nargs)11284 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11285 {
11286   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11287   tree ret = NULL_TREE;
11288 
11289   switch (fcode)
11290     {
11291     case BUILT_IN_FPCLASSIFY:
11292       ret = fold_builtin_fpclassify (loc, args, nargs);
11293       break;
11294 
11295     default:
11296       break;
11297     }
11298   if (ret)
11299     {
11300       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11301       SET_EXPR_LOCATION (ret, loc);
11302       TREE_NO_WARNING (ret) = 1;
11303       return ret;
11304     }
11305   return NULL_TREE;
11306 }
11307 
11308 /* Initialize format string characters in the target charset.  */
11309 
11310 bool
init_target_chars(void)11311 init_target_chars (void)
11312 {
11313   static bool init;
11314   if (!init)
11315     {
11316       target_newline = lang_hooks.to_target_charset ('\n');
11317       target_percent = lang_hooks.to_target_charset ('%');
11318       target_c = lang_hooks.to_target_charset ('c');
11319       target_s = lang_hooks.to_target_charset ('s');
11320       if (target_newline == 0 || target_percent == 0 || target_c == 0
11321 	  || target_s == 0)
11322 	return false;
11323 
11324       target_percent_c[0] = target_percent;
11325       target_percent_c[1] = target_c;
11326       target_percent_c[2] = '\0';
11327 
11328       target_percent_s[0] = target_percent;
11329       target_percent_s[1] = target_s;
11330       target_percent_s[2] = '\0';
11331 
11332       target_percent_s_newline[0] = target_percent;
11333       target_percent_s_newline[1] = target_s;
11334       target_percent_s_newline[2] = target_newline;
11335       target_percent_s_newline[3] = '\0';
11336 
11337       init = true;
11338     }
11339   return true;
11340 }
11341 
11342 /* Helper function for do_mpfr_arg*().  Ensure M is a normal number
11343    and no overflow/underflow occurred.  INEXACT is true if M was not
11344    exactly calculated.  TYPE is the tree type for the result.  This
11345    function assumes that you cleared the MPFR flags and then
11346    calculated M to see if anything subsequently set a flag prior to
11347    entering this function.  Return NULL_TREE if any checks fail.  */
11348 
11349 static tree
do_mpfr_ckconv(mpfr_srcptr m,tree type,int inexact)11350 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11351 {
11352   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11353      overflow/underflow occurred.  If -frounding-math, proceed iff the
11354      result of calling FUNC was exact.  */
11355   if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11356       && (!flag_rounding_math || !inexact))
11357     {
11358       REAL_VALUE_TYPE rr;
11359 
11360       real_from_mpfr (&rr, m, type, MPFR_RNDN);
11361       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11362 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
11363 	 but the mpft_t is not, then we underflowed in the
11364 	 conversion.  */
11365       if (real_isfinite (&rr)
11366 	  && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11367         {
11368 	  REAL_VALUE_TYPE rmode;
11369 
11370 	  real_convert (&rmode, TYPE_MODE (type), &rr);
11371 	  /* Proceed iff the specified mode can hold the value.  */
11372 	  if (real_identical (&rmode, &rr))
11373 	    return build_real (type, rmode);
11374 	}
11375     }
11376   return NULL_TREE;
11377 }
11378 
11379 /* Helper function for do_mpc_arg*().  Ensure M is a normal complex
11380    number and no overflow/underflow occurred.  INEXACT is true if M
11381    was not exactly calculated.  TYPE is the tree type for the result.
11382    This function assumes that you cleared the MPFR flags and then
11383    calculated M to see if anything subsequently set a flag prior to
11384    entering this function.  Return NULL_TREE if any checks fail, if
11385    FORCE_CONVERT is true, then bypass the checks.  */
11386 
11387 static tree
do_mpc_ckconv(mpc_srcptr m,tree type,int inexact,int force_convert)11388 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11389 {
11390   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11391      overflow/underflow occurred.  If -frounding-math, proceed iff the
11392      result of calling FUNC was exact.  */
11393   if (force_convert
11394       || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11395 	  && !mpfr_overflow_p () && !mpfr_underflow_p ()
11396 	  && (!flag_rounding_math || !inexact)))
11397     {
11398       REAL_VALUE_TYPE re, im;
11399 
11400       real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11401       real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11402       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11403 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
11404 	 but the mpft_t is not, then we underflowed in the
11405 	 conversion.  */
11406       if (force_convert
11407 	  || (real_isfinite (&re) && real_isfinite (&im)
11408 	      && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11409 	      && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11410         {
11411 	  REAL_VALUE_TYPE re_mode, im_mode;
11412 
11413 	  real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11414 	  real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11415 	  /* Proceed iff the specified mode can hold the value.  */
11416 	  if (force_convert
11417 	      || (real_identical (&re_mode, &re)
11418 		  && real_identical (&im_mode, &im)))
11419 	    return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11420 				  build_real (TREE_TYPE (type), im_mode));
11421 	}
11422     }
11423   return NULL_TREE;
11424 }
11425 
11426 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11427    the pointer *(ARG_QUO) and return the result.  The type is taken
11428    from the type of ARG0 and is used for setting the precision of the
11429    calculation and results.  */
11430 
11431 static tree
do_mpfr_remquo(tree arg0,tree arg1,tree arg_quo)11432 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11433 {
11434   tree const type = TREE_TYPE (arg0);
11435   tree result = NULL_TREE;
11436 
11437   STRIP_NOPS (arg0);
11438   STRIP_NOPS (arg1);
11439 
11440   /* To proceed, MPFR must exactly represent the target floating point
11441      format, which only happens when the target base equals two.  */
11442   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11443       && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11444       && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11445     {
11446       const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11447       const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11448 
11449       if (real_isfinite (ra0) && real_isfinite (ra1))
11450         {
11451 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11452 	  const int prec = fmt->p;
11453 	  const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11454 	  tree result_rem;
11455 	  long integer_quo;
11456 	  mpfr_t m0, m1;
11457 
11458 	  mpfr_inits2 (prec, m0, m1, NULL);
11459 	  mpfr_from_real (m0, ra0, MPFR_RNDN);
11460 	  mpfr_from_real (m1, ra1, MPFR_RNDN);
11461 	  mpfr_clear_flags ();
11462 	  mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11463 	  /* Remquo is independent of the rounding mode, so pass
11464 	     inexact=0 to do_mpfr_ckconv().  */
11465 	  result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11466 	  mpfr_clears (m0, m1, NULL);
11467 	  if (result_rem)
11468 	    {
11469 	      /* MPFR calculates quo in the host's long so it may
11470 		 return more bits in quo than the target int can hold
11471 		 if sizeof(host long) > sizeof(target int).  This can
11472 		 happen even for native compilers in LP64 mode.  In
11473 		 these cases, modulo the quo value with the largest
11474 		 number that the target int can hold while leaving one
11475 		 bit for the sign.  */
11476 	      if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11477 		integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11478 
11479 	      /* Dereference the quo pointer argument.  */
11480 	      arg_quo = build_fold_indirect_ref (arg_quo);
11481 	      /* Proceed iff a valid pointer type was passed in.  */
11482 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11483 	        {
11484 		  /* Set the value. */
11485 		  tree result_quo
11486 		    = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11487 				   build_int_cst (TREE_TYPE (arg_quo),
11488 						  integer_quo));
11489 		  TREE_SIDE_EFFECTS (result_quo) = 1;
11490 		  /* Combine the quo assignment with the rem.  */
11491 		  result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11492 						    result_quo, result_rem));
11493 		}
11494 	    }
11495 	}
11496     }
11497   return result;
11498 }
11499 
11500 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11501    resulting value as a tree with type TYPE.  The mpfr precision is
11502    set to the precision of TYPE.  We assume that this mpfr function
11503    returns zero if the result could be calculated exactly within the
11504    requested precision.  In addition, the integer pointer represented
11505    by ARG_SG will be dereferenced and set to the appropriate signgam
11506    (-1,1) value.  */
11507 
11508 static tree
do_mpfr_lgamma_r(tree arg,tree arg_sg,tree type)11509 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11510 {
11511   tree result = NULL_TREE;
11512 
11513   STRIP_NOPS (arg);
11514 
11515   /* To proceed, MPFR must exactly represent the target floating point
11516      format, which only happens when the target base equals two.  Also
11517      verify ARG is a constant and that ARG_SG is an int pointer.  */
11518   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11519       && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11520       && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11521       && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11522     {
11523       const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11524 
11525       /* In addition to NaN and Inf, the argument cannot be zero or a
11526 	 negative integer.  */
11527       if (real_isfinite (ra)
11528 	  && ra->cl != rvc_zero
11529 	  && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11530         {
11531 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11532 	  const int prec = fmt->p;
11533 	  const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11534 	  int inexact, sg;
11535 	  mpfr_t m;
11536 	  tree result_lg;
11537 
11538 	  mpfr_init2 (m, prec);
11539 	  mpfr_from_real (m, ra, MPFR_RNDN);
11540 	  mpfr_clear_flags ();
11541 	  inexact = mpfr_lgamma (m, &sg, m, rnd);
11542 	  result_lg = do_mpfr_ckconv (m, type, inexact);
11543 	  mpfr_clear (m);
11544 	  if (result_lg)
11545 	    {
11546 	      tree result_sg;
11547 
11548 	      /* Dereference the arg_sg pointer argument.  */
11549 	      arg_sg = build_fold_indirect_ref (arg_sg);
11550 	      /* Assign the signgam value into *arg_sg. */
11551 	      result_sg = fold_build2 (MODIFY_EXPR,
11552 				       TREE_TYPE (arg_sg), arg_sg,
11553 				       build_int_cst (TREE_TYPE (arg_sg), sg));
11554 	      TREE_SIDE_EFFECTS (result_sg) = 1;
11555 	      /* Combine the signgam assignment with the lgamma result.  */
11556 	      result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11557 						result_sg, result_lg));
11558 	    }
11559 	}
11560     }
11561 
11562   return result;
11563 }
11564 
11565 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11566    mpc function FUNC on it and return the resulting value as a tree
11567    with type TYPE.  The mpfr precision is set to the precision of
11568    TYPE.  We assume that function FUNC returns zero if the result
11569    could be calculated exactly within the requested precision.  If
11570    DO_NONFINITE is true, then fold expressions containing Inf or NaN
11571    in the arguments and/or results.  */
11572 
11573 tree
do_mpc_arg2(tree arg0,tree arg1,tree type,int do_nonfinite,int (* func)(mpc_ptr,mpc_srcptr,mpc_srcptr,mpc_rnd_t))11574 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11575 	     int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11576 {
11577   tree result = NULL_TREE;
11578 
11579   STRIP_NOPS (arg0);
11580   STRIP_NOPS (arg1);
11581 
11582   /* To proceed, MPFR must exactly represent the target floating point
11583      format, which only happens when the target base equals two.  */
11584   if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11585       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11586       && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11587       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11588       && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11589     {
11590       const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11591       const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11592       const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11593       const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11594 
11595       if (do_nonfinite
11596 	  || (real_isfinite (re0) && real_isfinite (im0)
11597 	      && real_isfinite (re1) && real_isfinite (im1)))
11598         {
11599 	  const struct real_format *const fmt =
11600 	    REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11601 	  const int prec = fmt->p;
11602 	  const mpfr_rnd_t rnd = fmt->round_towards_zero
11603 				 ? MPFR_RNDZ : MPFR_RNDN;
11604 	  const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11605 	  int inexact;
11606 	  mpc_t m0, m1;
11607 
11608 	  mpc_init2 (m0, prec);
11609 	  mpc_init2 (m1, prec);
11610 	  mpfr_from_real (mpc_realref (m0), re0, rnd);
11611 	  mpfr_from_real (mpc_imagref (m0), im0, rnd);
11612 	  mpfr_from_real (mpc_realref (m1), re1, rnd);
11613 	  mpfr_from_real (mpc_imagref (m1), im1, rnd);
11614 	  mpfr_clear_flags ();
11615 	  inexact = func (m0, m0, m1, crnd);
11616 	  result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11617 	  mpc_clear (m0);
11618 	  mpc_clear (m1);
11619 	}
11620     }
11621 
11622   return result;
11623 }
11624 
11625 /* A wrapper function for builtin folding that prevents warnings for
11626    "statement without effect" and the like, caused by removing the
11627    call node earlier than the warning is generated.  */
11628 
11629 tree
fold_call_stmt(gcall * stmt,bool ignore)11630 fold_call_stmt (gcall *stmt, bool ignore)
11631 {
11632   tree ret = NULL_TREE;
11633   tree fndecl = gimple_call_fndecl (stmt);
11634   location_t loc = gimple_location (stmt);
11635   if (fndecl && fndecl_built_in_p (fndecl)
11636       && !gimple_call_va_arg_pack_p (stmt))
11637     {
11638       int nargs = gimple_call_num_args (stmt);
11639       tree *args = (nargs > 0
11640 		    ? gimple_call_arg_ptr (stmt, 0)
11641 		    : &error_mark_node);
11642 
11643       if (avoid_folding_inline_builtin (fndecl))
11644 	return NULL_TREE;
11645       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11646         {
11647 	  return targetm.fold_builtin (fndecl, nargs, args, ignore);
11648         }
11649       else
11650 	{
11651 	  ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
11652 	  if (ret)
11653 	    {
11654 	      /* Propagate location information from original call to
11655 		 expansion of builtin.  Otherwise things like
11656 		 maybe_emit_chk_warning, that operate on the expansion
11657 		 of a builtin, will use the wrong location information.  */
11658 	      if (gimple_has_location (stmt))
11659                 {
11660 		  tree realret = ret;
11661 		  if (TREE_CODE (ret) == NOP_EXPR)
11662 		    realret = TREE_OPERAND (ret, 0);
11663 		  if (CAN_HAVE_LOCATION_P (realret)
11664 		      && !EXPR_HAS_LOCATION (realret))
11665 		    SET_EXPR_LOCATION (realret, loc);
11666                   return realret;
11667                 }
11668 	      return ret;
11669 	    }
11670 	}
11671     }
11672   return NULL_TREE;
11673 }
11674 
11675 /* Look up the function in builtin_decl that corresponds to DECL
11676    and set ASMSPEC as its user assembler name.  DECL must be a
11677    function decl that declares a builtin.  */
11678 
11679 void
set_builtin_user_assembler_name(tree decl,const char * asmspec)11680 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11681 {
11682   gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11683 	      && asmspec != 0);
11684 
11685   tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11686   set_user_assembler_name (builtin, asmspec);
11687 
11688   if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11689       && INT_TYPE_SIZE < BITS_PER_WORD)
11690     {
11691       scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11692       set_user_assembler_libfunc ("ffs", asmspec);
11693       set_optab_libfunc (ffs_optab, mode, "ffs");
11694     }
11695 }
11696 
11697 /* Return true if DECL is a builtin that expands to a constant or similarly
11698    simple code.  */
11699 bool
is_simple_builtin(tree decl)11700 is_simple_builtin (tree decl)
11701 {
11702   if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11703     switch (DECL_FUNCTION_CODE (decl))
11704       {
11705 	/* Builtins that expand to constants.  */
11706       case BUILT_IN_CONSTANT_P:
11707       case BUILT_IN_EXPECT:
11708       case BUILT_IN_OBJECT_SIZE:
11709       case BUILT_IN_UNREACHABLE:
11710 	/* Simple register moves or loads from stack.  */
11711       case BUILT_IN_ASSUME_ALIGNED:
11712       case BUILT_IN_RETURN_ADDRESS:
11713       case BUILT_IN_EXTRACT_RETURN_ADDR:
11714       case BUILT_IN_FROB_RETURN_ADDR:
11715       case BUILT_IN_RETURN:
11716       case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11717       case BUILT_IN_FRAME_ADDRESS:
11718       case BUILT_IN_VA_END:
11719       case BUILT_IN_STACK_SAVE:
11720       case BUILT_IN_STACK_RESTORE:
11721 	/* Exception state returns or moves registers around.  */
11722       case BUILT_IN_EH_FILTER:
11723       case BUILT_IN_EH_POINTER:
11724       case BUILT_IN_EH_COPY_VALUES:
11725 	return true;
11726 
11727       default:
11728 	return false;
11729       }
11730 
11731   return false;
11732 }
11733 
11734 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11735    most probably expanded inline into reasonably simple code.  This is a
11736    superset of is_simple_builtin.  */
11737 bool
is_inexpensive_builtin(tree decl)11738 is_inexpensive_builtin (tree decl)
11739 {
11740   if (!decl)
11741     return false;
11742   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11743     return true;
11744   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11745     switch (DECL_FUNCTION_CODE (decl))
11746       {
11747       case BUILT_IN_ABS:
11748       CASE_BUILT_IN_ALLOCA:
11749       case BUILT_IN_BSWAP16:
11750       case BUILT_IN_BSWAP32:
11751       case BUILT_IN_BSWAP64:
11752       case BUILT_IN_CLZ:
11753       case BUILT_IN_CLZIMAX:
11754       case BUILT_IN_CLZL:
11755       case BUILT_IN_CLZLL:
11756       case BUILT_IN_CTZ:
11757       case BUILT_IN_CTZIMAX:
11758       case BUILT_IN_CTZL:
11759       case BUILT_IN_CTZLL:
11760       case BUILT_IN_FFS:
11761       case BUILT_IN_FFSIMAX:
11762       case BUILT_IN_FFSL:
11763       case BUILT_IN_FFSLL:
11764       case BUILT_IN_IMAXABS:
11765       case BUILT_IN_FINITE:
11766       case BUILT_IN_FINITEF:
11767       case BUILT_IN_FINITEL:
11768       case BUILT_IN_FINITED32:
11769       case BUILT_IN_FINITED64:
11770       case BUILT_IN_FINITED128:
11771       case BUILT_IN_FPCLASSIFY:
11772       case BUILT_IN_ISFINITE:
11773       case BUILT_IN_ISINF_SIGN:
11774       case BUILT_IN_ISINF:
11775       case BUILT_IN_ISINFF:
11776       case BUILT_IN_ISINFL:
11777       case BUILT_IN_ISINFD32:
11778       case BUILT_IN_ISINFD64:
11779       case BUILT_IN_ISINFD128:
11780       case BUILT_IN_ISNAN:
11781       case BUILT_IN_ISNANF:
11782       case BUILT_IN_ISNANL:
11783       case BUILT_IN_ISNAND32:
11784       case BUILT_IN_ISNAND64:
11785       case BUILT_IN_ISNAND128:
11786       case BUILT_IN_ISNORMAL:
11787       case BUILT_IN_ISGREATER:
11788       case BUILT_IN_ISGREATEREQUAL:
11789       case BUILT_IN_ISLESS:
11790       case BUILT_IN_ISLESSEQUAL:
11791       case BUILT_IN_ISLESSGREATER:
11792       case BUILT_IN_ISUNORDERED:
11793       case BUILT_IN_VA_ARG_PACK:
11794       case BUILT_IN_VA_ARG_PACK_LEN:
11795       case BUILT_IN_VA_COPY:
11796       case BUILT_IN_TRAP:
11797       case BUILT_IN_SAVEREGS:
11798       case BUILT_IN_POPCOUNTL:
11799       case BUILT_IN_POPCOUNTLL:
11800       case BUILT_IN_POPCOUNTIMAX:
11801       case BUILT_IN_POPCOUNT:
11802       case BUILT_IN_PARITYL:
11803       case BUILT_IN_PARITYLL:
11804       case BUILT_IN_PARITYIMAX:
11805       case BUILT_IN_PARITY:
11806       case BUILT_IN_LABS:
11807       case BUILT_IN_LLABS:
11808       case BUILT_IN_PREFETCH:
11809       case BUILT_IN_ACC_ON_DEVICE:
11810 	return true;
11811 
11812       default:
11813 	return is_simple_builtin (decl);
11814       }
11815 
11816   return false;
11817 }
11818 
11819 /* Return true if T is a constant and the value cast to a target char
11820    can be represented by a host char.
11821    Store the casted char constant in *P if so.  */
11822 
11823 bool
target_char_cst_p(tree t,char * p)11824 target_char_cst_p (tree t, char *p)
11825 {
11826   if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11827     return false;
11828 
11829   *p = (char)tree_to_uhwi (t);
11830   return true;
11831 }
11832 
11833 /* Return true if the builtin DECL is implemented in a standard library.
11834    Otherwise returns false which doesn't guarantee it is not (thus the list of
11835    handled builtins below may be incomplete).  */
11836 
11837 bool
builtin_with_linkage_p(tree decl)11838 builtin_with_linkage_p (tree decl)
11839 {
11840   if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11841     switch (DECL_FUNCTION_CODE (decl))
11842     {
11843       CASE_FLT_FN (BUILT_IN_ACOS):
11844       CASE_FLT_FN (BUILT_IN_ACOSH):
11845       CASE_FLT_FN (BUILT_IN_ASIN):
11846       CASE_FLT_FN (BUILT_IN_ASINH):
11847       CASE_FLT_FN (BUILT_IN_ATAN):
11848       CASE_FLT_FN (BUILT_IN_ATANH):
11849       CASE_FLT_FN (BUILT_IN_ATAN2):
11850       CASE_FLT_FN (BUILT_IN_CBRT):
11851       CASE_FLT_FN (BUILT_IN_CEIL):
11852       CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11853       CASE_FLT_FN (BUILT_IN_COPYSIGN):
11854       CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11855       CASE_FLT_FN (BUILT_IN_COS):
11856       CASE_FLT_FN (BUILT_IN_COSH):
11857       CASE_FLT_FN (BUILT_IN_ERF):
11858       CASE_FLT_FN (BUILT_IN_ERFC):
11859       CASE_FLT_FN (BUILT_IN_EXP):
11860       CASE_FLT_FN (BUILT_IN_EXP2):
11861       CASE_FLT_FN (BUILT_IN_EXPM1):
11862       CASE_FLT_FN (BUILT_IN_FABS):
11863       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11864       CASE_FLT_FN (BUILT_IN_FDIM):
11865       CASE_FLT_FN (BUILT_IN_FLOOR):
11866       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11867       CASE_FLT_FN (BUILT_IN_FMA):
11868       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11869       CASE_FLT_FN (BUILT_IN_FMAX):
11870       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11871       CASE_FLT_FN (BUILT_IN_FMIN):
11872       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11873       CASE_FLT_FN (BUILT_IN_FMOD):
11874       CASE_FLT_FN (BUILT_IN_FREXP):
11875       CASE_FLT_FN (BUILT_IN_HYPOT):
11876       CASE_FLT_FN (BUILT_IN_ILOGB):
11877       CASE_FLT_FN (BUILT_IN_LDEXP):
11878       CASE_FLT_FN (BUILT_IN_LGAMMA):
11879       CASE_FLT_FN (BUILT_IN_LLRINT):
11880       CASE_FLT_FN (BUILT_IN_LLROUND):
11881       CASE_FLT_FN (BUILT_IN_LOG):
11882       CASE_FLT_FN (BUILT_IN_LOG10):
11883       CASE_FLT_FN (BUILT_IN_LOG1P):
11884       CASE_FLT_FN (BUILT_IN_LOG2):
11885       CASE_FLT_FN (BUILT_IN_LOGB):
11886       CASE_FLT_FN (BUILT_IN_LRINT):
11887       CASE_FLT_FN (BUILT_IN_LROUND):
11888       CASE_FLT_FN (BUILT_IN_MODF):
11889       CASE_FLT_FN (BUILT_IN_NAN):
11890       CASE_FLT_FN (BUILT_IN_NEARBYINT):
11891       CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11892       CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11893       CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11894       CASE_FLT_FN (BUILT_IN_POW):
11895       CASE_FLT_FN (BUILT_IN_REMAINDER):
11896       CASE_FLT_FN (BUILT_IN_REMQUO):
11897       CASE_FLT_FN (BUILT_IN_RINT):
11898       CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11899       CASE_FLT_FN (BUILT_IN_ROUND):
11900       CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11901       CASE_FLT_FN (BUILT_IN_SCALBLN):
11902       CASE_FLT_FN (BUILT_IN_SCALBN):
11903       CASE_FLT_FN (BUILT_IN_SIN):
11904       CASE_FLT_FN (BUILT_IN_SINH):
11905       CASE_FLT_FN (BUILT_IN_SINCOS):
11906       CASE_FLT_FN (BUILT_IN_SQRT):
11907       CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11908       CASE_FLT_FN (BUILT_IN_TAN):
11909       CASE_FLT_FN (BUILT_IN_TANH):
11910       CASE_FLT_FN (BUILT_IN_TGAMMA):
11911       CASE_FLT_FN (BUILT_IN_TRUNC):
11912       CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11913 	return true;
11914       default:
11915 	break;
11916     }
11917   return false;
11918 }
11919