xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/builtins.c (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 /* Expand builtin functions.
2    Copyright (C) 1988-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Legacy warning!  Please add no further builtin simplifications here
21    (apart from pure constant folding) - builtin simplifications should go
22    to match.pd or gimple-fold.c instead.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70 #include "intl.h"
71 
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
76 
77 /* Define the names of the builtin function types and codes.  */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79   = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80 
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
83 {
84 #include "builtins.def"
85 };
86 
87 /* Setup an array of builtin_info_type, make sure each element decl is
88    initialized to NULL_TREE.  */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
90 
91 /* Non-zero if __builtin_constant_p should be folded right away.  */
92 bool force_folding_builtin_constant_p;
93 
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 					machine_mode, int, tree);
128 static rtx expand_builtin_strcat (tree, rtx);
129 static rtx expand_builtin_strcpy (tree, rtx);
130 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
131 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_strncat (tree, rtx);
133 static rtx expand_builtin_strncpy (tree, rtx);
134 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
135 static rtx expand_builtin_memset (tree, rtx, machine_mode);
136 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
137 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
138 static rtx expand_builtin_bzero (tree);
139 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
140 static rtx expand_builtin_alloca (tree, bool);
141 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static tree stabilize_va_list_loc (location_t, tree, int);
144 static rtx expand_builtin_expect (tree, rtx);
145 static tree fold_builtin_constant_p (tree);
146 static tree fold_builtin_classify_type (tree);
147 static tree fold_builtin_strlen (location_t, tree, tree);
148 static tree fold_builtin_inf (location_t, tree, int);
149 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
150 static bool validate_arg (const_tree, enum tree_code code);
151 static rtx expand_builtin_fabs (tree, rtx, rtx);
152 static rtx expand_builtin_signbit (tree, rtx);
153 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
154 static tree fold_builtin_isascii (location_t, tree);
155 static tree fold_builtin_toascii (location_t, tree);
156 static tree fold_builtin_isdigit (location_t, tree);
157 static tree fold_builtin_fabs (location_t, tree, tree);
158 static tree fold_builtin_abs (location_t, tree, tree);
159 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
160 					enum tree_code);
161 static tree fold_builtin_0 (location_t, tree);
162 static tree fold_builtin_1 (location_t, tree, tree);
163 static tree fold_builtin_2 (location_t, tree, tree, tree);
164 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
165 static tree fold_builtin_varargs (location_t, tree, tree*, int);
166 
167 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
168 static tree fold_builtin_strspn (location_t, tree, tree);
169 static tree fold_builtin_strcspn (location_t, tree, tree);
170 
171 static rtx expand_builtin_object_size (tree);
172 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
173 				      enum built_in_function);
174 static void maybe_emit_chk_warning (tree, enum built_in_function);
175 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
176 static void maybe_emit_free_warning (tree);
177 static tree fold_builtin_object_size (tree, tree);
178 
179 unsigned HOST_WIDE_INT target_newline;
180 unsigned HOST_WIDE_INT target_percent;
181 static unsigned HOST_WIDE_INT target_c;
182 static unsigned HOST_WIDE_INT target_s;
183 char target_percent_c[3];
184 char target_percent_s[3];
185 char target_percent_s_newline[4];
186 static tree do_mpfr_remquo (tree, tree, tree);
187 static tree do_mpfr_lgamma_r (tree, tree, tree);
188 static void expand_builtin_sync_synchronize (void);
189 
190 /* Return true if NAME starts with __builtin_ or __sync_.  */
191 
192 static bool
193 is_builtin_name (const char *name)
194 {
195   if (strncmp (name, "__builtin_", 10) == 0)
196     return true;
197   if (strncmp (name, "__sync_", 7) == 0)
198     return true;
199   if (strncmp (name, "__atomic_", 9) == 0)
200     return true;
201   if (flag_cilkplus
202       && (!strcmp (name, "__cilkrts_detach")
203 	  || !strcmp (name, "__cilkrts_pop_frame")))
204     return true;
205   return false;
206 }
207 
208 
209 /* Return true if DECL is a function symbol representing a built-in.  */
210 
211 bool
212 is_builtin_fn (tree decl)
213 {
214   return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
215 }
216 
217 /* Return true if NODE should be considered for inline expansion regardless
218    of the optimization level.  This means whenever a function is invoked with
219    its "internal" name, which normally contains the prefix "__builtin".  */
220 
221 bool
222 called_as_built_in (tree node)
223 {
224   /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
225      we want the name used to call the function, not the name it
226      will have. */
227   const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
228   return is_builtin_name (name);
229 }
230 
231 /* Compute values M and N such that M divides (address of EXP - N) and such
232    that N < M.  If these numbers can be determined, store M in alignp and N in
233    *BITPOSP and return true.  Otherwise return false and store BITS_PER_UNIT to
234    *alignp and any bit-offset to *bitposp.
235 
236    Note that the address (and thus the alignment) computed here is based
237    on the address to which a symbol resolves, whereas DECL_ALIGN is based
238    on the address at which an object is actually located.  These two
239    addresses are not always the same.  For example, on ARM targets,
240    the address &foo of a Thumb function foo() has the lowest bit set,
241    whereas foo() itself starts on an even address.
242 
243    If ADDR_P is true we are taking the address of the memory reference EXP
244    and thus cannot rely on the access taking place.  */
245 
246 static bool
247 get_object_alignment_2 (tree exp, unsigned int *alignp,
248 			unsigned HOST_WIDE_INT *bitposp, bool addr_p)
249 {
250   HOST_WIDE_INT bitsize, bitpos;
251   tree offset;
252   machine_mode mode;
253   int unsignedp, reversep, volatilep;
254   unsigned int align = BITS_PER_UNIT;
255   bool known_alignment = false;
256 
257   /* Get the innermost object and the constant (bitpos) and possibly
258      variable (offset) offset of the access.  */
259   exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
260 			     &unsignedp, &reversep, &volatilep);
261 
262   /* Extract alignment information from the innermost object and
263      possibly adjust bitpos and offset.  */
264   if (TREE_CODE (exp) == FUNCTION_DECL)
265     {
266       /* Function addresses can encode extra information besides their
267 	 alignment.  However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
268 	 allows the low bit to be used as a virtual bit, we know
269 	 that the address itself must be at least 2-byte aligned.  */
270       if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
271 	align = 2 * BITS_PER_UNIT;
272     }
273   else if (TREE_CODE (exp) == LABEL_DECL)
274     ;
275   else if (TREE_CODE (exp) == CONST_DECL)
276     {
277       /* The alignment of a CONST_DECL is determined by its initializer.  */
278       exp = DECL_INITIAL (exp);
279       align = TYPE_ALIGN (TREE_TYPE (exp));
280       if (CONSTANT_CLASS_P (exp))
281 	align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
282 
283       known_alignment = true;
284     }
285   else if (DECL_P (exp))
286     {
287       align = DECL_ALIGN (exp);
288       known_alignment = true;
289     }
290   else if (TREE_CODE (exp) == INDIRECT_REF
291 	   || TREE_CODE (exp) == MEM_REF
292 	   || TREE_CODE (exp) == TARGET_MEM_REF)
293     {
294       tree addr = TREE_OPERAND (exp, 0);
295       unsigned ptr_align;
296       unsigned HOST_WIDE_INT ptr_bitpos;
297       unsigned HOST_WIDE_INT ptr_bitmask = ~0;
298 
299       /* If the address is explicitely aligned, handle that.  */
300       if (TREE_CODE (addr) == BIT_AND_EXPR
301 	  && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
302 	{
303 	  ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
304 	  ptr_bitmask *= BITS_PER_UNIT;
305 	  align = least_bit_hwi (ptr_bitmask);
306 	  addr = TREE_OPERAND (addr, 0);
307 	}
308 
309       known_alignment
310 	= get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
311       align = MAX (ptr_align, align);
312 
313       /* Re-apply explicit alignment to the bitpos.  */
314       ptr_bitpos &= ptr_bitmask;
315 
316       /* The alignment of the pointer operand in a TARGET_MEM_REF
317 	 has to take the variable offset parts into account.  */
318       if (TREE_CODE (exp) == TARGET_MEM_REF)
319 	{
320 	  if (TMR_INDEX (exp))
321 	    {
322 	      unsigned HOST_WIDE_INT step = 1;
323 	      if (TMR_STEP (exp))
324 		step = TREE_INT_CST_LOW (TMR_STEP (exp));
325 	      align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
326 	    }
327 	  if (TMR_INDEX2 (exp))
328 	    align = BITS_PER_UNIT;
329 	  known_alignment = false;
330 	}
331 
332       /* When EXP is an actual memory reference then we can use
333 	 TYPE_ALIGN of a pointer indirection to derive alignment.
334 	 Do so only if get_pointer_alignment_1 did not reveal absolute
335 	 alignment knowledge and if using that alignment would
336 	 improve the situation.  */
337       unsigned int talign;
338       if (!addr_p && !known_alignment
339 	  && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
340 	  && talign > align)
341 	align = talign;
342       else
343 	{
344 	  /* Else adjust bitpos accordingly.  */
345 	  bitpos += ptr_bitpos;
346 	  if (TREE_CODE (exp) == MEM_REF
347 	      || TREE_CODE (exp) == TARGET_MEM_REF)
348 	    bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
349 	}
350     }
351   else if (TREE_CODE (exp) == STRING_CST)
352     {
353       /* STRING_CST are the only constant objects we allow to be not
354          wrapped inside a CONST_DECL.  */
355       align = TYPE_ALIGN (TREE_TYPE (exp));
356       if (CONSTANT_CLASS_P (exp))
357 	align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
358 
359       known_alignment = true;
360     }
361 
362   /* If there is a non-constant offset part extract the maximum
363      alignment that can prevail.  */
364   if (offset)
365     {
366       unsigned int trailing_zeros = tree_ctz (offset);
367       if (trailing_zeros < HOST_BITS_PER_INT)
368 	{
369 	  unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
370 	  if (inner)
371 	    align = MIN (align, inner);
372 	}
373     }
374 
375   *alignp = align;
376   *bitposp = bitpos & (*alignp - 1);
377   return known_alignment;
378 }
379 
380 /* For a memory reference expression EXP compute values M and N such that M
381    divides (&EXP - N) and such that N < M.  If these numbers can be determined,
382    store M in alignp and N in *BITPOSP and return true.  Otherwise return false
383    and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.  */
384 
385 bool
386 get_object_alignment_1 (tree exp, unsigned int *alignp,
387 			unsigned HOST_WIDE_INT *bitposp)
388 {
389   return get_object_alignment_2 (exp, alignp, bitposp, false);
390 }
391 
392 /* Return the alignment in bits of EXP, an object.  */
393 
394 unsigned int
395 get_object_alignment (tree exp)
396 {
397   unsigned HOST_WIDE_INT bitpos = 0;
398   unsigned int align;
399 
400   get_object_alignment_1 (exp, &align, &bitpos);
401 
402   /* align and bitpos now specify known low bits of the pointer.
403      ptr & (align - 1) == bitpos.  */
404 
405   if (bitpos != 0)
406     align = least_bit_hwi (bitpos);
407   return align;
408 }
409 
410 /* For a pointer valued expression EXP compute values M and N such that M
411    divides (EXP - N) and such that N < M.  If these numbers can be determined,
412    store M in alignp and N in *BITPOSP and return true.  Return false if
413    the results are just a conservative approximation.
414 
415    If EXP is not a pointer, false is returned too.  */
416 
417 bool
418 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
419 			 unsigned HOST_WIDE_INT *bitposp)
420 {
421   STRIP_NOPS (exp);
422 
423   if (TREE_CODE (exp) == ADDR_EXPR)
424     return get_object_alignment_2 (TREE_OPERAND (exp, 0),
425 				   alignp, bitposp, true);
426   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
427     {
428       unsigned int align;
429       unsigned HOST_WIDE_INT bitpos;
430       bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
431 					  &align, &bitpos);
432       if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
433 	bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
434       else
435 	{
436 	  unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
437 	  if (trailing_zeros < HOST_BITS_PER_INT)
438 	    {
439 	      unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 	      if (inner)
441 		align = MIN (align, inner);
442 	    }
443 	}
444       *alignp = align;
445       *bitposp = bitpos & (align - 1);
446       return res;
447     }
448   else if (TREE_CODE (exp) == SSA_NAME
449 	   && POINTER_TYPE_P (TREE_TYPE (exp)))
450     {
451       unsigned int ptr_align, ptr_misalign;
452       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
453 
454       if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
455 	{
456 	  *bitposp = ptr_misalign * BITS_PER_UNIT;
457 	  *alignp = ptr_align * BITS_PER_UNIT;
458 	  /* Make sure to return a sensible alignment when the multiplication
459 	     by BITS_PER_UNIT overflowed.  */
460 	  if (*alignp == 0)
461 	    *alignp = 1u << (HOST_BITS_PER_INT - 1);
462 	  /* We cannot really tell whether this result is an approximation.  */
463 	  return false;
464 	}
465       else
466 	{
467 	  *bitposp = 0;
468 	  *alignp = BITS_PER_UNIT;
469 	  return false;
470 	}
471     }
472   else if (TREE_CODE (exp) == INTEGER_CST)
473     {
474       *alignp = BIGGEST_ALIGNMENT;
475       *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
476 		  & (BIGGEST_ALIGNMENT - 1));
477       return true;
478     }
479 
480   *bitposp = 0;
481   *alignp = BITS_PER_UNIT;
482   return false;
483 }
484 
485 /* Return the alignment in bits of EXP, a pointer valued expression.
486    The alignment returned is, by default, the alignment of the thing that
487    EXP points to.  If it is not a POINTER_TYPE, 0 is returned.
488 
489    Otherwise, look at the expression to see if we can do better, i.e., if the
490    expression is actually pointing at an object whose alignment is tighter.  */
491 
492 unsigned int
493 get_pointer_alignment (tree exp)
494 {
495   unsigned HOST_WIDE_INT bitpos = 0;
496   unsigned int align;
497 
498   get_pointer_alignment_1 (exp, &align, &bitpos);
499 
500   /* align and bitpos now specify known low bits of the pointer.
501      ptr & (align - 1) == bitpos.  */
502 
503   if (bitpos != 0)
504     align = least_bit_hwi (bitpos);
505 
506   return align;
507 }
508 
509 /* Return the number of non-zero elements in the sequence
510    [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
511    ELTSIZE must be a power of 2 less than 8.  Used by c_strlen.  */
512 
513 static unsigned
514 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
515 {
516   gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
517 
518   unsigned n;
519 
520   if (eltsize == 1)
521     {
522       /* Optimize the common case of plain char.  */
523       for (n = 0; n < maxelts; n++)
524 	{
525 	  const char *elt = (const char*) ptr + n;
526 	  if (!*elt)
527 	    break;
528 	}
529     }
530   else
531     {
532       for (n = 0; n < maxelts; n++)
533 	{
534 	  const char *elt = (const char*) ptr + n * eltsize;
535 	  if (!memcmp (elt, "\0\0\0\0", eltsize))
536 	    break;
537 	}
538     }
539   return n;
540 }
541 
542 /* Compute the length of a null-terminated character string or wide
543    character string handling character sizes of 1, 2, and 4 bytes.
544    TREE_STRING_LENGTH is not the right way because it evaluates to
545    the size of the character array in bytes (as opposed to characters)
546    and because it can contain a zero byte in the middle.
547 
548    ONLY_VALUE should be nonzero if the result is not going to be emitted
549    into the instruction stream and zero if it is going to be expanded.
550    E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551    is returned, otherwise NULL, since
552    len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
553    evaluate the side-effects.
554 
555    If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556    accesses.  Note that this implies the result is not going to be emitted
557    into the instruction stream.
558 
559    The value returned is of type `ssizetype'.
560 
561    Unfortunately, string_constant can't access the values of const char
562    arrays with initializers, so neither can we do so here.  */
563 
564 tree
565 c_strlen (tree src, int only_value)
566 {
567   STRIP_NOPS (src);
568   if (TREE_CODE (src) == COND_EXPR
569       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
570     {
571       tree len1, len2;
572 
573       len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
574       len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
575       if (tree_int_cst_equal (len1, len2))
576 	return len1;
577     }
578 
579   if (TREE_CODE (src) == COMPOUND_EXPR
580       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581     return c_strlen (TREE_OPERAND (src, 1), only_value);
582 
583   location_t loc = EXPR_LOC_OR_LOC (src, input_location);
584 
585   /* Offset from the beginning of the string in bytes.  */
586   tree byteoff;
587   src = string_constant (src, &byteoff);
588   if (src == 0)
589     return NULL_TREE;
590 
591   /* Determine the size of the string element.  */
592   unsigned eltsize
593     = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
594 
595   /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
596      length of SRC.  */
597   unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
598 
599   /* PTR can point to the byte representation of any string type, including
600      char* and wchar_t*.  */
601   const char *ptr = TREE_STRING_POINTER (src);
602 
603   if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
604     {
605       /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
606 	 compute the offset to the following null if we don't know where to
607 	 start searching for it.  */
608       if (string_length (ptr, eltsize, maxelts) < maxelts)
609 	{
610 	  /* Return when an embedded null character is found.  */
611 	  return NULL_TREE;
612 	}
613 
614       /* We don't know the starting offset, but we do know that the string
615 	 has no internal zero bytes.  We can assume that the offset falls
616 	 within the bounds of the string; otherwise, the programmer deserves
617 	 what he gets.  Subtract the offset from the length of the string,
618 	 and return that.  This would perhaps not be valid if we were dealing
619 	 with named arrays in addition to literal string constants.  */
620 
621       return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
622     }
623 
624   /* Offset from the beginning of the string in elements.  */
625   HOST_WIDE_INT eltoff;
626 
627   /* We have a known offset into the string.  Start searching there for
628      a null character if we can represent it as a single HOST_WIDE_INT.  */
629   if (byteoff == 0)
630     eltoff = 0;
631   else if (! tree_fits_shwi_p (byteoff))
632     eltoff = -1;
633   else
634     eltoff = tree_to_shwi (byteoff) / eltsize;
635 
636   /* If the offset is known to be out of bounds, warn, and call strlen at
637      runtime.  */
638   if (eltoff < 0 || eltoff > maxelts)
639     {
640      /* Suppress multiple warnings for propagated constant strings.  */
641       if (only_value != 2
642 	  && !TREE_NO_WARNING (src))
643         {
644 	  warning_at (loc, 0, "offset %qwi outside bounds of constant string",
645 		      eltoff);
646           TREE_NO_WARNING (src) = 1;
647         }
648       return NULL_TREE;
649     }
650 
651   /* Use strlen to search for the first zero byte.  Since any strings
652      constructed with build_string will have nulls appended, we win even
653      if we get handed something like (char[4])"abcd".
654 
655      Since ELTOFF is our starting index into the string, no further
656      calculation is needed.  */
657   unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
658 				maxelts - eltoff);
659 
660   return ssize_int (len);
661 }
662 
663 /* Return a constant integer corresponding to target reading
664    GET_MODE_BITSIZE (MODE) bits from string constant STR.  */
665 
666 static rtx
667 c_readstr (const char *str, machine_mode mode)
668 {
669   HOST_WIDE_INT ch;
670   unsigned int i, j;
671   HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
672 
673   gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
674   unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
675     / HOST_BITS_PER_WIDE_INT;
676 
677   gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
678   for (i = 0; i < len; i++)
679     tmp[i] = 0;
680 
681   ch = 1;
682   for (i = 0; i < GET_MODE_SIZE (mode); i++)
683     {
684       j = i;
685       if (WORDS_BIG_ENDIAN)
686 	j = GET_MODE_SIZE (mode) - i - 1;
687       if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
688 	  && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
689 	j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
690       j *= BITS_PER_UNIT;
691 
692       if (ch)
693 	ch = (unsigned char) str[i];
694       tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
695     }
696 
697   wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
698   return immed_wide_int_const (c, mode);
699 }
700 
701 /* Cast a target constant CST to target CHAR and if that value fits into
702    host char type, return zero and put that value into variable pointed to by
703    P.  */
704 
705 static int
706 target_char_cast (tree cst, char *p)
707 {
708   unsigned HOST_WIDE_INT val, hostval;
709 
710   if (TREE_CODE (cst) != INTEGER_CST
711       || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
712     return 1;
713 
714   /* Do not care if it fits or not right here.  */
715   val = TREE_INT_CST_LOW (cst);
716 
717   if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
718     val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
719 
720   hostval = val;
721   if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
722     hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
723 
724   if (val != hostval)
725     return 1;
726 
727   *p = hostval;
728   return 0;
729 }
730 
731 /* Similar to save_expr, but assumes that arbitrary code is not executed
732    in between the multiple evaluations.  In particular, we assume that a
733    non-addressable local variable will not be modified.  */
734 
735 static tree
736 builtin_save_expr (tree exp)
737 {
738   if (TREE_CODE (exp) == SSA_NAME
739       || (TREE_ADDRESSABLE (exp) == 0
740 	  && (TREE_CODE (exp) == PARM_DECL
741 	      || (VAR_P (exp) && !TREE_STATIC (exp)))))
742     return exp;
743 
744   return save_expr (exp);
745 }
746 
747 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
748    times to get the address of either a higher stack frame, or a return
749    address located within it (depending on FNDECL_CODE).  */
750 
751 static rtx
752 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
753 {
754   int i;
755   rtx tem = INITIAL_FRAME_ADDRESS_RTX;
756   if (tem == NULL_RTX)
757     {
758       /* For a zero count with __builtin_return_address, we don't care what
759 	 frame address we return, because target-specific definitions will
760 	 override us.  Therefore frame pointer elimination is OK, and using
761 	 the soft frame pointer is OK.
762 
763 	 For a nonzero count, or a zero count with __builtin_frame_address,
764 	 we require a stable offset from the current frame pointer to the
765 	 previous one, so we must use the hard frame pointer, and
766 	 we must disable frame pointer elimination.  */
767       if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
768 	tem = frame_pointer_rtx;
769       else
770 	{
771 	  tem = hard_frame_pointer_rtx;
772 
773 	  /* Tell reload not to eliminate the frame pointer.  */
774 	  crtl->accesses_prior_frames = 1;
775 	}
776     }
777 
778   if (count > 0)
779     SETUP_FRAME_ADDRESSES ();
780 
781   /* On the SPARC, the return address is not in the frame, it is in a
782      register.  There is no way to access it off of the current frame
783      pointer, but it can be accessed off the previous frame pointer by
784      reading the value from the register window save area.  */
785   if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
786     count--;
787 
788   /* Scan back COUNT frames to the specified frame.  */
789   for (i = 0; i < count; i++)
790     {
791       /* Assume the dynamic chain pointer is in the word that the
792 	 frame address points to, unless otherwise specified.  */
793       tem = DYNAMIC_CHAIN_ADDRESS (tem);
794       tem = memory_address (Pmode, tem);
795       tem = gen_frame_mem (Pmode, tem);
796       tem = copy_to_reg (tem);
797     }
798 
799   /* For __builtin_frame_address, return what we've got.  But, on
800      the SPARC for example, we may have to add a bias.  */
801   if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
802     return FRAME_ADDR_RTX (tem);
803 
804   /* For __builtin_return_address, get the return address from that frame.  */
805 #ifdef RETURN_ADDR_RTX
806   tem = RETURN_ADDR_RTX (count, tem);
807 #else
808   tem = memory_address (Pmode,
809 			plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
810   tem = gen_frame_mem (Pmode, tem);
811 #endif
812   return tem;
813 }
814 
815 /* Alias set used for setjmp buffer.  */
816 static alias_set_type setjmp_alias_set = -1;
817 
818 /* Construct the leading half of a __builtin_setjmp call.  Control will
819    return to RECEIVER_LABEL.  This is also called directly by the SJLJ
820    exception handling code.  */
821 
822 void
823 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
824 {
825   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
826   rtx stack_save;
827   rtx mem;
828 
829   if (setjmp_alias_set == -1)
830     setjmp_alias_set = new_alias_set ();
831 
832   buf_addr = convert_memory_address (Pmode, buf_addr);
833 
834   buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
835 
836   /* We store the frame pointer and the address of receiver_label in
837      the buffer and use the rest of it for the stack save area, which
838      is machine-dependent.  */
839 
840   mem = gen_rtx_MEM (Pmode, buf_addr);
841   set_mem_alias_set (mem, setjmp_alias_set);
842   emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
843 
844   mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
845 					   GET_MODE_SIZE (Pmode))),
846   set_mem_alias_set (mem, setjmp_alias_set);
847 
848   emit_move_insn (validize_mem (mem),
849 		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
850 
851   stack_save = gen_rtx_MEM (sa_mode,
852 			    plus_constant (Pmode, buf_addr,
853 					   2 * GET_MODE_SIZE (Pmode)));
854   set_mem_alias_set (stack_save, setjmp_alias_set);
855   emit_stack_save (SAVE_NONLOCAL, &stack_save);
856 
857   /* If there is further processing to do, do it.  */
858   if (targetm.have_builtin_setjmp_setup ())
859     emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
860 
861   /* We have a nonlocal label.   */
862   cfun->has_nonlocal_label = 1;
863 }
864 
865 /* Construct the trailing part of a __builtin_setjmp call.  This is
866    also called directly by the SJLJ exception handling code.
867    If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler.  */
868 
869 void
870 expand_builtin_setjmp_receiver (rtx receiver_label)
871 {
872   rtx chain;
873 
874   /* Mark the FP as used when we get here, so we have to make sure it's
875      marked as used by this function.  */
876   emit_use (hard_frame_pointer_rtx);
877 
878   /* Mark the static chain as clobbered here so life information
879      doesn't get messed up for it.  */
880   chain = targetm.calls.static_chain (current_function_decl, true);
881   if (chain && REG_P (chain))
882     emit_clobber (chain);
883 
884   /* Now put in the code to restore the frame pointer, and argument
885      pointer, if needed.  */
886   if (! targetm.have_nonlocal_goto ())
887     {
888       /* First adjust our frame pointer to its actual value.  It was
889 	 previously set to the start of the virtual area corresponding to
890 	 the stacked variables when we branched here and now needs to be
891 	 adjusted to the actual hardware fp value.
892 
893 	 Assignments to virtual registers are converted by
894 	 instantiate_virtual_regs into the corresponding assignment
895 	 to the underlying register (fp in this case) that makes
896 	 the original assignment true.
897 	 So the following insn will actually be decrementing fp by
898 	 STARTING_FRAME_OFFSET.  */
899       emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
900 
901       /* Restoring the frame pointer also modifies the hard frame pointer.
902 	 Mark it used (so that the previous assignment remains live once
903 	 the frame pointer is eliminated) and clobbered (to represent the
904 	 implicit update from the assignment).  */
905       emit_use (hard_frame_pointer_rtx);
906       emit_clobber (hard_frame_pointer_rtx);
907     }
908 
909   if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
910     {
911       /* If the argument pointer can be eliminated in favor of the
912 	 frame pointer, we don't need to restore it.  We assume here
913 	 that if such an elimination is present, it can always be used.
914 	 This is the case on all known machines; if we don't make this
915 	 assumption, we do unnecessary saving on many machines.  */
916       size_t i;
917       static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
918 
919       for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
920 	if (elim_regs[i].from == ARG_POINTER_REGNUM
921 	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
922 	  break;
923 
924       if (i == ARRAY_SIZE (elim_regs))
925 	{
926 	  /* Now restore our arg pointer from the address at which it
927 	     was saved in our stack frame.  */
928 	  emit_move_insn (crtl->args.internal_arg_pointer,
929 			  copy_to_reg (get_arg_pointer_save_area ()));
930 	}
931     }
932 
933   if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
934     emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
935   else if (targetm.have_nonlocal_goto_receiver ())
936     emit_insn (targetm.gen_nonlocal_goto_receiver ());
937   else
938     { /* Nothing */ }
939 
940   /* We must not allow the code we just generated to be reordered by
941      scheduling.  Specifically, the update of the frame pointer must
942      happen immediately, not later.  */
943   emit_insn (gen_blockage ());
944 }
945 
946 /* __builtin_longjmp is passed a pointer to an array of five words (not
947    all will be used on all machines).  It operates similarly to the C
948    library function of the same name, but is more efficient.  Much of
949    the code below is copied from the handling of non-local gotos.  */
950 
951 static void
952 expand_builtin_longjmp (rtx buf_addr, rtx value)
953 {
954   rtx fp, lab, stack;
955   rtx_insn *insn, *last;
956   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
957 
958   /* DRAP is needed for stack realign if longjmp is expanded to current
959      function  */
960   if (SUPPORTS_STACK_ALIGNMENT)
961     crtl->need_drap = true;
962 
963   if (setjmp_alias_set == -1)
964     setjmp_alias_set = new_alias_set ();
965 
966   buf_addr = convert_memory_address (Pmode, buf_addr);
967 
968   buf_addr = force_reg (Pmode, buf_addr);
969 
970   /* We require that the user must pass a second argument of 1, because
971      that is what builtin_setjmp will return.  */
972   gcc_assert (value == const1_rtx);
973 
974   last = get_last_insn ();
975   if (targetm.have_builtin_longjmp ())
976     emit_insn (targetm.gen_builtin_longjmp (buf_addr));
977   else
978     {
979       fp = gen_rtx_MEM (Pmode, buf_addr);
980       lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
981 					       GET_MODE_SIZE (Pmode)));
982 
983       stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
984 						   2 * GET_MODE_SIZE (Pmode)));
985       set_mem_alias_set (fp, setjmp_alias_set);
986       set_mem_alias_set (lab, setjmp_alias_set);
987       set_mem_alias_set (stack, setjmp_alias_set);
988 
989       /* Pick up FP, label, and SP from the block and jump.  This code is
990 	 from expand_goto in stmt.c; see there for detailed comments.  */
991       if (targetm.have_nonlocal_goto ())
992 	/* We have to pass a value to the nonlocal_goto pattern that will
993 	   get copied into the static_chain pointer, but it does not matter
994 	   what that value is, because builtin_setjmp does not use it.  */
995 	emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
996       else
997 	{
998 	  lab = copy_to_reg (lab);
999 
1000 	  emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1001 	  emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1002 
1003 	  emit_move_insn (hard_frame_pointer_rtx, fp);
1004 	  emit_stack_restore (SAVE_NONLOCAL, stack);
1005 
1006 	  emit_use (hard_frame_pointer_rtx);
1007 	  emit_use (stack_pointer_rtx);
1008 	  emit_indirect_jump (lab);
1009 	}
1010     }
1011 
1012   /* Search backwards and mark the jump insn as a non-local goto.
1013      Note that this precludes the use of __builtin_longjmp to a
1014      __builtin_setjmp target in the same function.  However, we've
1015      already cautioned the user that these functions are for
1016      internal exception handling use only.  */
1017   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1018     {
1019       gcc_assert (insn != last);
1020 
1021       if (JUMP_P (insn))
1022 	{
1023 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1024 	  break;
1025 	}
1026       else if (CALL_P (insn))
1027 	break;
1028     }
1029 }
1030 
1031 static inline bool
1032 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1033 {
1034   return (iter->i < iter->n);
1035 }
1036 
1037 /* This function validates the types of a function call argument list
1038    against a specified list of tree_codes.  If the last specifier is a 0,
1039    that represents an ellipsis, otherwise the last specifier must be a
1040    VOID_TYPE.  */
1041 
1042 static bool
1043 validate_arglist (const_tree callexpr, ...)
1044 {
1045   enum tree_code code;
1046   bool res = 0;
1047   va_list ap;
1048   const_call_expr_arg_iterator iter;
1049   const_tree arg;
1050 
1051   va_start (ap, callexpr);
1052   init_const_call_expr_arg_iterator (callexpr, &iter);
1053 
1054   /* Get a bitmap of pointer argument numbers declared attribute nonnull.  */
1055   tree fn = CALL_EXPR_FN (callexpr);
1056   bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1057 
1058   for (unsigned argno = 1; ; ++argno)
1059     {
1060       code = (enum tree_code) va_arg (ap, int);
1061 
1062       switch (code)
1063 	{
1064 	case 0:
1065 	  /* This signifies an ellipses, any further arguments are all ok.  */
1066 	  res = true;
1067 	  goto end;
1068 	case VOID_TYPE:
1069 	  /* This signifies an endlink, if no arguments remain, return
1070 	     true, otherwise return false.  */
1071 	  res = !more_const_call_expr_args_p (&iter);
1072 	  goto end;
1073 	case POINTER_TYPE:
1074 	  /* The actual argument must be nonnull when either the whole
1075 	     called function has been declared nonnull, or when the formal
1076 	     argument corresponding to the actual argument has been.  */
1077 	  if (argmap
1078 	      && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1079 	    {
1080 	      arg = next_const_call_expr_arg (&iter);
1081 	      if (!validate_arg (arg, code) || integer_zerop (arg))
1082 		goto end;
1083 	      break;
1084 	    }
1085 	  /* FALLTHRU */
1086 	default:
1087 	  /* If no parameters remain or the parameter's code does not
1088 	     match the specified code, return false.  Otherwise continue
1089 	     checking any remaining arguments.  */
1090 	  arg = next_const_call_expr_arg (&iter);
1091 	  if (!validate_arg (arg, code))
1092 	    goto end;
1093 	  break;
1094 	}
1095     }
1096 
1097   /* We need gotos here since we can only have one VA_CLOSE in a
1098      function.  */
1099  end: ;
1100   va_end (ap);
1101 
1102   BITMAP_FREE (argmap);
1103 
1104   return res;
1105 }
1106 
1107 /* Expand a call to __builtin_nonlocal_goto.  We're passed the target label
1108    and the address of the save area.  */
1109 
1110 static rtx
1111 expand_builtin_nonlocal_goto (tree exp)
1112 {
1113   tree t_label, t_save_area;
1114   rtx r_label, r_save_area, r_fp, r_sp;
1115   rtx_insn *insn;
1116 
1117   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1118     return NULL_RTX;
1119 
1120   t_label = CALL_EXPR_ARG (exp, 0);
1121   t_save_area = CALL_EXPR_ARG (exp, 1);
1122 
1123   r_label = expand_normal (t_label);
1124   r_label = convert_memory_address (Pmode, r_label);
1125   r_save_area = expand_normal (t_save_area);
1126   r_save_area = convert_memory_address (Pmode, r_save_area);
1127   /* Copy the address of the save location to a register just in case it was
1128      based on the frame pointer.   */
1129   r_save_area = copy_to_reg (r_save_area);
1130   r_fp = gen_rtx_MEM (Pmode, r_save_area);
1131   r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1132 		      plus_constant (Pmode, r_save_area,
1133 				     GET_MODE_SIZE (Pmode)));
1134 
1135   crtl->has_nonlocal_goto = 1;
1136 
1137   /* ??? We no longer need to pass the static chain value, afaik.  */
1138   if (targetm.have_nonlocal_goto ())
1139     emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1140   else
1141     {
1142       r_label = copy_to_reg (r_label);
1143 
1144       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1145       emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1146 
1147       /* Restore frame pointer for containing function.  */
1148       emit_move_insn (hard_frame_pointer_rtx, r_fp);
1149       emit_stack_restore (SAVE_NONLOCAL, r_sp);
1150 
1151       /* USE of hard_frame_pointer_rtx added for consistency;
1152 	 not clear if really needed.  */
1153       emit_use (hard_frame_pointer_rtx);
1154       emit_use (stack_pointer_rtx);
1155 
1156       /* If the architecture is using a GP register, we must
1157 	 conservatively assume that the target function makes use of it.
1158 	 The prologue of functions with nonlocal gotos must therefore
1159 	 initialize the GP register to the appropriate value, and we
1160 	 must then make sure that this value is live at the point
1161 	 of the jump.  (Note that this doesn't necessarily apply
1162 	 to targets with a nonlocal_goto pattern; they are free
1163 	 to implement it in their own way.  Note also that this is
1164 	 a no-op if the GP register is a global invariant.)  */
1165       unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1166       if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1167 	emit_use (pic_offset_table_rtx);
1168 
1169       emit_indirect_jump (r_label);
1170     }
1171 
1172   /* Search backwards to the jump insn and mark it as a
1173      non-local goto.  */
1174   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1175     {
1176       if (JUMP_P (insn))
1177 	{
1178 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1179 	  break;
1180 	}
1181       else if (CALL_P (insn))
1182 	break;
1183     }
1184 
1185   return const0_rtx;
1186 }
1187 
1188 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1189    (not all will be used on all machines) that was passed to __builtin_setjmp.
1190    It updates the stack pointer in that block to the current value.  This is
1191    also called directly by the SJLJ exception handling code.  */
1192 
1193 void
1194 expand_builtin_update_setjmp_buf (rtx buf_addr)
1195 {
1196   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1197   buf_addr = convert_memory_address (Pmode, buf_addr);
1198   rtx stack_save
1199     = gen_rtx_MEM (sa_mode,
1200 		   memory_address
1201 		   (sa_mode,
1202 		    plus_constant (Pmode, buf_addr,
1203 				   2 * GET_MODE_SIZE (Pmode))));
1204 
1205   emit_stack_save (SAVE_NONLOCAL, &stack_save);
1206 }
1207 
1208 /* Expand a call to __builtin_prefetch.  For a target that does not support
1209    data prefetch, evaluate the memory address argument in case it has side
1210    effects.  */
1211 
1212 static void
1213 expand_builtin_prefetch (tree exp)
1214 {
1215   tree arg0, arg1, arg2;
1216   int nargs;
1217   rtx op0, op1, op2;
1218 
1219   if (!validate_arglist (exp, POINTER_TYPE, 0))
1220     return;
1221 
1222   arg0 = CALL_EXPR_ARG (exp, 0);
1223 
1224   /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1225      zero (read) and argument 2 (locality) defaults to 3 (high degree of
1226      locality).  */
1227   nargs = call_expr_nargs (exp);
1228   if (nargs > 1)
1229     arg1 = CALL_EXPR_ARG (exp, 1);
1230   else
1231     arg1 = integer_zero_node;
1232   if (nargs > 2)
1233     arg2 = CALL_EXPR_ARG (exp, 2);
1234   else
1235     arg2 = integer_three_node;
1236 
1237   /* Argument 0 is an address.  */
1238   op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1239 
1240   /* Argument 1 (read/write flag) must be a compile-time constant int.  */
1241   if (TREE_CODE (arg1) != INTEGER_CST)
1242     {
1243       error ("second argument to %<__builtin_prefetch%> must be a constant");
1244       arg1 = integer_zero_node;
1245     }
1246   op1 = expand_normal (arg1);
1247   /* Argument 1 must be either zero or one.  */
1248   if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1249     {
1250       warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1251 	       " using zero");
1252       op1 = const0_rtx;
1253     }
1254 
1255   /* Argument 2 (locality) must be a compile-time constant int.  */
1256   if (TREE_CODE (arg2) != INTEGER_CST)
1257     {
1258       error ("third argument to %<__builtin_prefetch%> must be a constant");
1259       arg2 = integer_zero_node;
1260     }
1261   op2 = expand_normal (arg2);
1262   /* Argument 2 must be 0, 1, 2, or 3.  */
1263   if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1264     {
1265       warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1266       op2 = const0_rtx;
1267     }
1268 
1269   if (targetm.have_prefetch ())
1270     {
1271       struct expand_operand ops[3];
1272 
1273       create_address_operand (&ops[0], op0);
1274       create_integer_operand (&ops[1], INTVAL (op1));
1275       create_integer_operand (&ops[2], INTVAL (op2));
1276       if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1277 	return;
1278     }
1279 
1280   /* Don't do anything with direct references to volatile memory, but
1281      generate code to handle other side effects.  */
1282   if (!MEM_P (op0) && side_effects_p (op0))
1283     emit_insn (op0);
1284 }
1285 
1286 /* Get a MEM rtx for expression EXP which is the address of an operand
1287    to be used in a string instruction (cmpstrsi, movmemsi, ..).  LEN is
1288    the maximum length of the block of memory that might be accessed or
1289    NULL if unknown.  */
1290 
1291 static rtx
1292 get_memory_rtx (tree exp, tree len)
1293 {
1294   tree orig_exp = exp;
1295   rtx addr, mem;
1296 
1297   /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1298      from its expression, for expr->a.b only <variable>.a.b is recorded.  */
1299   if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1300     exp = TREE_OPERAND (exp, 0);
1301 
1302   addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1303   mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1304 
1305   /* Get an expression we can use to find the attributes to assign to MEM.
1306      First remove any nops.  */
1307   while (CONVERT_EXPR_P (exp)
1308 	 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1309     exp = TREE_OPERAND (exp, 0);
1310 
1311   /* Build a MEM_REF representing the whole accessed area as a byte blob,
1312      (as builtin stringops may alias with anything).  */
1313   exp = fold_build2 (MEM_REF,
1314 		     build_array_type (char_type_node,
1315 				       build_range_type (sizetype,
1316 							 size_one_node, len)),
1317 		     exp, build_int_cst (ptr_type_node, 0));
1318 
1319   /* If the MEM_REF has no acceptable address, try to get the base object
1320      from the original address we got, and build an all-aliasing
1321      unknown-sized access to that one.  */
1322   if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1323     set_mem_attributes (mem, exp, 0);
1324   else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1325 	   && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1326 						     0))))
1327     {
1328       exp = build_fold_addr_expr (exp);
1329       exp = fold_build2 (MEM_REF,
1330 			 build_array_type (char_type_node,
1331 					   build_range_type (sizetype,
1332 							     size_zero_node,
1333 							     NULL)),
1334 			 exp, build_int_cst (ptr_type_node, 0));
1335       set_mem_attributes (mem, exp, 0);
1336     }
1337   set_mem_alias_set (mem, 0);
1338   return mem;
1339 }
1340 
1341 /* Built-in functions to perform an untyped call and return.  */
1342 
1343 #define apply_args_mode \
1344   (this_target_builtins->x_apply_args_mode)
1345 #define apply_result_mode \
1346   (this_target_builtins->x_apply_result_mode)
1347 
1348 /* Return the size required for the block returned by __builtin_apply_args,
1349    and initialize apply_args_mode.  */
1350 
1351 static int
1352 apply_args_size (void)
1353 {
1354   static int size = -1;
1355   int align;
1356   unsigned int regno;
1357   machine_mode mode;
1358 
1359   /* The values computed by this function never change.  */
1360   if (size < 0)
1361     {
1362       /* The first value is the incoming arg-pointer.  */
1363       size = GET_MODE_SIZE (Pmode);
1364 
1365       /* The second value is the structure value address unless this is
1366 	 passed as an "invisible" first argument.  */
1367       if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1368 	size += GET_MODE_SIZE (Pmode);
1369 
1370       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1371 	if (FUNCTION_ARG_REGNO_P (regno))
1372 	  {
1373 	    mode = targetm.calls.get_raw_arg_mode (regno);
1374 
1375 	    gcc_assert (mode != VOIDmode);
1376 
1377 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1378 	    if (size % align != 0)
1379 	      size = CEIL (size, align) * align;
1380 	    size += GET_MODE_SIZE (mode);
1381 	    apply_args_mode[regno] = mode;
1382 	  }
1383 	else
1384 	  {
1385 	    apply_args_mode[regno] = VOIDmode;
1386 	  }
1387     }
1388   return size;
1389 }
1390 
1391 /* Return the size required for the block returned by __builtin_apply,
1392    and initialize apply_result_mode.  */
1393 
1394 static int
1395 apply_result_size (void)
1396 {
1397   static int size = -1;
1398   int align, regno;
1399   machine_mode mode;
1400 
1401   /* The values computed by this function never change.  */
1402   if (size < 0)
1403     {
1404       size = 0;
1405 
1406       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1407 	if (targetm.calls.function_value_regno_p (regno))
1408 	  {
1409 	    mode = targetm.calls.get_raw_result_mode (regno);
1410 
1411 	    gcc_assert (mode != VOIDmode);
1412 
1413 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1414 	    if (size % align != 0)
1415 	      size = CEIL (size, align) * align;
1416 	    size += GET_MODE_SIZE (mode);
1417 	    apply_result_mode[regno] = mode;
1418 	  }
1419 	else
1420 	  apply_result_mode[regno] = VOIDmode;
1421 
1422       /* Allow targets that use untyped_call and untyped_return to override
1423 	 the size so that machine-specific information can be stored here.  */
1424 #ifdef APPLY_RESULT_SIZE
1425       size = APPLY_RESULT_SIZE;
1426 #endif
1427     }
1428   return size;
1429 }
1430 
1431 /* Create a vector describing the result block RESULT.  If SAVEP is true,
1432    the result block is used to save the values; otherwise it is used to
1433    restore the values.  */
1434 
1435 static rtx
1436 result_vector (int savep, rtx result)
1437 {
1438   int regno, size, align, nelts;
1439   machine_mode mode;
1440   rtx reg, mem;
1441   rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1442 
1443   size = nelts = 0;
1444   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1445     if ((mode = apply_result_mode[regno]) != VOIDmode)
1446       {
1447 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1448 	if (size % align != 0)
1449 	  size = CEIL (size, align) * align;
1450 	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1451 	mem = adjust_address (result, mode, size);
1452 	savevec[nelts++] = (savep
1453 			    ? gen_rtx_SET (mem, reg)
1454 			    : gen_rtx_SET (reg, mem));
1455 	size += GET_MODE_SIZE (mode);
1456       }
1457   return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1458 }
1459 
1460 /* Save the state required to perform an untyped call with the same
1461    arguments as were passed to the current function.  */
1462 
1463 static rtx
1464 expand_builtin_apply_args_1 (void)
1465 {
1466   rtx registers, tem;
1467   int size, align, regno;
1468   machine_mode mode;
1469   rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1470 
1471   /* Create a block where the arg-pointer, structure value address,
1472      and argument registers can be saved.  */
1473   registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1474 
1475   /* Walk past the arg-pointer and structure value address.  */
1476   size = GET_MODE_SIZE (Pmode);
1477   if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1478     size += GET_MODE_SIZE (Pmode);
1479 
1480   /* Save each register used in calling a function to the block.  */
1481   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482     if ((mode = apply_args_mode[regno]) != VOIDmode)
1483       {
1484 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 	if (size % align != 0)
1486 	  size = CEIL (size, align) * align;
1487 
1488 	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1489 
1490 	emit_move_insn (adjust_address (registers, mode, size), tem);
1491 	size += GET_MODE_SIZE (mode);
1492       }
1493 
1494   /* Save the arg pointer to the block.  */
1495   tem = copy_to_reg (crtl->args.internal_arg_pointer);
1496   /* We need the pointer as the caller actually passed them to us, not
1497      as we might have pretended they were passed.  Make sure it's a valid
1498      operand, as emit_move_insn isn't expected to handle a PLUS.  */
1499   if (STACK_GROWS_DOWNWARD)
1500     tem
1501       = force_operand (plus_constant (Pmode, tem,
1502 				      crtl->args.pretend_args_size),
1503 		       NULL_RTX);
1504   emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1505 
1506   size = GET_MODE_SIZE (Pmode);
1507 
1508   /* Save the structure value address unless this is passed as an
1509      "invisible" first argument.  */
1510   if (struct_incoming_value)
1511     {
1512       emit_move_insn (adjust_address (registers, Pmode, size),
1513 		      copy_to_reg (struct_incoming_value));
1514       size += GET_MODE_SIZE (Pmode);
1515     }
1516 
1517   /* Return the address of the block.  */
1518   return copy_addr_to_reg (XEXP (registers, 0));
1519 }
1520 
1521 /* __builtin_apply_args returns block of memory allocated on
1522    the stack into which is stored the arg pointer, structure
1523    value address, static chain, and all the registers that might
1524    possibly be used in performing a function call.  The code is
1525    moved to the start of the function so the incoming values are
1526    saved.  */
1527 
1528 static rtx
1529 expand_builtin_apply_args (void)
1530 {
1531   /* Don't do __builtin_apply_args more than once in a function.
1532      Save the result of the first call and reuse it.  */
1533   if (apply_args_value != 0)
1534     return apply_args_value;
1535   {
1536     /* When this function is called, it means that registers must be
1537        saved on entry to this function.  So we migrate the
1538        call to the first insn of this function.  */
1539     rtx temp;
1540 
1541     start_sequence ();
1542     temp = expand_builtin_apply_args_1 ();
1543     rtx_insn *seq = get_insns ();
1544     end_sequence ();
1545 
1546     apply_args_value = temp;
1547 
1548     /* Put the insns after the NOTE that starts the function.
1549        If this is inside a start_sequence, make the outer-level insn
1550        chain current, so the code is placed at the start of the
1551        function.  If internal_arg_pointer is a non-virtual pseudo,
1552        it needs to be placed after the function that initializes
1553        that pseudo.  */
1554     push_topmost_sequence ();
1555     if (REG_P (crtl->args.internal_arg_pointer)
1556 	&& REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1557       emit_insn_before (seq, parm_birth_insn);
1558     else
1559       emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1560     pop_topmost_sequence ();
1561     return temp;
1562   }
1563 }
1564 
1565 /* Perform an untyped call and save the state required to perform an
1566    untyped return of whatever value was returned by the given function.  */
1567 
1568 static rtx
1569 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1570 {
1571   int size, align, regno;
1572   machine_mode mode;
1573   rtx incoming_args, result, reg, dest, src;
1574   rtx_call_insn *call_insn;
1575   rtx old_stack_level = 0;
1576   rtx call_fusage = 0;
1577   rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1578 
1579   arguments = convert_memory_address (Pmode, arguments);
1580 
1581   /* Create a block where the return registers can be saved.  */
1582   result = assign_stack_local (BLKmode, apply_result_size (), -1);
1583 
1584   /* Fetch the arg pointer from the ARGUMENTS block.  */
1585   incoming_args = gen_reg_rtx (Pmode);
1586   emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1587   if (!STACK_GROWS_DOWNWARD)
1588     incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1589 					 incoming_args, 0, OPTAB_LIB_WIDEN);
1590 
1591   /* Push a new argument block and copy the arguments.  Do not allow
1592      the (potential) memcpy call below to interfere with our stack
1593      manipulations.  */
1594   do_pending_stack_adjust ();
1595   NO_DEFER_POP;
1596 
1597   /* Save the stack with nonlocal if available.  */
1598   if (targetm.have_save_stack_nonlocal ())
1599     emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1600   else
1601     emit_stack_save (SAVE_BLOCK, &old_stack_level);
1602 
1603   /* Allocate a block of memory onto the stack and copy the memory
1604      arguments to the outgoing arguments address.  We can pass TRUE
1605      as the 4th argument because we just saved the stack pointer
1606      and will restore it right after the call.  */
1607   allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1608 
1609   /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1610      may have already set current_function_calls_alloca to true.
1611      current_function_calls_alloca won't be set if argsize is zero,
1612      so we have to guarantee need_drap is true here.  */
1613   if (SUPPORTS_STACK_ALIGNMENT)
1614     crtl->need_drap = true;
1615 
1616   dest = virtual_outgoing_args_rtx;
1617   if (!STACK_GROWS_DOWNWARD)
1618     {
1619       if (CONST_INT_P (argsize))
1620 	dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1621       else
1622 	dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1623     }
1624   dest = gen_rtx_MEM (BLKmode, dest);
1625   set_mem_align (dest, PARM_BOUNDARY);
1626   src = gen_rtx_MEM (BLKmode, incoming_args);
1627   set_mem_align (src, PARM_BOUNDARY);
1628   emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1629 
1630   /* Refer to the argument block.  */
1631   apply_args_size ();
1632   arguments = gen_rtx_MEM (BLKmode, arguments);
1633   set_mem_align (arguments, PARM_BOUNDARY);
1634 
1635   /* Walk past the arg-pointer and structure value address.  */
1636   size = GET_MODE_SIZE (Pmode);
1637   if (struct_value)
1638     size += GET_MODE_SIZE (Pmode);
1639 
1640   /* Restore each of the registers previously saved.  Make USE insns
1641      for each of these registers for use in making the call.  */
1642   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1643     if ((mode = apply_args_mode[regno]) != VOIDmode)
1644       {
1645 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1646 	if (size % align != 0)
1647 	  size = CEIL (size, align) * align;
1648 	reg = gen_rtx_REG (mode, regno);
1649 	emit_move_insn (reg, adjust_address (arguments, mode, size));
1650 	use_reg (&call_fusage, reg);
1651 	size += GET_MODE_SIZE (mode);
1652       }
1653 
1654   /* Restore the structure value address unless this is passed as an
1655      "invisible" first argument.  */
1656   size = GET_MODE_SIZE (Pmode);
1657   if (struct_value)
1658     {
1659       rtx value = gen_reg_rtx (Pmode);
1660       emit_move_insn (value, adjust_address (arguments, Pmode, size));
1661       emit_move_insn (struct_value, value);
1662       if (REG_P (struct_value))
1663 	use_reg (&call_fusage, struct_value);
1664       size += GET_MODE_SIZE (Pmode);
1665     }
1666 
1667   /* All arguments and registers used for the call are set up by now!  */
1668   function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1669 
1670   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
1671      and we don't want to load it into a register as an optimization,
1672      because prepare_call_address already did it if it should be done.  */
1673   if (GET_CODE (function) != SYMBOL_REF)
1674     function = memory_address (FUNCTION_MODE, function);
1675 
1676   /* Generate the actual call instruction and save the return value.  */
1677   if (targetm.have_untyped_call ())
1678     {
1679       rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1680       emit_call_insn (targetm.gen_untyped_call (mem, result,
1681 						result_vector (1, result)));
1682     }
1683   else if (targetm.have_call_value ())
1684     {
1685       rtx valreg = 0;
1686 
1687       /* Locate the unique return register.  It is not possible to
1688 	 express a call that sets more than one return register using
1689 	 call_value; use untyped_call for that.  In fact, untyped_call
1690 	 only needs to save the return registers in the given block.  */
1691       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1692 	if ((mode = apply_result_mode[regno]) != VOIDmode)
1693 	  {
1694 	    gcc_assert (!valreg); /* have_untyped_call required.  */
1695 
1696 	    valreg = gen_rtx_REG (mode, regno);
1697 	  }
1698 
1699       emit_insn (targetm.gen_call_value (valreg,
1700 					 gen_rtx_MEM (FUNCTION_MODE, function),
1701 					 const0_rtx, NULL_RTX, const0_rtx));
1702 
1703       emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1704     }
1705   else
1706     gcc_unreachable ();
1707 
1708   /* Find the CALL insn we just emitted, and attach the register usage
1709      information.  */
1710   call_insn = last_call_insn ();
1711   add_function_usage_to (call_insn, call_fusage);
1712 
1713   /* Restore the stack.  */
1714   if (targetm.have_save_stack_nonlocal ())
1715     emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1716   else
1717     emit_stack_restore (SAVE_BLOCK, old_stack_level);
1718   fixup_args_size_notes (call_insn, get_last_insn (), 0);
1719 
1720   OK_DEFER_POP;
1721 
1722   /* Return the address of the result block.  */
1723   result = copy_addr_to_reg (XEXP (result, 0));
1724   return convert_memory_address (ptr_mode, result);
1725 }
1726 
1727 /* Perform an untyped return.  */
1728 
1729 static void
1730 expand_builtin_return (rtx result)
1731 {
1732   int size, align, regno;
1733   machine_mode mode;
1734   rtx reg;
1735   rtx_insn *call_fusage = 0;
1736 
1737   result = convert_memory_address (Pmode, result);
1738 
1739   apply_result_size ();
1740   result = gen_rtx_MEM (BLKmode, result);
1741 
1742   if (targetm.have_untyped_return ())
1743     {
1744       rtx vector = result_vector (0, result);
1745       emit_jump_insn (targetm.gen_untyped_return (result, vector));
1746       emit_barrier ();
1747       return;
1748     }
1749 
1750   /* Restore the return value and note that each value is used.  */
1751   size = 0;
1752   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1753     if ((mode = apply_result_mode[regno]) != VOIDmode)
1754       {
1755 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1756 	if (size % align != 0)
1757 	  size = CEIL (size, align) * align;
1758 	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1759 	emit_move_insn (reg, adjust_address (result, mode, size));
1760 
1761 	push_to_sequence (call_fusage);
1762 	emit_use (reg);
1763 	call_fusage = get_insns ();
1764 	end_sequence ();
1765 	size += GET_MODE_SIZE (mode);
1766       }
1767 
1768   /* Put the USE insns before the return.  */
1769   emit_insn (call_fusage);
1770 
1771   /* Return whatever values was restored by jumping directly to the end
1772      of the function.  */
1773   expand_naked_return ();
1774 }
1775 
1776 /* Used by expand_builtin_classify_type and fold_builtin_classify_type.  */
1777 
1778 static enum type_class
1779 type_to_class (tree type)
1780 {
1781   switch (TREE_CODE (type))
1782     {
1783     case VOID_TYPE:	   return void_type_class;
1784     case INTEGER_TYPE:	   return integer_type_class;
1785     case ENUMERAL_TYPE:	   return enumeral_type_class;
1786     case BOOLEAN_TYPE:	   return boolean_type_class;
1787     case POINTER_TYPE:	   return pointer_type_class;
1788     case REFERENCE_TYPE:   return reference_type_class;
1789     case OFFSET_TYPE:	   return offset_type_class;
1790     case REAL_TYPE:	   return real_type_class;
1791     case COMPLEX_TYPE:	   return complex_type_class;
1792     case FUNCTION_TYPE:	   return function_type_class;
1793     case METHOD_TYPE:	   return method_type_class;
1794     case RECORD_TYPE:	   return record_type_class;
1795     case UNION_TYPE:
1796     case QUAL_UNION_TYPE:  return union_type_class;
1797     case ARRAY_TYPE:	   return (TYPE_STRING_FLAG (type)
1798 				   ? string_type_class : array_type_class);
1799     case LANG_TYPE:	   return lang_type_class;
1800     default:		   return no_type_class;
1801     }
1802 }
1803 
1804 /* Expand a call EXP to __builtin_classify_type.  */
1805 
1806 static rtx
1807 expand_builtin_classify_type (tree exp)
1808 {
1809   if (call_expr_nargs (exp))
1810     return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1811   return GEN_INT (no_type_class);
1812 }
1813 
1814 /* This helper macro, meant to be used in mathfn_built_in below,
1815    determines which among a set of three builtin math functions is
1816    appropriate for a given type mode.  The `F' and `L' cases are
1817    automatically generated from the `double' case.  */
1818 #define CASE_MATHFN(MATHFN) \
1819   CASE_CFN_##MATHFN: \
1820   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1821   fcodel = BUILT_IN_##MATHFN##L ; break;
1822 /* Similar to above, but appends _R after any F/L suffix.  */
1823 #define CASE_MATHFN_REENT(MATHFN) \
1824   case CFN_BUILT_IN_##MATHFN##_R: \
1825   case CFN_BUILT_IN_##MATHFN##F_R: \
1826   case CFN_BUILT_IN_##MATHFN##L_R: \
1827   fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1828   fcodel = BUILT_IN_##MATHFN##L_R ; break;
1829 
1830 /* Return a function equivalent to FN but operating on floating-point
1831    values of type TYPE, or END_BUILTINS if no such function exists.
1832    This is purely an operation on function codes; it does not guarantee
1833    that the target actually has an implementation of the function.  */
1834 
1835 static built_in_function
1836 mathfn_built_in_2 (tree type, combined_fn fn)
1837 {
1838   built_in_function fcode, fcodef, fcodel;
1839 
1840   switch (fn)
1841     {
1842     CASE_MATHFN (ACOS)
1843     CASE_MATHFN (ACOSH)
1844     CASE_MATHFN (ASIN)
1845     CASE_MATHFN (ASINH)
1846     CASE_MATHFN (ATAN)
1847     CASE_MATHFN (ATAN2)
1848     CASE_MATHFN (ATANH)
1849     CASE_MATHFN (CBRT)
1850     CASE_MATHFN (CEIL)
1851     CASE_MATHFN (CEXPI)
1852     CASE_MATHFN (COPYSIGN)
1853     CASE_MATHFN (COS)
1854     CASE_MATHFN (COSH)
1855     CASE_MATHFN (DREM)
1856     CASE_MATHFN (ERF)
1857     CASE_MATHFN (ERFC)
1858     CASE_MATHFN (EXP)
1859     CASE_MATHFN (EXP10)
1860     CASE_MATHFN (EXP2)
1861     CASE_MATHFN (EXPM1)
1862     CASE_MATHFN (FABS)
1863     CASE_MATHFN (FDIM)
1864     CASE_MATHFN (FLOOR)
1865     CASE_MATHFN (FMA)
1866     CASE_MATHFN (FMAX)
1867     CASE_MATHFN (FMIN)
1868     CASE_MATHFN (FMOD)
1869     CASE_MATHFN (FREXP)
1870     CASE_MATHFN (GAMMA)
1871     CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1872     CASE_MATHFN (HUGE_VAL)
1873     CASE_MATHFN (HYPOT)
1874     CASE_MATHFN (ILOGB)
1875     CASE_MATHFN (ICEIL)
1876     CASE_MATHFN (IFLOOR)
1877     CASE_MATHFN (INF)
1878     CASE_MATHFN (IRINT)
1879     CASE_MATHFN (IROUND)
1880     CASE_MATHFN (ISINF)
1881     CASE_MATHFN (J0)
1882     CASE_MATHFN (J1)
1883     CASE_MATHFN (JN)
1884     CASE_MATHFN (LCEIL)
1885     CASE_MATHFN (LDEXP)
1886     CASE_MATHFN (LFLOOR)
1887     CASE_MATHFN (LGAMMA)
1888     CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1889     CASE_MATHFN (LLCEIL)
1890     CASE_MATHFN (LLFLOOR)
1891     CASE_MATHFN (LLRINT)
1892     CASE_MATHFN (LLROUND)
1893     CASE_MATHFN (LOG)
1894     CASE_MATHFN (LOG10)
1895     CASE_MATHFN (LOG1P)
1896     CASE_MATHFN (LOG2)
1897     CASE_MATHFN (LOGB)
1898     CASE_MATHFN (LRINT)
1899     CASE_MATHFN (LROUND)
1900     CASE_MATHFN (MODF)
1901     CASE_MATHFN (NAN)
1902     CASE_MATHFN (NANS)
1903     CASE_MATHFN (NEARBYINT)
1904     CASE_MATHFN (NEXTAFTER)
1905     CASE_MATHFN (NEXTTOWARD)
1906     CASE_MATHFN (POW)
1907     CASE_MATHFN (POWI)
1908     CASE_MATHFN (POW10)
1909     CASE_MATHFN (REMAINDER)
1910     CASE_MATHFN (REMQUO)
1911     CASE_MATHFN (RINT)
1912     CASE_MATHFN (ROUND)
1913     CASE_MATHFN (SCALB)
1914     CASE_MATHFN (SCALBLN)
1915     CASE_MATHFN (SCALBN)
1916     CASE_MATHFN (SIGNBIT)
1917     CASE_MATHFN (SIGNIFICAND)
1918     CASE_MATHFN (SIN)
1919     CASE_MATHFN (SINCOS)
1920     CASE_MATHFN (SINH)
1921     CASE_MATHFN (SQRT)
1922     CASE_MATHFN (TAN)
1923     CASE_MATHFN (TANH)
1924     CASE_MATHFN (TGAMMA)
1925     CASE_MATHFN (TRUNC)
1926     CASE_MATHFN (Y0)
1927     CASE_MATHFN (Y1)
1928     CASE_MATHFN (YN)
1929 
1930     default:
1931       return END_BUILTINS;
1932     }
1933 
1934   if (TYPE_MAIN_VARIANT (type) == double_type_node)
1935     return fcode;
1936   else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1937     return fcodef;
1938   else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1939     return fcodel;
1940   else
1941     return END_BUILTINS;
1942 }
1943 
1944 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1945    if available.  If IMPLICIT_P is true use the implicit builtin declaration,
1946    otherwise use the explicit declaration.  If we can't do the conversion,
1947    return null.  */
1948 
1949 static tree
1950 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1951 {
1952   built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1953   if (fcode2 == END_BUILTINS)
1954     return NULL_TREE;
1955 
1956   if (implicit_p && !builtin_decl_implicit_p (fcode2))
1957     return NULL_TREE;
1958 
1959   return builtin_decl_explicit (fcode2);
1960 }
1961 
1962 /* Like mathfn_built_in_1, but always use the implicit array.  */
1963 
1964 tree
1965 mathfn_built_in (tree type, combined_fn fn)
1966 {
1967   return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1968 }
1969 
1970 /* Like mathfn_built_in_1, but take a built_in_function and
1971    always use the implicit array.  */
1972 
1973 tree
1974 mathfn_built_in (tree type, enum built_in_function fn)
1975 {
1976   return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1977 }
1978 
1979 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1980    return its code, otherwise return IFN_LAST.  Note that this function
1981    only tests whether the function is defined in internals.def, not whether
1982    it is actually available on the target.  */
1983 
1984 internal_fn
1985 associated_internal_fn (tree fndecl)
1986 {
1987   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1988   tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1989   switch (DECL_FUNCTION_CODE (fndecl))
1990     {
1991 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1992     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1993 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1994     CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1995 #include "internal-fn.def"
1996 
1997     CASE_FLT_FN (BUILT_IN_POW10):
1998       return IFN_EXP10;
1999 
2000     CASE_FLT_FN (BUILT_IN_DREM):
2001       return IFN_REMAINDER;
2002 
2003     CASE_FLT_FN (BUILT_IN_SCALBN):
2004     CASE_FLT_FN (BUILT_IN_SCALBLN):
2005       if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2006 	return IFN_LDEXP;
2007       return IFN_LAST;
2008 
2009     default:
2010       return IFN_LAST;
2011     }
2012 }
2013 
2014 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2015    on the current target by a call to an internal function, return the
2016    code of that internal function, otherwise return IFN_LAST.  The caller
2017    is responsible for ensuring that any side-effects of the built-in
2018    call are dealt with correctly.  E.g. if CALL sets errno, the caller
2019    must decide that the errno result isn't needed or make it available
2020    in some other way.  */
2021 
2022 internal_fn
2023 replacement_internal_fn (gcall *call)
2024 {
2025   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2026     {
2027       internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2028       if (ifn != IFN_LAST)
2029 	{
2030 	  tree_pair types = direct_internal_fn_types (ifn, call);
2031 	  optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2032 	  if (direct_internal_fn_supported_p (ifn, types, opt_type))
2033 	    return ifn;
2034 	}
2035     }
2036   return IFN_LAST;
2037 }
2038 
2039 /* Expand a call to the builtin trinary math functions (fma).
2040    Return NULL_RTX if a normal call should be emitted rather than expanding the
2041    function in-line.  EXP is the expression that is a call to the builtin
2042    function; if convenient, the result should be placed in TARGET.
2043    SUBTARGET may be used as the target for computing one of EXP's
2044    operands.  */
2045 
2046 static rtx
2047 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2048 {
2049   optab builtin_optab;
2050   rtx op0, op1, op2, result;
2051   rtx_insn *insns;
2052   tree fndecl = get_callee_fndecl (exp);
2053   tree arg0, arg1, arg2;
2054   machine_mode mode;
2055 
2056   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2057     return NULL_RTX;
2058 
2059   arg0 = CALL_EXPR_ARG (exp, 0);
2060   arg1 = CALL_EXPR_ARG (exp, 1);
2061   arg2 = CALL_EXPR_ARG (exp, 2);
2062 
2063   switch (DECL_FUNCTION_CODE (fndecl))
2064     {
2065     CASE_FLT_FN (BUILT_IN_FMA):
2066       builtin_optab = fma_optab; break;
2067     default:
2068       gcc_unreachable ();
2069     }
2070 
2071   /* Make a suitable register to place result in.  */
2072   mode = TYPE_MODE (TREE_TYPE (exp));
2073 
2074   /* Before working hard, check whether the instruction is available.  */
2075   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2076     return NULL_RTX;
2077 
2078   result = gen_reg_rtx (mode);
2079 
2080   /* Always stabilize the argument list.  */
2081   CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2082   CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2083   CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2084 
2085   op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2086   op1 = expand_normal (arg1);
2087   op2 = expand_normal (arg2);
2088 
2089   start_sequence ();
2090 
2091   /* Compute into RESULT.
2092      Set RESULT to wherever the result comes back.  */
2093   result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2094 			      result, 0);
2095 
2096   /* If we were unable to expand via the builtin, stop the sequence
2097      (without outputting the insns) and call to the library function
2098      with the stabilized argument list.  */
2099   if (result == 0)
2100     {
2101       end_sequence ();
2102       return expand_call (exp, target, target == const0_rtx);
2103     }
2104 
2105   /* Output the entire sequence.  */
2106   insns = get_insns ();
2107   end_sequence ();
2108   emit_insn (insns);
2109 
2110   return result;
2111 }
2112 
2113 /* Expand a call to the builtin sin and cos math functions.
2114    Return NULL_RTX if a normal call should be emitted rather than expanding the
2115    function in-line.  EXP is the expression that is a call to the builtin
2116    function; if convenient, the result should be placed in TARGET.
2117    SUBTARGET may be used as the target for computing one of EXP's
2118    operands.  */
2119 
2120 static rtx
2121 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2122 {
2123   optab builtin_optab;
2124   rtx op0;
2125   rtx_insn *insns;
2126   tree fndecl = get_callee_fndecl (exp);
2127   machine_mode mode;
2128   tree arg;
2129 
2130   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2131     return NULL_RTX;
2132 
2133   arg = CALL_EXPR_ARG (exp, 0);
2134 
2135   switch (DECL_FUNCTION_CODE (fndecl))
2136     {
2137     CASE_FLT_FN (BUILT_IN_SIN):
2138     CASE_FLT_FN (BUILT_IN_COS):
2139       builtin_optab = sincos_optab; break;
2140     default:
2141       gcc_unreachable ();
2142     }
2143 
2144   /* Make a suitable register to place result in.  */
2145   mode = TYPE_MODE (TREE_TYPE (exp));
2146 
2147   /* Check if sincos insn is available, otherwise fallback
2148      to sin or cos insn.  */
2149   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2150     switch (DECL_FUNCTION_CODE (fndecl))
2151       {
2152       CASE_FLT_FN (BUILT_IN_SIN):
2153 	builtin_optab = sin_optab; break;
2154       CASE_FLT_FN (BUILT_IN_COS):
2155 	builtin_optab = cos_optab; break;
2156       default:
2157 	gcc_unreachable ();
2158       }
2159 
2160   /* Before working hard, check whether the instruction is available.  */
2161   if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2162     {
2163       rtx result = gen_reg_rtx (mode);
2164 
2165       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2166 	 need to expand the argument again.  This way, we will not perform
2167 	 side-effects more the once.  */
2168       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2169 
2170       op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2171 
2172       start_sequence ();
2173 
2174       /* Compute into RESULT.
2175 	 Set RESULT to wherever the result comes back.  */
2176       if (builtin_optab == sincos_optab)
2177 	{
2178 	  int ok;
2179 
2180 	  switch (DECL_FUNCTION_CODE (fndecl))
2181 	    {
2182 	    CASE_FLT_FN (BUILT_IN_SIN):
2183 	      ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2184 	      break;
2185 	    CASE_FLT_FN (BUILT_IN_COS):
2186 	      ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2187 	      break;
2188 	    default:
2189 	      gcc_unreachable ();
2190 	    }
2191 	  gcc_assert (ok);
2192 	}
2193       else
2194 	result = expand_unop (mode, builtin_optab, op0, result, 0);
2195 
2196       if (result != 0)
2197 	{
2198 	  /* Output the entire sequence.  */
2199 	  insns = get_insns ();
2200 	  end_sequence ();
2201 	  emit_insn (insns);
2202 	  return result;
2203 	}
2204 
2205       /* If we were unable to expand via the builtin, stop the sequence
2206 	 (without outputting the insns) and call to the library function
2207 	 with the stabilized argument list.  */
2208       end_sequence ();
2209     }
2210 
2211   return expand_call (exp, target, target == const0_rtx);
2212 }
2213 
2214 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2215    return an RTL instruction code that implements the functionality.
2216    If that isn't possible or available return CODE_FOR_nothing.  */
2217 
2218 static enum insn_code
2219 interclass_mathfn_icode (tree arg, tree fndecl)
2220 {
2221   bool errno_set = false;
2222   optab builtin_optab = unknown_optab;
2223   machine_mode mode;
2224 
2225   switch (DECL_FUNCTION_CODE (fndecl))
2226     {
2227     CASE_FLT_FN (BUILT_IN_ILOGB):
2228       errno_set = true; builtin_optab = ilogb_optab; break;
2229     CASE_FLT_FN (BUILT_IN_ISINF):
2230       builtin_optab = isinf_optab; break;
2231     case BUILT_IN_ISNORMAL:
2232     case BUILT_IN_ISFINITE:
2233     CASE_FLT_FN (BUILT_IN_FINITE):
2234     case BUILT_IN_FINITED32:
2235     case BUILT_IN_FINITED64:
2236     case BUILT_IN_FINITED128:
2237     case BUILT_IN_ISINFD32:
2238     case BUILT_IN_ISINFD64:
2239     case BUILT_IN_ISINFD128:
2240       /* These builtins have no optabs (yet).  */
2241       break;
2242     default:
2243       gcc_unreachable ();
2244     }
2245 
2246   /* There's no easy way to detect the case we need to set EDOM.  */
2247   if (flag_errno_math && errno_set)
2248     return CODE_FOR_nothing;
2249 
2250   /* Optab mode depends on the mode of the input argument.  */
2251   mode = TYPE_MODE (TREE_TYPE (arg));
2252 
2253   if (builtin_optab)
2254     return optab_handler (builtin_optab, mode);
2255   return CODE_FOR_nothing;
2256 }
2257 
2258 /* Expand a call to one of the builtin math functions that operate on
2259    floating point argument and output an integer result (ilogb, isinf,
2260    isnan, etc).
2261    Return 0 if a normal call should be emitted rather than expanding the
2262    function in-line.  EXP is the expression that is a call to the builtin
2263    function; if convenient, the result should be placed in TARGET.  */
2264 
2265 static rtx
2266 expand_builtin_interclass_mathfn (tree exp, rtx target)
2267 {
2268   enum insn_code icode = CODE_FOR_nothing;
2269   rtx op0;
2270   tree fndecl = get_callee_fndecl (exp);
2271   machine_mode mode;
2272   tree arg;
2273 
2274   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2275     return NULL_RTX;
2276 
2277   arg = CALL_EXPR_ARG (exp, 0);
2278   icode = interclass_mathfn_icode (arg, fndecl);
2279   mode = TYPE_MODE (TREE_TYPE (arg));
2280 
2281   if (icode != CODE_FOR_nothing)
2282     {
2283       struct expand_operand ops[1];
2284       rtx_insn *last = get_last_insn ();
2285       tree orig_arg = arg;
2286 
2287       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2288 	 need to expand the argument again.  This way, we will not perform
2289 	 side-effects more the once.  */
2290       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2291 
2292       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2293 
2294       if (mode != GET_MODE (op0))
2295 	op0 = convert_to_mode (mode, op0, 0);
2296 
2297       create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2298       if (maybe_legitimize_operands (icode, 0, 1, ops)
2299 	  && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2300 	return ops[0].value;
2301 
2302       delete_insns_since (last);
2303       CALL_EXPR_ARG (exp, 0) = orig_arg;
2304     }
2305 
2306   return NULL_RTX;
2307 }
2308 
2309 /* Expand a call to the builtin sincos math function.
2310    Return NULL_RTX if a normal call should be emitted rather than expanding the
2311    function in-line.  EXP is the expression that is a call to the builtin
2312    function.  */
2313 
2314 static rtx
2315 expand_builtin_sincos (tree exp)
2316 {
2317   rtx op0, op1, op2, target1, target2;
2318   machine_mode mode;
2319   tree arg, sinp, cosp;
2320   int result;
2321   location_t loc = EXPR_LOCATION (exp);
2322   tree alias_type, alias_off;
2323 
2324   if (!validate_arglist (exp, REAL_TYPE,
2325  			 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2326     return NULL_RTX;
2327 
2328   arg = CALL_EXPR_ARG (exp, 0);
2329   sinp = CALL_EXPR_ARG (exp, 1);
2330   cosp = CALL_EXPR_ARG (exp, 2);
2331 
2332   /* Make a suitable register to place result in.  */
2333   mode = TYPE_MODE (TREE_TYPE (arg));
2334 
2335   /* Check if sincos insn is available, otherwise emit the call.  */
2336   if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2337     return NULL_RTX;
2338 
2339   target1 = gen_reg_rtx (mode);
2340   target2 = gen_reg_rtx (mode);
2341 
2342   op0 = expand_normal (arg);
2343   alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2344   alias_off = build_int_cst (alias_type, 0);
2345   op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2346 					sinp, alias_off));
2347   op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2348 					cosp, alias_off));
2349 
2350   /* Compute into target1 and target2.
2351      Set TARGET to wherever the result comes back.  */
2352   result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2353   gcc_assert (result);
2354 
2355   /* Move target1 and target2 to the memory locations indicated
2356      by op1 and op2.  */
2357   emit_move_insn (op1, target1);
2358   emit_move_insn (op2, target2);
2359 
2360   return const0_rtx;
2361 }
2362 
2363 /* Expand a call to the internal cexpi builtin to the sincos math function.
2364    EXP is the expression that is a call to the builtin function; if convenient,
2365    the result should be placed in TARGET.  */
2366 
2367 static rtx
2368 expand_builtin_cexpi (tree exp, rtx target)
2369 {
2370   tree fndecl = get_callee_fndecl (exp);
2371   tree arg, type;
2372   machine_mode mode;
2373   rtx op0, op1, op2;
2374   location_t loc = EXPR_LOCATION (exp);
2375 
2376   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2377     return NULL_RTX;
2378 
2379   arg = CALL_EXPR_ARG (exp, 0);
2380   type = TREE_TYPE (arg);
2381   mode = TYPE_MODE (TREE_TYPE (arg));
2382 
2383   /* Try expanding via a sincos optab, fall back to emitting a libcall
2384      to sincos or cexp.  We are sure we have sincos or cexp because cexpi
2385      is only generated from sincos, cexp or if we have either of them.  */
2386   if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2387     {
2388       op1 = gen_reg_rtx (mode);
2389       op2 = gen_reg_rtx (mode);
2390 
2391       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2392 
2393       /* Compute into op1 and op2.  */
2394       expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2395     }
2396   else if (targetm.libc_has_function (function_sincos))
2397     {
2398       tree call, fn = NULL_TREE;
2399       tree top1, top2;
2400       rtx op1a, op2a;
2401 
2402       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2403 	fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2404       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2405 	fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2406       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2407 	fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2408       else
2409 	gcc_unreachable ();
2410 
2411       op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2412       op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2413       op1a = copy_addr_to_reg (XEXP (op1, 0));
2414       op2a = copy_addr_to_reg (XEXP (op2, 0));
2415       top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2416       top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2417 
2418       /* Make sure not to fold the sincos call again.  */
2419       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2420       expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2421 				      call, 3, arg, top1, top2));
2422     }
2423   else
2424     {
2425       tree call, fn = NULL_TREE, narg;
2426       tree ctype = build_complex_type (type);
2427 
2428       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2429 	fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2430       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2431 	fn = builtin_decl_explicit (BUILT_IN_CEXP);
2432       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2433 	fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2434       else
2435 	gcc_unreachable ();
2436 
2437       /* If we don't have a decl for cexp create one.  This is the
2438 	 friendliest fallback if the user calls __builtin_cexpi
2439 	 without full target C99 function support.  */
2440       if (fn == NULL_TREE)
2441 	{
2442 	  tree fntype;
2443 	  const char *name = NULL;
2444 
2445 	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2446 	    name = "cexpf";
2447 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2448 	    name = "cexp";
2449 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2450 	    name = "cexpl";
2451 
2452 	  fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2453 	  fn = build_fn_decl (name, fntype);
2454 	}
2455 
2456       narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2457 			  build_real (type, dconst0), arg);
2458 
2459       /* Make sure not to fold the cexp call again.  */
2460       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2461       return expand_expr (build_call_nary (ctype, call, 1, narg),
2462 			  target, VOIDmode, EXPAND_NORMAL);
2463     }
2464 
2465   /* Now build the proper return type.  */
2466   return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2467 			      make_tree (TREE_TYPE (arg), op2),
2468 			      make_tree (TREE_TYPE (arg), op1)),
2469 		      target, VOIDmode, EXPAND_NORMAL);
2470 }
2471 
2472 /* Conveniently construct a function call expression.  FNDECL names the
2473    function to be called, N is the number of arguments, and the "..."
2474    parameters are the argument expressions.  Unlike build_call_exr
2475    this doesn't fold the call, hence it will always return a CALL_EXPR.  */
2476 
2477 static tree
2478 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2479 {
2480   va_list ap;
2481   tree fntype = TREE_TYPE (fndecl);
2482   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2483 
2484   va_start (ap, n);
2485   fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2486   va_end (ap);
2487   SET_EXPR_LOCATION (fn, loc);
2488   return fn;
2489 }
2490 
2491 /* Expand a call to one of the builtin rounding functions gcc defines
2492    as an extension (lfloor and lceil).  As these are gcc extensions we
2493    do not need to worry about setting errno to EDOM.
2494    If expanding via optab fails, lower expression to (int)(floor(x)).
2495    EXP is the expression that is a call to the builtin function;
2496    if convenient, the result should be placed in TARGET.  */
2497 
2498 static rtx
2499 expand_builtin_int_roundingfn (tree exp, rtx target)
2500 {
2501   convert_optab builtin_optab;
2502   rtx op0, tmp;
2503   rtx_insn *insns;
2504   tree fndecl = get_callee_fndecl (exp);
2505   enum built_in_function fallback_fn;
2506   tree fallback_fndecl;
2507   machine_mode mode;
2508   tree arg;
2509 
2510   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2511     gcc_unreachable ();
2512 
2513   arg = CALL_EXPR_ARG (exp, 0);
2514 
2515   switch (DECL_FUNCTION_CODE (fndecl))
2516     {
2517     CASE_FLT_FN (BUILT_IN_ICEIL):
2518     CASE_FLT_FN (BUILT_IN_LCEIL):
2519     CASE_FLT_FN (BUILT_IN_LLCEIL):
2520       builtin_optab = lceil_optab;
2521       fallback_fn = BUILT_IN_CEIL;
2522       break;
2523 
2524     CASE_FLT_FN (BUILT_IN_IFLOOR):
2525     CASE_FLT_FN (BUILT_IN_LFLOOR):
2526     CASE_FLT_FN (BUILT_IN_LLFLOOR):
2527       builtin_optab = lfloor_optab;
2528       fallback_fn = BUILT_IN_FLOOR;
2529       break;
2530 
2531     default:
2532       gcc_unreachable ();
2533     }
2534 
2535   /* Make a suitable register to place result in.  */
2536   mode = TYPE_MODE (TREE_TYPE (exp));
2537 
2538   target = gen_reg_rtx (mode);
2539 
2540   /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2541      need to expand the argument again.  This way, we will not perform
2542      side-effects more the once.  */
2543   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2544 
2545   op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2546 
2547   start_sequence ();
2548 
2549   /* Compute into TARGET.  */
2550   if (expand_sfix_optab (target, op0, builtin_optab))
2551     {
2552       /* Output the entire sequence.  */
2553       insns = get_insns ();
2554       end_sequence ();
2555       emit_insn (insns);
2556       return target;
2557     }
2558 
2559   /* If we were unable to expand via the builtin, stop the sequence
2560      (without outputting the insns).  */
2561   end_sequence ();
2562 
2563   /* Fall back to floating point rounding optab.  */
2564   fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2565 
2566   /* For non-C99 targets we may end up without a fallback fndecl here
2567      if the user called __builtin_lfloor directly.  In this case emit
2568      a call to the floor/ceil variants nevertheless.  This should result
2569      in the best user experience for not full C99 targets.  */
2570   if (fallback_fndecl == NULL_TREE)
2571     {
2572       tree fntype;
2573       const char *name = NULL;
2574 
2575       switch (DECL_FUNCTION_CODE (fndecl))
2576 	{
2577 	case BUILT_IN_ICEIL:
2578 	case BUILT_IN_LCEIL:
2579 	case BUILT_IN_LLCEIL:
2580 	  name = "ceil";
2581 	  break;
2582 	case BUILT_IN_ICEILF:
2583 	case BUILT_IN_LCEILF:
2584 	case BUILT_IN_LLCEILF:
2585 	  name = "ceilf";
2586 	  break;
2587 	case BUILT_IN_ICEILL:
2588 	case BUILT_IN_LCEILL:
2589 	case BUILT_IN_LLCEILL:
2590 	  name = "ceill";
2591 	  break;
2592 	case BUILT_IN_IFLOOR:
2593 	case BUILT_IN_LFLOOR:
2594 	case BUILT_IN_LLFLOOR:
2595 	  name = "floor";
2596 	  break;
2597 	case BUILT_IN_IFLOORF:
2598 	case BUILT_IN_LFLOORF:
2599 	case BUILT_IN_LLFLOORF:
2600 	  name = "floorf";
2601 	  break;
2602 	case BUILT_IN_IFLOORL:
2603 	case BUILT_IN_LFLOORL:
2604 	case BUILT_IN_LLFLOORL:
2605 	  name = "floorl";
2606 	  break;
2607 	default:
2608 	  gcc_unreachable ();
2609 	}
2610 
2611       fntype = build_function_type_list (TREE_TYPE (arg),
2612 					 TREE_TYPE (arg), NULL_TREE);
2613       fallback_fndecl = build_fn_decl (name, fntype);
2614     }
2615 
2616   exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2617 
2618   tmp = expand_normal (exp);
2619   tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2620 
2621   /* Truncate the result of floating point optab to integer
2622      via expand_fix ().  */
2623   target = gen_reg_rtx (mode);
2624   expand_fix (target, tmp, 0);
2625 
2626   return target;
2627 }
2628 
2629 /* Expand a call to one of the builtin math functions doing integer
2630    conversion (lrint).
2631    Return 0 if a normal call should be emitted rather than expanding the
2632    function in-line.  EXP is the expression that is a call to the builtin
2633    function; if convenient, the result should be placed in TARGET.  */
2634 
2635 static rtx
2636 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2637 {
2638   convert_optab builtin_optab;
2639   rtx op0;
2640   rtx_insn *insns;
2641   tree fndecl = get_callee_fndecl (exp);
2642   tree arg;
2643   machine_mode mode;
2644   enum built_in_function fallback_fn = BUILT_IN_NONE;
2645 
2646   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2647      gcc_unreachable ();
2648 
2649   arg = CALL_EXPR_ARG (exp, 0);
2650 
2651   switch (DECL_FUNCTION_CODE (fndecl))
2652     {
2653     CASE_FLT_FN (BUILT_IN_IRINT):
2654       fallback_fn = BUILT_IN_LRINT;
2655       gcc_fallthrough ();
2656     CASE_FLT_FN (BUILT_IN_LRINT):
2657     CASE_FLT_FN (BUILT_IN_LLRINT):
2658       builtin_optab = lrint_optab;
2659       break;
2660 
2661     CASE_FLT_FN (BUILT_IN_IROUND):
2662       fallback_fn = BUILT_IN_LROUND;
2663       gcc_fallthrough ();
2664     CASE_FLT_FN (BUILT_IN_LROUND):
2665     CASE_FLT_FN (BUILT_IN_LLROUND):
2666       builtin_optab = lround_optab;
2667       break;
2668 
2669     default:
2670       gcc_unreachable ();
2671     }
2672 
2673   /* There's no easy way to detect the case we need to set EDOM.  */
2674   if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2675     return NULL_RTX;
2676 
2677   /* Make a suitable register to place result in.  */
2678   mode = TYPE_MODE (TREE_TYPE (exp));
2679 
2680   /* There's no easy way to detect the case we need to set EDOM.  */
2681   if (!flag_errno_math)
2682     {
2683       rtx result = gen_reg_rtx (mode);
2684 
2685       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2686 	 need to expand the argument again.  This way, we will not perform
2687 	 side-effects more the once.  */
2688       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2689 
2690       op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2691 
2692       start_sequence ();
2693 
2694       if (expand_sfix_optab (result, op0, builtin_optab))
2695 	{
2696 	  /* Output the entire sequence.  */
2697 	  insns = get_insns ();
2698 	  end_sequence ();
2699 	  emit_insn (insns);
2700 	  return result;
2701 	}
2702 
2703       /* If we were unable to expand via the builtin, stop the sequence
2704 	 (without outputting the insns) and call to the library function
2705 	 with the stabilized argument list.  */
2706       end_sequence ();
2707     }
2708 
2709   if (fallback_fn != BUILT_IN_NONE)
2710     {
2711       /* Fall back to rounding to long int.  Use implicit_p 0 - for non-C99
2712 	 targets, (int) round (x) should never be transformed into
2713 	 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2714 	 a call to lround in the hope that the target provides at least some
2715 	 C99 functions.  This should result in the best user experience for
2716 	 not full C99 targets.  */
2717       tree fallback_fndecl = mathfn_built_in_1
2718 	(TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2719 
2720       exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2721 				   fallback_fndecl, 1, arg);
2722 
2723       target = expand_call (exp, NULL_RTX, target == const0_rtx);
2724       target = maybe_emit_group_store (target, TREE_TYPE (exp));
2725       return convert_to_mode (mode, target, 0);
2726     }
2727 
2728   return expand_call (exp, target, target == const0_rtx);
2729 }
2730 
2731 /* Expand a call to the powi built-in mathematical function.  Return NULL_RTX if
2732    a normal call should be emitted rather than expanding the function
2733    in-line.  EXP is the expression that is a call to the builtin
2734    function; if convenient, the result should be placed in TARGET.  */
2735 
2736 static rtx
2737 expand_builtin_powi (tree exp, rtx target)
2738 {
2739   tree arg0, arg1;
2740   rtx op0, op1;
2741   machine_mode mode;
2742   machine_mode mode2;
2743 
2744   if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2745     return NULL_RTX;
2746 
2747   arg0 = CALL_EXPR_ARG (exp, 0);
2748   arg1 = CALL_EXPR_ARG (exp, 1);
2749   mode = TYPE_MODE (TREE_TYPE (exp));
2750 
2751   /* Emit a libcall to libgcc.  */
2752 
2753   /* Mode of the 2nd argument must match that of an int.  */
2754   mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2755 
2756   if (target == NULL_RTX)
2757     target = gen_reg_rtx (mode);
2758 
2759   op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2760   if (GET_MODE (op0) != mode)
2761     op0 = convert_to_mode (mode, op0, 0);
2762   op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2763   if (GET_MODE (op1) != mode2)
2764     op1 = convert_to_mode (mode2, op1, 0);
2765 
2766   target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2767 				    target, LCT_CONST, mode, 2,
2768 				    op0, mode, op1, mode2);
2769 
2770   return target;
2771 }
2772 
2773 /* Expand expression EXP which is a call to the strlen builtin.  Return
2774    NULL_RTX if we failed the caller should emit a normal call, otherwise
2775    try to get the result in TARGET, if convenient.  */
2776 
2777 static rtx
2778 expand_builtin_strlen (tree exp, rtx target,
2779 		       machine_mode target_mode)
2780 {
2781   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2782     return NULL_RTX;
2783   else
2784     {
2785       struct expand_operand ops[4];
2786       rtx pat;
2787       tree len;
2788       tree src = CALL_EXPR_ARG (exp, 0);
2789       rtx src_reg;
2790       rtx_insn *before_strlen;
2791       machine_mode insn_mode = target_mode;
2792       enum insn_code icode = CODE_FOR_nothing;
2793       unsigned int align;
2794 
2795       /* If the length can be computed at compile-time, return it.  */
2796       len = c_strlen (src, 0);
2797       if (len)
2798 	return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2799 
2800       /* If the length can be computed at compile-time and is constant
2801 	 integer, but there are side-effects in src, evaluate
2802 	 src for side-effects, then return len.
2803 	 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2804 	 can be optimized into: i++; x = 3;  */
2805       len = c_strlen (src, 1);
2806       if (len && TREE_CODE (len) == INTEGER_CST)
2807 	{
2808 	  expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2809 	  return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2810 	}
2811 
2812       align = get_pointer_alignment (src) / BITS_PER_UNIT;
2813 
2814       /* If SRC is not a pointer type, don't do this operation inline.  */
2815       if (align == 0)
2816 	return NULL_RTX;
2817 
2818       /* Bail out if we can't compute strlen in the right mode.  */
2819       while (insn_mode != VOIDmode)
2820 	{
2821 	  icode = optab_handler (strlen_optab, insn_mode);
2822 	  if (icode != CODE_FOR_nothing)
2823 	    break;
2824 
2825 	  insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2826 	}
2827       if (insn_mode == VOIDmode)
2828 	return NULL_RTX;
2829 
2830       /* Make a place to hold the source address.  We will not expand
2831 	 the actual source until we are sure that the expansion will
2832 	 not fail -- there are trees that cannot be expanded twice.  */
2833       src_reg = gen_reg_rtx (Pmode);
2834 
2835       /* Mark the beginning of the strlen sequence so we can emit the
2836 	 source operand later.  */
2837       before_strlen = get_last_insn ();
2838 
2839       create_output_operand (&ops[0], target, insn_mode);
2840       create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2841       create_integer_operand (&ops[2], 0);
2842       create_integer_operand (&ops[3], align);
2843       if (!maybe_expand_insn (icode, 4, ops))
2844 	return NULL_RTX;
2845 
2846       /* Now that we are assured of success, expand the source.  */
2847       start_sequence ();
2848       pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2849       if (pat != src_reg)
2850 	{
2851 #ifdef POINTERS_EXTEND_UNSIGNED
2852 	  if (GET_MODE (pat) != Pmode)
2853 	    pat = convert_to_mode (Pmode, pat,
2854 				   POINTERS_EXTEND_UNSIGNED);
2855 #endif
2856 	  emit_move_insn (src_reg, pat);
2857 	}
2858       pat = get_insns ();
2859       end_sequence ();
2860 
2861       if (before_strlen)
2862 	emit_insn_after (pat, before_strlen);
2863       else
2864 	emit_insn_before (pat, get_insns ());
2865 
2866       /* Return the value in the proper mode for this function.  */
2867       if (GET_MODE (ops[0].value) == target_mode)
2868 	target = ops[0].value;
2869       else if (target != 0)
2870 	convert_move (target, ops[0].value, 0);
2871       else
2872 	target = convert_to_mode (target_mode, ops[0].value, 0);
2873 
2874       return target;
2875     }
2876 }
2877 
2878 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
2879    bytes from constant string DATA + OFFSET and return it as target
2880    constant.  */
2881 
2882 static rtx
2883 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2884 			 machine_mode mode)
2885 {
2886   const char *str = (const char *) data;
2887 
2888   gcc_assert (offset >= 0
2889 	      && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2890 		  <= strlen (str) + 1));
2891 
2892   return c_readstr (str + offset, mode);
2893 }
2894 
2895 /* LEN specify length of the block of memcpy/memset operation.
2896    Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2897    In some cases we can make very likely guess on max size, then we
2898    set it into PROBABLE_MAX_SIZE.  */
2899 
2900 static void
2901 determine_block_size (tree len, rtx len_rtx,
2902 		      unsigned HOST_WIDE_INT *min_size,
2903 		      unsigned HOST_WIDE_INT *max_size,
2904 		      unsigned HOST_WIDE_INT *probable_max_size)
2905 {
2906   if (CONST_INT_P (len_rtx))
2907     {
2908       *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2909       return;
2910     }
2911   else
2912     {
2913       wide_int min, max;
2914       enum value_range_type range_type = VR_UNDEFINED;
2915 
2916       /* Determine bounds from the type.  */
2917       if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2918 	*min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2919       else
2920 	*min_size = 0;
2921       if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2922 	*probable_max_size = *max_size
2923 	  = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2924       else
2925 	*probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2926 
2927       if (TREE_CODE (len) == SSA_NAME)
2928 	range_type = get_range_info (len, &min, &max);
2929       if (range_type == VR_RANGE)
2930 	{
2931 	  if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2932 	    *min_size = min.to_uhwi ();
2933 	  if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2934 	    *probable_max_size = *max_size = max.to_uhwi ();
2935 	}
2936       else if (range_type == VR_ANTI_RANGE)
2937 	{
2938 	  /* Anti range 0...N lets us to determine minimal size to N+1.  */
2939 	  if (min == 0)
2940 	    {
2941 	      if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2942 		*min_size = max.to_uhwi () + 1;
2943 	    }
2944 	  /* Code like
2945 
2946 	     int n;
2947 	     if (n < 100)
2948 	       memcpy (a, b, n)
2949 
2950 	     Produce anti range allowing negative values of N.  We still
2951 	     can use the information and make a guess that N is not negative.
2952 	     */
2953 	  else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2954 	    *probable_max_size = min.to_uhwi () - 1;
2955 	}
2956     }
2957   gcc_checking_assert (*max_size <=
2958 		       (unsigned HOST_WIDE_INT)
2959 			  GET_MODE_MASK (GET_MODE (len_rtx)));
2960 }
2961 
2962 /* Helper function to do the actual work for expand_builtin_memcpy.  */
2963 
2964 static rtx
2965 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2966 {
2967   const char *src_str;
2968   unsigned int src_align = get_pointer_alignment (src);
2969   unsigned int dest_align = get_pointer_alignment (dest);
2970   rtx dest_mem, src_mem, dest_addr, len_rtx;
2971   HOST_WIDE_INT expected_size = -1;
2972   unsigned int expected_align = 0;
2973   unsigned HOST_WIDE_INT min_size;
2974   unsigned HOST_WIDE_INT max_size;
2975   unsigned HOST_WIDE_INT probable_max_size;
2976 
2977   /* If DEST is not a pointer type, call the normal function.  */
2978   if (dest_align == 0)
2979     return NULL_RTX;
2980 
2981   /* If either SRC is not a pointer type, don't do this
2982      operation in-line.  */
2983   if (src_align == 0)
2984     return NULL_RTX;
2985 
2986   if (currently_expanding_gimple_stmt)
2987     stringop_block_profile (currently_expanding_gimple_stmt,
2988 			    &expected_align, &expected_size);
2989 
2990   if (expected_align < dest_align)
2991     expected_align = dest_align;
2992   dest_mem = get_memory_rtx (dest, len);
2993   set_mem_align (dest_mem, dest_align);
2994   len_rtx = expand_normal (len);
2995   determine_block_size (len, len_rtx, &min_size, &max_size,
2996 			&probable_max_size);
2997   src_str = c_getstr (src);
2998 
2999   /* If SRC is a string constant and block move would be done
3000      by pieces, we can avoid loading the string from memory
3001      and only stored the computed constants.  */
3002   if (src_str
3003       && CONST_INT_P (len_rtx)
3004       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3005       && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3006 			      CONST_CAST (char *, src_str),
3007 			      dest_align, false))
3008     {
3009       dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3010 				  builtin_memcpy_read_str,
3011 				  CONST_CAST (char *, src_str),
3012 				  dest_align, false, 0);
3013       dest_mem = force_operand (XEXP (dest_mem, 0), target);
3014       dest_mem = convert_memory_address (ptr_mode, dest_mem);
3015       return dest_mem;
3016     }
3017 
3018   src_mem = get_memory_rtx (src, len);
3019   set_mem_align (src_mem, src_align);
3020 
3021   /* Copy word part most expediently.  */
3022   dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3023 				     CALL_EXPR_TAILCALL (exp)
3024 				     ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3025 				     expected_align, expected_size,
3026 				     min_size, max_size, probable_max_size);
3027 
3028   if (dest_addr == 0)
3029     {
3030       dest_addr = force_operand (XEXP (dest_mem, 0), target);
3031       dest_addr = convert_memory_address (ptr_mode, dest_addr);
3032     }
3033 
3034   return dest_addr;
3035 }
3036 
3037 /* Try to verify that the sizes and lengths of the arguments to a string
3038    manipulation function given by EXP are within valid bounds and that
3039    the operation does not lead to buffer overflow.  Arguments other than
3040    EXP may be null.  When non-null, the arguments have the following
3041    meaning:
3042    SIZE is the user-supplied size argument to the function (such as in
3043    memcpy(d, s, SIZE) or strncpy(d, s, SIZE).  It specifies the exact
3044    number of bytes to write.
3045    MAXLEN is the user-supplied bound on the length of the source sequence
3046    (such as in strncat(d, s, N).  It specifies the upper limit on the number
3047    of bytes to write.
3048    STR is the source string (such as in strcpy(d, s)) when the epxression
3049    EXP is a string function call (as opposed to a memory call like memcpy).
3050    As an exception, STR can also be an integer denoting the precomputed
3051    length of the source string.
3052    OBJSIZE is the size of the destination object specified by the last
3053    argument to the _chk builtins, typically resulting from the expansion
3054    of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3055    OBJSIZE).
3056 
3057    When SIZE is null LEN is checked to verify that it doesn't exceed
3058    SIZE_MAX.
3059 
3060    If the call is successfully verified as safe from buffer overflow
3061    the function returns true, otherwise false..  */
3062 
3063 static bool
3064 check_sizes (int opt, tree exp, tree size, tree maxlen, tree str, tree objsize)
3065 {
3066   /* The size of the largest object is half the address space, or
3067      SSIZE_MAX.  (This is way too permissive.)  */
3068   tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3069 
3070   tree slen = NULL_TREE;
3071 
3072   /* Set to true when the exact number of bytes written by a string
3073      function like strcpy is not known and the only thing that is
3074      known is that it must be at least one (for the terminating nul).  */
3075   bool at_least_one = false;
3076   if (str)
3077     {
3078       /* STR is normally a pointer to string but as a special case
3079 	 it can be an integer denoting the length of a string.  */
3080       if (POINTER_TYPE_P (TREE_TYPE (str)))
3081 	{
3082 	  /* Try to determine the range of lengths the source string
3083 	     refers to.  If it can be determined add one to it for
3084 	     the terminating nul.  Otherwise, set it to one for
3085 	     the same reason.  */
3086 	  tree lenrange[2];
3087 	  get_range_strlen (str, lenrange);
3088 	  if (lenrange[0])
3089 	    slen = fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3090 				size_one_node);
3091 	  else
3092 	    {
3093 	      at_least_one = true;
3094 	      slen = size_one_node;
3095 	    }
3096 	}
3097       else
3098 	slen = str;
3099     }
3100 
3101   if (!size && !maxlen)
3102     {
3103       /* When the only available piece of data is the object size
3104 	 there is nothing to do.  */
3105       if (!slen)
3106 	return true;
3107 
3108       /* Otherwise, when the length of the source sequence is known
3109 	 (as with with strlen), set SIZE to it.  */
3110       size = slen;
3111     }
3112 
3113   if (!objsize)
3114     objsize = maxobjsize;
3115 
3116   /* The SIZE is exact if it's non-null, constant, and in range of
3117      unsigned HOST_WIDE_INT.  */
3118   bool exactsize = size && tree_fits_uhwi_p (size);
3119 
3120   tree range[2] = { NULL_TREE, NULL_TREE };
3121   if (size)
3122     get_size_range (size, range);
3123 
3124   /* First check the number of bytes to be written against the maximum
3125      object size.  */
3126   if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3127     {
3128       location_t loc = tree_nonartificial_location (exp);
3129 
3130       if (range[0] == range[1])
3131 	warning_at (loc, opt,
3132 		    "%K%qD: specified size %wu "
3133 		    "exceeds maximum object size %wu",
3134 		    exp, get_callee_fndecl (exp),
3135 		    tree_to_uhwi (range[0]),
3136 		    tree_to_uhwi (maxobjsize));
3137 	  else
3138 	    warning_at (loc, opt,
3139 			"%K%qD: specified size between %wu and %wu "
3140 			"exceeds maximum object size %wu",
3141 			exp, get_callee_fndecl (exp),
3142 			tree_to_uhwi (range[0]),
3143 			tree_to_uhwi (range[1]),
3144 			tree_to_uhwi (maxobjsize));
3145       return false;
3146     }
3147 
3148   /* Next check the number of bytes to be written against the destination
3149      object size.  */
3150   if (range[0] || !exactsize || integer_all_onesp (size))
3151     {
3152       if (range[0]
3153 	  && ((tree_fits_uhwi_p (objsize)
3154 	       && tree_int_cst_lt (objsize, range[0]))
3155 	      || (tree_fits_uhwi_p (size)
3156 		  && tree_int_cst_lt (size, range[0]))))
3157 	{
3158 	  unsigned HOST_WIDE_INT uwir0 = tree_to_uhwi (range[0]);
3159 
3160 	  location_t loc = tree_nonartificial_location (exp);
3161 
3162 	  if (at_least_one)
3163 	    warning_at (loc, opt,
3164 			"%K%qD: writing at least %wu byte into a region "
3165 			"of size %wu overflows the destination",
3166 			exp, get_callee_fndecl (exp), uwir0,
3167 			tree_to_uhwi (objsize));
3168 	  else if (range[0] == range[1])
3169 	    warning_at (loc, opt,
3170 			(uwir0 == 1
3171 			 ? G_("%K%qD: writing %wu byte into a region "
3172 			      "of size %wu overflows the destination")
3173 			 : G_("%K%qD writing %wu bytes into a region "
3174 			      "of size %wu overflows the destination")),
3175 			exp, get_callee_fndecl (exp), uwir0,
3176 			tree_to_uhwi (objsize));
3177 	  else
3178 	    warning_at (loc, opt,
3179 			"%K%qD: writing between %wu and %wu bytes "
3180 			"into a region of size %wu overflows "
3181 			"the destination",
3182 			exp, get_callee_fndecl (exp), uwir0,
3183 			tree_to_uhwi (range[1]), tree_to_uhwi (objsize));
3184 
3185 	  /* Return error when an overflow has been detected.  */
3186 	  return false;
3187 	}
3188     }
3189 
3190   /* Check the maximum length of the source sequence against the size
3191      of the destination object if known, or against the maximum size
3192      of an object.  */
3193   if (maxlen)
3194     {
3195       get_size_range (maxlen, range);
3196 
3197       if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3198 	{
3199 	  location_t loc = tree_nonartificial_location (exp);
3200 
3201 	  if (tree_int_cst_lt (maxobjsize, range[0]))
3202 	    {
3203 	      /* Warn about crazy big sizes first since that's more
3204 		 likely to be meaningful than saying that the bound
3205 		 is greater than the object size if both are big.  */
3206 	      if (range[0] == range[1])
3207 		warning_at (loc, opt,
3208 			    "%K%qD: specified bound %wu "
3209 			    "exceeds maximum object size %wu",
3210 			    exp, get_callee_fndecl (exp),
3211 			    tree_to_uhwi (range[0]),
3212 			    tree_to_uhwi (maxobjsize));
3213 	      else
3214 		warning_at (loc, opt,
3215 			    "%K%qD: specified bound between %wu and %wu "
3216 			    " exceeds maximum object size %wu",
3217 			    exp, get_callee_fndecl (exp),
3218 			    tree_to_uhwi (range[0]),
3219 			    tree_to_uhwi (range[1]),
3220 			    tree_to_uhwi (maxobjsize));
3221 
3222 	      return false;
3223 	    }
3224 
3225 	  if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3226 	    {
3227 	      if (range[0] == range[1])
3228 		warning_at (loc, opt,
3229 			    "%K%qD: specified bound %wu "
3230 			    "exceeds the size %wu of the destination",
3231 			    exp, get_callee_fndecl (exp),
3232 			    tree_to_uhwi (range[0]),
3233 			    tree_to_uhwi (objsize));
3234 	      else
3235 		warning_at (loc, opt,
3236 			    "%K%qD: specified bound between %wu and %wu "
3237 			    " exceeds the size %wu of the destination",
3238 			    exp, get_callee_fndecl (exp),
3239 			    tree_to_uhwi (range[0]),
3240 			    tree_to_uhwi (range[1]),
3241 			    tree_to_uhwi (objsize));
3242 	      return false;
3243 	    }
3244 	}
3245     }
3246 
3247   return true;
3248 }
3249 
3250 /* Helper to compute the size of the object referenced by the DEST
3251    expression which must of of pointer type, using Object Size type
3252    OSTYPE (only the least significant 2 bits are used).  Return
3253    the size of the object if successful or NULL when the size cannot
3254    be determined.  */
3255 
3256 static inline tree
3257 compute_dest_size (tree dest, int ostype)
3258 {
3259   unsigned HOST_WIDE_INT size;
3260   if (compute_builtin_object_size (dest, ostype & 3, &size))
3261     return build_int_cst (sizetype, size);
3262 
3263   return NULL_TREE;
3264 }
3265 
3266 /* Helper to determine and check the sizes of the source and the destination
3267    of calls to __builtin_{bzero,memcpy,memset} calls.  Use Object Size type-0
3268    regardless of the OPT_Wstringop_overflow_ setting.  Returns true on success
3269    (no overflow or invalid sizes), false otherwise.  */
3270 
3271 static bool
3272 check_memop_sizes (tree exp, tree dest, tree size)
3273 {
3274   if (!warn_stringop_overflow)
3275     return true;
3276 
3277   /* For functions like memset and memcpy that operate on raw memory
3278      try to determine the size of the largest destination object using
3279      type-0 Object Size regardless of the object size type specified
3280      by the option.  */
3281   tree objsize = compute_dest_size (dest, 0);
3282 
3283   return check_sizes (OPT_Wstringop_overflow_, exp,
3284 		      size, /*maxlen=*/NULL_TREE, /*str=*/NULL_TREE, objsize);
3285 }
3286 
3287 /* Expand a call EXP to the memcpy builtin.
3288    Return NULL_RTX if we failed, the caller should emit a normal call,
3289    otherwise try to get the result in TARGET, if convenient (and in
3290    mode MODE if that's convenient).  */
3291 
3292 static rtx
3293 expand_builtin_memcpy (tree exp, rtx target)
3294 {
3295   if (!validate_arglist (exp,
3296  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3297     return NULL_RTX;
3298 
3299   tree dest = CALL_EXPR_ARG (exp, 0);
3300   tree src = CALL_EXPR_ARG (exp, 1);
3301   tree len = CALL_EXPR_ARG (exp, 2);
3302 
3303   check_memop_sizes (exp, dest, len);
3304 
3305   return expand_builtin_memcpy_args (dest, src, len, target, exp);
3306 }
3307 
3308 /* Expand an instrumented call EXP to the memcpy builtin.
3309    Return NULL_RTX if we failed, the caller should emit a normal call,
3310    otherwise try to get the result in TARGET, if convenient (and in
3311    mode MODE if that's convenient).  */
3312 
3313 static rtx
3314 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3315 {
3316   if (!validate_arglist (exp,
3317 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3318 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3319 			 INTEGER_TYPE, VOID_TYPE))
3320     return NULL_RTX;
3321   else
3322     {
3323       tree dest = CALL_EXPR_ARG (exp, 0);
3324       tree src = CALL_EXPR_ARG (exp, 2);
3325       tree len = CALL_EXPR_ARG (exp, 4);
3326       rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3327 
3328       /* Return src bounds with the result.  */
3329       if (res)
3330 	{
3331 	  rtx bnd = force_reg (targetm.chkp_bound_mode (),
3332 			       expand_normal (CALL_EXPR_ARG (exp, 1)));
3333 	  res = chkp_join_splitted_slot (res, bnd);
3334 	}
3335       return res;
3336     }
3337 }
3338 
3339 /* Expand a call EXP to the mempcpy builtin.
3340    Return NULL_RTX if we failed; the caller should emit a normal call,
3341    otherwise try to get the result in TARGET, if convenient (and in
3342    mode MODE if that's convenient).  If ENDP is 0 return the
3343    destination pointer, if ENDP is 1 return the end pointer ala
3344    mempcpy, and if ENDP is 2 return the end pointer minus one ala
3345    stpcpy.  */
3346 
3347 static rtx
3348 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3349 {
3350   if (!validate_arglist (exp,
3351  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3352     return NULL_RTX;
3353 
3354   tree dest = CALL_EXPR_ARG (exp, 0);
3355   tree src = CALL_EXPR_ARG (exp, 1);
3356   tree len = CALL_EXPR_ARG (exp, 2);
3357 
3358   /* Avoid expanding mempcpy into memcpy when the call is determined
3359      to overflow the buffer.  This also prevents the same overflow
3360      from being diagnosed again when expanding memcpy.  */
3361   if (!check_memop_sizes (exp, dest, len))
3362     return NULL_RTX;
3363 
3364   return expand_builtin_mempcpy_args (dest, src, len,
3365 				      target, mode, /*endp=*/ 1,
3366 				      exp);
3367 }
3368 
3369 /* Expand an instrumented call EXP to the mempcpy builtin.
3370    Return NULL_RTX if we failed, the caller should emit a normal call,
3371    otherwise try to get the result in TARGET, if convenient (and in
3372    mode MODE if that's convenient).  */
3373 
3374 static rtx
3375 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3376 {
3377   if (!validate_arglist (exp,
3378 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3379 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3380 			 INTEGER_TYPE, VOID_TYPE))
3381     return NULL_RTX;
3382   else
3383     {
3384       tree dest = CALL_EXPR_ARG (exp, 0);
3385       tree src = CALL_EXPR_ARG (exp, 2);
3386       tree len = CALL_EXPR_ARG (exp, 4);
3387       rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3388 					     mode, 1, exp);
3389 
3390       /* Return src bounds with the result.  */
3391       if (res)
3392 	{
3393 	  rtx bnd = force_reg (targetm.chkp_bound_mode (),
3394 			       expand_normal (CALL_EXPR_ARG (exp, 1)));
3395 	  res = chkp_join_splitted_slot (res, bnd);
3396 	}
3397       return res;
3398     }
3399 }
3400 
3401 /* Helper function to do the actual work for expand_builtin_mempcpy.  The
3402    arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3403    so that this can also be called without constructing an actual CALL_EXPR.
3404    The other arguments and return value are the same as for
3405    expand_builtin_mempcpy.  */
3406 
3407 static rtx
3408 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3409 			     rtx target, machine_mode mode, int endp,
3410 			     tree orig_exp)
3411 {
3412   tree fndecl = get_callee_fndecl (orig_exp);
3413 
3414     /* If return value is ignored, transform mempcpy into memcpy.  */
3415   if (target == const0_rtx
3416       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3417       && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3418     {
3419       tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3420       tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3421 					   dest, src, len);
3422       return expand_expr (result, target, mode, EXPAND_NORMAL);
3423     }
3424   else if (target == const0_rtx
3425 	   && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3426     {
3427       tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3428       tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3429 					   dest, src, len);
3430       return expand_expr (result, target, mode, EXPAND_NORMAL);
3431     }
3432   else
3433     {
3434       const char *src_str;
3435       unsigned int src_align = get_pointer_alignment (src);
3436       unsigned int dest_align = get_pointer_alignment (dest);
3437       rtx dest_mem, src_mem, len_rtx;
3438 
3439       /* If either SRC or DEST is not a pointer type, don't do this
3440 	 operation in-line.  */
3441       if (dest_align == 0 || src_align == 0)
3442 	return NULL_RTX;
3443 
3444       /* If LEN is not constant, call the normal function.  */
3445       if (! tree_fits_uhwi_p (len))
3446 	return NULL_RTX;
3447 
3448       len_rtx = expand_normal (len);
3449       src_str = c_getstr (src);
3450 
3451       /* If SRC is a string constant and block move would be done
3452 	 by pieces, we can avoid loading the string from memory
3453 	 and only stored the computed constants.  */
3454       if (src_str
3455 	  && CONST_INT_P (len_rtx)
3456 	  && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3457 	  && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3458 				  CONST_CAST (char *, src_str),
3459 				  dest_align, false))
3460 	{
3461 	  dest_mem = get_memory_rtx (dest, len);
3462 	  set_mem_align (dest_mem, dest_align);
3463 	  dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3464 				      builtin_memcpy_read_str,
3465 				      CONST_CAST (char *, src_str),
3466 				      dest_align, false, endp);
3467 	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3468 	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3469 	  return dest_mem;
3470 	}
3471 
3472       if (CONST_INT_P (len_rtx)
3473 	  && can_move_by_pieces (INTVAL (len_rtx),
3474 				 MIN (dest_align, src_align)))
3475 	{
3476 	  dest_mem = get_memory_rtx (dest, len);
3477 	  set_mem_align (dest_mem, dest_align);
3478 	  src_mem = get_memory_rtx (src, len);
3479 	  set_mem_align (src_mem, src_align);
3480 	  dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3481 				     MIN (dest_align, src_align), endp);
3482 	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3483 	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3484 	  return dest_mem;
3485 	}
3486 
3487       return NULL_RTX;
3488     }
3489 }
3490 
3491 /* Expand into a movstr instruction, if one is available.  Return NULL_RTX if
3492    we failed, the caller should emit a normal call, otherwise try to
3493    get the result in TARGET, if convenient.  If ENDP is 0 return the
3494    destination pointer, if ENDP is 1 return the end pointer ala
3495    mempcpy, and if ENDP is 2 return the end pointer minus one ala
3496    stpcpy.  */
3497 
3498 static rtx
3499 expand_movstr (tree dest, tree src, rtx target, int endp)
3500 {
3501   struct expand_operand ops[3];
3502   rtx dest_mem;
3503   rtx src_mem;
3504 
3505   if (!targetm.have_movstr ())
3506     return NULL_RTX;
3507 
3508   dest_mem = get_memory_rtx (dest, NULL);
3509   src_mem = get_memory_rtx (src, NULL);
3510   if (!endp)
3511     {
3512       target = force_reg (Pmode, XEXP (dest_mem, 0));
3513       dest_mem = replace_equiv_address (dest_mem, target);
3514     }
3515 
3516   create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3517   create_fixed_operand (&ops[1], dest_mem);
3518   create_fixed_operand (&ops[2], src_mem);
3519   if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3520     return NULL_RTX;
3521 
3522   if (endp && target != const0_rtx)
3523     {
3524       target = ops[0].value;
3525       /* movstr is supposed to set end to the address of the NUL
3526 	 terminator.  If the caller requested a mempcpy-like return value,
3527 	 adjust it.  */
3528       if (endp == 1)
3529 	{
3530 	  rtx tem = plus_constant (GET_MODE (target),
3531 				   gen_lowpart (GET_MODE (target), target), 1);
3532 	  emit_move_insn (target, force_operand (tem, NULL_RTX));
3533 	}
3534     }
3535   return target;
3536 }
3537 
3538 /* Do some very basic size validation of a call to the strcpy builtin
3539    given by EXP.  Return NULL_RTX to have the built-in expand to a call
3540    to the library function.  */
3541 
3542 static rtx
3543 expand_builtin_strcat (tree exp, rtx)
3544 {
3545   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3546       || !warn_stringop_overflow)
3547     return NULL_RTX;
3548 
3549   tree dest = CALL_EXPR_ARG (exp, 0);
3550   tree src = CALL_EXPR_ARG (exp, 1);
3551 
3552   /* There is no way here to determine the length of the string in
3553      the destination to which the SRC string is being appended so
3554      just diagnose cases when the souce string is longer than
3555      the destination object.  */
3556 
3557   tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3558 
3559   check_sizes (OPT_Wstringop_overflow_,
3560 	       exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3561 
3562   return NULL_RTX;
3563 }
3564 
3565 /* Expand expression EXP, which is a call to the strcpy builtin.  Return
3566    NULL_RTX if we failed the caller should emit a normal call, otherwise
3567    try to get the result in TARGET, if convenient (and in mode MODE if that's
3568    convenient).  */
3569 
3570 static rtx
3571 expand_builtin_strcpy (tree exp, rtx target)
3572 {
3573   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3574     return NULL_RTX;
3575 
3576   tree dest = CALL_EXPR_ARG (exp, 0);
3577   tree src = CALL_EXPR_ARG (exp, 1);
3578 
3579   if (warn_stringop_overflow)
3580     {
3581       tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3582       check_sizes (OPT_Wstringop_overflow_,
3583 		   exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3584     }
3585 
3586   return expand_builtin_strcpy_args (dest, src, target);
3587 }
3588 
3589 /* Helper function to do the actual work for expand_builtin_strcpy.  The
3590    arguments to the builtin_strcpy call DEST and SRC are broken out
3591    so that this can also be called without constructing an actual CALL_EXPR.
3592    The other arguments and return value are the same as for
3593    expand_builtin_strcpy.  */
3594 
3595 static rtx
3596 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3597 {
3598   return expand_movstr (dest, src, target, /*endp=*/0);
3599 }
3600 
3601 /* Expand a call EXP to the stpcpy builtin.
3602    Return NULL_RTX if we failed the caller should emit a normal call,
3603    otherwise try to get the result in TARGET, if convenient (and in
3604    mode MODE if that's convenient).  */
3605 
3606 static rtx
3607 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3608 {
3609   tree dst, src;
3610   location_t loc = EXPR_LOCATION (exp);
3611 
3612   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3613     return NULL_RTX;
3614 
3615   dst = CALL_EXPR_ARG (exp, 0);
3616   src = CALL_EXPR_ARG (exp, 1);
3617 
3618   /* If return value is ignored, transform stpcpy into strcpy.  */
3619   if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3620     {
3621       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3622       tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3623       return expand_expr (result, target, mode, EXPAND_NORMAL);
3624     }
3625   else
3626     {
3627       tree len, lenp1;
3628       rtx ret;
3629 
3630       /* Ensure we get an actual string whose length can be evaluated at
3631 	 compile-time, not an expression containing a string.  This is
3632 	 because the latter will potentially produce pessimized code
3633 	 when used to produce the return value.  */
3634       if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3635 	return expand_movstr (dst, src, target, /*endp=*/2);
3636 
3637       lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3638       ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3639 					 target, mode, /*endp=*/2,
3640 					 exp);
3641 
3642       if (ret)
3643 	return ret;
3644 
3645       if (TREE_CODE (len) == INTEGER_CST)
3646 	{
3647 	  rtx len_rtx = expand_normal (len);
3648 
3649 	  if (CONST_INT_P (len_rtx))
3650 	    {
3651 	      ret = expand_builtin_strcpy_args (dst, src, target);
3652 
3653 	      if (ret)
3654 		{
3655 		  if (! target)
3656 		    {
3657 		      if (mode != VOIDmode)
3658 			target = gen_reg_rtx (mode);
3659 		      else
3660 			target = gen_reg_rtx (GET_MODE (ret));
3661 		    }
3662 		  if (GET_MODE (target) != GET_MODE (ret))
3663 		    ret = gen_lowpart (GET_MODE (target), ret);
3664 
3665 		  ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3666 		  ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3667 		  gcc_assert (ret);
3668 
3669 		  return target;
3670 		}
3671 	    }
3672 	}
3673 
3674       return expand_movstr (dst, src, target, /*endp=*/2);
3675     }
3676 }
3677 
3678 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3679    bytes from constant string DATA + OFFSET and return it as target
3680    constant.  */
3681 
3682 rtx
3683 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3684 			  machine_mode mode)
3685 {
3686   const char *str = (const char *) data;
3687 
3688   if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3689     return const0_rtx;
3690 
3691   return c_readstr (str + offset, mode);
3692 }
3693 
3694 /* Helper to check the sizes of sequences and the destination of calls
3695    to __builtin_strncat and __builtin___strncat_chk.  Returns true on
3696    success (no overflow or invalid sizes), false otherwise.  */
3697 
3698 static bool
3699 check_strncat_sizes (tree exp, tree objsize)
3700 {
3701   tree dest = CALL_EXPR_ARG (exp, 0);
3702   tree src = CALL_EXPR_ARG (exp, 1);
3703   tree maxlen = CALL_EXPR_ARG (exp, 2);
3704 
3705   /* Try to determine the range of lengths that the source expression
3706      refers to.  */
3707   tree lenrange[2];
3708   get_range_strlen (src, lenrange);
3709 
3710   /* Try to verify that the destination is big enough for the shortest
3711      string.  */
3712 
3713   if (!objsize && warn_stringop_overflow)
3714     {
3715       /* If it hasn't been provided by __strncat_chk, try to determine
3716 	 the size of the destination object into which the source is
3717 	 being copied.  */
3718       objsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3719     }
3720 
3721   /* Add one for the terminating nul.  */
3722   tree srclen = (lenrange[0]
3723 		 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3724 				size_one_node)
3725 		 : NULL_TREE);
3726 
3727   /* Strncat copies at most MAXLEN bytes and always appends the terminating
3728      nul so the specified upper bound should never be equal to (or greater
3729      than) the size of the destination.  */
3730   if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3731       && tree_int_cst_equal (objsize, maxlen))
3732     {
3733       warning_at (EXPR_LOCATION (exp), OPT_Wstringop_overflow_,
3734 		  "specified bound %wu "
3735 		  "equals the size of the destination",
3736 		  tree_to_uhwi (maxlen));
3737 
3738       return false;
3739     }
3740 
3741   if (!srclen
3742       || (maxlen && tree_fits_uhwi_p (maxlen)
3743 	  && tree_fits_uhwi_p (srclen)
3744 	  && tree_int_cst_lt (maxlen, srclen)))
3745     srclen = maxlen;
3746 
3747   /* The number of bytes to write is LEN but check_sizes will also
3748      check SRCLEN if LEN's value isn't known.  */
3749   return check_sizes (OPT_Wstringop_overflow_,
3750 		      exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3751 }
3752 
3753 /* Similar to expand_builtin_strcat, do some very basic size validation
3754    of a call to the strcpy builtin given by EXP.  Return NULL_RTX to have
3755    the built-in expand to a call to the library function.  */
3756 
3757 static rtx
3758 expand_builtin_strncat (tree exp, rtx)
3759 {
3760   if (!validate_arglist (exp,
3761 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3762       || !warn_stringop_overflow)
3763     return NULL_RTX;
3764 
3765   tree dest = CALL_EXPR_ARG (exp, 0);
3766   tree src = CALL_EXPR_ARG (exp, 1);
3767   /* The upper bound on the number of bytes to write.  */
3768   tree maxlen = CALL_EXPR_ARG (exp, 2);
3769   /* The length of the source sequence.  */
3770   tree slen = c_strlen (src, 1);
3771 
3772   /* Try to determine the range of lengths that the source expression
3773      refers to.  */
3774   tree lenrange[2];
3775   if (slen)
3776     lenrange[0] = lenrange[1] = slen;
3777   else
3778     get_range_strlen (src, lenrange);
3779 
3780   /* Try to verify that the destination is big enough for the shortest
3781      string.  First try to determine the size of the destination object
3782      into which the source is being copied.  */
3783   tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3784 
3785   /* Add one for the terminating nul.  */
3786   tree srclen = (lenrange[0]
3787 		 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3788 				size_one_node)
3789 		 : NULL_TREE);
3790 
3791   /* Strncat copies at most MAXLEN bytes and always appends the terminating
3792      nul so the specified upper bound should never be equal to (or greater
3793      than) the size of the destination.  */
3794   if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3795       && tree_int_cst_equal (destsize, maxlen))
3796     {
3797       warning_at (EXPR_LOCATION (exp), OPT_Wstringop_overflow_,
3798 		  "specified bound %wu "
3799 		  "equals the size of the destination",
3800 		  tree_to_uhwi (maxlen));
3801 
3802       return NULL_RTX;
3803     }
3804 
3805   if (!srclen
3806       || (maxlen && tree_fits_uhwi_p (maxlen)
3807 	  && tree_fits_uhwi_p (srclen)
3808 	  && tree_int_cst_lt (maxlen, srclen)))
3809     srclen = maxlen;
3810 
3811   /* The number of bytes to write is LEN but check_sizes will also
3812      check SRCLEN if LEN's value isn't known.  */
3813   check_sizes (OPT_Wstringop_overflow_,
3814 	       exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3815 
3816   return NULL_RTX;
3817 }
3818 
3819 /* Expand expression EXP, which is a call to the strncpy builtin.  Return
3820    NULL_RTX if we failed the caller should emit a normal call.  */
3821 
3822 static rtx
3823 expand_builtin_strncpy (tree exp, rtx target)
3824 {
3825   location_t loc = EXPR_LOCATION (exp);
3826 
3827   if (validate_arglist (exp,
3828  			POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3829     {
3830       tree dest = CALL_EXPR_ARG (exp, 0);
3831       tree src = CALL_EXPR_ARG (exp, 1);
3832       /* The number of bytes to write (not the maximum).  */
3833       tree len = CALL_EXPR_ARG (exp, 2);
3834       /* The length of the source sequence.  */
3835       tree slen = c_strlen (src, 1);
3836 
3837       if (warn_stringop_overflow)
3838 	{
3839 	  /* Try to determine the range of lengths that the source expression
3840 	     refers to.  */
3841 	  tree lenrange[2];
3842 	  if (slen)
3843 	    lenrange[0] = lenrange[1] = slen;
3844 	  else
3845 	    {
3846 	      get_range_strlen (src, lenrange);
3847 	      slen = lenrange[0];
3848 	    }
3849 
3850 	  tree destsize = compute_dest_size (dest,
3851 					     warn_stringop_overflow - 1);
3852 
3853 	  /* The number of bytes to write is LEN but check_sizes will also
3854 	     check SLEN if LEN's value isn't known.  */
3855 	  check_sizes (OPT_Wstringop_overflow_,
3856 		       exp, len, /*maxlen=*/NULL_TREE, slen, destsize);
3857 	}
3858 
3859       /* We must be passed a constant len and src parameter.  */
3860       if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3861 	return NULL_RTX;
3862 
3863       slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3864 
3865       /* We're required to pad with trailing zeros if the requested
3866 	 len is greater than strlen(s2)+1.  In that case try to
3867 	 use store_by_pieces, if it fails, punt.  */
3868       if (tree_int_cst_lt (slen, len))
3869 	{
3870 	  unsigned int dest_align = get_pointer_alignment (dest);
3871 	  const char *p = c_getstr (src);
3872 	  rtx dest_mem;
3873 
3874 	  if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3875 	      || !can_store_by_pieces (tree_to_uhwi (len),
3876 				       builtin_strncpy_read_str,
3877 				       CONST_CAST (char *, p),
3878 				       dest_align, false))
3879 	    return NULL_RTX;
3880 
3881 	  dest_mem = get_memory_rtx (dest, len);
3882 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
3883 			   builtin_strncpy_read_str,
3884 			   CONST_CAST (char *, p), dest_align, false, 0);
3885 	  dest_mem = force_operand (XEXP (dest_mem, 0), target);
3886 	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3887 	  return dest_mem;
3888 	}
3889     }
3890   return NULL_RTX;
3891 }
3892 
3893 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3894    bytes from constant string DATA + OFFSET and return it as target
3895    constant.  */
3896 
3897 rtx
3898 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3899 			 machine_mode mode)
3900 {
3901   const char *c = (const char *) data;
3902   char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3903 
3904   memset (p, *c, GET_MODE_SIZE (mode));
3905 
3906   return c_readstr (p, mode);
3907 }
3908 
3909 /* Callback routine for store_by_pieces.  Return the RTL of a register
3910    containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3911    char value given in the RTL register data.  For example, if mode is
3912    4 bytes wide, return the RTL for 0x01010101*data.  */
3913 
3914 static rtx
3915 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3916 			machine_mode mode)
3917 {
3918   rtx target, coeff;
3919   size_t size;
3920   char *p;
3921 
3922   size = GET_MODE_SIZE (mode);
3923   if (size == 1)
3924     return (rtx) data;
3925 
3926   p = XALLOCAVEC (char, size);
3927   memset (p, 1, size);
3928   coeff = c_readstr (p, mode);
3929 
3930   target = convert_to_mode (mode, (rtx) data, 1);
3931   target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3932   return force_reg (mode, target);
3933 }
3934 
3935 /* Expand expression EXP, which is a call to the memset builtin.  Return
3936    NULL_RTX if we failed the caller should emit a normal call, otherwise
3937    try to get the result in TARGET, if convenient (and in mode MODE if that's
3938    convenient).  */
3939 
3940 static rtx
3941 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3942 {
3943   if (!validate_arglist (exp,
3944  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3945     return NULL_RTX;
3946 
3947   tree dest = CALL_EXPR_ARG (exp, 0);
3948   tree val = CALL_EXPR_ARG (exp, 1);
3949   tree len = CALL_EXPR_ARG (exp, 2);
3950 
3951   check_memop_sizes (exp, dest, len);
3952 
3953   return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3954 }
3955 
3956 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3957    Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3958    try to get the result in TARGET, if convenient (and in mode MODE if that's
3959    convenient).  */
3960 
3961 static rtx
3962 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3963 {
3964   if (!validate_arglist (exp,
3965 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3966 			 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3967     return NULL_RTX;
3968   else
3969     {
3970       tree dest = CALL_EXPR_ARG (exp, 0);
3971       tree val = CALL_EXPR_ARG (exp, 2);
3972       tree len = CALL_EXPR_ARG (exp, 3);
3973       rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3974 
3975       /* Return src bounds with the result.  */
3976       if (res)
3977 	{
3978 	  rtx bnd = force_reg (targetm.chkp_bound_mode (),
3979 			       expand_normal (CALL_EXPR_ARG (exp, 1)));
3980 	  res = chkp_join_splitted_slot (res, bnd);
3981 	}
3982       return res;
3983     }
3984 }
3985 
3986 /* Helper function to do the actual work for expand_builtin_memset.  The
3987    arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3988    so that this can also be called without constructing an actual CALL_EXPR.
3989    The other arguments and return value are the same as for
3990    expand_builtin_memset.  */
3991 
3992 static rtx
3993 expand_builtin_memset_args (tree dest, tree val, tree len,
3994 			    rtx target, machine_mode mode, tree orig_exp)
3995 {
3996   tree fndecl, fn;
3997   enum built_in_function fcode;
3998   machine_mode val_mode;
3999   char c;
4000   unsigned int dest_align;
4001   rtx dest_mem, dest_addr, len_rtx;
4002   HOST_WIDE_INT expected_size = -1;
4003   unsigned int expected_align = 0;
4004   unsigned HOST_WIDE_INT min_size;
4005   unsigned HOST_WIDE_INT max_size;
4006   unsigned HOST_WIDE_INT probable_max_size;
4007 
4008   dest_align = get_pointer_alignment (dest);
4009 
4010   /* If DEST is not a pointer type, don't do this operation in-line.  */
4011   if (dest_align == 0)
4012     return NULL_RTX;
4013 
4014   if (currently_expanding_gimple_stmt)
4015     stringop_block_profile (currently_expanding_gimple_stmt,
4016 			    &expected_align, &expected_size);
4017 
4018   if (expected_align < dest_align)
4019     expected_align = dest_align;
4020 
4021   /* If the LEN parameter is zero, return DEST.  */
4022   if (integer_zerop (len))
4023     {
4024       /* Evaluate and ignore VAL in case it has side-effects.  */
4025       expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4026       return expand_expr (dest, target, mode, EXPAND_NORMAL);
4027     }
4028 
4029   /* Stabilize the arguments in case we fail.  */
4030   dest = builtin_save_expr (dest);
4031   val = builtin_save_expr (val);
4032   len = builtin_save_expr (len);
4033 
4034   len_rtx = expand_normal (len);
4035   determine_block_size (len, len_rtx, &min_size, &max_size,
4036 			&probable_max_size);
4037   dest_mem = get_memory_rtx (dest, len);
4038   val_mode = TYPE_MODE (unsigned_char_type_node);
4039 
4040   if (TREE_CODE (val) != INTEGER_CST)
4041     {
4042       rtx val_rtx;
4043 
4044       val_rtx = expand_normal (val);
4045       val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4046 
4047       /* Assume that we can memset by pieces if we can store
4048        * the coefficients by pieces (in the required modes).
4049        * We can't pass builtin_memset_gen_str as that emits RTL.  */
4050       c = 1;
4051       if (tree_fits_uhwi_p (len)
4052 	  && can_store_by_pieces (tree_to_uhwi (len),
4053 				  builtin_memset_read_str, &c, dest_align,
4054 				  true))
4055 	{
4056 	  val_rtx = force_reg (val_mode, val_rtx);
4057 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
4058 			   builtin_memset_gen_str, val_rtx, dest_align,
4059 			   true, 0);
4060 	}
4061       else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4062 					dest_align, expected_align,
4063 					expected_size, min_size, max_size,
4064 					probable_max_size))
4065 	goto do_libcall;
4066 
4067       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4068       dest_mem = convert_memory_address (ptr_mode, dest_mem);
4069       return dest_mem;
4070     }
4071 
4072   if (target_char_cast (val, &c))
4073     goto do_libcall;
4074 
4075   if (c)
4076     {
4077       if (tree_fits_uhwi_p (len)
4078 	  && can_store_by_pieces (tree_to_uhwi (len),
4079 				  builtin_memset_read_str, &c, dest_align,
4080 				  true))
4081 	store_by_pieces (dest_mem, tree_to_uhwi (len),
4082 			 builtin_memset_read_str, &c, dest_align, true, 0);
4083       else if (!set_storage_via_setmem (dest_mem, len_rtx,
4084 					gen_int_mode (c, val_mode),
4085 					dest_align, expected_align,
4086 					expected_size, min_size, max_size,
4087 					probable_max_size))
4088 	goto do_libcall;
4089 
4090       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4091       dest_mem = convert_memory_address (ptr_mode, dest_mem);
4092       return dest_mem;
4093     }
4094 
4095   set_mem_align (dest_mem, dest_align);
4096   dest_addr = clear_storage_hints (dest_mem, len_rtx,
4097 				   CALL_EXPR_TAILCALL (orig_exp)
4098 				   ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4099 				   expected_align, expected_size,
4100 				   min_size, max_size,
4101 				   probable_max_size);
4102 
4103   if (dest_addr == 0)
4104     {
4105       dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4106       dest_addr = convert_memory_address (ptr_mode, dest_addr);
4107     }
4108 
4109   return dest_addr;
4110 
4111  do_libcall:
4112   fndecl = get_callee_fndecl (orig_exp);
4113   fcode = DECL_FUNCTION_CODE (fndecl);
4114   if (fcode == BUILT_IN_MEMSET
4115       || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4116     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4117 				dest, val, len);
4118   else if (fcode == BUILT_IN_BZERO)
4119     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4120 				dest, len);
4121   else
4122     gcc_unreachable ();
4123   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4124   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4125   return expand_call (fn, target, target == const0_rtx);
4126 }
4127 
4128 /* Expand expression EXP, which is a call to the bzero builtin.  Return
4129    NULL_RTX if we failed the caller should emit a normal call.  */
4130 
4131 static rtx
4132 expand_builtin_bzero (tree exp)
4133 {
4134   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4135     return NULL_RTX;
4136 
4137   tree dest = CALL_EXPR_ARG (exp, 0);
4138   tree size = CALL_EXPR_ARG (exp, 1);
4139 
4140   check_memop_sizes (exp, dest, size);
4141 
4142   /* New argument list transforming bzero(ptr x, int y) to
4143      memset(ptr x, int 0, size_t y).   This is done this way
4144      so that if it isn't expanded inline, we fallback to
4145      calling bzero instead of memset.  */
4146 
4147   location_t loc = EXPR_LOCATION (exp);
4148 
4149   return expand_builtin_memset_args (dest, integer_zero_node,
4150 				     fold_convert_loc (loc,
4151 						       size_type_node, size),
4152 				     const0_rtx, VOIDmode, exp);
4153 }
4154 
4155 /* Try to expand cmpstr operation ICODE with the given operands.
4156    Return the result rtx on success, otherwise return null.  */
4157 
4158 static rtx
4159 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4160 	       HOST_WIDE_INT align)
4161 {
4162   machine_mode insn_mode = insn_data[icode].operand[0].mode;
4163 
4164   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4165     target = NULL_RTX;
4166 
4167   struct expand_operand ops[4];
4168   create_output_operand (&ops[0], target, insn_mode);
4169   create_fixed_operand (&ops[1], arg1_rtx);
4170   create_fixed_operand (&ops[2], arg2_rtx);
4171   create_integer_operand (&ops[3], align);
4172   if (maybe_expand_insn (icode, 4, ops))
4173     return ops[0].value;
4174   return NULL_RTX;
4175 }
4176 
4177 /* Expand expression EXP, which is a call to the memcmp built-in function.
4178    Return NULL_RTX if we failed and the caller should emit a normal call,
4179    otherwise try to get the result in TARGET, if convenient.
4180    RESULT_EQ is true if we can relax the returned value to be either zero
4181    or nonzero, without caring about the sign.  */
4182 
4183 static rtx
4184 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4185 {
4186   if (!validate_arglist (exp,
4187  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4188     return NULL_RTX;
4189 
4190   tree arg1 = CALL_EXPR_ARG (exp, 0);
4191   tree arg2 = CALL_EXPR_ARG (exp, 1);
4192   tree len = CALL_EXPR_ARG (exp, 2);
4193   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4194   location_t loc = EXPR_LOCATION (exp);
4195 
4196   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4197   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4198 
4199   /* If we don't have POINTER_TYPE, call the function.  */
4200   if (arg1_align == 0 || arg2_align == 0)
4201     return NULL_RTX;
4202 
4203   rtx arg1_rtx = get_memory_rtx (arg1, len);
4204   rtx arg2_rtx = get_memory_rtx (arg2, len);
4205   rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4206 
4207   /* Set MEM_SIZE as appropriate.  */
4208   if (CONST_INT_P (len_rtx))
4209     {
4210       set_mem_size (arg1_rtx, INTVAL (len_rtx));
4211       set_mem_size (arg2_rtx, INTVAL (len_rtx));
4212     }
4213 
4214   by_pieces_constfn constfn = NULL;
4215 
4216   const char *src_str = c_getstr (arg2);
4217   if (result_eq && src_str == NULL)
4218     {
4219       src_str = c_getstr (arg1);
4220       if (src_str != NULL)
4221 	std::swap (arg1_rtx, arg2_rtx);
4222     }
4223 
4224   /* If SRC is a string constant and block move would be done
4225      by pieces, we can avoid loading the string from memory
4226      and only stored the computed constants.  */
4227   if (src_str
4228       && CONST_INT_P (len_rtx)
4229       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4230     constfn = builtin_memcpy_read_str;
4231 
4232   rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4233 				     TREE_TYPE (len), target,
4234 				     result_eq, constfn,
4235 				     CONST_CAST (char *, src_str));
4236 
4237   if (result)
4238     {
4239       /* Return the value in the proper mode for this function.  */
4240       if (GET_MODE (result) == mode)
4241 	return result;
4242 
4243       if (target != 0)
4244 	{
4245 	  convert_move (target, result, 0);
4246 	  return target;
4247 	}
4248 
4249       return convert_to_mode (mode, result, 0);
4250     }
4251 
4252   return NULL_RTX;
4253 }
4254 
4255 /* Expand expression EXP, which is a call to the strcmp builtin.  Return NULL_RTX
4256    if we failed the caller should emit a normal call, otherwise try to get
4257    the result in TARGET, if convenient.  */
4258 
4259 static rtx
4260 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4261 {
4262   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4263     return NULL_RTX;
4264 
4265   insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4266   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4267   if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4268     {
4269       rtx arg1_rtx, arg2_rtx;
4270       tree fndecl, fn;
4271       tree arg1 = CALL_EXPR_ARG (exp, 0);
4272       tree arg2 = CALL_EXPR_ARG (exp, 1);
4273       rtx result = NULL_RTX;
4274 
4275       unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4276       unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4277 
4278       /* If we don't have POINTER_TYPE, call the function.  */
4279       if (arg1_align == 0 || arg2_align == 0)
4280 	return NULL_RTX;
4281 
4282       /* Stabilize the arguments in case gen_cmpstr(n)si fail.  */
4283       arg1 = builtin_save_expr (arg1);
4284       arg2 = builtin_save_expr (arg2);
4285 
4286       arg1_rtx = get_memory_rtx (arg1, NULL);
4287       arg2_rtx = get_memory_rtx (arg2, NULL);
4288 
4289       /* Try to call cmpstrsi.  */
4290       if (cmpstr_icode != CODE_FOR_nothing)
4291 	result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4292 				MIN (arg1_align, arg2_align));
4293 
4294       /* Try to determine at least one length and call cmpstrnsi.  */
4295       if (!result && cmpstrn_icode != CODE_FOR_nothing)
4296 	{
4297 	  tree len;
4298 	  rtx arg3_rtx;
4299 
4300 	  tree len1 = c_strlen (arg1, 1);
4301 	  tree len2 = c_strlen (arg2, 1);
4302 
4303 	  if (len1)
4304 	    len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4305 	  if (len2)
4306 	    len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4307 
4308 	  /* If we don't have a constant length for the first, use the length
4309 	     of the second, if we know it.  We don't require a constant for
4310 	     this case; some cost analysis could be done if both are available
4311 	     but neither is constant.  For now, assume they're equally cheap,
4312 	     unless one has side effects.  If both strings have constant lengths,
4313 	     use the smaller.  */
4314 
4315 	  if (!len1)
4316 	    len = len2;
4317 	  else if (!len2)
4318 	    len = len1;
4319 	  else if (TREE_SIDE_EFFECTS (len1))
4320 	    len = len2;
4321 	  else if (TREE_SIDE_EFFECTS (len2))
4322 	    len = len1;
4323 	  else if (TREE_CODE (len1) != INTEGER_CST)
4324 	    len = len2;
4325 	  else if (TREE_CODE (len2) != INTEGER_CST)
4326 	    len = len1;
4327 	  else if (tree_int_cst_lt (len1, len2))
4328 	    len = len1;
4329 	  else
4330 	    len = len2;
4331 
4332 	  /* If both arguments have side effects, we cannot optimize.  */
4333 	  if (len && !TREE_SIDE_EFFECTS (len))
4334 	    {
4335 	      arg3_rtx = expand_normal (len);
4336 	      result = expand_cmpstrn_or_cmpmem
4337 		(cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4338 		 arg3_rtx, MIN (arg1_align, arg2_align));
4339 	    }
4340 	}
4341 
4342       if (result)
4343 	{
4344 	  /* Return the value in the proper mode for this function.  */
4345 	  machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4346 	  if (GET_MODE (result) == mode)
4347 	    return result;
4348 	  if (target == 0)
4349 	    return convert_to_mode (mode, result, 0);
4350 	  convert_move (target, result, 0);
4351 	  return target;
4352 	}
4353 
4354       /* Expand the library call ourselves using a stabilized argument
4355 	 list to avoid re-evaluating the function's arguments twice.  */
4356       fndecl = get_callee_fndecl (exp);
4357       fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4358       gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4359       CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4360       return expand_call (fn, target, target == const0_rtx);
4361     }
4362   return NULL_RTX;
4363 }
4364 
4365 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4366    NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4367    the result in TARGET, if convenient.  */
4368 
4369 static rtx
4370 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4371 			ATTRIBUTE_UNUSED machine_mode mode)
4372 {
4373   location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4374 
4375   if (!validate_arglist (exp,
4376  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4377     return NULL_RTX;
4378 
4379   /* If c_strlen can determine an expression for one of the string
4380      lengths, and it doesn't have side effects, then emit cmpstrnsi
4381      using length MIN(strlen(string)+1, arg3).  */
4382   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4383   if (cmpstrn_icode != CODE_FOR_nothing)
4384   {
4385     tree len, len1, len2, len3;
4386     rtx arg1_rtx, arg2_rtx, arg3_rtx;
4387     rtx result;
4388     tree fndecl, fn;
4389     tree arg1 = CALL_EXPR_ARG (exp, 0);
4390     tree arg2 = CALL_EXPR_ARG (exp, 1);
4391     tree arg3 = CALL_EXPR_ARG (exp, 2);
4392 
4393     unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4394     unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4395 
4396     len1 = c_strlen (arg1, 1);
4397     len2 = c_strlen (arg2, 1);
4398 
4399     if (len1)
4400       len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4401     if (len2)
4402       len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4403 
4404     len3 = fold_convert_loc (loc, sizetype, arg3);
4405 
4406     /* If we don't have a constant length for the first, use the length
4407        of the second, if we know it.  If neither string is constant length,
4408        use the given length argument.  We don't require a constant for
4409        this case; some cost analysis could be done if both are available
4410        but neither is constant.  For now, assume they're equally cheap,
4411        unless one has side effects.  If both strings have constant lengths,
4412        use the smaller.  */
4413 
4414     if (!len1 && !len2)
4415       len = len3;
4416     else if (!len1)
4417       len = len2;
4418     else if (!len2)
4419       len = len1;
4420     else if (TREE_SIDE_EFFECTS (len1))
4421       len = len2;
4422     else if (TREE_SIDE_EFFECTS (len2))
4423       len = len1;
4424     else if (TREE_CODE (len1) != INTEGER_CST)
4425       len = len2;
4426     else if (TREE_CODE (len2) != INTEGER_CST)
4427       len = len1;
4428     else if (tree_int_cst_lt (len1, len2))
4429       len = len1;
4430     else
4431       len = len2;
4432 
4433     /* If we are not using the given length, we must incorporate it here.
4434        The actual new length parameter will be MIN(len,arg3) in this case.  */
4435     if (len != len3)
4436       len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4437     arg1_rtx = get_memory_rtx (arg1, len);
4438     arg2_rtx = get_memory_rtx (arg2, len);
4439     arg3_rtx = expand_normal (len);
4440     result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4441 				       arg2_rtx, TREE_TYPE (len), arg3_rtx,
4442 				       MIN (arg1_align, arg2_align));
4443     if (result)
4444       {
4445 	/* Return the value in the proper mode for this function.  */
4446 	mode = TYPE_MODE (TREE_TYPE (exp));
4447 	if (GET_MODE (result) == mode)
4448 	  return result;
4449 	if (target == 0)
4450 	  return convert_to_mode (mode, result, 0);
4451 	convert_move (target, result, 0);
4452 	return target;
4453       }
4454 
4455     /* Expand the library call ourselves using a stabilized argument
4456        list to avoid re-evaluating the function's arguments twice.  */
4457     fndecl = get_callee_fndecl (exp);
4458     fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4459 				arg1, arg2, len);
4460     gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4461     CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4462     return expand_call (fn, target, target == const0_rtx);
4463   }
4464   return NULL_RTX;
4465 }
4466 
4467 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4468    if that's convenient.  */
4469 
4470 rtx
4471 expand_builtin_saveregs (void)
4472 {
4473   rtx val;
4474   rtx_insn *seq;
4475 
4476   /* Don't do __builtin_saveregs more than once in a function.
4477      Save the result of the first call and reuse it.  */
4478   if (saveregs_value != 0)
4479     return saveregs_value;
4480 
4481   /* When this function is called, it means that registers must be
4482      saved on entry to this function.  So we migrate the call to the
4483      first insn of this function.  */
4484 
4485   start_sequence ();
4486 
4487   /* Do whatever the machine needs done in this case.  */
4488   val = targetm.calls.expand_builtin_saveregs ();
4489 
4490   seq = get_insns ();
4491   end_sequence ();
4492 
4493   saveregs_value = val;
4494 
4495   /* Put the insns after the NOTE that starts the function.  If this
4496      is inside a start_sequence, make the outer-level insn chain current, so
4497      the code is placed at the start of the function.  */
4498   push_topmost_sequence ();
4499   emit_insn_after (seq, entry_of_function ());
4500   pop_topmost_sequence ();
4501 
4502   return val;
4503 }
4504 
4505 /* Expand a call to __builtin_next_arg.  */
4506 
4507 static rtx
4508 expand_builtin_next_arg (void)
4509 {
4510   /* Checking arguments is already done in fold_builtin_next_arg
4511      that must be called before this function.  */
4512   return expand_binop (ptr_mode, add_optab,
4513 		       crtl->args.internal_arg_pointer,
4514 		       crtl->args.arg_offset_rtx,
4515 		       NULL_RTX, 0, OPTAB_LIB_WIDEN);
4516 }
4517 
4518 /* Make it easier for the backends by protecting the valist argument
4519    from multiple evaluations.  */
4520 
4521 static tree
4522 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4523 {
4524   tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4525 
4526   /* The current way of determining the type of valist is completely
4527      bogus.  We should have the information on the va builtin instead.  */
4528   if (!vatype)
4529     vatype = targetm.fn_abi_va_list (cfun->decl);
4530 
4531   if (TREE_CODE (vatype) == ARRAY_TYPE)
4532     {
4533       if (TREE_SIDE_EFFECTS (valist))
4534 	valist = save_expr (valist);
4535 
4536       /* For this case, the backends will be expecting a pointer to
4537 	 vatype, but it's possible we've actually been given an array
4538 	 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4539 	 So fix it.  */
4540       if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4541 	{
4542 	  tree p1 = build_pointer_type (TREE_TYPE (vatype));
4543 	  valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4544 	}
4545     }
4546   else
4547     {
4548       tree pt = build_pointer_type (vatype);
4549 
4550       if (! needs_lvalue)
4551 	{
4552 	  if (! TREE_SIDE_EFFECTS (valist))
4553 	    return valist;
4554 
4555 	  valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4556 	  TREE_SIDE_EFFECTS (valist) = 1;
4557 	}
4558 
4559       if (TREE_SIDE_EFFECTS (valist))
4560 	valist = save_expr (valist);
4561       valist = fold_build2_loc (loc, MEM_REF,
4562 				vatype, valist, build_int_cst (pt, 0));
4563     }
4564 
4565   return valist;
4566 }
4567 
4568 /* The "standard" definition of va_list is void*.  */
4569 
4570 tree
4571 std_build_builtin_va_list (void)
4572 {
4573   return ptr_type_node;
4574 }
4575 
4576 /* The "standard" abi va_list is va_list_type_node.  */
4577 
4578 tree
4579 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4580 {
4581   return va_list_type_node;
4582 }
4583 
4584 /* The "standard" type of va_list is va_list_type_node.  */
4585 
4586 tree
4587 std_canonical_va_list_type (tree type)
4588 {
4589   tree wtype, htype;
4590 
4591   wtype = va_list_type_node;
4592   htype = type;
4593 
4594   if (TREE_CODE (wtype) == ARRAY_TYPE)
4595     {
4596       /* If va_list is an array type, the argument may have decayed
4597 	 to a pointer type, e.g. by being passed to another function.
4598 	 In that case, unwrap both types so that we can compare the
4599 	 underlying records.  */
4600       if (TREE_CODE (htype) == ARRAY_TYPE
4601 	  || POINTER_TYPE_P (htype))
4602 	{
4603 	  wtype = TREE_TYPE (wtype);
4604 	  htype = TREE_TYPE (htype);
4605 	}
4606     }
4607   if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4608     return va_list_type_node;
4609 
4610   return NULL_TREE;
4611 }
4612 
4613 /* The "standard" implementation of va_start: just assign `nextarg' to
4614    the variable.  */
4615 
4616 void
4617 std_expand_builtin_va_start (tree valist, rtx nextarg)
4618 {
4619   rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4620   convert_move (va_r, nextarg, 0);
4621 
4622   /* We do not have any valid bounds for the pointer, so
4623      just store zero bounds for it.  */
4624   if (chkp_function_instrumented_p (current_function_decl))
4625     chkp_expand_bounds_reset_for_mem (valist,
4626 				      make_tree (TREE_TYPE (valist),
4627 						 nextarg));
4628 }
4629 
4630 /* Expand EXP, a call to __builtin_va_start.  */
4631 
4632 static rtx
4633 expand_builtin_va_start (tree exp)
4634 {
4635   rtx nextarg;
4636   tree valist;
4637   location_t loc = EXPR_LOCATION (exp);
4638 
4639   if (call_expr_nargs (exp) < 2)
4640     {
4641       error_at (loc, "too few arguments to function %<va_start%>");
4642       return const0_rtx;
4643     }
4644 
4645   if (fold_builtin_next_arg (exp, true))
4646     return const0_rtx;
4647 
4648   nextarg = expand_builtin_next_arg ();
4649   valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4650 
4651   if (targetm.expand_builtin_va_start)
4652     targetm.expand_builtin_va_start (valist, nextarg);
4653   else
4654     std_expand_builtin_va_start (valist, nextarg);
4655 
4656   return const0_rtx;
4657 }
4658 
4659 /* Expand EXP, a call to __builtin_va_end.  */
4660 
4661 static rtx
4662 expand_builtin_va_end (tree exp)
4663 {
4664   tree valist = CALL_EXPR_ARG (exp, 0);
4665 
4666   /* Evaluate for side effects, if needed.  I hate macros that don't
4667      do that.  */
4668   if (TREE_SIDE_EFFECTS (valist))
4669     expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4670 
4671   return const0_rtx;
4672 }
4673 
4674 /* Expand EXP, a call to __builtin_va_copy.  We do this as a
4675    builtin rather than just as an assignment in stdarg.h because of the
4676    nastiness of array-type va_list types.  */
4677 
4678 static rtx
4679 expand_builtin_va_copy (tree exp)
4680 {
4681   tree dst, src, t;
4682   location_t loc = EXPR_LOCATION (exp);
4683 
4684   dst = CALL_EXPR_ARG (exp, 0);
4685   src = CALL_EXPR_ARG (exp, 1);
4686 
4687   dst = stabilize_va_list_loc (loc, dst, 1);
4688   src = stabilize_va_list_loc (loc, src, 0);
4689 
4690   gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4691 
4692   if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4693     {
4694       t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4695       TREE_SIDE_EFFECTS (t) = 1;
4696       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4697     }
4698   else
4699     {
4700       rtx dstb, srcb, size;
4701 
4702       /* Evaluate to pointers.  */
4703       dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4704       srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4705       size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4706       		  NULL_RTX, VOIDmode, EXPAND_NORMAL);
4707 
4708       dstb = convert_memory_address (Pmode, dstb);
4709       srcb = convert_memory_address (Pmode, srcb);
4710 
4711       /* "Dereference" to BLKmode memories.  */
4712       dstb = gen_rtx_MEM (BLKmode, dstb);
4713       set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4714       set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4715       srcb = gen_rtx_MEM (BLKmode, srcb);
4716       set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4717       set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4718 
4719       /* Copy.  */
4720       emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4721     }
4722 
4723   return const0_rtx;
4724 }
4725 
4726 /* Expand a call to one of the builtin functions __builtin_frame_address or
4727    __builtin_return_address.  */
4728 
4729 static rtx
4730 expand_builtin_frame_address (tree fndecl, tree exp)
4731 {
4732   /* The argument must be a nonnegative integer constant.
4733      It counts the number of frames to scan up the stack.
4734      The value is either the frame pointer value or the return
4735      address saved in that frame.  */
4736   if (call_expr_nargs (exp) == 0)
4737     /* Warning about missing arg was already issued.  */
4738     return const0_rtx;
4739   else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4740     {
4741       error ("invalid argument to %qD", fndecl);
4742       return const0_rtx;
4743     }
4744   else
4745     {
4746       /* Number of frames to scan up the stack.  */
4747       unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4748 
4749       rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4750 
4751       /* Some ports cannot access arbitrary stack frames.  */
4752       if (tem == NULL)
4753 	{
4754 	  warning (0, "unsupported argument to %qD", fndecl);
4755 	  return const0_rtx;
4756 	}
4757 
4758       if (count)
4759 	{
4760 	  /* Warn since no effort is made to ensure that any frame
4761 	     beyond the current one exists or can be safely reached.  */
4762 	  warning (OPT_Wframe_address, "calling %qD with "
4763 		   "a nonzero argument is unsafe", fndecl);
4764 	}
4765 
4766       /* For __builtin_frame_address, return what we've got.  */
4767       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4768 	return tem;
4769 
4770       if (!REG_P (tem)
4771 	  && ! CONSTANT_P (tem))
4772 	tem = copy_addr_to_reg (tem);
4773       return tem;
4774     }
4775 }
4776 
4777 /* Expand EXP, a call to the alloca builtin.  Return NULL_RTX if we
4778    failed and the caller should emit a normal call.  CANNOT_ACCUMULATE
4779    is the same as for allocate_dynamic_stack_space.  */
4780 
4781 static rtx
4782 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4783 {
4784   rtx op0;
4785   rtx result;
4786   unsigned int align;
4787   tree fndecl = get_callee_fndecl (exp);
4788   bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
4789 			    == BUILT_IN_ALLOCA_WITH_ALIGN);
4790 
4791   bool valid_arglist
4792     = (alloca_with_align
4793        ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4794        : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4795 
4796   if (!valid_arglist)
4797     return NULL_RTX;
4798 
4799   if ((alloca_with_align && !warn_vla_limit)
4800       || (!alloca_with_align && !warn_alloca_limit))
4801     {
4802       /* -Walloca-larger-than and -Wvla-larger-than settings override
4803 	 the more general -Walloc-size-larger-than so unless either of
4804 	 the former options is specified check the alloca arguments for
4805 	 overflow.  */
4806       tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4807       int idx[] = { 0, -1 };
4808       maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4809     }
4810 
4811   /* Compute the argument.  */
4812   op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4813 
4814   /* Compute the alignment.  */
4815   align = (alloca_with_align
4816 	   ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4817 	   : BIGGEST_ALIGNMENT);
4818 
4819   /* Allocate the desired space.  */
4820   result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4821   result = convert_memory_address (ptr_mode, result);
4822 
4823   return result;
4824 }
4825 
4826 /* Expand a call to bswap builtin in EXP.
4827    Return NULL_RTX if a normal call should be emitted rather than expanding the
4828    function in-line.  If convenient, the result should be placed in TARGET.
4829    SUBTARGET may be used as the target for computing one of EXP's operands.  */
4830 
4831 static rtx
4832 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4833 		      rtx subtarget)
4834 {
4835   tree arg;
4836   rtx op0;
4837 
4838   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4839     return NULL_RTX;
4840 
4841   arg = CALL_EXPR_ARG (exp, 0);
4842   op0 = expand_expr (arg,
4843 		     subtarget && GET_MODE (subtarget) == target_mode
4844 		     ? subtarget : NULL_RTX,
4845 		     target_mode, EXPAND_NORMAL);
4846   if (GET_MODE (op0) != target_mode)
4847     op0 = convert_to_mode (target_mode, op0, 1);
4848 
4849   target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4850 
4851   gcc_assert (target);
4852 
4853   return convert_to_mode (target_mode, target, 1);
4854 }
4855 
4856 /* Expand a call to a unary builtin in EXP.
4857    Return NULL_RTX if a normal call should be emitted rather than expanding the
4858    function in-line.  If convenient, the result should be placed in TARGET.
4859    SUBTARGET may be used as the target for computing one of EXP's operands.  */
4860 
4861 static rtx
4862 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4863 		     rtx subtarget, optab op_optab)
4864 {
4865   rtx op0;
4866 
4867   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4868     return NULL_RTX;
4869 
4870   /* Compute the argument.  */
4871   op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4872 		     (subtarget
4873 		      && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4874 			  == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4875 		     VOIDmode, EXPAND_NORMAL);
4876   /* Compute op, into TARGET if possible.
4877      Set TARGET to wherever the result comes back.  */
4878   target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4879 			op_optab, op0, target, op_optab != clrsb_optab);
4880   gcc_assert (target);
4881 
4882   return convert_to_mode (target_mode, target, 0);
4883 }
4884 
4885 /* Expand a call to __builtin_expect.  We just return our argument
4886    as the builtin_expect semantic should've been already executed by
4887    tree branch prediction pass. */
4888 
4889 static rtx
4890 expand_builtin_expect (tree exp, rtx target)
4891 {
4892   tree arg;
4893 
4894   if (call_expr_nargs (exp) < 2)
4895     return const0_rtx;
4896   arg = CALL_EXPR_ARG (exp, 0);
4897 
4898   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4899   /* When guessing was done, the hints should be already stripped away.  */
4900   gcc_assert (!flag_guess_branch_prob
4901 	      || optimize == 0 || seen_error ());
4902   return target;
4903 }
4904 
4905 /* Expand a call to __builtin_assume_aligned.  We just return our first
4906    argument as the builtin_assume_aligned semantic should've been already
4907    executed by CCP.  */
4908 
4909 static rtx
4910 expand_builtin_assume_aligned (tree exp, rtx target)
4911 {
4912   if (call_expr_nargs (exp) < 2)
4913     return const0_rtx;
4914   target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4915 			EXPAND_NORMAL);
4916   gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4917 	      && (call_expr_nargs (exp) < 3
4918 		  || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4919   return target;
4920 }
4921 
4922 void
4923 expand_builtin_trap (void)
4924 {
4925   if (targetm.have_trap ())
4926     {
4927       rtx_insn *insn = emit_insn (targetm.gen_trap ());
4928       /* For trap insns when not accumulating outgoing args force
4929 	 REG_ARGS_SIZE note to prevent crossjumping of calls with
4930 	 different args sizes.  */
4931       if (!ACCUMULATE_OUTGOING_ARGS)
4932 	add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4933     }
4934   else
4935     {
4936       tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4937       tree call_expr = build_call_expr (fn, 0);
4938       expand_call (call_expr, NULL_RTX, false);
4939     }
4940 
4941   emit_barrier ();
4942 }
4943 
4944 /* Expand a call to __builtin_unreachable.  We do nothing except emit
4945    a barrier saying that control flow will not pass here.
4946 
4947    It is the responsibility of the program being compiled to ensure
4948    that control flow does never reach __builtin_unreachable.  */
4949 static void
4950 expand_builtin_unreachable (void)
4951 {
4952   emit_barrier ();
4953 }
4954 
4955 /* Expand EXP, a call to fabs, fabsf or fabsl.
4956    Return NULL_RTX if a normal call should be emitted rather than expanding
4957    the function inline.  If convenient, the result should be placed
4958    in TARGET.  SUBTARGET may be used as the target for computing
4959    the operand.  */
4960 
4961 static rtx
4962 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4963 {
4964   machine_mode mode;
4965   tree arg;
4966   rtx op0;
4967 
4968   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4969     return NULL_RTX;
4970 
4971   arg = CALL_EXPR_ARG (exp, 0);
4972   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4973   mode = TYPE_MODE (TREE_TYPE (arg));
4974   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4975   return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4976 }
4977 
4978 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4979    Return NULL is a normal call should be emitted rather than expanding the
4980    function inline.  If convenient, the result should be placed in TARGET.
4981    SUBTARGET may be used as the target for computing the operand.  */
4982 
4983 static rtx
4984 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4985 {
4986   rtx op0, op1;
4987   tree arg;
4988 
4989   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4990     return NULL_RTX;
4991 
4992   arg = CALL_EXPR_ARG (exp, 0);
4993   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4994 
4995   arg = CALL_EXPR_ARG (exp, 1);
4996   op1 = expand_normal (arg);
4997 
4998   return expand_copysign (op0, op1, target);
4999 }
5000 
5001 /* Expand a call to __builtin___clear_cache.  */
5002 
5003 static rtx
5004 expand_builtin___clear_cache (tree exp)
5005 {
5006   if (!targetm.code_for_clear_cache)
5007     {
5008 #ifdef CLEAR_INSN_CACHE
5009       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5010 	 does something.  Just do the default expansion to a call to
5011 	 __clear_cache().  */
5012       return NULL_RTX;
5013 #else
5014       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5015 	 does nothing.  There is no need to call it.  Do nothing.  */
5016       return const0_rtx;
5017 #endif /* CLEAR_INSN_CACHE */
5018     }
5019 
5020   /* We have a "clear_cache" insn, and it will handle everything.  */
5021   tree begin, end;
5022   rtx begin_rtx, end_rtx;
5023 
5024   /* We must not expand to a library call.  If we did, any
5025      fallback library function in libgcc that might contain a call to
5026      __builtin___clear_cache() would recurse infinitely.  */
5027   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5028     {
5029       error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5030       return const0_rtx;
5031     }
5032 
5033   if (targetm.have_clear_cache ())
5034     {
5035       struct expand_operand ops[2];
5036 
5037       begin = CALL_EXPR_ARG (exp, 0);
5038       begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5039 
5040       end = CALL_EXPR_ARG (exp, 1);
5041       end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5042 
5043       create_address_operand (&ops[0], begin_rtx);
5044       create_address_operand (&ops[1], end_rtx);
5045       if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5046 	return const0_rtx;
5047     }
5048   return const0_rtx;
5049 }
5050 
5051 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT.  */
5052 
5053 static rtx
5054 round_trampoline_addr (rtx tramp)
5055 {
5056   rtx temp, addend, mask;
5057 
5058   /* If we don't need too much alignment, we'll have been guaranteed
5059      proper alignment by get_trampoline_type.  */
5060   if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5061     return tramp;
5062 
5063   /* Round address up to desired boundary.  */
5064   temp = gen_reg_rtx (Pmode);
5065   addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5066   mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5067 
5068   temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
5069 			       temp, 0, OPTAB_LIB_WIDEN);
5070   tramp = expand_simple_binop (Pmode, AND, temp, mask,
5071 			       temp, 0, OPTAB_LIB_WIDEN);
5072 
5073   return tramp;
5074 }
5075 
5076 static rtx
5077 expand_builtin_init_trampoline (tree exp, bool onstack)
5078 {
5079   tree t_tramp, t_func, t_chain;
5080   rtx m_tramp, r_tramp, r_chain, tmp;
5081 
5082   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5083 			 POINTER_TYPE, VOID_TYPE))
5084     return NULL_RTX;
5085 
5086   t_tramp = CALL_EXPR_ARG (exp, 0);
5087   t_func = CALL_EXPR_ARG (exp, 1);
5088   t_chain = CALL_EXPR_ARG (exp, 2);
5089 
5090   r_tramp = expand_normal (t_tramp);
5091   m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5092   MEM_NOTRAP_P (m_tramp) = 1;
5093 
5094   /* If ONSTACK, the TRAMP argument should be the address of a field
5095      within the local function's FRAME decl.  Either way, let's see if
5096      we can fill in the MEM_ATTRs for this memory.  */
5097   if (TREE_CODE (t_tramp) == ADDR_EXPR)
5098     set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5099 
5100   /* Creator of a heap trampoline is responsible for making sure the
5101      address is aligned to at least STACK_BOUNDARY.  Normally malloc
5102      will ensure this anyhow.  */
5103   tmp = round_trampoline_addr (r_tramp);
5104   if (tmp != r_tramp)
5105     {
5106       m_tramp = change_address (m_tramp, BLKmode, tmp);
5107       set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5108       set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5109     }
5110 
5111   /* The FUNC argument should be the address of the nested function.
5112      Extract the actual function decl to pass to the hook.  */
5113   gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5114   t_func = TREE_OPERAND (t_func, 0);
5115   gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5116 
5117   r_chain = expand_normal (t_chain);
5118 
5119   /* Generate insns to initialize the trampoline.  */
5120   targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5121 
5122   if (onstack)
5123     {
5124       trampolines_created = 1;
5125 
5126       if (targetm.calls.custom_function_descriptors != 0)
5127 	warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5128 		    "trampoline generated for nested function %qD", t_func);
5129     }
5130 
5131   return const0_rtx;
5132 }
5133 
5134 static rtx
5135 expand_builtin_adjust_trampoline (tree exp)
5136 {
5137   rtx tramp;
5138 
5139   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5140     return NULL_RTX;
5141 
5142   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5143   tramp = round_trampoline_addr (tramp);
5144   if (targetm.calls.trampoline_adjust_address)
5145     tramp = targetm.calls.trampoline_adjust_address (tramp);
5146 
5147   return tramp;
5148 }
5149 
5150 /* Expand a call to the builtin descriptor initialization routine.
5151    A descriptor is made up of a couple of pointers to the static
5152    chain and the code entry in this order.  */
5153 
5154 static rtx
5155 expand_builtin_init_descriptor (tree exp)
5156 {
5157   tree t_descr, t_func, t_chain;
5158   rtx m_descr, r_descr, r_func, r_chain;
5159 
5160   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5161 			 VOID_TYPE))
5162     return NULL_RTX;
5163 
5164   t_descr = CALL_EXPR_ARG (exp, 0);
5165   t_func = CALL_EXPR_ARG (exp, 1);
5166   t_chain = CALL_EXPR_ARG (exp, 2);
5167 
5168   r_descr = expand_normal (t_descr);
5169   m_descr = gen_rtx_MEM (BLKmode, r_descr);
5170   MEM_NOTRAP_P (m_descr) = 1;
5171 
5172   r_func = expand_normal (t_func);
5173   r_chain = expand_normal (t_chain);
5174 
5175   /* Generate insns to initialize the descriptor.  */
5176   emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5177   emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5178 				     POINTER_SIZE / BITS_PER_UNIT), r_func);
5179 
5180   return const0_rtx;
5181 }
5182 
5183 /* Expand a call to the builtin descriptor adjustment routine.  */
5184 
5185 static rtx
5186 expand_builtin_adjust_descriptor (tree exp)
5187 {
5188   rtx tramp;
5189 
5190   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5191     return NULL_RTX;
5192 
5193   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5194 
5195   /* Unalign the descriptor to allow runtime identification.  */
5196   tramp = plus_constant (ptr_mode, tramp,
5197 			 targetm.calls.custom_function_descriptors);
5198 
5199   return force_operand (tramp, NULL_RTX);
5200 }
5201 
5202 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5203    function.  The function first checks whether the back end provides
5204    an insn to implement signbit for the respective mode.  If not, it
5205    checks whether the floating point format of the value is such that
5206    the sign bit can be extracted.  If that is not the case, error out.
5207    EXP is the expression that is a call to the builtin function; if
5208    convenient, the result should be placed in TARGET.  */
5209 static rtx
5210 expand_builtin_signbit (tree exp, rtx target)
5211 {
5212   const struct real_format *fmt;
5213   machine_mode fmode, imode, rmode;
5214   tree arg;
5215   int word, bitpos;
5216   enum insn_code icode;
5217   rtx temp;
5218   location_t loc = EXPR_LOCATION (exp);
5219 
5220   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5221     return NULL_RTX;
5222 
5223   arg = CALL_EXPR_ARG (exp, 0);
5224   fmode = TYPE_MODE (TREE_TYPE (arg));
5225   rmode = TYPE_MODE (TREE_TYPE (exp));
5226   fmt = REAL_MODE_FORMAT (fmode);
5227 
5228   arg = builtin_save_expr (arg);
5229 
5230   /* Expand the argument yielding a RTX expression. */
5231   temp = expand_normal (arg);
5232 
5233   /* Check if the back end provides an insn that handles signbit for the
5234      argument's mode. */
5235   icode = optab_handler (signbit_optab, fmode);
5236   if (icode != CODE_FOR_nothing)
5237     {
5238       rtx_insn *last = get_last_insn ();
5239       target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5240       if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5241 	return target;
5242       delete_insns_since (last);
5243     }
5244 
5245   /* For floating point formats without a sign bit, implement signbit
5246      as "ARG < 0.0".  */
5247   bitpos = fmt->signbit_ro;
5248   if (bitpos < 0)
5249   {
5250     /* But we can't do this if the format supports signed zero.  */
5251     gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5252 
5253     arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5254 		       build_real (TREE_TYPE (arg), dconst0));
5255     return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5256   }
5257 
5258   if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5259     {
5260       imode = int_mode_for_mode (fmode);
5261       gcc_assert (imode != BLKmode);
5262       temp = gen_lowpart (imode, temp);
5263     }
5264   else
5265     {
5266       imode = word_mode;
5267       /* Handle targets with different FP word orders.  */
5268       if (FLOAT_WORDS_BIG_ENDIAN)
5269 	word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5270       else
5271 	word = bitpos / BITS_PER_WORD;
5272       temp = operand_subword_force (temp, word, fmode);
5273       bitpos = bitpos % BITS_PER_WORD;
5274     }
5275 
5276   /* Force the intermediate word_mode (or narrower) result into a
5277      register.  This avoids attempting to create paradoxical SUBREGs
5278      of floating point modes below.  */
5279   temp = force_reg (imode, temp);
5280 
5281   /* If the bitpos is within the "result mode" lowpart, the operation
5282      can be implement with a single bitwise AND.  Otherwise, we need
5283      a right shift and an AND.  */
5284 
5285   if (bitpos < GET_MODE_BITSIZE (rmode))
5286     {
5287       wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5288 
5289       if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5290 	temp = gen_lowpart (rmode, temp);
5291       temp = expand_binop (rmode, and_optab, temp,
5292 			   immed_wide_int_const (mask, rmode),
5293 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5294     }
5295   else
5296     {
5297       /* Perform a logical right shift to place the signbit in the least
5298 	 significant bit, then truncate the result to the desired mode
5299 	 and mask just this bit.  */
5300       temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5301       temp = gen_lowpart (rmode, temp);
5302       temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5303 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5304     }
5305 
5306   return temp;
5307 }
5308 
5309 /* Expand fork or exec calls.  TARGET is the desired target of the
5310    call.  EXP is the call. FN is the
5311    identificator of the actual function.  IGNORE is nonzero if the
5312    value is to be ignored.  */
5313 
5314 static rtx
5315 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5316 {
5317   tree id, decl;
5318   tree call;
5319 
5320   /* If we are not profiling, just call the function.  */
5321   if (!profile_arc_flag)
5322     return NULL_RTX;
5323 
5324   /* Otherwise call the wrapper.  This should be equivalent for the rest of
5325      compiler, so the code does not diverge, and the wrapper may run the
5326      code necessary for keeping the profiling sane.  */
5327 
5328   switch (DECL_FUNCTION_CODE (fn))
5329     {
5330     case BUILT_IN_FORK:
5331       id = get_identifier ("__gcov_fork");
5332       break;
5333 
5334     case BUILT_IN_EXECL:
5335       id = get_identifier ("__gcov_execl");
5336       break;
5337 
5338     case BUILT_IN_EXECV:
5339       id = get_identifier ("__gcov_execv");
5340       break;
5341 
5342     case BUILT_IN_EXECLP:
5343       id = get_identifier ("__gcov_execlp");
5344       break;
5345 
5346     case BUILT_IN_EXECLE:
5347       id = get_identifier ("__gcov_execle");
5348       break;
5349 
5350     case BUILT_IN_EXECVP:
5351       id = get_identifier ("__gcov_execvp");
5352       break;
5353 
5354     case BUILT_IN_EXECVE:
5355       id = get_identifier ("__gcov_execve");
5356       break;
5357 
5358     default:
5359       gcc_unreachable ();
5360     }
5361 
5362   decl = build_decl (DECL_SOURCE_LOCATION (fn),
5363 		     FUNCTION_DECL, id, TREE_TYPE (fn));
5364   DECL_EXTERNAL (decl) = 1;
5365   TREE_PUBLIC (decl) = 1;
5366   DECL_ARTIFICIAL (decl) = 1;
5367   TREE_NOTHROW (decl) = 1;
5368   DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5369   DECL_VISIBILITY_SPECIFIED (decl) = 1;
5370   call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5371   return expand_call (call, target, ignore);
5372  }
5373 
5374 
5375 
5376 /* Reconstitute a mode for a __sync intrinsic operation.  Since the type of
5377    the pointer in these functions is void*, the tree optimizers may remove
5378    casts.  The mode computed in expand_builtin isn't reliable either, due
5379    to __sync_bool_compare_and_swap.
5380 
5381    FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5382    group of builtins.  This gives us log2 of the mode size.  */
5383 
5384 static inline machine_mode
5385 get_builtin_sync_mode (int fcode_diff)
5386 {
5387   /* The size is not negotiable, so ask not to get BLKmode in return
5388      if the target indicates that a smaller size would be better.  */
5389   return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5390 }
5391 
5392 /* Expand the memory expression LOC and return the appropriate memory operand
5393    for the builtin_sync operations.  */
5394 
5395 static rtx
5396 get_builtin_sync_mem (tree loc, machine_mode mode)
5397 {
5398   rtx addr, mem;
5399 
5400   addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5401   addr = convert_memory_address (Pmode, addr);
5402 
5403   /* Note that we explicitly do not want any alias information for this
5404      memory, so that we kill all other live memories.  Otherwise we don't
5405      satisfy the full barrier semantics of the intrinsic.  */
5406   mem = validize_mem (gen_rtx_MEM (mode, addr));
5407 
5408   /* The alignment needs to be at least according to that of the mode.  */
5409   set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5410 			   get_pointer_alignment (loc)));
5411   set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5412   MEM_VOLATILE_P (mem) = 1;
5413 
5414   return mem;
5415 }
5416 
5417 /* Make sure an argument is in the right mode.
5418    EXP is the tree argument.
5419    MODE is the mode it should be in.  */
5420 
5421 static rtx
5422 expand_expr_force_mode (tree exp, machine_mode mode)
5423 {
5424   rtx val;
5425   machine_mode old_mode;
5426 
5427   val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5428   /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
5429      of CONST_INTs, where we know the old_mode only from the call argument.  */
5430 
5431   old_mode = GET_MODE (val);
5432   if (old_mode == VOIDmode)
5433     old_mode = TYPE_MODE (TREE_TYPE (exp));
5434   val = convert_modes (mode, old_mode, val, 1);
5435   return val;
5436 }
5437 
5438 
5439 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5440    EXP is the CALL_EXPR.  CODE is the rtx code
5441    that corresponds to the arithmetic or logical operation from the name;
5442    an exception here is that NOT actually means NAND.  TARGET is an optional
5443    place for us to store the results; AFTER is true if this is the
5444    fetch_and_xxx form.  */
5445 
5446 static rtx
5447 expand_builtin_sync_operation (machine_mode mode, tree exp,
5448 			       enum rtx_code code, bool after,
5449 			       rtx target)
5450 {
5451   rtx val, mem;
5452   location_t loc = EXPR_LOCATION (exp);
5453 
5454   if (code == NOT && warn_sync_nand)
5455     {
5456       tree fndecl = get_callee_fndecl (exp);
5457       enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5458 
5459       static bool warned_f_a_n, warned_n_a_f;
5460 
5461       switch (fcode)
5462 	{
5463 	case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5464 	case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5465 	case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5466 	case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5467 	case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5468 	  if (warned_f_a_n)
5469 	    break;
5470 
5471 	  fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5472 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5473 	  warned_f_a_n = true;
5474 	  break;
5475 
5476 	case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5477 	case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5478 	case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5479 	case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5480 	case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5481 	  if (warned_n_a_f)
5482 	    break;
5483 
5484 	 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5485 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5486 	  warned_n_a_f = true;
5487 	  break;
5488 
5489 	default:
5490 	  gcc_unreachable ();
5491 	}
5492     }
5493 
5494   /* Expand the operands.  */
5495   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5496   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5497 
5498   return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5499 				 after);
5500 }
5501 
5502 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5503    intrinsics. EXP is the CALL_EXPR.  IS_BOOL is
5504    true if this is the boolean form.  TARGET is a place for us to store the
5505    results; this is NOT optional if IS_BOOL is true.  */
5506 
5507 static rtx
5508 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5509 				 bool is_bool, rtx target)
5510 {
5511   rtx old_val, new_val, mem;
5512   rtx *pbool, *poval;
5513 
5514   /* Expand the operands.  */
5515   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5516   old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5517   new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5518 
5519   pbool = poval = NULL;
5520   if (target != const0_rtx)
5521     {
5522       if (is_bool)
5523 	pbool = &target;
5524       else
5525 	poval = &target;
5526     }
5527   if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5528 				       false, MEMMODEL_SYNC_SEQ_CST,
5529 				       MEMMODEL_SYNC_SEQ_CST))
5530     return NULL_RTX;
5531 
5532   return target;
5533 }
5534 
5535 /* Expand the __sync_lock_test_and_set intrinsic.  Note that the most
5536    general form is actually an atomic exchange, and some targets only
5537    support a reduced form with the second argument being a constant 1.
5538    EXP is the CALL_EXPR; TARGET is an optional place for us to store
5539    the results.  */
5540 
5541 static rtx
5542 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5543 				       rtx target)
5544 {
5545   rtx val, mem;
5546 
5547   /* Expand the operands.  */
5548   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5549   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5550 
5551   return expand_sync_lock_test_and_set (target, mem, val);
5552 }
5553 
5554 /* Expand the __sync_lock_release intrinsic.  EXP is the CALL_EXPR.  */
5555 
5556 static void
5557 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5558 {
5559   rtx mem;
5560 
5561   /* Expand the operands.  */
5562   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5563 
5564   expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5565 }
5566 
5567 /* Given an integer representing an ``enum memmodel'', verify its
5568    correctness and return the memory model enum.  */
5569 
5570 static enum memmodel
5571 get_memmodel (tree exp)
5572 {
5573   rtx op;
5574   unsigned HOST_WIDE_INT val;
5575   source_location loc
5576     = expansion_point_location_if_in_system_header (input_location);
5577 
5578   /* If the parameter is not a constant, it's a run time value so we'll just
5579      convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking.  */
5580   if (TREE_CODE (exp) != INTEGER_CST)
5581     return MEMMODEL_SEQ_CST;
5582 
5583   op = expand_normal (exp);
5584 
5585   val = INTVAL (op);
5586   if (targetm.memmodel_check)
5587     val = targetm.memmodel_check (val);
5588   else if (val & ~MEMMODEL_MASK)
5589     {
5590       warning_at (loc, OPT_Winvalid_memory_model,
5591 		  "unknown architecture specifier in memory model to builtin");
5592       return MEMMODEL_SEQ_CST;
5593     }
5594 
5595   /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5596   if (memmodel_base (val) >= MEMMODEL_LAST)
5597     {
5598       warning_at (loc, OPT_Winvalid_memory_model,
5599 		  "invalid memory model argument to builtin");
5600       return MEMMODEL_SEQ_CST;
5601     }
5602 
5603   /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5604      be conservative and promote consume to acquire.  */
5605   if (val == MEMMODEL_CONSUME)
5606     val = MEMMODEL_ACQUIRE;
5607 
5608   return (enum memmodel) val;
5609 }
5610 
5611 /* Expand the __atomic_exchange intrinsic:
5612    	TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5613    EXP is the CALL_EXPR.
5614    TARGET is an optional place for us to store the results.  */
5615 
5616 static rtx
5617 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5618 {
5619   rtx val, mem;
5620   enum memmodel model;
5621 
5622   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5623 
5624   if (!flag_inline_atomics)
5625     return NULL_RTX;
5626 
5627   /* Expand the operands.  */
5628   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5629   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5630 
5631   return expand_atomic_exchange (target, mem, val, model);
5632 }
5633 
5634 /* Expand the __atomic_compare_exchange intrinsic:
5635    	bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5636 					TYPE desired, BOOL weak,
5637 					enum memmodel success,
5638 					enum memmodel failure)
5639    EXP is the CALL_EXPR.
5640    TARGET is an optional place for us to store the results.  */
5641 
5642 static rtx
5643 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5644 					rtx target)
5645 {
5646   rtx expect, desired, mem, oldval;
5647   rtx_code_label *label;
5648   enum memmodel success, failure;
5649   tree weak;
5650   bool is_weak;
5651   source_location loc
5652     = expansion_point_location_if_in_system_header (input_location);
5653 
5654   success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5655   failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5656 
5657   if (failure > success)
5658     {
5659       warning_at (loc, OPT_Winvalid_memory_model,
5660 		  "failure memory model cannot be stronger than success "
5661 		  "memory model for %<__atomic_compare_exchange%>");
5662       success = MEMMODEL_SEQ_CST;
5663     }
5664 
5665   if (is_mm_release (failure) || is_mm_acq_rel (failure))
5666     {
5667       warning_at (loc, OPT_Winvalid_memory_model,
5668 		  "invalid failure memory model for "
5669 		  "%<__atomic_compare_exchange%>");
5670       failure = MEMMODEL_SEQ_CST;
5671       success = MEMMODEL_SEQ_CST;
5672     }
5673 
5674 
5675   if (!flag_inline_atomics)
5676     return NULL_RTX;
5677 
5678   /* Expand the operands.  */
5679   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5680 
5681   expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5682   expect = convert_memory_address (Pmode, expect);
5683   expect = gen_rtx_MEM (mode, expect);
5684   desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5685 
5686   weak = CALL_EXPR_ARG (exp, 3);
5687   is_weak = false;
5688   if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5689     is_weak = true;
5690 
5691   if (target == const0_rtx)
5692     target = NULL;
5693 
5694   /* Lest the rtl backend create a race condition with an imporoper store
5695      to memory, always create a new pseudo for OLDVAL.  */
5696   oldval = NULL;
5697 
5698   if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5699 				       is_weak, success, failure))
5700     return NULL_RTX;
5701 
5702   /* Conditionally store back to EXPECT, lest we create a race condition
5703      with an improper store to memory.  */
5704   /* ??? With a rearrangement of atomics at the gimple level, we can handle
5705      the normal case where EXPECT is totally private, i.e. a register.  At
5706      which point the store can be unconditional.  */
5707   label = gen_label_rtx ();
5708   emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5709 			   GET_MODE (target), 1, label);
5710   emit_move_insn (expect, oldval);
5711   emit_label (label);
5712 
5713   return target;
5714 }
5715 
5716 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5717    internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5718    call.  The weak parameter must be dropped to match the expected parameter
5719    list and the expected argument changed from value to pointer to memory
5720    slot.  */
5721 
5722 static void
5723 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5724 {
5725   unsigned int z;
5726   vec<tree, va_gc> *vec;
5727 
5728   vec_alloc (vec, 5);
5729   vec->quick_push (gimple_call_arg (call, 0));
5730   tree expected = gimple_call_arg (call, 1);
5731   rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5732 				      TREE_TYPE (expected));
5733   rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5734   if (expd != x)
5735     emit_move_insn (x, expd);
5736   tree v = make_tree (TREE_TYPE (expected), x);
5737   vec->quick_push (build1 (ADDR_EXPR,
5738 			   build_pointer_type (TREE_TYPE (expected)), v));
5739   vec->quick_push (gimple_call_arg (call, 2));
5740   /* Skip the boolean weak parameter.  */
5741   for (z = 4; z < 6; z++)
5742     vec->quick_push (gimple_call_arg (call, z));
5743   built_in_function fncode
5744     = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5745 			   + exact_log2 (GET_MODE_SIZE (mode)));
5746   tree fndecl = builtin_decl_explicit (fncode);
5747   tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5748 		    fndecl);
5749   tree exp = build_call_vec (boolean_type_node, fn, vec);
5750   tree lhs = gimple_call_lhs (call);
5751   rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5752   if (lhs)
5753     {
5754       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5755       if (GET_MODE (boolret) != mode)
5756 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5757       x = force_reg (mode, x);
5758       write_complex_part (target, boolret, true);
5759       write_complex_part (target, x, false);
5760     }
5761 }
5762 
5763 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function.  */
5764 
5765 void
5766 expand_ifn_atomic_compare_exchange (gcall *call)
5767 {
5768   int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5769   gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5770   machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5771   rtx expect, desired, mem, oldval, boolret;
5772   enum memmodel success, failure;
5773   tree lhs;
5774   bool is_weak;
5775   source_location loc
5776     = expansion_point_location_if_in_system_header (gimple_location (call));
5777 
5778   success = get_memmodel (gimple_call_arg (call, 4));
5779   failure = get_memmodel (gimple_call_arg (call, 5));
5780 
5781   if (failure > success)
5782     {
5783       warning_at (loc, OPT_Winvalid_memory_model,
5784 		  "failure memory model cannot be stronger than success "
5785 		  "memory model for %<__atomic_compare_exchange%>");
5786       success = MEMMODEL_SEQ_CST;
5787     }
5788 
5789   if (is_mm_release (failure) || is_mm_acq_rel (failure))
5790     {
5791       warning_at (loc, OPT_Winvalid_memory_model,
5792 		  "invalid failure memory model for "
5793 		  "%<__atomic_compare_exchange%>");
5794       failure = MEMMODEL_SEQ_CST;
5795       success = MEMMODEL_SEQ_CST;
5796     }
5797 
5798   if (!flag_inline_atomics)
5799     {
5800       expand_ifn_atomic_compare_exchange_into_call (call, mode);
5801       return;
5802     }
5803 
5804   /* Expand the operands.  */
5805   mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5806 
5807   expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5808   desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5809 
5810   is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5811 
5812   boolret = NULL;
5813   oldval = NULL;
5814 
5815   if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5816 				       is_weak, success, failure))
5817     {
5818       expand_ifn_atomic_compare_exchange_into_call (call, mode);
5819       return;
5820     }
5821 
5822   lhs = gimple_call_lhs (call);
5823   if (lhs)
5824     {
5825       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5826       if (GET_MODE (boolret) != mode)
5827 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5828       write_complex_part (target, boolret, true);
5829       write_complex_part (target, oldval, false);
5830     }
5831 }
5832 
5833 /* Expand the __atomic_load intrinsic:
5834    	TYPE __atomic_load (TYPE *object, enum memmodel)
5835    EXP is the CALL_EXPR.
5836    TARGET is an optional place for us to store the results.  */
5837 
5838 static rtx
5839 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5840 {
5841   rtx mem;
5842   enum memmodel model;
5843 
5844   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5845   if (is_mm_release (model) || is_mm_acq_rel (model))
5846     {
5847       source_location loc
5848 	= expansion_point_location_if_in_system_header (input_location);
5849       warning_at (loc, OPT_Winvalid_memory_model,
5850 		  "invalid memory model for %<__atomic_load%>");
5851       model = MEMMODEL_SEQ_CST;
5852     }
5853 
5854   if (!flag_inline_atomics)
5855     return NULL_RTX;
5856 
5857   /* Expand the operand.  */
5858   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5859 
5860   return expand_atomic_load (target, mem, model);
5861 }
5862 
5863 
5864 /* Expand the __atomic_store intrinsic:
5865    	void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5866    EXP is the CALL_EXPR.
5867    TARGET is an optional place for us to store the results.  */
5868 
5869 static rtx
5870 expand_builtin_atomic_store (machine_mode mode, tree exp)
5871 {
5872   rtx mem, val;
5873   enum memmodel model;
5874 
5875   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5876   if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5877 	|| is_mm_release (model)))
5878     {
5879       source_location loc
5880 	= expansion_point_location_if_in_system_header (input_location);
5881       warning_at (loc, OPT_Winvalid_memory_model,
5882 		  "invalid memory model for %<__atomic_store%>");
5883       model = MEMMODEL_SEQ_CST;
5884     }
5885 
5886   if (!flag_inline_atomics)
5887     return NULL_RTX;
5888 
5889   /* Expand the operands.  */
5890   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5891   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5892 
5893   return expand_atomic_store (mem, val, model, false);
5894 }
5895 
5896 /* Expand the __atomic_fetch_XXX intrinsic:
5897    	TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5898    EXP is the CALL_EXPR.
5899    TARGET is an optional place for us to store the results.
5900    CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5901    FETCH_AFTER is true if returning the result of the operation.
5902    FETCH_AFTER is false if returning the value before the operation.
5903    IGNORE is true if the result is not used.
5904    EXT_CALL is the correct builtin for an external call if this cannot be
5905    resolved to an instruction sequence.  */
5906 
5907 static rtx
5908 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5909 				enum rtx_code code, bool fetch_after,
5910 				bool ignore, enum built_in_function ext_call)
5911 {
5912   rtx val, mem, ret;
5913   enum memmodel model;
5914   tree fndecl;
5915   tree addr;
5916 
5917   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5918 
5919   /* Expand the operands.  */
5920   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5921   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5922 
5923   /* Only try generating instructions if inlining is turned on.  */
5924   if (flag_inline_atomics)
5925     {
5926       ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5927       if (ret)
5928 	return ret;
5929     }
5930 
5931   /* Return if a different routine isn't needed for the library call.  */
5932   if (ext_call == BUILT_IN_NONE)
5933     return NULL_RTX;
5934 
5935   /* Change the call to the specified function.  */
5936   fndecl = get_callee_fndecl (exp);
5937   addr = CALL_EXPR_FN (exp);
5938   STRIP_NOPS (addr);
5939 
5940   gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5941   TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5942 
5943   /* If we will emit code after the call, the call can not be a tail call.
5944      If it is emitted as a tail call, a barrier is emitted after it, and
5945      then all trailing code is removed.  */
5946   if (!ignore)
5947     CALL_EXPR_TAILCALL (exp) = 0;
5948 
5949   /* Expand the call here so we can emit trailing code.  */
5950   ret = expand_call (exp, target, ignore);
5951 
5952   /* Replace the original function just in case it matters.  */
5953   TREE_OPERAND (addr, 0) = fndecl;
5954 
5955   /* Then issue the arithmetic correction to return the right result.  */
5956   if (!ignore)
5957     {
5958       if (code == NOT)
5959 	{
5960 	  ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5961 				     OPTAB_LIB_WIDEN);
5962 	  ret = expand_simple_unop (mode, NOT, ret, target, true);
5963 	}
5964       else
5965 	ret = expand_simple_binop (mode, code, ret, val, target, true,
5966 				   OPTAB_LIB_WIDEN);
5967     }
5968   return ret;
5969 }
5970 
5971 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function.  */
5972 
5973 void
5974 expand_ifn_atomic_bit_test_and (gcall *call)
5975 {
5976   tree ptr = gimple_call_arg (call, 0);
5977   tree bit = gimple_call_arg (call, 1);
5978   tree flag = gimple_call_arg (call, 2);
5979   tree lhs = gimple_call_lhs (call);
5980   enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5981   machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5982   enum rtx_code code;
5983   optab optab;
5984   struct expand_operand ops[5];
5985 
5986   gcc_assert (flag_inline_atomics);
5987 
5988   if (gimple_call_num_args (call) == 4)
5989     model = get_memmodel (gimple_call_arg (call, 3));
5990 
5991   rtx mem = get_builtin_sync_mem (ptr, mode);
5992   rtx val = expand_expr_force_mode (bit, mode);
5993 
5994   switch (gimple_call_internal_fn (call))
5995     {
5996     case IFN_ATOMIC_BIT_TEST_AND_SET:
5997       code = IOR;
5998       optab = atomic_bit_test_and_set_optab;
5999       break;
6000     case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6001       code = XOR;
6002       optab = atomic_bit_test_and_complement_optab;
6003       break;
6004     case IFN_ATOMIC_BIT_TEST_AND_RESET:
6005       code = AND;
6006       optab = atomic_bit_test_and_reset_optab;
6007       break;
6008     default:
6009       gcc_unreachable ();
6010     }
6011 
6012   if (lhs == NULL_TREE)
6013     {
6014       val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6015 				 val, NULL_RTX, true, OPTAB_DIRECT);
6016       if (code == AND)
6017 	val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6018       expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6019       return;
6020     }
6021 
6022   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6023   enum insn_code icode = direct_optab_handler (optab, mode);
6024   gcc_assert (icode != CODE_FOR_nothing);
6025   create_output_operand (&ops[0], target, mode);
6026   create_fixed_operand (&ops[1], mem);
6027   create_convert_operand_to (&ops[2], val, mode, true);
6028   create_integer_operand (&ops[3], model);
6029   create_integer_operand (&ops[4], integer_onep (flag));
6030   if (maybe_expand_insn (icode, 5, ops))
6031     return;
6032 
6033   rtx bitval = val;
6034   val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6035 			     val, NULL_RTX, true, OPTAB_DIRECT);
6036   rtx maskval = val;
6037   if (code == AND)
6038     val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6039   rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6040 				       code, model, false);
6041   if (integer_onep (flag))
6042     {
6043       result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6044 				    NULL_RTX, true, OPTAB_DIRECT);
6045       result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6046 				    true, OPTAB_DIRECT);
6047     }
6048   else
6049     result = expand_simple_binop (mode, AND, result, maskval, target, true,
6050 				  OPTAB_DIRECT);
6051   if (result != target)
6052     emit_move_insn (target, result);
6053 }
6054 
6055 /* Expand an atomic clear operation.
6056 	void _atomic_clear (BOOL *obj, enum memmodel)
6057    EXP is the call expression.  */
6058 
6059 static rtx
6060 expand_builtin_atomic_clear (tree exp)
6061 {
6062   machine_mode mode;
6063   rtx mem, ret;
6064   enum memmodel model;
6065 
6066   mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6067   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6068   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6069 
6070   if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6071     {
6072       source_location loc
6073 	= expansion_point_location_if_in_system_header (input_location);
6074       warning_at (loc, OPT_Winvalid_memory_model,
6075 		  "invalid memory model for %<__atomic_store%>");
6076       model = MEMMODEL_SEQ_CST;
6077     }
6078 
6079   /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6080      Failing that, a store is issued by __atomic_store.  The only way this can
6081      fail is if the bool type is larger than a word size.  Unlikely, but
6082      handle it anyway for completeness.  Assume a single threaded model since
6083      there is no atomic support in this case, and no barriers are required.  */
6084   ret = expand_atomic_store (mem, const0_rtx, model, true);
6085   if (!ret)
6086     emit_move_insn (mem, const0_rtx);
6087   return const0_rtx;
6088 }
6089 
6090 /* Expand an atomic test_and_set operation.
6091 	bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6092    EXP is the call expression.  */
6093 
6094 static rtx
6095 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6096 {
6097   rtx mem;
6098   enum memmodel model;
6099   machine_mode mode;
6100 
6101   mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6102   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6103   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6104 
6105   return expand_atomic_test_and_set (target, mem, model);
6106 }
6107 
6108 
6109 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6110    this architecture.  If ARG1 is NULL, use typical alignment for size ARG0.  */
6111 
6112 static tree
6113 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6114 {
6115   int size;
6116   machine_mode mode;
6117   unsigned int mode_align, type_align;
6118 
6119   if (TREE_CODE (arg0) != INTEGER_CST)
6120     return NULL_TREE;
6121 
6122   size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6123   mode = mode_for_size (size, MODE_INT, 0);
6124   mode_align = GET_MODE_ALIGNMENT (mode);
6125 
6126   if (TREE_CODE (arg1) == INTEGER_CST)
6127     {
6128       unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6129 
6130       /* Either this argument is null, or it's a fake pointer encoding
6131          the alignment of the object.  */
6132       val = least_bit_hwi (val);
6133       val *= BITS_PER_UNIT;
6134 
6135       if (val == 0 || mode_align < val)
6136         type_align = mode_align;
6137       else
6138         type_align = val;
6139     }
6140   else
6141     {
6142       tree ttype = TREE_TYPE (arg1);
6143 
6144       /* This function is usually invoked and folded immediately by the front
6145 	 end before anything else has a chance to look at it.  The pointer
6146 	 parameter at this point is usually cast to a void *, so check for that
6147 	 and look past the cast.  */
6148       if (CONVERT_EXPR_P (arg1)
6149 	  && POINTER_TYPE_P (ttype)
6150 	  && VOID_TYPE_P (TREE_TYPE (ttype))
6151 	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6152 	arg1 = TREE_OPERAND (arg1, 0);
6153 
6154       ttype = TREE_TYPE (arg1);
6155       gcc_assert (POINTER_TYPE_P (ttype));
6156 
6157       /* Get the underlying type of the object.  */
6158       ttype = TREE_TYPE (ttype);
6159       type_align = TYPE_ALIGN (ttype);
6160     }
6161 
6162   /* If the object has smaller alignment, the lock free routines cannot
6163      be used.  */
6164   if (type_align < mode_align)
6165     return boolean_false_node;
6166 
6167   /* Check if a compare_and_swap pattern exists for the mode which represents
6168      the required size.  The pattern is not allowed to fail, so the existence
6169      of the pattern indicates support is present.  Also require that an
6170      atomic load exists for the required size.  */
6171   if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6172     return boolean_true_node;
6173   else
6174     return boolean_false_node;
6175 }
6176 
6177 /* Return true if the parameters to call EXP represent an object which will
6178    always generate lock free instructions.  The first argument represents the
6179    size of the object, and the second parameter is a pointer to the object
6180    itself.  If NULL is passed for the object, then the result is based on
6181    typical alignment for an object of the specified size.  Otherwise return
6182    false.  */
6183 
6184 static rtx
6185 expand_builtin_atomic_always_lock_free (tree exp)
6186 {
6187   tree size;
6188   tree arg0 = CALL_EXPR_ARG (exp, 0);
6189   tree arg1 = CALL_EXPR_ARG (exp, 1);
6190 
6191   if (TREE_CODE (arg0) != INTEGER_CST)
6192     {
6193       error ("non-constant argument 1 to __atomic_always_lock_free");
6194       return const0_rtx;
6195     }
6196 
6197   size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6198   if (size == boolean_true_node)
6199     return const1_rtx;
6200   return const0_rtx;
6201 }
6202 
6203 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6204    is lock free on this architecture.  */
6205 
6206 static tree
6207 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6208 {
6209   if (!flag_inline_atomics)
6210     return NULL_TREE;
6211 
6212   /* If it isn't always lock free, don't generate a result.  */
6213   if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6214     return boolean_true_node;
6215 
6216   return NULL_TREE;
6217 }
6218 
6219 /* Return true if the parameters to call EXP represent an object which will
6220    always generate lock free instructions.  The first argument represents the
6221    size of the object, and the second parameter is a pointer to the object
6222    itself.  If NULL is passed for the object, then the result is based on
6223    typical alignment for an object of the specified size.  Otherwise return
6224    NULL*/
6225 
6226 static rtx
6227 expand_builtin_atomic_is_lock_free (tree exp)
6228 {
6229   tree size;
6230   tree arg0 = CALL_EXPR_ARG (exp, 0);
6231   tree arg1 = CALL_EXPR_ARG (exp, 1);
6232 
6233   if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6234     {
6235       error ("non-integer argument 1 to __atomic_is_lock_free");
6236       return NULL_RTX;
6237     }
6238 
6239   if (!flag_inline_atomics)
6240     return NULL_RTX;
6241 
6242   /* If the value is known at compile time, return the RTX for it.  */
6243   size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6244   if (size == boolean_true_node)
6245     return const1_rtx;
6246 
6247   return NULL_RTX;
6248 }
6249 
6250 /* Expand the __atomic_thread_fence intrinsic:
6251    	void __atomic_thread_fence (enum memmodel)
6252    EXP is the CALL_EXPR.  */
6253 
6254 static void
6255 expand_builtin_atomic_thread_fence (tree exp)
6256 {
6257   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6258   expand_mem_thread_fence (model);
6259 }
6260 
6261 /* Expand the __atomic_signal_fence intrinsic:
6262    	void __atomic_signal_fence (enum memmodel)
6263    EXP is the CALL_EXPR.  */
6264 
6265 static void
6266 expand_builtin_atomic_signal_fence (tree exp)
6267 {
6268   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6269   expand_mem_signal_fence (model);
6270 }
6271 
6272 /* Expand the __sync_synchronize intrinsic.  */
6273 
6274 static void
6275 expand_builtin_sync_synchronize (void)
6276 {
6277   expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6278 }
6279 
6280 static rtx
6281 expand_builtin_thread_pointer (tree exp, rtx target)
6282 {
6283   enum insn_code icode;
6284   if (!validate_arglist (exp, VOID_TYPE))
6285     return const0_rtx;
6286   icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6287   if (icode != CODE_FOR_nothing)
6288     {
6289       struct expand_operand op;
6290       /* If the target is not sutitable then create a new target. */
6291       if (target == NULL_RTX
6292 	  || !REG_P (target)
6293 	  || GET_MODE (target) != Pmode)
6294 	target = gen_reg_rtx (Pmode);
6295       create_output_operand (&op, target, Pmode);
6296       expand_insn (icode, 1, &op);
6297       return target;
6298     }
6299   error ("__builtin_thread_pointer is not supported on this target");
6300   return const0_rtx;
6301 }
6302 
6303 static void
6304 expand_builtin_set_thread_pointer (tree exp)
6305 {
6306   enum insn_code icode;
6307   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6308     return;
6309   icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6310   if (icode != CODE_FOR_nothing)
6311     {
6312       struct expand_operand op;
6313       rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6314 			     Pmode, EXPAND_NORMAL);
6315       create_input_operand (&op, val, Pmode);
6316       expand_insn (icode, 1, &op);
6317       return;
6318     }
6319   error ("__builtin_set_thread_pointer is not supported on this target");
6320 }
6321 
6322 
6323 /* Emit code to restore the current value of stack.  */
6324 
6325 static void
6326 expand_stack_restore (tree var)
6327 {
6328   rtx_insn *prev;
6329   rtx sa = expand_normal (var);
6330 
6331   sa = convert_memory_address (Pmode, sa);
6332 
6333   prev = get_last_insn ();
6334   emit_stack_restore (SAVE_BLOCK, sa);
6335 
6336   record_new_stack_level ();
6337 
6338   fixup_args_size_notes (prev, get_last_insn (), 0);
6339 }
6340 
6341 /* Emit code to save the current value of stack.  */
6342 
6343 static rtx
6344 expand_stack_save (void)
6345 {
6346   rtx ret = NULL_RTX;
6347 
6348   emit_stack_save (SAVE_BLOCK, &ret);
6349   return ret;
6350 }
6351 
6352 
6353 /* Expand an expression EXP that calls a built-in function,
6354    with result going to TARGET if that's convenient
6355    (and in mode MODE if that's convenient).
6356    SUBTARGET may be used as the target for computing one of EXP's operands.
6357    IGNORE is nonzero if the value is to be ignored.  */
6358 
6359 rtx
6360 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6361 		int ignore)
6362 {
6363   tree fndecl = get_callee_fndecl (exp);
6364   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6365   machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6366   int flags;
6367 
6368   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6369     return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6370 
6371   /* When ASan is enabled, we don't want to expand some memory/string
6372      builtins and rely on libsanitizer's hooks.  This allows us to avoid
6373      redundant checks and be sure, that possible overflow will be detected
6374      by ASan.  */
6375 
6376   if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6377     return expand_call (exp, target, ignore);
6378 
6379   /* When not optimizing, generate calls to library functions for a certain
6380      set of builtins.  */
6381   if (!optimize
6382       && !called_as_built_in (fndecl)
6383       && fcode != BUILT_IN_FORK
6384       && fcode != BUILT_IN_EXECL
6385       && fcode != BUILT_IN_EXECV
6386       && fcode != BUILT_IN_EXECLP
6387       && fcode != BUILT_IN_EXECLE
6388       && fcode != BUILT_IN_EXECVP
6389       && fcode != BUILT_IN_EXECVE
6390       && fcode != BUILT_IN_ALLOCA
6391       && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6392       && fcode != BUILT_IN_FREE
6393       && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6394       && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6395       && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6396       && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6397       && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6398       && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6399       && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6400       && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6401       && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6402       && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6403       && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6404       && fcode != BUILT_IN_CHKP_BNDRET)
6405     return expand_call (exp, target, ignore);
6406 
6407   /* The built-in function expanders test for target == const0_rtx
6408      to determine whether the function's result will be ignored.  */
6409   if (ignore)
6410     target = const0_rtx;
6411 
6412   /* If the result of a pure or const built-in function is ignored, and
6413      none of its arguments are volatile, we can avoid expanding the
6414      built-in call and just evaluate the arguments for side-effects.  */
6415   if (target == const0_rtx
6416       && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6417       && !(flags & ECF_LOOPING_CONST_OR_PURE))
6418     {
6419       bool volatilep = false;
6420       tree arg;
6421       call_expr_arg_iterator iter;
6422 
6423       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6424 	if (TREE_THIS_VOLATILE (arg))
6425 	  {
6426 	    volatilep = true;
6427 	    break;
6428 	  }
6429 
6430       if (! volatilep)
6431 	{
6432 	  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6433 	    expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6434 	  return const0_rtx;
6435 	}
6436     }
6437 
6438   /* expand_builtin_with_bounds is supposed to be used for
6439      instrumented builtin calls.  */
6440   gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6441 
6442   switch (fcode)
6443     {
6444     CASE_FLT_FN (BUILT_IN_FABS):
6445     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6446     case BUILT_IN_FABSD32:
6447     case BUILT_IN_FABSD64:
6448     case BUILT_IN_FABSD128:
6449       target = expand_builtin_fabs (exp, target, subtarget);
6450       if (target)
6451 	return target;
6452       break;
6453 
6454     CASE_FLT_FN (BUILT_IN_COPYSIGN):
6455     CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6456       target = expand_builtin_copysign (exp, target, subtarget);
6457       if (target)
6458 	return target;
6459       break;
6460 
6461       /* Just do a normal library call if we were unable to fold
6462 	 the values.  */
6463     CASE_FLT_FN (BUILT_IN_CABS):
6464       break;
6465 
6466     CASE_FLT_FN (BUILT_IN_FMA):
6467       target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6468       if (target)
6469 	return target;
6470       break;
6471 
6472     CASE_FLT_FN (BUILT_IN_ILOGB):
6473       if (! flag_unsafe_math_optimizations)
6474 	break;
6475       gcc_fallthrough ();
6476     CASE_FLT_FN (BUILT_IN_ISINF):
6477     CASE_FLT_FN (BUILT_IN_FINITE):
6478     case BUILT_IN_ISFINITE:
6479     case BUILT_IN_ISNORMAL:
6480       target = expand_builtin_interclass_mathfn (exp, target);
6481       if (target)
6482 	return target;
6483       break;
6484 
6485     CASE_FLT_FN (BUILT_IN_ICEIL):
6486     CASE_FLT_FN (BUILT_IN_LCEIL):
6487     CASE_FLT_FN (BUILT_IN_LLCEIL):
6488     CASE_FLT_FN (BUILT_IN_LFLOOR):
6489     CASE_FLT_FN (BUILT_IN_IFLOOR):
6490     CASE_FLT_FN (BUILT_IN_LLFLOOR):
6491       target = expand_builtin_int_roundingfn (exp, target);
6492       if (target)
6493 	return target;
6494       break;
6495 
6496     CASE_FLT_FN (BUILT_IN_IRINT):
6497     CASE_FLT_FN (BUILT_IN_LRINT):
6498     CASE_FLT_FN (BUILT_IN_LLRINT):
6499     CASE_FLT_FN (BUILT_IN_IROUND):
6500     CASE_FLT_FN (BUILT_IN_LROUND):
6501     CASE_FLT_FN (BUILT_IN_LLROUND):
6502       target = expand_builtin_int_roundingfn_2 (exp, target);
6503       if (target)
6504 	return target;
6505       break;
6506 
6507     CASE_FLT_FN (BUILT_IN_POWI):
6508       target = expand_builtin_powi (exp, target);
6509       if (target)
6510 	return target;
6511       break;
6512 
6513     CASE_FLT_FN (BUILT_IN_CEXPI):
6514       target = expand_builtin_cexpi (exp, target);
6515       gcc_assert (target);
6516       return target;
6517 
6518     CASE_FLT_FN (BUILT_IN_SIN):
6519     CASE_FLT_FN (BUILT_IN_COS):
6520       if (! flag_unsafe_math_optimizations)
6521 	break;
6522       target = expand_builtin_mathfn_3 (exp, target, subtarget);
6523       if (target)
6524 	return target;
6525       break;
6526 
6527     CASE_FLT_FN (BUILT_IN_SINCOS):
6528       if (! flag_unsafe_math_optimizations)
6529 	break;
6530       target = expand_builtin_sincos (exp);
6531       if (target)
6532 	return target;
6533       break;
6534 
6535     case BUILT_IN_APPLY_ARGS:
6536       return expand_builtin_apply_args ();
6537 
6538       /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6539 	 FUNCTION with a copy of the parameters described by
6540 	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
6541 	 allocated on the stack into which is stored all the registers
6542 	 that might possibly be used for returning the result of a
6543 	 function.  ARGUMENTS is the value returned by
6544 	 __builtin_apply_args.  ARGSIZE is the number of bytes of
6545 	 arguments that must be copied.  ??? How should this value be
6546 	 computed?  We'll also need a safe worst case value for varargs
6547 	 functions.  */
6548     case BUILT_IN_APPLY:
6549       if (!validate_arglist (exp, POINTER_TYPE,
6550 			     POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6551 	  && !validate_arglist (exp, REFERENCE_TYPE,
6552 				POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6553 	return const0_rtx;
6554       else
6555 	{
6556 	  rtx ops[3];
6557 
6558 	  ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6559 	  ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6560 	  ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6561 
6562 	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
6563 	}
6564 
6565       /* __builtin_return (RESULT) causes the function to return the
6566 	 value described by RESULT.  RESULT is address of the block of
6567 	 memory returned by __builtin_apply.  */
6568     case BUILT_IN_RETURN:
6569       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6570 	expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6571       return const0_rtx;
6572 
6573     case BUILT_IN_SAVEREGS:
6574       return expand_builtin_saveregs ();
6575 
6576     case BUILT_IN_VA_ARG_PACK:
6577       /* All valid uses of __builtin_va_arg_pack () are removed during
6578 	 inlining.  */
6579       error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6580       return const0_rtx;
6581 
6582     case BUILT_IN_VA_ARG_PACK_LEN:
6583       /* All valid uses of __builtin_va_arg_pack_len () are removed during
6584 	 inlining.  */
6585       error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6586       return const0_rtx;
6587 
6588       /* Return the address of the first anonymous stack arg.  */
6589     case BUILT_IN_NEXT_ARG:
6590       if (fold_builtin_next_arg (exp, false))
6591 	return const0_rtx;
6592       return expand_builtin_next_arg ();
6593 
6594     case BUILT_IN_CLEAR_CACHE:
6595       target = expand_builtin___clear_cache (exp);
6596       if (target)
6597         return target;
6598       break;
6599 
6600     case BUILT_IN_CLASSIFY_TYPE:
6601       return expand_builtin_classify_type (exp);
6602 
6603     case BUILT_IN_CONSTANT_P:
6604       return const0_rtx;
6605 
6606     case BUILT_IN_FRAME_ADDRESS:
6607     case BUILT_IN_RETURN_ADDRESS:
6608       return expand_builtin_frame_address (fndecl, exp);
6609 
6610     /* Returns the address of the area where the structure is returned.
6611        0 otherwise.  */
6612     case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6613       if (call_expr_nargs (exp) != 0
6614 	  || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6615 	  || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6616 	return const0_rtx;
6617       else
6618 	return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6619 
6620     case BUILT_IN_ALLOCA:
6621     case BUILT_IN_ALLOCA_WITH_ALIGN:
6622       /* If the allocation stems from the declaration of a variable-sized
6623 	 object, it cannot accumulate.  */
6624       target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6625       if (target)
6626 	return target;
6627       break;
6628 
6629     case BUILT_IN_STACK_SAVE:
6630       return expand_stack_save ();
6631 
6632     case BUILT_IN_STACK_RESTORE:
6633       expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6634       return const0_rtx;
6635 
6636     case BUILT_IN_BSWAP16:
6637     case BUILT_IN_BSWAP32:
6638     case BUILT_IN_BSWAP64:
6639       target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6640       if (target)
6641 	return target;
6642       break;
6643 
6644     CASE_INT_FN (BUILT_IN_FFS):
6645       target = expand_builtin_unop (target_mode, exp, target,
6646 				    subtarget, ffs_optab);
6647       if (target)
6648 	return target;
6649       break;
6650 
6651     CASE_INT_FN (BUILT_IN_CLZ):
6652       target = expand_builtin_unop (target_mode, exp, target,
6653 				    subtarget, clz_optab);
6654       if (target)
6655 	return target;
6656       break;
6657 
6658     CASE_INT_FN (BUILT_IN_CTZ):
6659       target = expand_builtin_unop (target_mode, exp, target,
6660 				    subtarget, ctz_optab);
6661       if (target)
6662 	return target;
6663       break;
6664 
6665     CASE_INT_FN (BUILT_IN_CLRSB):
6666       target = expand_builtin_unop (target_mode, exp, target,
6667 				    subtarget, clrsb_optab);
6668       if (target)
6669 	return target;
6670       break;
6671 
6672     CASE_INT_FN (BUILT_IN_POPCOUNT):
6673       target = expand_builtin_unop (target_mode, exp, target,
6674 				    subtarget, popcount_optab);
6675       if (target)
6676 	return target;
6677       break;
6678 
6679     CASE_INT_FN (BUILT_IN_PARITY):
6680       target = expand_builtin_unop (target_mode, exp, target,
6681 				    subtarget, parity_optab);
6682       if (target)
6683 	return target;
6684       break;
6685 
6686     case BUILT_IN_STRLEN:
6687       target = expand_builtin_strlen (exp, target, target_mode);
6688       if (target)
6689 	return target;
6690       break;
6691 
6692     case BUILT_IN_STRCAT:
6693       target = expand_builtin_strcat (exp, target);
6694       if (target)
6695 	return target;
6696       break;
6697 
6698     case BUILT_IN_STRCPY:
6699       target = expand_builtin_strcpy (exp, target);
6700       if (target)
6701 	return target;
6702       break;
6703 
6704     case BUILT_IN_STRNCAT:
6705       target = expand_builtin_strncat (exp, target);
6706       if (target)
6707 	return target;
6708       break;
6709 
6710     case BUILT_IN_STRNCPY:
6711       target = expand_builtin_strncpy (exp, target);
6712       if (target)
6713 	return target;
6714       break;
6715 
6716     case BUILT_IN_STPCPY:
6717       target = expand_builtin_stpcpy (exp, target, mode);
6718       if (target)
6719 	return target;
6720       break;
6721 
6722     case BUILT_IN_MEMCPY:
6723       target = expand_builtin_memcpy (exp, target);
6724       if (target)
6725 	return target;
6726       break;
6727 
6728     case BUILT_IN_MEMPCPY:
6729       target = expand_builtin_mempcpy (exp, target, mode);
6730       if (target)
6731 	return target;
6732       break;
6733 
6734     case BUILT_IN_MEMSET:
6735       target = expand_builtin_memset (exp, target, mode);
6736       if (target)
6737 	return target;
6738       break;
6739 
6740     case BUILT_IN_BZERO:
6741       target = expand_builtin_bzero (exp);
6742       if (target)
6743 	return target;
6744       break;
6745 
6746     case BUILT_IN_STRCMP:
6747       target = expand_builtin_strcmp (exp, target);
6748       if (target)
6749 	return target;
6750       break;
6751 
6752     case BUILT_IN_STRNCMP:
6753       target = expand_builtin_strncmp (exp, target, mode);
6754       if (target)
6755 	return target;
6756       break;
6757 
6758     case BUILT_IN_BCMP:
6759     case BUILT_IN_MEMCMP:
6760     case BUILT_IN_MEMCMP_EQ:
6761       target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6762       if (target)
6763 	return target;
6764       if (fcode == BUILT_IN_MEMCMP_EQ)
6765 	{
6766 	  tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6767 	  TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6768 	}
6769       break;
6770 
6771     case BUILT_IN_SETJMP:
6772       /* This should have been lowered to the builtins below.  */
6773       gcc_unreachable ();
6774 
6775     case BUILT_IN_SETJMP_SETUP:
6776       /* __builtin_setjmp_setup is passed a pointer to an array of five words
6777           and the receiver label.  */
6778       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6779 	{
6780 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6781 				      VOIDmode, EXPAND_NORMAL);
6782 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6783 	  rtx_insn *label_r = label_rtx (label);
6784 
6785 	  /* This is copied from the handling of non-local gotos.  */
6786 	  expand_builtin_setjmp_setup (buf_addr, label_r);
6787 	  nonlocal_goto_handler_labels
6788 	    = gen_rtx_INSN_LIST (VOIDmode, label_r,
6789 				 nonlocal_goto_handler_labels);
6790 	  /* ??? Do not let expand_label treat us as such since we would
6791 	     not want to be both on the list of non-local labels and on
6792 	     the list of forced labels.  */
6793 	  FORCED_LABEL (label) = 0;
6794 	  return const0_rtx;
6795 	}
6796       break;
6797 
6798     case BUILT_IN_SETJMP_RECEIVER:
6799        /* __builtin_setjmp_receiver is passed the receiver label.  */
6800       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6801 	{
6802 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6803 	  rtx_insn *label_r = label_rtx (label);
6804 
6805 	  expand_builtin_setjmp_receiver (label_r);
6806 	  return const0_rtx;
6807 	}
6808       break;
6809 
6810       /* __builtin_longjmp is passed a pointer to an array of five words.
6811 	 It's similar to the C library longjmp function but works with
6812 	 __builtin_setjmp above.  */
6813     case BUILT_IN_LONGJMP:
6814       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6815 	{
6816 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6817 				      VOIDmode, EXPAND_NORMAL);
6818 	  rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6819 
6820 	  if (value != const1_rtx)
6821 	    {
6822 	      error ("%<__builtin_longjmp%> second argument must be 1");
6823 	      return const0_rtx;
6824 	    }
6825 
6826 	  expand_builtin_longjmp (buf_addr, value);
6827 	  return const0_rtx;
6828 	}
6829       break;
6830 
6831     case BUILT_IN_NONLOCAL_GOTO:
6832       target = expand_builtin_nonlocal_goto (exp);
6833       if (target)
6834 	return target;
6835       break;
6836 
6837       /* This updates the setjmp buffer that is its argument with the value
6838 	 of the current stack pointer.  */
6839     case BUILT_IN_UPDATE_SETJMP_BUF:
6840       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6841 	{
6842 	  rtx buf_addr
6843 	    = expand_normal (CALL_EXPR_ARG (exp, 0));
6844 
6845 	  expand_builtin_update_setjmp_buf (buf_addr);
6846 	  return const0_rtx;
6847 	}
6848       break;
6849 
6850     case BUILT_IN_TRAP:
6851       expand_builtin_trap ();
6852       return const0_rtx;
6853 
6854     case BUILT_IN_UNREACHABLE:
6855       expand_builtin_unreachable ();
6856       return const0_rtx;
6857 
6858     CASE_FLT_FN (BUILT_IN_SIGNBIT):
6859     case BUILT_IN_SIGNBITD32:
6860     case BUILT_IN_SIGNBITD64:
6861     case BUILT_IN_SIGNBITD128:
6862       target = expand_builtin_signbit (exp, target);
6863       if (target)
6864 	return target;
6865       break;
6866 
6867       /* Various hooks for the DWARF 2 __throw routine.  */
6868     case BUILT_IN_UNWIND_INIT:
6869       expand_builtin_unwind_init ();
6870       return const0_rtx;
6871     case BUILT_IN_DWARF_CFA:
6872       return virtual_cfa_rtx;
6873 #ifdef DWARF2_UNWIND_INFO
6874     case BUILT_IN_DWARF_SP_COLUMN:
6875       return expand_builtin_dwarf_sp_column ();
6876     case BUILT_IN_INIT_DWARF_REG_SIZES:
6877       expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6878       return const0_rtx;
6879 #endif
6880     case BUILT_IN_FROB_RETURN_ADDR:
6881       return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6882     case BUILT_IN_EXTRACT_RETURN_ADDR:
6883       return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6884     case BUILT_IN_EH_RETURN:
6885       expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6886 				CALL_EXPR_ARG (exp, 1));
6887       return const0_rtx;
6888     case BUILT_IN_EH_RETURN_DATA_REGNO:
6889       return expand_builtin_eh_return_data_regno (exp);
6890     case BUILT_IN_EXTEND_POINTER:
6891       return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6892     case BUILT_IN_EH_POINTER:
6893       return expand_builtin_eh_pointer (exp);
6894     case BUILT_IN_EH_FILTER:
6895       return expand_builtin_eh_filter (exp);
6896     case BUILT_IN_EH_COPY_VALUES:
6897       return expand_builtin_eh_copy_values (exp);
6898 
6899     case BUILT_IN_VA_START:
6900       return expand_builtin_va_start (exp);
6901     case BUILT_IN_VA_END:
6902       return expand_builtin_va_end (exp);
6903     case BUILT_IN_VA_COPY:
6904       return expand_builtin_va_copy (exp);
6905     case BUILT_IN_EXPECT:
6906       return expand_builtin_expect (exp, target);
6907     case BUILT_IN_ASSUME_ALIGNED:
6908       return expand_builtin_assume_aligned (exp, target);
6909     case BUILT_IN_PREFETCH:
6910       expand_builtin_prefetch (exp);
6911       return const0_rtx;
6912 
6913     case BUILT_IN_INIT_TRAMPOLINE:
6914       return expand_builtin_init_trampoline (exp, true);
6915     case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6916       return expand_builtin_init_trampoline (exp, false);
6917     case BUILT_IN_ADJUST_TRAMPOLINE:
6918       return expand_builtin_adjust_trampoline (exp);
6919 
6920     case BUILT_IN_INIT_DESCRIPTOR:
6921       return expand_builtin_init_descriptor (exp);
6922     case BUILT_IN_ADJUST_DESCRIPTOR:
6923       return expand_builtin_adjust_descriptor (exp);
6924 
6925     case BUILT_IN_FORK:
6926     case BUILT_IN_EXECL:
6927     case BUILT_IN_EXECV:
6928     case BUILT_IN_EXECLP:
6929     case BUILT_IN_EXECLE:
6930     case BUILT_IN_EXECVP:
6931     case BUILT_IN_EXECVE:
6932       target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6933       if (target)
6934 	return target;
6935       break;
6936 
6937     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6938     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6939     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6940     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6941     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6942       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6943       target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6944       if (target)
6945 	return target;
6946       break;
6947 
6948     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6949     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6950     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6951     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6952     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6953       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6954       target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6955       if (target)
6956 	return target;
6957       break;
6958 
6959     case BUILT_IN_SYNC_FETCH_AND_OR_1:
6960     case BUILT_IN_SYNC_FETCH_AND_OR_2:
6961     case BUILT_IN_SYNC_FETCH_AND_OR_4:
6962     case BUILT_IN_SYNC_FETCH_AND_OR_8:
6963     case BUILT_IN_SYNC_FETCH_AND_OR_16:
6964       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6965       target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6966       if (target)
6967 	return target;
6968       break;
6969 
6970     case BUILT_IN_SYNC_FETCH_AND_AND_1:
6971     case BUILT_IN_SYNC_FETCH_AND_AND_2:
6972     case BUILT_IN_SYNC_FETCH_AND_AND_4:
6973     case BUILT_IN_SYNC_FETCH_AND_AND_8:
6974     case BUILT_IN_SYNC_FETCH_AND_AND_16:
6975       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6976       target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6977       if (target)
6978 	return target;
6979       break;
6980 
6981     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6982     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6983     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6984     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6985     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6986       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6987       target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6988       if (target)
6989 	return target;
6990       break;
6991 
6992     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6993     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6994     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6995     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6996     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6997       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6998       target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6999       if (target)
7000 	return target;
7001       break;
7002 
7003     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7004     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7005     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7006     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7007     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7008       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7009       target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7010       if (target)
7011 	return target;
7012       break;
7013 
7014     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7015     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7016     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7017     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7018     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7019       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7020       target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7021       if (target)
7022 	return target;
7023       break;
7024 
7025     case BUILT_IN_SYNC_OR_AND_FETCH_1:
7026     case BUILT_IN_SYNC_OR_AND_FETCH_2:
7027     case BUILT_IN_SYNC_OR_AND_FETCH_4:
7028     case BUILT_IN_SYNC_OR_AND_FETCH_8:
7029     case BUILT_IN_SYNC_OR_AND_FETCH_16:
7030       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7031       target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7032       if (target)
7033 	return target;
7034       break;
7035 
7036     case BUILT_IN_SYNC_AND_AND_FETCH_1:
7037     case BUILT_IN_SYNC_AND_AND_FETCH_2:
7038     case BUILT_IN_SYNC_AND_AND_FETCH_4:
7039     case BUILT_IN_SYNC_AND_AND_FETCH_8:
7040     case BUILT_IN_SYNC_AND_AND_FETCH_16:
7041       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7042       target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7043       if (target)
7044 	return target;
7045       break;
7046 
7047     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7048     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7049     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7050     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7051     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7052       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7053       target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7054       if (target)
7055 	return target;
7056       break;
7057 
7058     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7059     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7060     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7061     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7062     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7063       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7064       target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7065       if (target)
7066 	return target;
7067       break;
7068 
7069     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7070     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7071     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7072     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7073     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7074       if (mode == VOIDmode)
7075 	mode = TYPE_MODE (boolean_type_node);
7076       if (!target || !register_operand (target, mode))
7077 	target = gen_reg_rtx (mode);
7078 
7079       mode = get_builtin_sync_mode
7080 				(fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7081       target = expand_builtin_compare_and_swap (mode, exp, true, target);
7082       if (target)
7083 	return target;
7084       break;
7085 
7086     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7087     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7088     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7089     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7090     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7091       mode = get_builtin_sync_mode
7092 				(fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7093       target = expand_builtin_compare_and_swap (mode, exp, false, target);
7094       if (target)
7095 	return target;
7096       break;
7097 
7098     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7099     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7100     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7101     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7102     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7103       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7104       target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7105       if (target)
7106 	return target;
7107       break;
7108 
7109     case BUILT_IN_SYNC_LOCK_RELEASE_1:
7110     case BUILT_IN_SYNC_LOCK_RELEASE_2:
7111     case BUILT_IN_SYNC_LOCK_RELEASE_4:
7112     case BUILT_IN_SYNC_LOCK_RELEASE_8:
7113     case BUILT_IN_SYNC_LOCK_RELEASE_16:
7114       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7115       expand_builtin_sync_lock_release (mode, exp);
7116       return const0_rtx;
7117 
7118     case BUILT_IN_SYNC_SYNCHRONIZE:
7119       expand_builtin_sync_synchronize ();
7120       return const0_rtx;
7121 
7122     case BUILT_IN_ATOMIC_EXCHANGE_1:
7123     case BUILT_IN_ATOMIC_EXCHANGE_2:
7124     case BUILT_IN_ATOMIC_EXCHANGE_4:
7125     case BUILT_IN_ATOMIC_EXCHANGE_8:
7126     case BUILT_IN_ATOMIC_EXCHANGE_16:
7127       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7128       target = expand_builtin_atomic_exchange (mode, exp, target);
7129       if (target)
7130 	return target;
7131       break;
7132 
7133     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7134     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7135     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7136     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7137     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7138       {
7139 	unsigned int nargs, z;
7140 	vec<tree, va_gc> *vec;
7141 
7142 	mode =
7143 	    get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7144 	target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7145 	if (target)
7146 	  return target;
7147 
7148 	/* If this is turned into an external library call, the weak parameter
7149 	   must be dropped to match the expected parameter list.  */
7150 	nargs = call_expr_nargs (exp);
7151 	vec_alloc (vec, nargs - 1);
7152 	for (z = 0; z < 3; z++)
7153 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
7154 	/* Skip the boolean weak parameter.  */
7155 	for (z = 4; z < 6; z++)
7156 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
7157 	exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7158 	break;
7159       }
7160 
7161     case BUILT_IN_ATOMIC_LOAD_1:
7162     case BUILT_IN_ATOMIC_LOAD_2:
7163     case BUILT_IN_ATOMIC_LOAD_4:
7164     case BUILT_IN_ATOMIC_LOAD_8:
7165     case BUILT_IN_ATOMIC_LOAD_16:
7166       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7167       target = expand_builtin_atomic_load (mode, exp, target);
7168       if (target)
7169 	return target;
7170       break;
7171 
7172     case BUILT_IN_ATOMIC_STORE_1:
7173     case BUILT_IN_ATOMIC_STORE_2:
7174     case BUILT_IN_ATOMIC_STORE_4:
7175     case BUILT_IN_ATOMIC_STORE_8:
7176     case BUILT_IN_ATOMIC_STORE_16:
7177       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7178       target = expand_builtin_atomic_store (mode, exp);
7179       if (target)
7180 	return const0_rtx;
7181       break;
7182 
7183     case BUILT_IN_ATOMIC_ADD_FETCH_1:
7184     case BUILT_IN_ATOMIC_ADD_FETCH_2:
7185     case BUILT_IN_ATOMIC_ADD_FETCH_4:
7186     case BUILT_IN_ATOMIC_ADD_FETCH_8:
7187     case BUILT_IN_ATOMIC_ADD_FETCH_16:
7188       {
7189 	enum built_in_function lib;
7190 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7191 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7192 				       (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7193 	target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7194 						 ignore, lib);
7195 	if (target)
7196 	  return target;
7197 	break;
7198       }
7199     case BUILT_IN_ATOMIC_SUB_FETCH_1:
7200     case BUILT_IN_ATOMIC_SUB_FETCH_2:
7201     case BUILT_IN_ATOMIC_SUB_FETCH_4:
7202     case BUILT_IN_ATOMIC_SUB_FETCH_8:
7203     case BUILT_IN_ATOMIC_SUB_FETCH_16:
7204       {
7205 	enum built_in_function lib;
7206 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7207 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7208 				       (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7209 	target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7210 						 ignore, lib);
7211 	if (target)
7212 	  return target;
7213 	break;
7214       }
7215     case BUILT_IN_ATOMIC_AND_FETCH_1:
7216     case BUILT_IN_ATOMIC_AND_FETCH_2:
7217     case BUILT_IN_ATOMIC_AND_FETCH_4:
7218     case BUILT_IN_ATOMIC_AND_FETCH_8:
7219     case BUILT_IN_ATOMIC_AND_FETCH_16:
7220       {
7221 	enum built_in_function lib;
7222 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7223 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7224 				       (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7225 	target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7226 						 ignore, lib);
7227 	if (target)
7228 	  return target;
7229 	break;
7230       }
7231     case BUILT_IN_ATOMIC_NAND_FETCH_1:
7232     case BUILT_IN_ATOMIC_NAND_FETCH_2:
7233     case BUILT_IN_ATOMIC_NAND_FETCH_4:
7234     case BUILT_IN_ATOMIC_NAND_FETCH_8:
7235     case BUILT_IN_ATOMIC_NAND_FETCH_16:
7236       {
7237 	enum built_in_function lib;
7238 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7239 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7240 				       (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7241 	target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7242 						 ignore, lib);
7243 	if (target)
7244 	  return target;
7245 	break;
7246       }
7247     case BUILT_IN_ATOMIC_XOR_FETCH_1:
7248     case BUILT_IN_ATOMIC_XOR_FETCH_2:
7249     case BUILT_IN_ATOMIC_XOR_FETCH_4:
7250     case BUILT_IN_ATOMIC_XOR_FETCH_8:
7251     case BUILT_IN_ATOMIC_XOR_FETCH_16:
7252       {
7253 	enum built_in_function lib;
7254 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7255 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7256 				       (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7257 	target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7258 						 ignore, lib);
7259 	if (target)
7260 	  return target;
7261 	break;
7262       }
7263     case BUILT_IN_ATOMIC_OR_FETCH_1:
7264     case BUILT_IN_ATOMIC_OR_FETCH_2:
7265     case BUILT_IN_ATOMIC_OR_FETCH_4:
7266     case BUILT_IN_ATOMIC_OR_FETCH_8:
7267     case BUILT_IN_ATOMIC_OR_FETCH_16:
7268       {
7269 	enum built_in_function lib;
7270 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7271 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7272 				       (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7273 	target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7274 						 ignore, lib);
7275 	if (target)
7276 	  return target;
7277 	break;
7278       }
7279     case BUILT_IN_ATOMIC_FETCH_ADD_1:
7280     case BUILT_IN_ATOMIC_FETCH_ADD_2:
7281     case BUILT_IN_ATOMIC_FETCH_ADD_4:
7282     case BUILT_IN_ATOMIC_FETCH_ADD_8:
7283     case BUILT_IN_ATOMIC_FETCH_ADD_16:
7284       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7285       target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7286 					       ignore, BUILT_IN_NONE);
7287       if (target)
7288 	return target;
7289       break;
7290 
7291     case BUILT_IN_ATOMIC_FETCH_SUB_1:
7292     case BUILT_IN_ATOMIC_FETCH_SUB_2:
7293     case BUILT_IN_ATOMIC_FETCH_SUB_4:
7294     case BUILT_IN_ATOMIC_FETCH_SUB_8:
7295     case BUILT_IN_ATOMIC_FETCH_SUB_16:
7296       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7297       target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7298 					       ignore, BUILT_IN_NONE);
7299       if (target)
7300 	return target;
7301       break;
7302 
7303     case BUILT_IN_ATOMIC_FETCH_AND_1:
7304     case BUILT_IN_ATOMIC_FETCH_AND_2:
7305     case BUILT_IN_ATOMIC_FETCH_AND_4:
7306     case BUILT_IN_ATOMIC_FETCH_AND_8:
7307     case BUILT_IN_ATOMIC_FETCH_AND_16:
7308       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7309       target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7310 					       ignore, BUILT_IN_NONE);
7311       if (target)
7312 	return target;
7313       break;
7314 
7315     case BUILT_IN_ATOMIC_FETCH_NAND_1:
7316     case BUILT_IN_ATOMIC_FETCH_NAND_2:
7317     case BUILT_IN_ATOMIC_FETCH_NAND_4:
7318     case BUILT_IN_ATOMIC_FETCH_NAND_8:
7319     case BUILT_IN_ATOMIC_FETCH_NAND_16:
7320       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7321       target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7322 					       ignore, BUILT_IN_NONE);
7323       if (target)
7324 	return target;
7325       break;
7326 
7327     case BUILT_IN_ATOMIC_FETCH_XOR_1:
7328     case BUILT_IN_ATOMIC_FETCH_XOR_2:
7329     case BUILT_IN_ATOMIC_FETCH_XOR_4:
7330     case BUILT_IN_ATOMIC_FETCH_XOR_8:
7331     case BUILT_IN_ATOMIC_FETCH_XOR_16:
7332       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7333       target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7334 					       ignore, BUILT_IN_NONE);
7335       if (target)
7336 	return target;
7337       break;
7338 
7339     case BUILT_IN_ATOMIC_FETCH_OR_1:
7340     case BUILT_IN_ATOMIC_FETCH_OR_2:
7341     case BUILT_IN_ATOMIC_FETCH_OR_4:
7342     case BUILT_IN_ATOMIC_FETCH_OR_8:
7343     case BUILT_IN_ATOMIC_FETCH_OR_16:
7344       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7345       target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7346 					       ignore, BUILT_IN_NONE);
7347       if (target)
7348 	return target;
7349       break;
7350 
7351     case BUILT_IN_ATOMIC_TEST_AND_SET:
7352       return expand_builtin_atomic_test_and_set (exp, target);
7353 
7354     case BUILT_IN_ATOMIC_CLEAR:
7355       return expand_builtin_atomic_clear (exp);
7356 
7357     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7358       return expand_builtin_atomic_always_lock_free (exp);
7359 
7360     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7361       target = expand_builtin_atomic_is_lock_free (exp);
7362       if (target)
7363         return target;
7364       break;
7365 
7366     case BUILT_IN_ATOMIC_THREAD_FENCE:
7367       expand_builtin_atomic_thread_fence (exp);
7368       return const0_rtx;
7369 
7370     case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7371       expand_builtin_atomic_signal_fence (exp);
7372       return const0_rtx;
7373 
7374     case BUILT_IN_OBJECT_SIZE:
7375       return expand_builtin_object_size (exp);
7376 
7377     case BUILT_IN_MEMCPY_CHK:
7378     case BUILT_IN_MEMPCPY_CHK:
7379     case BUILT_IN_MEMMOVE_CHK:
7380     case BUILT_IN_MEMSET_CHK:
7381       target = expand_builtin_memory_chk (exp, target, mode, fcode);
7382       if (target)
7383 	return target;
7384       break;
7385 
7386     case BUILT_IN_STRCPY_CHK:
7387     case BUILT_IN_STPCPY_CHK:
7388     case BUILT_IN_STRNCPY_CHK:
7389     case BUILT_IN_STPNCPY_CHK:
7390     case BUILT_IN_STRCAT_CHK:
7391     case BUILT_IN_STRNCAT_CHK:
7392     case BUILT_IN_SNPRINTF_CHK:
7393     case BUILT_IN_VSNPRINTF_CHK:
7394       maybe_emit_chk_warning (exp, fcode);
7395       break;
7396 
7397     case BUILT_IN_SPRINTF_CHK:
7398     case BUILT_IN_VSPRINTF_CHK:
7399       maybe_emit_sprintf_chk_warning (exp, fcode);
7400       break;
7401 
7402     case BUILT_IN_FREE:
7403       if (warn_free_nonheap_object)
7404 	maybe_emit_free_warning (exp);
7405       break;
7406 
7407     case BUILT_IN_THREAD_POINTER:
7408       return expand_builtin_thread_pointer (exp, target);
7409 
7410     case BUILT_IN_SET_THREAD_POINTER:
7411       expand_builtin_set_thread_pointer (exp);
7412       return const0_rtx;
7413 
7414     case BUILT_IN_CILK_DETACH:
7415       expand_builtin_cilk_detach (exp);
7416       return const0_rtx;
7417 
7418     case BUILT_IN_CILK_POP_FRAME:
7419       expand_builtin_cilk_pop_frame (exp);
7420       return const0_rtx;
7421 
7422     case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7423     case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7424     case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7425     case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7426     case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7427     case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7428     case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7429     case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7430     case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7431     case BUILT_IN_CHKP_GET_PTR_LBOUND:
7432     case BUILT_IN_CHKP_GET_PTR_UBOUND:
7433       /* We allow user CHKP builtins if Pointer Bounds
7434 	 Checker is off.  */
7435       if (!chkp_function_instrumented_p (current_function_decl))
7436 	{
7437 	  if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7438 	      || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7439 	      || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7440 	      || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7441 	      || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7442 	    return expand_normal (CALL_EXPR_ARG (exp, 0));
7443 	  else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7444 	    return expand_normal (size_zero_node);
7445 	  else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7446 	    return expand_normal (size_int (-1));
7447 	  else
7448 	    return const0_rtx;
7449 	}
7450       /* FALLTHROUGH */
7451 
7452     case BUILT_IN_CHKP_BNDMK:
7453     case BUILT_IN_CHKP_BNDSTX:
7454     case BUILT_IN_CHKP_BNDCL:
7455     case BUILT_IN_CHKP_BNDCU:
7456     case BUILT_IN_CHKP_BNDLDX:
7457     case BUILT_IN_CHKP_BNDRET:
7458     case BUILT_IN_CHKP_INTERSECT:
7459     case BUILT_IN_CHKP_NARROW:
7460     case BUILT_IN_CHKP_EXTRACT_LOWER:
7461     case BUILT_IN_CHKP_EXTRACT_UPPER:
7462       /* Software implementation of Pointer Bounds Checker is NYI.
7463 	 Target support is required.  */
7464       error ("Your target platform does not support -fcheck-pointer-bounds");
7465       break;
7466 
7467     case BUILT_IN_ACC_ON_DEVICE:
7468       /* Do library call, if we failed to expand the builtin when
7469 	 folding.  */
7470       break;
7471 
7472     default:	/* just do library call, if unknown builtin */
7473       break;
7474     }
7475 
7476   /* The switch statement above can drop through to cause the function
7477      to be called normally.  */
7478   return expand_call (exp, target, ignore);
7479 }
7480 
7481 /* Similar to expand_builtin but is used for instrumented calls.  */
7482 
7483 rtx
7484 expand_builtin_with_bounds (tree exp, rtx target,
7485 			    rtx subtarget ATTRIBUTE_UNUSED,
7486 			    machine_mode mode, int ignore)
7487 {
7488   tree fndecl = get_callee_fndecl (exp);
7489   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7490 
7491   gcc_assert (CALL_WITH_BOUNDS_P (exp));
7492 
7493   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7494     return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7495 
7496   gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7497 	      && fcode < END_CHKP_BUILTINS);
7498 
7499   switch (fcode)
7500     {
7501     case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7502       target = expand_builtin_memcpy_with_bounds (exp, target);
7503       if (target)
7504 	return target;
7505       break;
7506 
7507     case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7508       target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7509       if (target)
7510 	return target;
7511       break;
7512 
7513     case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7514       target = expand_builtin_memset_with_bounds (exp, target, mode);
7515       if (target)
7516 	return target;
7517       break;
7518 
7519     default:
7520       break;
7521     }
7522 
7523   /* The switch statement above can drop through to cause the function
7524      to be called normally.  */
7525   return expand_call (exp, target, ignore);
7526  }
7527 
7528 /* Determine whether a tree node represents a call to a built-in
7529    function.  If the tree T is a call to a built-in function with
7530    the right number of arguments of the appropriate types, return
7531    the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7532    Otherwise the return value is END_BUILTINS.  */
7533 
7534 enum built_in_function
7535 builtin_mathfn_code (const_tree t)
7536 {
7537   const_tree fndecl, arg, parmlist;
7538   const_tree argtype, parmtype;
7539   const_call_expr_arg_iterator iter;
7540 
7541   if (TREE_CODE (t) != CALL_EXPR)
7542     return END_BUILTINS;
7543 
7544   fndecl = get_callee_fndecl (t);
7545   if (fndecl == NULL_TREE
7546       || TREE_CODE (fndecl) != FUNCTION_DECL
7547       || ! DECL_BUILT_IN (fndecl)
7548       || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7549     return END_BUILTINS;
7550 
7551   parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7552   init_const_call_expr_arg_iterator (t, &iter);
7553   for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7554     {
7555       /* If a function doesn't take a variable number of arguments,
7556 	 the last element in the list will have type `void'.  */
7557       parmtype = TREE_VALUE (parmlist);
7558       if (VOID_TYPE_P (parmtype))
7559 	{
7560 	  if (more_const_call_expr_args_p (&iter))
7561 	    return END_BUILTINS;
7562 	  return DECL_FUNCTION_CODE (fndecl);
7563 	}
7564 
7565       if (! more_const_call_expr_args_p (&iter))
7566 	return END_BUILTINS;
7567 
7568       arg = next_const_call_expr_arg (&iter);
7569       argtype = TREE_TYPE (arg);
7570 
7571       if (SCALAR_FLOAT_TYPE_P (parmtype))
7572 	{
7573 	  if (! SCALAR_FLOAT_TYPE_P (argtype))
7574 	    return END_BUILTINS;
7575 	}
7576       else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7577 	{
7578 	  if (! COMPLEX_FLOAT_TYPE_P (argtype))
7579 	    return END_BUILTINS;
7580 	}
7581       else if (POINTER_TYPE_P (parmtype))
7582 	{
7583 	  if (! POINTER_TYPE_P (argtype))
7584 	    return END_BUILTINS;
7585 	}
7586       else if (INTEGRAL_TYPE_P (parmtype))
7587 	{
7588 	  if (! INTEGRAL_TYPE_P (argtype))
7589 	    return END_BUILTINS;
7590 	}
7591       else
7592 	return END_BUILTINS;
7593     }
7594 
7595   /* Variable-length argument list.  */
7596   return DECL_FUNCTION_CODE (fndecl);
7597 }
7598 
7599 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7600    evaluate to a constant.  */
7601 
7602 static tree
7603 fold_builtin_constant_p (tree arg)
7604 {
7605   /* We return 1 for a numeric type that's known to be a constant
7606      value at compile-time or for an aggregate type that's a
7607      literal constant.  */
7608   STRIP_NOPS (arg);
7609 
7610   /* If we know this is a constant, emit the constant of one.  */
7611   if (CONSTANT_CLASS_P (arg)
7612       || (TREE_CODE (arg) == CONSTRUCTOR
7613 	  && TREE_CONSTANT (arg)))
7614     return integer_one_node;
7615   if (TREE_CODE (arg) == ADDR_EXPR)
7616     {
7617        tree op = TREE_OPERAND (arg, 0);
7618        if (TREE_CODE (op) == STRING_CST
7619 	   || (TREE_CODE (op) == ARRAY_REF
7620 	       && integer_zerop (TREE_OPERAND (op, 1))
7621 	       && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7622 	 return integer_one_node;
7623     }
7624 
7625   /* If this expression has side effects, show we don't know it to be a
7626      constant.  Likewise if it's a pointer or aggregate type since in
7627      those case we only want literals, since those are only optimized
7628      when generating RTL, not later.
7629      And finally, if we are compiling an initializer, not code, we
7630      need to return a definite result now; there's not going to be any
7631      more optimization done.  */
7632   if (TREE_SIDE_EFFECTS (arg)
7633       || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7634       || POINTER_TYPE_P (TREE_TYPE (arg))
7635       || cfun == 0
7636       || folding_initializer
7637       || force_folding_builtin_constant_p)
7638     return integer_zero_node;
7639 
7640   return NULL_TREE;
7641 }
7642 
7643 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7644    return it as a truthvalue.  */
7645 
7646 static tree
7647 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7648 				tree predictor)
7649 {
7650   tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7651 
7652   fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7653   arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7654   ret_type = TREE_TYPE (TREE_TYPE (fn));
7655   pred_type = TREE_VALUE (arg_types);
7656   expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7657 
7658   pred = fold_convert_loc (loc, pred_type, pred);
7659   expected = fold_convert_loc (loc, expected_type, expected);
7660   call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7661 				   predictor);
7662 
7663   return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7664 		 build_int_cst (ret_type, 0));
7665 }
7666 
7667 /* Fold a call to builtin_expect with arguments ARG0 and ARG1.  Return
7668    NULL_TREE if no simplification is possible.  */
7669 
7670 tree
7671 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7672 {
7673   tree inner, fndecl, inner_arg0;
7674   enum tree_code code;
7675 
7676   /* Distribute the expected value over short-circuiting operators.
7677      See through the cast from truthvalue_type_node to long.  */
7678   inner_arg0 = arg0;
7679   while (CONVERT_EXPR_P (inner_arg0)
7680 	 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7681 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7682     inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7683 
7684   /* If this is a builtin_expect within a builtin_expect keep the
7685      inner one.  See through a comparison against a constant.  It
7686      might have been added to create a thruthvalue.  */
7687   inner = inner_arg0;
7688 
7689   if (COMPARISON_CLASS_P (inner)
7690       && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7691     inner = TREE_OPERAND (inner, 0);
7692 
7693   if (TREE_CODE (inner) == CALL_EXPR
7694       && (fndecl = get_callee_fndecl (inner))
7695       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7696       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7697     return arg0;
7698 
7699   inner = inner_arg0;
7700   code = TREE_CODE (inner);
7701   if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7702     {
7703       tree op0 = TREE_OPERAND (inner, 0);
7704       tree op1 = TREE_OPERAND (inner, 1);
7705 
7706       op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7707       op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7708       inner = build2 (code, TREE_TYPE (inner), op0, op1);
7709 
7710       return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7711     }
7712 
7713   /* If the argument isn't invariant then there's nothing else we can do.  */
7714   if (!TREE_CONSTANT (inner_arg0))
7715     return NULL_TREE;
7716 
7717   /* If we expect that a comparison against the argument will fold to
7718      a constant return the constant.  In practice, this means a true
7719      constant or the address of a non-weak symbol.  */
7720   inner = inner_arg0;
7721   STRIP_NOPS (inner);
7722   if (TREE_CODE (inner) == ADDR_EXPR)
7723     {
7724       do
7725 	{
7726 	  inner = TREE_OPERAND (inner, 0);
7727 	}
7728       while (TREE_CODE (inner) == COMPONENT_REF
7729 	     || TREE_CODE (inner) == ARRAY_REF);
7730       if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7731 	return NULL_TREE;
7732     }
7733 
7734   /* Otherwise, ARG0 already has the proper type for the return value.  */
7735   return arg0;
7736 }
7737 
7738 /* Fold a call to __builtin_classify_type with argument ARG.  */
7739 
7740 static tree
7741 fold_builtin_classify_type (tree arg)
7742 {
7743   if (arg == 0)
7744     return build_int_cst (integer_type_node, no_type_class);
7745 
7746   return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7747 }
7748 
7749 /* Fold a call to __builtin_strlen with argument ARG.  */
7750 
7751 static tree
7752 fold_builtin_strlen (location_t loc, tree type, tree arg)
7753 {
7754   if (!validate_arg (arg, POINTER_TYPE))
7755     return NULL_TREE;
7756   else
7757     {
7758       tree len = c_strlen (arg, 0);
7759 
7760       if (len)
7761 	return fold_convert_loc (loc, type, len);
7762 
7763       return NULL_TREE;
7764     }
7765 }
7766 
7767 /* Fold a call to __builtin_inf or __builtin_huge_val.  */
7768 
7769 static tree
7770 fold_builtin_inf (location_t loc, tree type, int warn)
7771 {
7772   REAL_VALUE_TYPE real;
7773 
7774   /* __builtin_inff is intended to be usable to define INFINITY on all
7775      targets.  If an infinity is not available, INFINITY expands "to a
7776      positive constant of type float that overflows at translation
7777      time", footnote "In this case, using INFINITY will violate the
7778      constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7779      Thus we pedwarn to ensure this constraint violation is
7780      diagnosed.  */
7781   if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7782     pedwarn (loc, 0, "target format does not support infinity");
7783 
7784   real_inf (&real);
7785   return build_real (type, real);
7786 }
7787 
7788 /* Fold function call to builtin sincos, sincosf, or sincosl.  Return
7789    NULL_TREE if no simplification can be made.  */
7790 
7791 static tree
7792 fold_builtin_sincos (location_t loc,
7793 		     tree arg0, tree arg1, tree arg2)
7794 {
7795   tree type;
7796   tree fndecl, call = NULL_TREE;
7797 
7798   if (!validate_arg (arg0, REAL_TYPE)
7799       || !validate_arg (arg1, POINTER_TYPE)
7800       || !validate_arg (arg2, POINTER_TYPE))
7801     return NULL_TREE;
7802 
7803   type = TREE_TYPE (arg0);
7804 
7805   /* Calculate the result when the argument is a constant.  */
7806   built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7807   if (fn == END_BUILTINS)
7808     return NULL_TREE;
7809 
7810   /* Canonicalize sincos to cexpi.  */
7811   if (TREE_CODE (arg0) == REAL_CST)
7812     {
7813       tree complex_type = build_complex_type (type);
7814       call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7815     }
7816   if (!call)
7817     {
7818       if (!targetm.libc_has_function (function_c99_math_complex)
7819 	  || !builtin_decl_implicit_p (fn))
7820 	return NULL_TREE;
7821       fndecl = builtin_decl_explicit (fn);
7822       call = build_call_expr_loc (loc, fndecl, 1, arg0);
7823       call = builtin_save_expr (call);
7824     }
7825 
7826   return build2 (COMPOUND_EXPR, void_type_node,
7827 		 build2 (MODIFY_EXPR, void_type_node,
7828 			 build_fold_indirect_ref_loc (loc, arg1),
7829 			 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7830 		 build2 (MODIFY_EXPR, void_type_node,
7831 			 build_fold_indirect_ref_loc (loc, arg2),
7832 			 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7833 }
7834 
7835 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7836    Return NULL_TREE if no simplification can be made.  */
7837 
7838 static tree
7839 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7840 {
7841   if (!validate_arg (arg1, POINTER_TYPE)
7842       || !validate_arg (arg2, POINTER_TYPE)
7843       || !validate_arg (len, INTEGER_TYPE))
7844     return NULL_TREE;
7845 
7846   /* If the LEN parameter is zero, return zero.  */
7847   if (integer_zerop (len))
7848     return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7849 			      arg1, arg2);
7850 
7851   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
7852   if (operand_equal_p (arg1, arg2, 0))
7853     return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7854 
7855   /* If len parameter is one, return an expression corresponding to
7856      (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
7857   if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7858     {
7859       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7860       tree cst_uchar_ptr_node
7861 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7862 
7863       tree ind1
7864 	= fold_convert_loc (loc, integer_type_node,
7865 			    build1 (INDIRECT_REF, cst_uchar_node,
7866 				    fold_convert_loc (loc,
7867 						      cst_uchar_ptr_node,
7868 						      arg1)));
7869       tree ind2
7870 	= fold_convert_loc (loc, integer_type_node,
7871 			    build1 (INDIRECT_REF, cst_uchar_node,
7872 				    fold_convert_loc (loc,
7873 						      cst_uchar_ptr_node,
7874 						      arg2)));
7875       return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7876     }
7877 
7878   return NULL_TREE;
7879 }
7880 
7881 /* Fold a call to builtin isascii with argument ARG.  */
7882 
7883 static tree
7884 fold_builtin_isascii (location_t loc, tree arg)
7885 {
7886   if (!validate_arg (arg, INTEGER_TYPE))
7887     return NULL_TREE;
7888   else
7889     {
7890       /* Transform isascii(c) -> ((c & ~0x7f) == 0).  */
7891       arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7892 			 build_int_cst (integer_type_node,
7893 					~ (unsigned HOST_WIDE_INT) 0x7f));
7894       return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7895 			      arg, integer_zero_node);
7896     }
7897 }
7898 
7899 /* Fold a call to builtin toascii with argument ARG.  */
7900 
7901 static tree
7902 fold_builtin_toascii (location_t loc, tree arg)
7903 {
7904   if (!validate_arg (arg, INTEGER_TYPE))
7905     return NULL_TREE;
7906 
7907   /* Transform toascii(c) -> (c & 0x7f).  */
7908   return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7909 			  build_int_cst (integer_type_node, 0x7f));
7910 }
7911 
7912 /* Fold a call to builtin isdigit with argument ARG.  */
7913 
7914 static tree
7915 fold_builtin_isdigit (location_t loc, tree arg)
7916 {
7917   if (!validate_arg (arg, INTEGER_TYPE))
7918     return NULL_TREE;
7919   else
7920     {
7921       /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9.  */
7922       /* According to the C standard, isdigit is unaffected by locale.
7923 	 However, it definitely is affected by the target character set.  */
7924       unsigned HOST_WIDE_INT target_digit0
7925 	= lang_hooks.to_target_charset ('0');
7926 
7927       if (target_digit0 == 0)
7928 	return NULL_TREE;
7929 
7930       arg = fold_convert_loc (loc, unsigned_type_node, arg);
7931       arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7932 			 build_int_cst (unsigned_type_node, target_digit0));
7933       return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7934 			  build_int_cst (unsigned_type_node, 9));
7935     }
7936 }
7937 
7938 /* Fold a call to fabs, fabsf or fabsl with argument ARG.  */
7939 
7940 static tree
7941 fold_builtin_fabs (location_t loc, tree arg, tree type)
7942 {
7943   if (!validate_arg (arg, REAL_TYPE))
7944     return NULL_TREE;
7945 
7946   arg = fold_convert_loc (loc, type, arg);
7947   return fold_build1_loc (loc, ABS_EXPR, type, arg);
7948 }
7949 
7950 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG.  */
7951 
7952 static tree
7953 fold_builtin_abs (location_t loc, tree arg, tree type)
7954 {
7955   if (!validate_arg (arg, INTEGER_TYPE))
7956     return NULL_TREE;
7957 
7958   arg = fold_convert_loc (loc, type, arg);
7959   return fold_build1_loc (loc, ABS_EXPR, type, arg);
7960 }
7961 
7962 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012].  */
7963 
7964 static tree
7965 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7966 {
7967   /* ??? Only expand to FMA_EXPR if it's directly supported.  */
7968   if (validate_arg (arg0, REAL_TYPE)
7969       && validate_arg (arg1, REAL_TYPE)
7970       && validate_arg (arg2, REAL_TYPE)
7971       && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7972     return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7973 
7974   return NULL_TREE;
7975 }
7976 
7977 /* Fold a call to builtin carg(a+bi) -> atan2(b,a).  */
7978 
7979 static tree
7980 fold_builtin_carg (location_t loc, tree arg, tree type)
7981 {
7982   if (validate_arg (arg, COMPLEX_TYPE)
7983       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7984     {
7985       tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7986 
7987       if (atan2_fn)
7988         {
7989   	  tree new_arg = builtin_save_expr (arg);
7990 	  tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7991 	  tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7992 	  return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7993 	}
7994     }
7995 
7996   return NULL_TREE;
7997 }
7998 
7999 /* Fold a call to builtin frexp, we can assume the base is 2.  */
8000 
8001 static tree
8002 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8003 {
8004   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8005     return NULL_TREE;
8006 
8007   STRIP_NOPS (arg0);
8008 
8009   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8010     return NULL_TREE;
8011 
8012   arg1 = build_fold_indirect_ref_loc (loc, arg1);
8013 
8014   /* Proceed if a valid pointer type was passed in.  */
8015   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8016     {
8017       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8018       tree frac, exp;
8019 
8020       switch (value->cl)
8021       {
8022       case rvc_zero:
8023 	/* For +-0, return (*exp = 0, +-0).  */
8024 	exp = integer_zero_node;
8025 	frac = arg0;
8026 	break;
8027       case rvc_nan:
8028       case rvc_inf:
8029 	/* For +-NaN or +-Inf, *exp is unspecified, return arg0.  */
8030 	return omit_one_operand_loc (loc, rettype, arg0, arg1);
8031       case rvc_normal:
8032 	{
8033 	  /* Since the frexp function always expects base 2, and in
8034 	     GCC normalized significands are already in the range
8035 	     [0.5, 1.0), we have exactly what frexp wants.  */
8036 	  REAL_VALUE_TYPE frac_rvt = *value;
8037 	  SET_REAL_EXP (&frac_rvt, 0);
8038 	  frac = build_real (rettype, frac_rvt);
8039 	  exp = build_int_cst (integer_type_node, REAL_EXP (value));
8040 	}
8041 	break;
8042       default:
8043 	gcc_unreachable ();
8044       }
8045 
8046       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8047       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8048       TREE_SIDE_EFFECTS (arg1) = 1;
8049       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8050     }
8051 
8052   return NULL_TREE;
8053 }
8054 
8055 /* Fold a call to builtin modf.  */
8056 
8057 static tree
8058 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8059 {
8060   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8061     return NULL_TREE;
8062 
8063   STRIP_NOPS (arg0);
8064 
8065   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8066     return NULL_TREE;
8067 
8068   arg1 = build_fold_indirect_ref_loc (loc, arg1);
8069 
8070   /* Proceed if a valid pointer type was passed in.  */
8071   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8072     {
8073       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8074       REAL_VALUE_TYPE trunc, frac;
8075 
8076       switch (value->cl)
8077       {
8078       case rvc_nan:
8079       case rvc_zero:
8080 	/* For +-NaN or +-0, return (*arg1 = arg0, arg0).  */
8081 	trunc = frac = *value;
8082 	break;
8083       case rvc_inf:
8084 	/* For +-Inf, return (*arg1 = arg0, +-0).  */
8085 	frac = dconst0;
8086 	frac.sign = value->sign;
8087 	trunc = *value;
8088 	break;
8089       case rvc_normal:
8090 	/* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)).  */
8091 	real_trunc (&trunc, VOIDmode, value);
8092 	real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8093 	/* If the original number was negative and already
8094 	   integral, then the fractional part is -0.0.  */
8095 	if (value->sign && frac.cl == rvc_zero)
8096 	  frac.sign = value->sign;
8097 	break;
8098       }
8099 
8100       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8101       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8102 			  build_real (rettype, trunc));
8103       TREE_SIDE_EFFECTS (arg1) = 1;
8104       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8105 			  build_real (rettype, frac));
8106     }
8107 
8108   return NULL_TREE;
8109 }
8110 
8111 /* Given a location LOC, an interclass builtin function decl FNDECL
8112    and its single argument ARG, return an folded expression computing
8113    the same, or NULL_TREE if we either couldn't or didn't want to fold
8114    (the latter happen if there's an RTL instruction available).  */
8115 
8116 static tree
8117 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8118 {
8119   machine_mode mode;
8120 
8121   if (!validate_arg (arg, REAL_TYPE))
8122     return NULL_TREE;
8123 
8124   if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8125     return NULL_TREE;
8126 
8127   mode = TYPE_MODE (TREE_TYPE (arg));
8128 
8129   bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8130 
8131   /* If there is no optab, try generic code.  */
8132   switch (DECL_FUNCTION_CODE (fndecl))
8133     {
8134       tree result;
8135 
8136     CASE_FLT_FN (BUILT_IN_ISINF):
8137       {
8138 	/* isinf(x) -> isgreater(fabs(x),DBL_MAX).  */
8139 	tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8140 	tree type = TREE_TYPE (arg);
8141 	REAL_VALUE_TYPE r;
8142 	char buf[128];
8143 
8144 	if (is_ibm_extended)
8145 	  {
8146 	    /* NaN and Inf are encoded in the high-order double value
8147 	       only.  The low-order value is not significant.  */
8148 	    type = double_type_node;
8149 	    mode = DFmode;
8150 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8151 	  }
8152 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8153 	real_from_string (&r, buf);
8154 	result = build_call_expr (isgr_fn, 2,
8155 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
8156 				  build_real (type, r));
8157 	return result;
8158       }
8159     CASE_FLT_FN (BUILT_IN_FINITE):
8160     case BUILT_IN_ISFINITE:
8161       {
8162 	/* isfinite(x) -> islessequal(fabs(x),DBL_MAX).  */
8163 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8164 	tree type = TREE_TYPE (arg);
8165 	REAL_VALUE_TYPE r;
8166 	char buf[128];
8167 
8168 	if (is_ibm_extended)
8169 	  {
8170 	    /* NaN and Inf are encoded in the high-order double value
8171 	       only.  The low-order value is not significant.  */
8172 	    type = double_type_node;
8173 	    mode = DFmode;
8174 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8175 	  }
8176 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8177 	real_from_string (&r, buf);
8178 	result = build_call_expr (isle_fn, 2,
8179 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
8180 				  build_real (type, r));
8181 	/*result = fold_build2_loc (loc, UNGT_EXPR,
8182 				  TREE_TYPE (TREE_TYPE (fndecl)),
8183 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
8184 				  build_real (type, r));
8185 	result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8186 				  TREE_TYPE (TREE_TYPE (fndecl)),
8187 				  result);*/
8188 	return result;
8189       }
8190     case BUILT_IN_ISNORMAL:
8191       {
8192 	/* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8193 	   islessequal(fabs(x),DBL_MAX).  */
8194 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8195 	tree type = TREE_TYPE (arg);
8196 	tree orig_arg, max_exp, min_exp;
8197 	machine_mode orig_mode = mode;
8198 	REAL_VALUE_TYPE rmax, rmin;
8199 	char buf[128];
8200 
8201 	orig_arg = arg = builtin_save_expr (arg);
8202 	if (is_ibm_extended)
8203 	  {
8204 	    /* Use double to test the normal range of IBM extended
8205 	       precision.  Emin for IBM extended precision is
8206 	       different to emin for IEEE double, being 53 higher
8207 	       since the low double exponent is at least 53 lower
8208 	       than the high double exponent.  */
8209 	    type = double_type_node;
8210 	    mode = DFmode;
8211 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8212 	  }
8213 	arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8214 
8215 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8216 	real_from_string (&rmax, buf);
8217 	sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8218 	real_from_string (&rmin, buf);
8219 	max_exp = build_real (type, rmax);
8220 	min_exp = build_real (type, rmin);
8221 
8222 	max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8223 	if (is_ibm_extended)
8224 	  {
8225 	    /* Testing the high end of the range is done just using
8226 	       the high double, using the same test as isfinite().
8227 	       For the subnormal end of the range we first test the
8228 	       high double, then if its magnitude is equal to the
8229 	       limit of 0x1p-969, we test whether the low double is
8230 	       non-zero and opposite sign to the high double.  */
8231 	    tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8232 	    tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8233 	    tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8234 	    tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8235 				       arg, min_exp);
8236 	    tree as_complex = build1 (VIEW_CONVERT_EXPR,
8237 				      complex_double_type_node, orig_arg);
8238 	    tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8239 	    tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8240 	    tree zero = build_real (type, dconst0);
8241 	    tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8242 	    tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8243 	    tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8244 	    tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8245 				      fold_build3 (COND_EXPR,
8246 						   integer_type_node,
8247 						   hilt, logt, lolt));
8248 	    eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8249 				  eq_min, ok_lo);
8250 	    min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8251 				   gt_min, eq_min);
8252 	  }
8253 	else
8254 	  {
8255 	    tree const isge_fn
8256 	      = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8257 	    min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8258 	  }
8259 	result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8260 			      max_exp, min_exp);
8261 	return result;
8262       }
8263     default:
8264       break;
8265     }
8266 
8267   return NULL_TREE;
8268 }
8269 
8270 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8271    ARG is the argument for the call.  */
8272 
8273 static tree
8274 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8275 {
8276   tree type = TREE_TYPE (TREE_TYPE (fndecl));
8277 
8278   if (!validate_arg (arg, REAL_TYPE))
8279     return NULL_TREE;
8280 
8281   switch (builtin_index)
8282     {
8283     case BUILT_IN_ISINF:
8284       if (!HONOR_INFINITIES (arg))
8285 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8286 
8287       return NULL_TREE;
8288 
8289     case BUILT_IN_ISINF_SIGN:
8290       {
8291 	/* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8292 	/* In a boolean context, GCC will fold the inner COND_EXPR to
8293 	   1.  So e.g. "if (isinf_sign(x))" would be folded to just
8294 	   "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8295 	tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8296 	tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8297 	tree tmp = NULL_TREE;
8298 
8299 	arg = builtin_save_expr (arg);
8300 
8301 	if (signbit_fn && isinf_fn)
8302 	  {
8303 	    tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8304 	    tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8305 
8306 	    signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8307 					signbit_call, integer_zero_node);
8308 	    isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8309 				      isinf_call, integer_zero_node);
8310 
8311 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8312 			       integer_minus_one_node, integer_one_node);
8313 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8314 			       isinf_call, tmp,
8315 			       integer_zero_node);
8316 	  }
8317 
8318 	return tmp;
8319       }
8320 
8321     case BUILT_IN_ISFINITE:
8322       if (!HONOR_NANS (arg)
8323 	  && !HONOR_INFINITIES (arg))
8324 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
8325 
8326       return NULL_TREE;
8327 
8328     case BUILT_IN_ISNAN:
8329       if (!HONOR_NANS (arg))
8330 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8331 
8332       {
8333 	bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8334 	if (is_ibm_extended)
8335 	  {
8336 	    /* NaN and Inf are encoded in the high-order double value
8337 	       only.  The low-order value is not significant.  */
8338 	    arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8339 	  }
8340       }
8341       arg = builtin_save_expr (arg);
8342       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8343 
8344     default:
8345       gcc_unreachable ();
8346     }
8347 }
8348 
8349 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8350    This builtin will generate code to return the appropriate floating
8351    point classification depending on the value of the floating point
8352    number passed in.  The possible return values must be supplied as
8353    int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8354    FP_NORMAL, FP_SUBNORMAL and FP_ZERO.  The ellipses is for exactly
8355    one floating point argument which is "type generic".  */
8356 
8357 static tree
8358 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8359 {
8360   tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8361     arg, type, res, tmp;
8362   machine_mode mode;
8363   REAL_VALUE_TYPE r;
8364   char buf[128];
8365 
8366   /* Verify the required arguments in the original call.  */
8367   if (nargs != 6
8368       || !validate_arg (args[0], INTEGER_TYPE)
8369       || !validate_arg (args[1], INTEGER_TYPE)
8370       || !validate_arg (args[2], INTEGER_TYPE)
8371       || !validate_arg (args[3], INTEGER_TYPE)
8372       || !validate_arg (args[4], INTEGER_TYPE)
8373       || !validate_arg (args[5], REAL_TYPE))
8374     return NULL_TREE;
8375 
8376   fp_nan = args[0];
8377   fp_infinite = args[1];
8378   fp_normal = args[2];
8379   fp_subnormal = args[3];
8380   fp_zero = args[4];
8381   arg = args[5];
8382   type = TREE_TYPE (arg);
8383   mode = TYPE_MODE (type);
8384   arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8385 
8386   /* fpclassify(x) ->
8387        isnan(x) ? FP_NAN :
8388          (fabs(x) == Inf ? FP_INFINITE :
8389 	   (fabs(x) >= DBL_MIN ? FP_NORMAL :
8390 	     (x == 0 ? FP_ZERO : FP_SUBNORMAL))).  */
8391 
8392   tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8393 		     build_real (type, dconst0));
8394   res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8395 		     tmp, fp_zero, fp_subnormal);
8396 
8397   sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8398   real_from_string (&r, buf);
8399   tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8400 		     arg, build_real (type, r));
8401   res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8402 
8403   if (HONOR_INFINITIES (mode))
8404     {
8405       real_inf (&r);
8406       tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8407 			 build_real (type, r));
8408       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8409 			 fp_infinite, res);
8410     }
8411 
8412   if (HONOR_NANS (mode))
8413     {
8414       tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8415       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8416     }
8417 
8418   return res;
8419 }
8420 
8421 /* Fold a call to an unordered comparison function such as
8422    __builtin_isgreater().  FNDECL is the FUNCTION_DECL for the function
8423    being called and ARG0 and ARG1 are the arguments for the call.
8424    UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8425    the opposite of the desired result.  UNORDERED_CODE is used
8426    for modes that can hold NaNs and ORDERED_CODE is used for
8427    the rest.  */
8428 
8429 static tree
8430 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8431 			    enum tree_code unordered_code,
8432 			    enum tree_code ordered_code)
8433 {
8434   tree type = TREE_TYPE (TREE_TYPE (fndecl));
8435   enum tree_code code;
8436   tree type0, type1;
8437   enum tree_code code0, code1;
8438   tree cmp_type = NULL_TREE;
8439 
8440   type0 = TREE_TYPE (arg0);
8441   type1 = TREE_TYPE (arg1);
8442 
8443   code0 = TREE_CODE (type0);
8444   code1 = TREE_CODE (type1);
8445 
8446   if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8447     /* Choose the wider of two real types.  */
8448     cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8449       ? type0 : type1;
8450   else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8451     cmp_type = type0;
8452   else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8453     cmp_type = type1;
8454 
8455   arg0 = fold_convert_loc (loc, cmp_type, arg0);
8456   arg1 = fold_convert_loc (loc, cmp_type, arg1);
8457 
8458   if (unordered_code == UNORDERED_EXPR)
8459     {
8460       if (!HONOR_NANS (arg0))
8461 	return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8462       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8463     }
8464 
8465   code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8466   return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8467 		      fold_build2_loc (loc, code, type, arg0, arg1));
8468 }
8469 
8470 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8471    arithmetics if it can never overflow, or into internal functions that
8472    return both result of arithmetics and overflowed boolean flag in
8473    a complex integer result, or some other check for overflow.
8474    Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8475    checking part of that.  */
8476 
8477 static tree
8478 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8479 			     tree arg0, tree arg1, tree arg2)
8480 {
8481   enum internal_fn ifn = IFN_LAST;
8482   /* The code of the expression corresponding to the type-generic
8483      built-in, or ERROR_MARK for the type-specific ones.  */
8484   enum tree_code opcode = ERROR_MARK;
8485   bool ovf_only = false;
8486 
8487   switch (fcode)
8488     {
8489     case BUILT_IN_ADD_OVERFLOW_P:
8490       ovf_only = true;
8491       /* FALLTHRU */
8492     case BUILT_IN_ADD_OVERFLOW:
8493       opcode = PLUS_EXPR;
8494       /* FALLTHRU */
8495     case BUILT_IN_SADD_OVERFLOW:
8496     case BUILT_IN_SADDL_OVERFLOW:
8497     case BUILT_IN_SADDLL_OVERFLOW:
8498     case BUILT_IN_UADD_OVERFLOW:
8499     case BUILT_IN_UADDL_OVERFLOW:
8500     case BUILT_IN_UADDLL_OVERFLOW:
8501       ifn = IFN_ADD_OVERFLOW;
8502       break;
8503     case BUILT_IN_SUB_OVERFLOW_P:
8504       ovf_only = true;
8505       /* FALLTHRU */
8506     case BUILT_IN_SUB_OVERFLOW:
8507       opcode = MINUS_EXPR;
8508       /* FALLTHRU */
8509     case BUILT_IN_SSUB_OVERFLOW:
8510     case BUILT_IN_SSUBL_OVERFLOW:
8511     case BUILT_IN_SSUBLL_OVERFLOW:
8512     case BUILT_IN_USUB_OVERFLOW:
8513     case BUILT_IN_USUBL_OVERFLOW:
8514     case BUILT_IN_USUBLL_OVERFLOW:
8515       ifn = IFN_SUB_OVERFLOW;
8516       break;
8517     case BUILT_IN_MUL_OVERFLOW_P:
8518       ovf_only = true;
8519       /* FALLTHRU */
8520     case BUILT_IN_MUL_OVERFLOW:
8521       opcode = MULT_EXPR;
8522       /* FALLTHRU */
8523     case BUILT_IN_SMUL_OVERFLOW:
8524     case BUILT_IN_SMULL_OVERFLOW:
8525     case BUILT_IN_SMULLL_OVERFLOW:
8526     case BUILT_IN_UMUL_OVERFLOW:
8527     case BUILT_IN_UMULL_OVERFLOW:
8528     case BUILT_IN_UMULLL_OVERFLOW:
8529       ifn = IFN_MUL_OVERFLOW;
8530       break;
8531     default:
8532       gcc_unreachable ();
8533     }
8534 
8535   /* For the "generic" overloads, the first two arguments can have different
8536      types and the last argument determines the target type to use to check
8537      for overflow.  The arguments of the other overloads all have the same
8538      type.  */
8539   tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8540 
8541   /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8542      arguments are constant, attempt to fold the built-in call into a constant
8543      expression indicating whether or not it detected an overflow.  */
8544   if (ovf_only
8545       && TREE_CODE (arg0) == INTEGER_CST
8546       && TREE_CODE (arg1) == INTEGER_CST)
8547     /* Perform the computation in the target type and check for overflow.  */
8548     return omit_one_operand_loc (loc, boolean_type_node,
8549 				 arith_overflowed_p (opcode, type, arg0, arg1)
8550 				 ? boolean_true_node : boolean_false_node,
8551 				 arg2);
8552 
8553   tree ctype = build_complex_type (type);
8554   tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8555 					    2, arg0, arg1);
8556   tree tgt = save_expr (call);
8557   tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8558   tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8559   ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8560 
8561   if (ovf_only)
8562     return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8563 
8564   tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8565   tree store
8566     = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8567   return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8568 }
8569 
8570 /* Fold a call to __builtin_FILE to a constant string.  */
8571 
8572 static inline tree
8573 fold_builtin_FILE (location_t loc)
8574 {
8575   if (const char *fname = LOCATION_FILE (loc))
8576     return build_string_literal (strlen (fname) + 1, fname);
8577 
8578   return build_string_literal (1, "");
8579 }
8580 
8581 /* Fold a call to __builtin_FUNCTION to a constant string.  */
8582 
8583 static inline tree
8584 fold_builtin_FUNCTION ()
8585 {
8586   if (current_function_decl)
8587     {
8588       const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8589       return build_string_literal (strlen (name) + 1, name);
8590     }
8591 
8592   return build_string_literal (1, "");
8593 }
8594 
8595 /* Fold a call to __builtin_LINE to an integer constant.  */
8596 
8597 static inline tree
8598 fold_builtin_LINE (location_t loc, tree type)
8599 {
8600   return build_int_cst (type, LOCATION_LINE (loc));
8601 }
8602 
8603 /* Fold a call to built-in function FNDECL with 0 arguments.
8604    This function returns NULL_TREE if no simplification was possible.  */
8605 
8606 static tree
8607 fold_builtin_0 (location_t loc, tree fndecl)
8608 {
8609   tree type = TREE_TYPE (TREE_TYPE (fndecl));
8610   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8611   switch (fcode)
8612     {
8613     case BUILT_IN_FILE:
8614       return fold_builtin_FILE (loc);
8615 
8616     case BUILT_IN_FUNCTION:
8617       return fold_builtin_FUNCTION ();
8618 
8619     case BUILT_IN_LINE:
8620       return fold_builtin_LINE (loc, type);
8621 
8622     CASE_FLT_FN (BUILT_IN_INF):
8623     CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8624     case BUILT_IN_INFD32:
8625     case BUILT_IN_INFD64:
8626     case BUILT_IN_INFD128:
8627       return fold_builtin_inf (loc, type, true);
8628 
8629     CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8630     CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8631       return fold_builtin_inf (loc, type, false);
8632 
8633     case BUILT_IN_CLASSIFY_TYPE:
8634       return fold_builtin_classify_type (NULL_TREE);
8635 
8636     default:
8637       break;
8638     }
8639   return NULL_TREE;
8640 }
8641 
8642 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8643    This function returns NULL_TREE if no simplification was possible.  */
8644 
8645 static tree
8646 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8647 {
8648   tree type = TREE_TYPE (TREE_TYPE (fndecl));
8649   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8650 
8651   if (TREE_CODE (arg0) == ERROR_MARK)
8652     return NULL_TREE;
8653 
8654   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8655     return ret;
8656 
8657   switch (fcode)
8658     {
8659     case BUILT_IN_CONSTANT_P:
8660       {
8661 	tree val = fold_builtin_constant_p (arg0);
8662 
8663 	/* Gimplification will pull the CALL_EXPR for the builtin out of
8664 	   an if condition.  When not optimizing, we'll not CSE it back.
8665 	   To avoid link error types of regressions, return false now.  */
8666 	if (!val && !optimize)
8667 	  val = integer_zero_node;
8668 
8669 	return val;
8670       }
8671 
8672     case BUILT_IN_CLASSIFY_TYPE:
8673       return fold_builtin_classify_type (arg0);
8674 
8675     case BUILT_IN_STRLEN:
8676       return fold_builtin_strlen (loc, type, arg0);
8677 
8678     CASE_FLT_FN (BUILT_IN_FABS):
8679     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8680     case BUILT_IN_FABSD32:
8681     case BUILT_IN_FABSD64:
8682     case BUILT_IN_FABSD128:
8683       return fold_builtin_fabs (loc, arg0, type);
8684 
8685     case BUILT_IN_ABS:
8686     case BUILT_IN_LABS:
8687     case BUILT_IN_LLABS:
8688     case BUILT_IN_IMAXABS:
8689       return fold_builtin_abs (loc, arg0, type);
8690 
8691     CASE_FLT_FN (BUILT_IN_CONJ):
8692       if (validate_arg (arg0, COMPLEX_TYPE)
8693 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8694 	return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8695     break;
8696 
8697     CASE_FLT_FN (BUILT_IN_CREAL):
8698       if (validate_arg (arg0, COMPLEX_TYPE)
8699 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8700 	return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8701     break;
8702 
8703     CASE_FLT_FN (BUILT_IN_CIMAG):
8704       if (validate_arg (arg0, COMPLEX_TYPE)
8705 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8706 	return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8707     break;
8708 
8709     CASE_FLT_FN (BUILT_IN_CARG):
8710       return fold_builtin_carg (loc, arg0, type);
8711 
8712     case BUILT_IN_ISASCII:
8713       return fold_builtin_isascii (loc, arg0);
8714 
8715     case BUILT_IN_TOASCII:
8716       return fold_builtin_toascii (loc, arg0);
8717 
8718     case BUILT_IN_ISDIGIT:
8719       return fold_builtin_isdigit (loc, arg0);
8720 
8721     CASE_FLT_FN (BUILT_IN_FINITE):
8722     case BUILT_IN_FINITED32:
8723     case BUILT_IN_FINITED64:
8724     case BUILT_IN_FINITED128:
8725     case BUILT_IN_ISFINITE:
8726       {
8727 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8728 	if (ret)
8729 	  return ret;
8730 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8731       }
8732 
8733     CASE_FLT_FN (BUILT_IN_ISINF):
8734     case BUILT_IN_ISINFD32:
8735     case BUILT_IN_ISINFD64:
8736     case BUILT_IN_ISINFD128:
8737       {
8738 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8739 	if (ret)
8740 	  return ret;
8741 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8742       }
8743 
8744     case BUILT_IN_ISNORMAL:
8745       return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8746 
8747     case BUILT_IN_ISINF_SIGN:
8748       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8749 
8750     CASE_FLT_FN (BUILT_IN_ISNAN):
8751     case BUILT_IN_ISNAND32:
8752     case BUILT_IN_ISNAND64:
8753     case BUILT_IN_ISNAND128:
8754       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8755 
8756     case BUILT_IN_FREE:
8757       if (integer_zerop (arg0))
8758 	return build_empty_stmt (loc);
8759       break;
8760 
8761     default:
8762       break;
8763     }
8764 
8765   return NULL_TREE;
8766 
8767 }
8768 
8769 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8770    This function returns NULL_TREE if no simplification was possible.  */
8771 
8772 static tree
8773 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8774 {
8775   tree type = TREE_TYPE (TREE_TYPE (fndecl));
8776   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8777 
8778   if (TREE_CODE (arg0) == ERROR_MARK
8779       || TREE_CODE (arg1) == ERROR_MARK)
8780     return NULL_TREE;
8781 
8782   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8783     return ret;
8784 
8785   switch (fcode)
8786     {
8787     CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8788     CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8789       if (validate_arg (arg0, REAL_TYPE)
8790 	  && validate_arg (arg1, POINTER_TYPE))
8791 	return do_mpfr_lgamma_r (arg0, arg1, type);
8792     break;
8793 
8794     CASE_FLT_FN (BUILT_IN_FREXP):
8795       return fold_builtin_frexp (loc, arg0, arg1, type);
8796 
8797     CASE_FLT_FN (BUILT_IN_MODF):
8798       return fold_builtin_modf (loc, arg0, arg1, type);
8799 
8800     case BUILT_IN_STRSPN:
8801       return fold_builtin_strspn (loc, arg0, arg1);
8802 
8803     case BUILT_IN_STRCSPN:
8804       return fold_builtin_strcspn (loc, arg0, arg1);
8805 
8806     case BUILT_IN_STRPBRK:
8807       return fold_builtin_strpbrk (loc, arg0, arg1, type);
8808 
8809     case BUILT_IN_EXPECT:
8810       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8811 
8812     case BUILT_IN_ISGREATER:
8813       return fold_builtin_unordered_cmp (loc, fndecl,
8814 					 arg0, arg1, UNLE_EXPR, LE_EXPR);
8815     case BUILT_IN_ISGREATEREQUAL:
8816       return fold_builtin_unordered_cmp (loc, fndecl,
8817 					 arg0, arg1, UNLT_EXPR, LT_EXPR);
8818     case BUILT_IN_ISLESS:
8819       return fold_builtin_unordered_cmp (loc, fndecl,
8820 					 arg0, arg1, UNGE_EXPR, GE_EXPR);
8821     case BUILT_IN_ISLESSEQUAL:
8822       return fold_builtin_unordered_cmp (loc, fndecl,
8823 					 arg0, arg1, UNGT_EXPR, GT_EXPR);
8824     case BUILT_IN_ISLESSGREATER:
8825       return fold_builtin_unordered_cmp (loc, fndecl,
8826 					 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8827     case BUILT_IN_ISUNORDERED:
8828       return fold_builtin_unordered_cmp (loc, fndecl,
8829 					 arg0, arg1, UNORDERED_EXPR,
8830 					 NOP_EXPR);
8831 
8832       /* We do the folding for va_start in the expander.  */
8833     case BUILT_IN_VA_START:
8834       break;
8835 
8836     case BUILT_IN_OBJECT_SIZE:
8837       return fold_builtin_object_size (arg0, arg1);
8838 
8839     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8840       return fold_builtin_atomic_always_lock_free (arg0, arg1);
8841 
8842     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8843       return fold_builtin_atomic_is_lock_free (arg0, arg1);
8844 
8845     default:
8846       break;
8847     }
8848   return NULL_TREE;
8849 }
8850 
8851 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8852    and ARG2.
8853    This function returns NULL_TREE if no simplification was possible.  */
8854 
8855 static tree
8856 fold_builtin_3 (location_t loc, tree fndecl,
8857 		tree arg0, tree arg1, tree arg2)
8858 {
8859   tree type = TREE_TYPE (TREE_TYPE (fndecl));
8860   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8861 
8862   if (TREE_CODE (arg0) == ERROR_MARK
8863       || TREE_CODE (arg1) == ERROR_MARK
8864       || TREE_CODE (arg2) == ERROR_MARK)
8865     return NULL_TREE;
8866 
8867   if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8868 				  arg0, arg1, arg2))
8869     return ret;
8870 
8871   switch (fcode)
8872     {
8873 
8874     CASE_FLT_FN (BUILT_IN_SINCOS):
8875       return fold_builtin_sincos (loc, arg0, arg1, arg2);
8876 
8877     CASE_FLT_FN (BUILT_IN_FMA):
8878       return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8879 
8880     CASE_FLT_FN (BUILT_IN_REMQUO):
8881       if (validate_arg (arg0, REAL_TYPE)
8882 	  && validate_arg (arg1, REAL_TYPE)
8883 	  && validate_arg (arg2, POINTER_TYPE))
8884 	return do_mpfr_remquo (arg0, arg1, arg2);
8885     break;
8886 
8887     case BUILT_IN_BCMP:
8888     case BUILT_IN_MEMCMP:
8889       return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8890 
8891     case BUILT_IN_EXPECT:
8892       return fold_builtin_expect (loc, arg0, arg1, arg2);
8893 
8894     case BUILT_IN_ADD_OVERFLOW:
8895     case BUILT_IN_SUB_OVERFLOW:
8896     case BUILT_IN_MUL_OVERFLOW:
8897     case BUILT_IN_ADD_OVERFLOW_P:
8898     case BUILT_IN_SUB_OVERFLOW_P:
8899     case BUILT_IN_MUL_OVERFLOW_P:
8900     case BUILT_IN_SADD_OVERFLOW:
8901     case BUILT_IN_SADDL_OVERFLOW:
8902     case BUILT_IN_SADDLL_OVERFLOW:
8903     case BUILT_IN_SSUB_OVERFLOW:
8904     case BUILT_IN_SSUBL_OVERFLOW:
8905     case BUILT_IN_SSUBLL_OVERFLOW:
8906     case BUILT_IN_SMUL_OVERFLOW:
8907     case BUILT_IN_SMULL_OVERFLOW:
8908     case BUILT_IN_SMULLL_OVERFLOW:
8909     case BUILT_IN_UADD_OVERFLOW:
8910     case BUILT_IN_UADDL_OVERFLOW:
8911     case BUILT_IN_UADDLL_OVERFLOW:
8912     case BUILT_IN_USUB_OVERFLOW:
8913     case BUILT_IN_USUBL_OVERFLOW:
8914     case BUILT_IN_USUBLL_OVERFLOW:
8915     case BUILT_IN_UMUL_OVERFLOW:
8916     case BUILT_IN_UMULL_OVERFLOW:
8917     case BUILT_IN_UMULLL_OVERFLOW:
8918       return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8919 
8920     default:
8921       break;
8922     }
8923   return NULL_TREE;
8924 }
8925 
8926 /* Fold a call to built-in function FNDECL.  ARGS is an array of NARGS
8927    arguments.  IGNORE is true if the result of the
8928    function call is ignored.  This function returns NULL_TREE if no
8929    simplification was possible.  */
8930 
8931 tree
8932 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8933 {
8934   tree ret = NULL_TREE;
8935 
8936   switch (nargs)
8937     {
8938     case 0:
8939       ret = fold_builtin_0 (loc, fndecl);
8940       break;
8941     case 1:
8942       ret = fold_builtin_1 (loc, fndecl, args[0]);
8943       break;
8944     case 2:
8945       ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8946       break;
8947     case 3:
8948       ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8949       break;
8950     default:
8951       ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8952       break;
8953     }
8954   if (ret)
8955     {
8956       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8957       SET_EXPR_LOCATION (ret, loc);
8958       TREE_NO_WARNING (ret) = 1;
8959       return ret;
8960     }
8961   return NULL_TREE;
8962 }
8963 
8964 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8965    list ARGS along with N new arguments in NEWARGS.  SKIP is the number
8966    of arguments in ARGS to be omitted.  OLDNARGS is the number of
8967    elements in ARGS.  */
8968 
8969 static tree
8970 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8971 			  int skip, tree fndecl, int n, va_list newargs)
8972 {
8973   int nargs = oldnargs - skip + n;
8974   tree *buffer;
8975 
8976   if (n > 0)
8977     {
8978       int i, j;
8979 
8980       buffer = XALLOCAVEC (tree, nargs);
8981       for (i = 0; i < n; i++)
8982 	buffer[i] = va_arg (newargs, tree);
8983       for (j = skip; j < oldnargs; j++, i++)
8984 	buffer[i] = args[j];
8985     }
8986   else
8987     buffer = args + skip;
8988 
8989   return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8990 }
8991 
8992 /* Return true if FNDECL shouldn't be folded right now.
8993    If a built-in function has an inline attribute always_inline
8994    wrapper, defer folding it after always_inline functions have
8995    been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8996    might not be performed.  */
8997 
8998 bool
8999 avoid_folding_inline_builtin (tree fndecl)
9000 {
9001   return (DECL_DECLARED_INLINE_P (fndecl)
9002 	  && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9003 	  && cfun
9004 	  && !cfun->always_inline_functions_inlined
9005 	  && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9006 }
9007 
9008 /* A wrapper function for builtin folding that prevents warnings for
9009    "statement without effect" and the like, caused by removing the
9010    call node earlier than the warning is generated.  */
9011 
9012 tree
9013 fold_call_expr (location_t loc, tree exp, bool ignore)
9014 {
9015   tree ret = NULL_TREE;
9016   tree fndecl = get_callee_fndecl (exp);
9017   if (fndecl
9018       && TREE_CODE (fndecl) == FUNCTION_DECL
9019       && DECL_BUILT_IN (fndecl)
9020       /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9021 	 yet.  Defer folding until we see all the arguments
9022 	 (after inlining).  */
9023       && !CALL_EXPR_VA_ARG_PACK (exp))
9024     {
9025       int nargs = call_expr_nargs (exp);
9026 
9027       /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9028 	 instead last argument is __builtin_va_arg_pack ().  Defer folding
9029 	 even in that case, until arguments are finalized.  */
9030       if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9031 	{
9032 	  tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9033 	  if (fndecl2
9034 	      && TREE_CODE (fndecl2) == FUNCTION_DECL
9035 	      && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9036 	      && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9037 	    return NULL_TREE;
9038 	}
9039 
9040       if (avoid_folding_inline_builtin (fndecl))
9041 	return NULL_TREE;
9042 
9043       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9044         return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9045 				     CALL_EXPR_ARGP (exp), ignore);
9046       else
9047 	{
9048 	  tree *args = CALL_EXPR_ARGP (exp);
9049 	  ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9050 	  if (ret)
9051 	    return ret;
9052 	}
9053     }
9054   return NULL_TREE;
9055 }
9056 
9057 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9058    N arguments are passed in the array ARGARRAY.  Return a folded
9059    expression or NULL_TREE if no simplification was possible.  */
9060 
9061 tree
9062 fold_builtin_call_array (location_t loc, tree,
9063 			 tree fn,
9064 			 int n,
9065 			 tree *argarray)
9066 {
9067   if (TREE_CODE (fn) != ADDR_EXPR)
9068     return NULL_TREE;
9069 
9070   tree fndecl = TREE_OPERAND (fn, 0);
9071   if (TREE_CODE (fndecl) == FUNCTION_DECL
9072       && DECL_BUILT_IN (fndecl))
9073     {
9074       /* If last argument is __builtin_va_arg_pack (), arguments to this
9075 	 function are not finalized yet.  Defer folding until they are.  */
9076       if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9077 	{
9078 	  tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9079 	  if (fndecl2
9080 	      && TREE_CODE (fndecl2) == FUNCTION_DECL
9081 	      && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9082 	      && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9083 	    return NULL_TREE;
9084 	}
9085       if (avoid_folding_inline_builtin (fndecl))
9086 	return NULL_TREE;
9087       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9088 	return targetm.fold_builtin (fndecl, n, argarray, false);
9089       else
9090 	return fold_builtin_n (loc, fndecl, argarray, n, false);
9091     }
9092 
9093   return NULL_TREE;
9094 }
9095 
9096 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9097    along with N new arguments specified as the "..." parameters.  SKIP
9098    is the number of arguments in EXP to be omitted.  This function is used
9099    to do varargs-to-varargs transformations.  */
9100 
9101 static tree
9102 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9103 {
9104   va_list ap;
9105   tree t;
9106 
9107   va_start (ap, n);
9108   t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9109 				CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9110   va_end (ap);
9111 
9112   return t;
9113 }
9114 
9115 /* Validate a single argument ARG against a tree code CODE representing
9116    a type.  Return true when argument is valid.  */
9117 
9118 static bool
9119 validate_arg (const_tree arg, enum tree_code code)
9120 {
9121   if (!arg)
9122     return false;
9123   else if (code == POINTER_TYPE)
9124     return POINTER_TYPE_P (TREE_TYPE (arg));
9125   else if (code == INTEGER_TYPE)
9126     return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9127   return code == TREE_CODE (TREE_TYPE (arg));
9128 }
9129 
9130 /* This function validates the types of a function call argument list
9131    against a specified list of tree_codes.  If the last specifier is a 0,
9132    that represents an ellipses, otherwise the last specifier must be a
9133    VOID_TYPE.
9134 
9135    This is the GIMPLE version of validate_arglist.  Eventually we want to
9136    completely convert builtins.c to work from GIMPLEs and the tree based
9137    validate_arglist will then be removed.  */
9138 
9139 bool
9140 validate_gimple_arglist (const gcall *call, ...)
9141 {
9142   enum tree_code code;
9143   bool res = 0;
9144   va_list ap;
9145   const_tree arg;
9146   size_t i;
9147 
9148   va_start (ap, call);
9149   i = 0;
9150 
9151   do
9152     {
9153       code = (enum tree_code) va_arg (ap, int);
9154       switch (code)
9155 	{
9156 	case 0:
9157 	  /* This signifies an ellipses, any further arguments are all ok.  */
9158 	  res = true;
9159 	  goto end;
9160 	case VOID_TYPE:
9161 	  /* This signifies an endlink, if no arguments remain, return
9162 	     true, otherwise return false.  */
9163 	  res = (i == gimple_call_num_args (call));
9164 	  goto end;
9165 	default:
9166 	  /* If no parameters remain or the parameter's code does not
9167 	     match the specified code, return false.  Otherwise continue
9168 	     checking any remaining arguments.  */
9169 	  arg = gimple_call_arg (call, i++);
9170 	  if (!validate_arg (arg, code))
9171 	    goto end;
9172 	  break;
9173 	}
9174     }
9175   while (1);
9176 
9177   /* We need gotos here since we can only have one VA_CLOSE in a
9178      function.  */
9179  end: ;
9180   va_end (ap);
9181 
9182   return res;
9183 }
9184 
9185 /* Default target-specific builtin expander that does nothing.  */
9186 
9187 rtx
9188 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9189 			rtx target ATTRIBUTE_UNUSED,
9190 			rtx subtarget ATTRIBUTE_UNUSED,
9191 			machine_mode mode ATTRIBUTE_UNUSED,
9192 			int ignore ATTRIBUTE_UNUSED)
9193 {
9194   return NULL_RTX;
9195 }
9196 
9197 /* Returns true is EXP represents data that would potentially reside
9198    in a readonly section.  */
9199 
9200 bool
9201 readonly_data_expr (tree exp)
9202 {
9203   STRIP_NOPS (exp);
9204 
9205   if (TREE_CODE (exp) != ADDR_EXPR)
9206     return false;
9207 
9208   exp = get_base_address (TREE_OPERAND (exp, 0));
9209   if (!exp)
9210     return false;
9211 
9212   /* Make sure we call decl_readonly_section only for trees it
9213      can handle (since it returns true for everything it doesn't
9214      understand).  */
9215   if (TREE_CODE (exp) == STRING_CST
9216       || TREE_CODE (exp) == CONSTRUCTOR
9217       || (VAR_P (exp) && TREE_STATIC (exp)))
9218     return decl_readonly_section (exp, 0);
9219   else
9220     return false;
9221 }
9222 
9223 /* Simplify a call to the strpbrk builtin.  S1 and S2 are the arguments
9224    to the call, and TYPE is its return type.
9225 
9226    Return NULL_TREE if no simplification was possible, otherwise return the
9227    simplified form of the call as a tree.
9228 
9229    The simplified form may be a constant or other expression which
9230    computes the same value, but in a more efficient manner (including
9231    calls to other builtin functions).
9232 
9233    The call may contain arguments which need to be evaluated, but
9234    which are not useful to determine the result of the call.  In
9235    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
9236    COMPOUND_EXPR will be an argument which must be evaluated.
9237    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
9238    COMPOUND_EXPR in the chain will contain the tree for the simplified
9239    form of the builtin function call.  */
9240 
9241 static tree
9242 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9243 {
9244   if (!validate_arg (s1, POINTER_TYPE)
9245       || !validate_arg (s2, POINTER_TYPE))
9246     return NULL_TREE;
9247   else
9248     {
9249       tree fn;
9250       const char *p1, *p2;
9251 
9252       p2 = c_getstr (s2);
9253       if (p2 == NULL)
9254 	return NULL_TREE;
9255 
9256       p1 = c_getstr (s1);
9257       if (p1 != NULL)
9258 	{
9259 	  const char *r = strpbrk (p1, p2);
9260 	  tree tem;
9261 
9262 	  if (r == NULL)
9263 	    return build_int_cst (TREE_TYPE (s1), 0);
9264 
9265 	  /* Return an offset into the constant string argument.  */
9266 	  tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9267 	  return fold_convert_loc (loc, type, tem);
9268 	}
9269 
9270       if (p2[0] == '\0')
9271 	/* strpbrk(x, "") == NULL.
9272 	   Evaluate and ignore s1 in case it had side-effects.  */
9273 	return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9274 
9275       if (p2[1] != '\0')
9276 	return NULL_TREE;  /* Really call strpbrk.  */
9277 
9278       fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9279       if (!fn)
9280 	return NULL_TREE;
9281 
9282       /* New argument list transforming strpbrk(s1, s2) to
9283 	 strchr(s1, s2[0]).  */
9284       return build_call_expr_loc (loc, fn, 2, s1,
9285 				  build_int_cst (integer_type_node, p2[0]));
9286     }
9287 }
9288 
9289 /* Simplify a call to the strspn builtin.  S1 and S2 are the arguments
9290    to the call.
9291 
9292    Return NULL_TREE if no simplification was possible, otherwise return the
9293    simplified form of the call as a tree.
9294 
9295    The simplified form may be a constant or other expression which
9296    computes the same value, but in a more efficient manner (including
9297    calls to other builtin functions).
9298 
9299    The call may contain arguments which need to be evaluated, but
9300    which are not useful to determine the result of the call.  In
9301    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
9302    COMPOUND_EXPR will be an argument which must be evaluated.
9303    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
9304    COMPOUND_EXPR in the chain will contain the tree for the simplified
9305    form of the builtin function call.  */
9306 
9307 static tree
9308 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9309 {
9310   if (!validate_arg (s1, POINTER_TYPE)
9311       || !validate_arg (s2, POINTER_TYPE))
9312     return NULL_TREE;
9313   else
9314     {
9315       const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9316 
9317       /* If either argument is "", return NULL_TREE.  */
9318       if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9319 	/* Evaluate and ignore both arguments in case either one has
9320 	   side-effects.  */
9321 	return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9322 				  s1, s2);
9323       return NULL_TREE;
9324     }
9325 }
9326 
9327 /* Simplify a call to the strcspn builtin.  S1 and S2 are the arguments
9328    to the call.
9329 
9330    Return NULL_TREE if no simplification was possible, otherwise return the
9331    simplified form of the call as a tree.
9332 
9333    The simplified form may be a constant or other expression which
9334    computes the same value, but in a more efficient manner (including
9335    calls to other builtin functions).
9336 
9337    The call may contain arguments which need to be evaluated, but
9338    which are not useful to determine the result of the call.  In
9339    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
9340    COMPOUND_EXPR will be an argument which must be evaluated.
9341    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
9342    COMPOUND_EXPR in the chain will contain the tree for the simplified
9343    form of the builtin function call.  */
9344 
9345 static tree
9346 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9347 {
9348   if (!validate_arg (s1, POINTER_TYPE)
9349       || !validate_arg (s2, POINTER_TYPE))
9350     return NULL_TREE;
9351   else
9352     {
9353       /* If the first argument is "", return NULL_TREE.  */
9354       const char *p1 = c_getstr (s1);
9355       if (p1 && *p1 == '\0')
9356 	{
9357 	  /* Evaluate and ignore argument s2 in case it has
9358 	     side-effects.  */
9359 	  return omit_one_operand_loc (loc, size_type_node,
9360 				   size_zero_node, s2);
9361 	}
9362 
9363       /* If the second argument is "", return __builtin_strlen(s1).  */
9364       const char *p2 = c_getstr (s2);
9365       if (p2 && *p2 == '\0')
9366 	{
9367 	  tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9368 
9369 	  /* If the replacement _DECL isn't initialized, don't do the
9370 	     transformation.  */
9371 	  if (!fn)
9372 	    return NULL_TREE;
9373 
9374 	  return build_call_expr_loc (loc, fn, 1, s1);
9375 	}
9376       return NULL_TREE;
9377     }
9378 }
9379 
9380 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9381    produced.  False otherwise.  This is done so that we don't output the error
9382    or warning twice or three times.  */
9383 
9384 bool
9385 fold_builtin_next_arg (tree exp, bool va_start_p)
9386 {
9387   tree fntype = TREE_TYPE (current_function_decl);
9388   int nargs = call_expr_nargs (exp);
9389   tree arg;
9390   /* There is good chance the current input_location points inside the
9391      definition of the va_start macro (perhaps on the token for
9392      builtin) in a system header, so warnings will not be emitted.
9393      Use the location in real source code.  */
9394   source_location current_location =
9395     linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9396 					      NULL);
9397 
9398   if (!stdarg_p (fntype))
9399     {
9400       error ("%<va_start%> used in function with fixed args");
9401       return true;
9402     }
9403 
9404   if (va_start_p)
9405     {
9406       if (va_start_p && (nargs != 2))
9407 	{
9408 	  error ("wrong number of arguments to function %<va_start%>");
9409 	  return true;
9410 	}
9411       arg = CALL_EXPR_ARG (exp, 1);
9412     }
9413   /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9414      when we checked the arguments and if needed issued a warning.  */
9415   else
9416     {
9417       if (nargs == 0)
9418 	{
9419 	  /* Evidently an out of date version of <stdarg.h>; can't validate
9420 	     va_start's second argument, but can still work as intended.  */
9421 	  warning_at (current_location,
9422 		      OPT_Wvarargs,
9423 		   "%<__builtin_next_arg%> called without an argument");
9424 	  return true;
9425 	}
9426       else if (nargs > 1)
9427 	{
9428 	  error ("wrong number of arguments to function %<__builtin_next_arg%>");
9429 	  return true;
9430 	}
9431       arg = CALL_EXPR_ARG (exp, 0);
9432     }
9433 
9434   if (TREE_CODE (arg) == SSA_NAME)
9435     arg = SSA_NAME_VAR (arg);
9436 
9437   /* We destructively modify the call to be __builtin_va_start (ap, 0)
9438      or __builtin_next_arg (0) the first time we see it, after checking
9439      the arguments and if needed issuing a warning.  */
9440   if (!integer_zerop (arg))
9441     {
9442       tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9443 
9444       /* Strip off all nops for the sake of the comparison.  This
9445 	 is not quite the same as STRIP_NOPS.  It does more.
9446 	 We must also strip off INDIRECT_EXPR for C++ reference
9447 	 parameters.  */
9448       while (CONVERT_EXPR_P (arg)
9449 	     || TREE_CODE (arg) == INDIRECT_REF)
9450 	arg = TREE_OPERAND (arg, 0);
9451       if (arg != last_parm)
9452 	{
9453 	  /* FIXME: Sometimes with the tree optimizers we can get the
9454 	     not the last argument even though the user used the last
9455 	     argument.  We just warn and set the arg to be the last
9456 	     argument so that we will get wrong-code because of
9457 	     it.  */
9458 	  warning_at (current_location,
9459 		      OPT_Wvarargs,
9460 		      "second parameter of %<va_start%> not last named argument");
9461 	}
9462 
9463       /* Undefined by C99 7.15.1.4p4 (va_start):
9464          "If the parameter parmN is declared with the register storage
9465          class, with a function or array type, or with a type that is
9466          not compatible with the type that results after application of
9467          the default argument promotions, the behavior is undefined."
9468       */
9469       else if (DECL_REGISTER (arg))
9470 	{
9471 	  warning_at (current_location,
9472 		      OPT_Wvarargs,
9473 		      "undefined behavior when second parameter of "
9474 		      "%<va_start%> is declared with %<register%> storage");
9475 	}
9476 
9477       /* We want to verify the second parameter just once before the tree
9478 	 optimizers are run and then avoid keeping it in the tree,
9479 	 as otherwise we could warn even for correct code like:
9480 	 void foo (int i, ...)
9481 	 { va_list ap; i++; va_start (ap, i); va_end (ap); }  */
9482       if (va_start_p)
9483 	CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9484       else
9485 	CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9486     }
9487   return false;
9488 }
9489 
9490 
9491 /* Expand a call EXP to __builtin_object_size.  */
9492 
9493 static rtx
9494 expand_builtin_object_size (tree exp)
9495 {
9496   tree ost;
9497   int object_size_type;
9498   tree fndecl = get_callee_fndecl (exp);
9499 
9500   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9501     {
9502       error ("%Kfirst argument of %D must be a pointer, second integer constant",
9503 	     exp, fndecl);
9504       expand_builtin_trap ();
9505       return const0_rtx;
9506     }
9507 
9508   ost = CALL_EXPR_ARG (exp, 1);
9509   STRIP_NOPS (ost);
9510 
9511   if (TREE_CODE (ost) != INTEGER_CST
9512       || tree_int_cst_sgn (ost) < 0
9513       || compare_tree_int (ost, 3) > 0)
9514     {
9515       error ("%Klast argument of %D is not integer constant between 0 and 3",
9516 	     exp, fndecl);
9517       expand_builtin_trap ();
9518       return const0_rtx;
9519     }
9520 
9521   object_size_type = tree_to_shwi (ost);
9522 
9523   return object_size_type < 2 ? constm1_rtx : const0_rtx;
9524 }
9525 
9526 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9527    FCODE is the BUILT_IN_* to use.
9528    Return NULL_RTX if we failed; the caller should emit a normal call,
9529    otherwise try to get the result in TARGET, if convenient (and in
9530    mode MODE if that's convenient).  */
9531 
9532 static rtx
9533 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9534 			   enum built_in_function fcode)
9535 {
9536   tree dest, src, len, size;
9537 
9538   if (!validate_arglist (exp,
9539 			 POINTER_TYPE,
9540 			 fcode == BUILT_IN_MEMSET_CHK
9541 			 ? INTEGER_TYPE : POINTER_TYPE,
9542 			 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9543     return NULL_RTX;
9544 
9545   dest = CALL_EXPR_ARG (exp, 0);
9546   src = CALL_EXPR_ARG (exp, 1);
9547   len = CALL_EXPR_ARG (exp, 2);
9548   size = CALL_EXPR_ARG (exp, 3);
9549 
9550   bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9551 			       exp, len, /*maxlen=*/NULL_TREE,
9552 			       /*str=*/NULL_TREE, size);
9553 
9554   if (!tree_fits_uhwi_p (size))
9555     return NULL_RTX;
9556 
9557   if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9558     {
9559       /* Avoid transforming the checking call to an ordinary one when
9560 	 an overflow has been detected or when the call couldn't be
9561 	 validated because the size is not constant.  */
9562       if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9563 	return NULL_RTX;
9564 
9565       tree fn = NULL_TREE;
9566       /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9567 	 mem{cpy,pcpy,move,set} is available.  */
9568       switch (fcode)
9569 	{
9570 	case BUILT_IN_MEMCPY_CHK:
9571 	  fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9572 	  break;
9573 	case BUILT_IN_MEMPCPY_CHK:
9574 	  fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9575 	  break;
9576 	case BUILT_IN_MEMMOVE_CHK:
9577 	  fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9578 	  break;
9579 	case BUILT_IN_MEMSET_CHK:
9580 	  fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9581 	  break;
9582 	default:
9583 	  break;
9584 	}
9585 
9586       if (! fn)
9587 	return NULL_RTX;
9588 
9589       fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9590       gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9591       CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9592       return expand_expr (fn, target, mode, EXPAND_NORMAL);
9593     }
9594   else if (fcode == BUILT_IN_MEMSET_CHK)
9595     return NULL_RTX;
9596   else
9597     {
9598       unsigned int dest_align = get_pointer_alignment (dest);
9599 
9600       /* If DEST is not a pointer type, call the normal function.  */
9601       if (dest_align == 0)
9602 	return NULL_RTX;
9603 
9604       /* If SRC and DEST are the same (and not volatile), do nothing.  */
9605       if (operand_equal_p (src, dest, 0))
9606 	{
9607 	  tree expr;
9608 
9609 	  if (fcode != BUILT_IN_MEMPCPY_CHK)
9610 	    {
9611 	      /* Evaluate and ignore LEN in case it has side-effects.  */
9612 	      expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9613 	      return expand_expr (dest, target, mode, EXPAND_NORMAL);
9614 	    }
9615 
9616 	  expr = fold_build_pointer_plus (dest, len);
9617 	  return expand_expr (expr, target, mode, EXPAND_NORMAL);
9618 	}
9619 
9620       /* __memmove_chk special case.  */
9621       if (fcode == BUILT_IN_MEMMOVE_CHK)
9622 	{
9623 	  unsigned int src_align = get_pointer_alignment (src);
9624 
9625 	  if (src_align == 0)
9626 	    return NULL_RTX;
9627 
9628 	  /* If src is categorized for a readonly section we can use
9629 	     normal __memcpy_chk.  */
9630 	  if (readonly_data_expr (src))
9631 	    {
9632 	      tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9633 	      if (!fn)
9634 		return NULL_RTX;
9635 	      fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9636 					  dest, src, len, size);
9637 	      gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9638 	      CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9639 	      return expand_expr (fn, target, mode, EXPAND_NORMAL);
9640 	    }
9641 	}
9642       return NULL_RTX;
9643     }
9644 }
9645 
9646 /* Emit warning if a buffer overflow is detected at compile time.  */
9647 
9648 static void
9649 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9650 {
9651   /* The source string.  */
9652   tree srcstr = NULL_TREE;
9653   /* The size of the destination object.  */
9654   tree objsize = NULL_TREE;
9655   /* The string that is being concatenated with (as in __strcat_chk)
9656      or null if it isn't.  */
9657   tree catstr = NULL_TREE;
9658   /* The maximum length of the source sequence in a bounded operation
9659      (such as __strncat_chk) or null if the operation isn't bounded
9660      (such as __strcat_chk).  */
9661   tree maxlen = NULL_TREE;
9662 
9663   switch (fcode)
9664     {
9665     case BUILT_IN_STRCPY_CHK:
9666     case BUILT_IN_STPCPY_CHK:
9667       srcstr = CALL_EXPR_ARG (exp, 1);
9668       objsize = CALL_EXPR_ARG (exp, 2);
9669       break;
9670 
9671     case BUILT_IN_STRCAT_CHK:
9672       /* For __strcat_chk the warning will be emitted only if overflowing
9673 	 by at least strlen (dest) + 1 bytes.  */
9674       catstr = CALL_EXPR_ARG (exp, 0);
9675       srcstr = CALL_EXPR_ARG (exp, 1);
9676       objsize = CALL_EXPR_ARG (exp, 2);
9677       break;
9678 
9679     case BUILT_IN_STRNCAT_CHK:
9680       catstr = CALL_EXPR_ARG (exp, 0);
9681       srcstr = CALL_EXPR_ARG (exp, 1);
9682       maxlen = CALL_EXPR_ARG (exp, 2);
9683       objsize = CALL_EXPR_ARG (exp, 3);
9684       break;
9685 
9686     case BUILT_IN_STRNCPY_CHK:
9687     case BUILT_IN_STPNCPY_CHK:
9688       srcstr = CALL_EXPR_ARG (exp, 1);
9689       maxlen = CALL_EXPR_ARG (exp, 2);
9690       objsize = CALL_EXPR_ARG (exp, 3);
9691       break;
9692 
9693     case BUILT_IN_SNPRINTF_CHK:
9694     case BUILT_IN_VSNPRINTF_CHK:
9695       maxlen = CALL_EXPR_ARG (exp, 1);
9696       objsize = CALL_EXPR_ARG (exp, 3);
9697       break;
9698     default:
9699       gcc_unreachable ();
9700     }
9701 
9702   if (catstr && maxlen)
9703     {
9704       /* Check __strncat_chk.  There is no way to determine the length
9705 	 of the string to which the source string is being appended so
9706 	 just warn when the length of the source string is not known.  */
9707       if (!check_strncat_sizes (exp, objsize))
9708 	return;
9709     }
9710 
9711   check_sizes (OPT_Wstringop_overflow_, exp,
9712 	       /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9713 }
9714 
9715 /* Emit warning if a buffer overflow is detected at compile time
9716    in __sprintf_chk/__vsprintf_chk calls.  */
9717 
9718 static void
9719 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9720 {
9721   tree size, len, fmt;
9722   const char *fmt_str;
9723   int nargs = call_expr_nargs (exp);
9724 
9725   /* Verify the required arguments in the original call.  */
9726 
9727   if (nargs < 4)
9728     return;
9729   size = CALL_EXPR_ARG (exp, 2);
9730   fmt = CALL_EXPR_ARG (exp, 3);
9731 
9732   if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9733     return;
9734 
9735   /* Check whether the format is a literal string constant.  */
9736   fmt_str = c_getstr (fmt);
9737   if (fmt_str == NULL)
9738     return;
9739 
9740   if (!init_target_chars ())
9741     return;
9742 
9743   /* If the format doesn't contain % args or %%, we know its size.  */
9744   if (strchr (fmt_str, target_percent) == 0)
9745     len = build_int_cstu (size_type_node, strlen (fmt_str));
9746   /* If the format is "%s" and first ... argument is a string literal,
9747      we know it too.  */
9748   else if (fcode == BUILT_IN_SPRINTF_CHK
9749 	   && strcmp (fmt_str, target_percent_s) == 0)
9750     {
9751       tree arg;
9752 
9753       if (nargs < 5)
9754 	return;
9755       arg = CALL_EXPR_ARG (exp, 4);
9756       if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9757 	return;
9758 
9759       len = c_strlen (arg, 1);
9760       if (!len || ! tree_fits_uhwi_p (len))
9761 	return;
9762     }
9763   else
9764     return;
9765 
9766   /* Add one for the terminating nul.  */
9767   len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9768   check_sizes (OPT_Wstringop_overflow_,
9769 	       exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9770 }
9771 
9772 /* Emit warning if a free is called with address of a variable.  */
9773 
9774 static void
9775 maybe_emit_free_warning (tree exp)
9776 {
9777   tree arg = CALL_EXPR_ARG (exp, 0);
9778 
9779   STRIP_NOPS (arg);
9780   if (TREE_CODE (arg) != ADDR_EXPR)
9781     return;
9782 
9783   arg = get_base_address (TREE_OPERAND (arg, 0));
9784   if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9785     return;
9786 
9787   if (SSA_VAR_P (arg))
9788     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9789 		"%Kattempt to free a non-heap object %qD", exp, arg);
9790   else
9791     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9792 		"%Kattempt to free a non-heap object", exp);
9793 }
9794 
9795 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9796    if possible.  */
9797 
9798 static tree
9799 fold_builtin_object_size (tree ptr, tree ost)
9800 {
9801   unsigned HOST_WIDE_INT bytes;
9802   int object_size_type;
9803 
9804   if (!validate_arg (ptr, POINTER_TYPE)
9805       || !validate_arg (ost, INTEGER_TYPE))
9806     return NULL_TREE;
9807 
9808   STRIP_NOPS (ost);
9809 
9810   if (TREE_CODE (ost) != INTEGER_CST
9811       || tree_int_cst_sgn (ost) < 0
9812       || compare_tree_int (ost, 3) > 0)
9813     return NULL_TREE;
9814 
9815   object_size_type = tree_to_shwi (ost);
9816 
9817   /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9818      if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9819      and (size_t) 0 for types 2 and 3.  */
9820   if (TREE_SIDE_EFFECTS (ptr))
9821     return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9822 
9823   if (TREE_CODE (ptr) == ADDR_EXPR)
9824     {
9825       compute_builtin_object_size (ptr, object_size_type, &bytes);
9826       if (wi::fits_to_tree_p (bytes, size_type_node))
9827 	return build_int_cstu (size_type_node, bytes);
9828     }
9829   else if (TREE_CODE (ptr) == SSA_NAME)
9830     {
9831       /* If object size is not known yet, delay folding until
9832        later.  Maybe subsequent passes will help determining
9833        it.  */
9834       if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9835 	  && wi::fits_to_tree_p (bytes, size_type_node))
9836 	return build_int_cstu (size_type_node, bytes);
9837     }
9838 
9839   return NULL_TREE;
9840 }
9841 
9842 /* Builtins with folding operations that operate on "..." arguments
9843    need special handling; we need to store the arguments in a convenient
9844    data structure before attempting any folding.  Fortunately there are
9845    only a few builtins that fall into this category.  FNDECL is the
9846    function, EXP is the CALL_EXPR for the call.  */
9847 
9848 static tree
9849 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9850 {
9851   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9852   tree ret = NULL_TREE;
9853 
9854   switch (fcode)
9855     {
9856     case BUILT_IN_FPCLASSIFY:
9857       ret = fold_builtin_fpclassify (loc, args, nargs);
9858       break;
9859 
9860     default:
9861       break;
9862     }
9863   if (ret)
9864     {
9865       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9866       SET_EXPR_LOCATION (ret, loc);
9867       TREE_NO_WARNING (ret) = 1;
9868       return ret;
9869     }
9870   return NULL_TREE;
9871 }
9872 
9873 /* Initialize format string characters in the target charset.  */
9874 
9875 bool
9876 init_target_chars (void)
9877 {
9878   static bool init;
9879   if (!init)
9880     {
9881       target_newline = lang_hooks.to_target_charset ('\n');
9882       target_percent = lang_hooks.to_target_charset ('%');
9883       target_c = lang_hooks.to_target_charset ('c');
9884       target_s = lang_hooks.to_target_charset ('s');
9885       if (target_newline == 0 || target_percent == 0 || target_c == 0
9886 	  || target_s == 0)
9887 	return false;
9888 
9889       target_percent_c[0] = target_percent;
9890       target_percent_c[1] = target_c;
9891       target_percent_c[2] = '\0';
9892 
9893       target_percent_s[0] = target_percent;
9894       target_percent_s[1] = target_s;
9895       target_percent_s[2] = '\0';
9896 
9897       target_percent_s_newline[0] = target_percent;
9898       target_percent_s_newline[1] = target_s;
9899       target_percent_s_newline[2] = target_newline;
9900       target_percent_s_newline[3] = '\0';
9901 
9902       init = true;
9903     }
9904   return true;
9905 }
9906 
9907 /* Helper function for do_mpfr_arg*().  Ensure M is a normal number
9908    and no overflow/underflow occurred.  INEXACT is true if M was not
9909    exactly calculated.  TYPE is the tree type for the result.  This
9910    function assumes that you cleared the MPFR flags and then
9911    calculated M to see if anything subsequently set a flag prior to
9912    entering this function.  Return NULL_TREE if any checks fail.  */
9913 
9914 static tree
9915 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9916 {
9917   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9918      overflow/underflow occurred.  If -frounding-math, proceed iff the
9919      result of calling FUNC was exact.  */
9920   if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9921       && (!flag_rounding_math || !inexact))
9922     {
9923       REAL_VALUE_TYPE rr;
9924 
9925       real_from_mpfr (&rr, m, type, GMP_RNDN);
9926       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9927 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
9928 	 but the mpft_t is not, then we underflowed in the
9929 	 conversion.  */
9930       if (real_isfinite (&rr)
9931 	  && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9932         {
9933 	  REAL_VALUE_TYPE rmode;
9934 
9935 	  real_convert (&rmode, TYPE_MODE (type), &rr);
9936 	  /* Proceed iff the specified mode can hold the value.  */
9937 	  if (real_identical (&rmode, &rr))
9938 	    return build_real (type, rmode);
9939 	}
9940     }
9941   return NULL_TREE;
9942 }
9943 
9944 /* Helper function for do_mpc_arg*().  Ensure M is a normal complex
9945    number and no overflow/underflow occurred.  INEXACT is true if M
9946    was not exactly calculated.  TYPE is the tree type for the result.
9947    This function assumes that you cleared the MPFR flags and then
9948    calculated M to see if anything subsequently set a flag prior to
9949    entering this function.  Return NULL_TREE if any checks fail, if
9950    FORCE_CONVERT is true, then bypass the checks.  */
9951 
9952 static tree
9953 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9954 {
9955   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9956      overflow/underflow occurred.  If -frounding-math, proceed iff the
9957      result of calling FUNC was exact.  */
9958   if (force_convert
9959       || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9960 	  && !mpfr_overflow_p () && !mpfr_underflow_p ()
9961 	  && (!flag_rounding_math || !inexact)))
9962     {
9963       REAL_VALUE_TYPE re, im;
9964 
9965       real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9966       real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9967       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9968 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
9969 	 but the mpft_t is not, then we underflowed in the
9970 	 conversion.  */
9971       if (force_convert
9972 	  || (real_isfinite (&re) && real_isfinite (&im)
9973 	      && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9974 	      && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9975         {
9976 	  REAL_VALUE_TYPE re_mode, im_mode;
9977 
9978 	  real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9979 	  real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9980 	  /* Proceed iff the specified mode can hold the value.  */
9981 	  if (force_convert
9982 	      || (real_identical (&re_mode, &re)
9983 		  && real_identical (&im_mode, &im)))
9984 	    return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9985 				  build_real (TREE_TYPE (type), im_mode));
9986 	}
9987     }
9988   return NULL_TREE;
9989 }
9990 
9991 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9992    the pointer *(ARG_QUO) and return the result.  The type is taken
9993    from the type of ARG0 and is used for setting the precision of the
9994    calculation and results.  */
9995 
9996 static tree
9997 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9998 {
9999   tree const type = TREE_TYPE (arg0);
10000   tree result = NULL_TREE;
10001 
10002   STRIP_NOPS (arg0);
10003   STRIP_NOPS (arg1);
10004 
10005   /* To proceed, MPFR must exactly represent the target floating point
10006      format, which only happens when the target base equals two.  */
10007   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10008       && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10009       && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10010     {
10011       const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10012       const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10013 
10014       if (real_isfinite (ra0) && real_isfinite (ra1))
10015         {
10016 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10017 	  const int prec = fmt->p;
10018 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10019 	  tree result_rem;
10020 	  long integer_quo;
10021 	  mpfr_t m0, m1;
10022 
10023 	  mpfr_inits2 (prec, m0, m1, NULL);
10024 	  mpfr_from_real (m0, ra0, GMP_RNDN);
10025 	  mpfr_from_real (m1, ra1, GMP_RNDN);
10026 	  mpfr_clear_flags ();
10027 	  mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10028 	  /* Remquo is independent of the rounding mode, so pass
10029 	     inexact=0 to do_mpfr_ckconv().  */
10030 	  result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10031 	  mpfr_clears (m0, m1, NULL);
10032 	  if (result_rem)
10033 	    {
10034 	      /* MPFR calculates quo in the host's long so it may
10035 		 return more bits in quo than the target int can hold
10036 		 if sizeof(host long) > sizeof(target int).  This can
10037 		 happen even for native compilers in LP64 mode.  In
10038 		 these cases, modulo the quo value with the largest
10039 		 number that the target int can hold while leaving one
10040 		 bit for the sign.  */
10041 	      if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10042 		integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10043 
10044 	      /* Dereference the quo pointer argument.  */
10045 	      arg_quo = build_fold_indirect_ref (arg_quo);
10046 	      /* Proceed iff a valid pointer type was passed in.  */
10047 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10048 	        {
10049 		  /* Set the value. */
10050 		  tree result_quo
10051 		    = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10052 				   build_int_cst (TREE_TYPE (arg_quo),
10053 						  integer_quo));
10054 		  TREE_SIDE_EFFECTS (result_quo) = 1;
10055 		  /* Combine the quo assignment with the rem.  */
10056 		  result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10057 						    result_quo, result_rem));
10058 		}
10059 	    }
10060 	}
10061     }
10062   return result;
10063 }
10064 
10065 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10066    resulting value as a tree with type TYPE.  The mpfr precision is
10067    set to the precision of TYPE.  We assume that this mpfr function
10068    returns zero if the result could be calculated exactly within the
10069    requested precision.  In addition, the integer pointer represented
10070    by ARG_SG will be dereferenced and set to the appropriate signgam
10071    (-1,1) value.  */
10072 
10073 static tree
10074 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10075 {
10076   tree result = NULL_TREE;
10077 
10078   STRIP_NOPS (arg);
10079 
10080   /* To proceed, MPFR must exactly represent the target floating point
10081      format, which only happens when the target base equals two.  Also
10082      verify ARG is a constant and that ARG_SG is an int pointer.  */
10083   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10084       && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10085       && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10086       && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10087     {
10088       const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10089 
10090       /* In addition to NaN and Inf, the argument cannot be zero or a
10091 	 negative integer.  */
10092       if (real_isfinite (ra)
10093 	  && ra->cl != rvc_zero
10094 	  && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10095         {
10096 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10097 	  const int prec = fmt->p;
10098 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10099 	  int inexact, sg;
10100 	  mpfr_t m;
10101 	  tree result_lg;
10102 
10103 	  mpfr_init2 (m, prec);
10104 	  mpfr_from_real (m, ra, GMP_RNDN);
10105 	  mpfr_clear_flags ();
10106 	  inexact = mpfr_lgamma (m, &sg, m, rnd);
10107 	  result_lg = do_mpfr_ckconv (m, type, inexact);
10108 	  mpfr_clear (m);
10109 	  if (result_lg)
10110 	    {
10111 	      tree result_sg;
10112 
10113 	      /* Dereference the arg_sg pointer argument.  */
10114 	      arg_sg = build_fold_indirect_ref (arg_sg);
10115 	      /* Assign the signgam value into *arg_sg. */
10116 	      result_sg = fold_build2 (MODIFY_EXPR,
10117 				       TREE_TYPE (arg_sg), arg_sg,
10118 				       build_int_cst (TREE_TYPE (arg_sg), sg));
10119 	      TREE_SIDE_EFFECTS (result_sg) = 1;
10120 	      /* Combine the signgam assignment with the lgamma result.  */
10121 	      result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10122 						result_sg, result_lg));
10123 	    }
10124 	}
10125     }
10126 
10127   return result;
10128 }
10129 
10130 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10131    mpc function FUNC on it and return the resulting value as a tree
10132    with type TYPE.  The mpfr precision is set to the precision of
10133    TYPE.  We assume that function FUNC returns zero if the result
10134    could be calculated exactly within the requested precision.  If
10135    DO_NONFINITE is true, then fold expressions containing Inf or NaN
10136    in the arguments and/or results.  */
10137 
10138 tree
10139 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10140 	     int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10141 {
10142   tree result = NULL_TREE;
10143 
10144   STRIP_NOPS (arg0);
10145   STRIP_NOPS (arg1);
10146 
10147   /* To proceed, MPFR must exactly represent the target floating point
10148      format, which only happens when the target base equals two.  */
10149   if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10150       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10151       && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10152       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10153       && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10154     {
10155       const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10156       const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10157       const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10158       const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10159 
10160       if (do_nonfinite
10161 	  || (real_isfinite (re0) && real_isfinite (im0)
10162 	      && real_isfinite (re1) && real_isfinite (im1)))
10163         {
10164 	  const struct real_format *const fmt =
10165 	    REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10166 	  const int prec = fmt->p;
10167 	  const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10168 	  const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10169 	  int inexact;
10170 	  mpc_t m0, m1;
10171 
10172 	  mpc_init2 (m0, prec);
10173 	  mpc_init2 (m1, prec);
10174 	  mpfr_from_real (mpc_realref (m0), re0, rnd);
10175 	  mpfr_from_real (mpc_imagref (m0), im0, rnd);
10176 	  mpfr_from_real (mpc_realref (m1), re1, rnd);
10177 	  mpfr_from_real (mpc_imagref (m1), im1, rnd);
10178 	  mpfr_clear_flags ();
10179 	  inexact = func (m0, m0, m1, crnd);
10180 	  result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10181 	  mpc_clear (m0);
10182 	  mpc_clear (m1);
10183 	}
10184     }
10185 
10186   return result;
10187 }
10188 
10189 /* A wrapper function for builtin folding that prevents warnings for
10190    "statement without effect" and the like, caused by removing the
10191    call node earlier than the warning is generated.  */
10192 
10193 tree
10194 fold_call_stmt (gcall *stmt, bool ignore)
10195 {
10196   tree ret = NULL_TREE;
10197   tree fndecl = gimple_call_fndecl (stmt);
10198   location_t loc = gimple_location (stmt);
10199   if (fndecl
10200       && TREE_CODE (fndecl) == FUNCTION_DECL
10201       && DECL_BUILT_IN (fndecl)
10202       && !gimple_call_va_arg_pack_p (stmt))
10203     {
10204       int nargs = gimple_call_num_args (stmt);
10205       tree *args = (nargs > 0
10206 		    ? gimple_call_arg_ptr (stmt, 0)
10207 		    : &error_mark_node);
10208 
10209       if (avoid_folding_inline_builtin (fndecl))
10210 	return NULL_TREE;
10211       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10212         {
10213 	  return targetm.fold_builtin (fndecl, nargs, args, ignore);
10214         }
10215       else
10216 	{
10217 	  ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10218 	  if (ret)
10219 	    {
10220 	      /* Propagate location information from original call to
10221 		 expansion of builtin.  Otherwise things like
10222 		 maybe_emit_chk_warning, that operate on the expansion
10223 		 of a builtin, will use the wrong location information.  */
10224 	      if (gimple_has_location (stmt))
10225                 {
10226 		  tree realret = ret;
10227 		  if (TREE_CODE (ret) == NOP_EXPR)
10228 		    realret = TREE_OPERAND (ret, 0);
10229 		  if (CAN_HAVE_LOCATION_P (realret)
10230 		      && !EXPR_HAS_LOCATION (realret))
10231 		    SET_EXPR_LOCATION (realret, loc);
10232                   return realret;
10233                 }
10234 	      return ret;
10235 	    }
10236 	}
10237     }
10238   return NULL_TREE;
10239 }
10240 
10241 /* Look up the function in builtin_decl that corresponds to DECL
10242    and set ASMSPEC as its user assembler name.  DECL must be a
10243    function decl that declares a builtin.  */
10244 
10245 void
10246 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10247 {
10248   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10249 	      && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10250 	      && asmspec != 0);
10251 
10252   tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10253   set_user_assembler_name (builtin, asmspec);
10254 
10255   if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10256       && INT_TYPE_SIZE < BITS_PER_WORD)
10257     {
10258       set_user_assembler_libfunc ("ffs", asmspec);
10259       set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10260 			 "ffs");
10261     }
10262 }
10263 
10264 /* Return true if DECL is a builtin that expands to a constant or similarly
10265    simple code.  */
10266 bool
10267 is_simple_builtin (tree decl)
10268 {
10269   if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10270     switch (DECL_FUNCTION_CODE (decl))
10271       {
10272 	/* Builtins that expand to constants.  */
10273       case BUILT_IN_CONSTANT_P:
10274       case BUILT_IN_EXPECT:
10275       case BUILT_IN_OBJECT_SIZE:
10276       case BUILT_IN_UNREACHABLE:
10277 	/* Simple register moves or loads from stack.  */
10278       case BUILT_IN_ASSUME_ALIGNED:
10279       case BUILT_IN_RETURN_ADDRESS:
10280       case BUILT_IN_EXTRACT_RETURN_ADDR:
10281       case BUILT_IN_FROB_RETURN_ADDR:
10282       case BUILT_IN_RETURN:
10283       case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10284       case BUILT_IN_FRAME_ADDRESS:
10285       case BUILT_IN_VA_END:
10286       case BUILT_IN_STACK_SAVE:
10287       case BUILT_IN_STACK_RESTORE:
10288 	/* Exception state returns or moves registers around.  */
10289       case BUILT_IN_EH_FILTER:
10290       case BUILT_IN_EH_POINTER:
10291       case BUILT_IN_EH_COPY_VALUES:
10292 	return true;
10293 
10294       default:
10295 	return false;
10296       }
10297 
10298   return false;
10299 }
10300 
10301 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10302    most probably expanded inline into reasonably simple code.  This is a
10303    superset of is_simple_builtin.  */
10304 bool
10305 is_inexpensive_builtin (tree decl)
10306 {
10307   if (!decl)
10308     return false;
10309   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10310     return true;
10311   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10312     switch (DECL_FUNCTION_CODE (decl))
10313       {
10314       case BUILT_IN_ABS:
10315       case BUILT_IN_ALLOCA:
10316       case BUILT_IN_ALLOCA_WITH_ALIGN:
10317       case BUILT_IN_BSWAP16:
10318       case BUILT_IN_BSWAP32:
10319       case BUILT_IN_BSWAP64:
10320       case BUILT_IN_CLZ:
10321       case BUILT_IN_CLZIMAX:
10322       case BUILT_IN_CLZL:
10323       case BUILT_IN_CLZLL:
10324       case BUILT_IN_CTZ:
10325       case BUILT_IN_CTZIMAX:
10326       case BUILT_IN_CTZL:
10327       case BUILT_IN_CTZLL:
10328       case BUILT_IN_FFS:
10329       case BUILT_IN_FFSIMAX:
10330       case BUILT_IN_FFSL:
10331       case BUILT_IN_FFSLL:
10332       case BUILT_IN_IMAXABS:
10333       case BUILT_IN_FINITE:
10334       case BUILT_IN_FINITEF:
10335       case BUILT_IN_FINITEL:
10336       case BUILT_IN_FINITED32:
10337       case BUILT_IN_FINITED64:
10338       case BUILT_IN_FINITED128:
10339       case BUILT_IN_FPCLASSIFY:
10340       case BUILT_IN_ISFINITE:
10341       case BUILT_IN_ISINF_SIGN:
10342       case BUILT_IN_ISINF:
10343       case BUILT_IN_ISINFF:
10344       case BUILT_IN_ISINFL:
10345       case BUILT_IN_ISINFD32:
10346       case BUILT_IN_ISINFD64:
10347       case BUILT_IN_ISINFD128:
10348       case BUILT_IN_ISNAN:
10349       case BUILT_IN_ISNANF:
10350       case BUILT_IN_ISNANL:
10351       case BUILT_IN_ISNAND32:
10352       case BUILT_IN_ISNAND64:
10353       case BUILT_IN_ISNAND128:
10354       case BUILT_IN_ISNORMAL:
10355       case BUILT_IN_ISGREATER:
10356       case BUILT_IN_ISGREATEREQUAL:
10357       case BUILT_IN_ISLESS:
10358       case BUILT_IN_ISLESSEQUAL:
10359       case BUILT_IN_ISLESSGREATER:
10360       case BUILT_IN_ISUNORDERED:
10361       case BUILT_IN_VA_ARG_PACK:
10362       case BUILT_IN_VA_ARG_PACK_LEN:
10363       case BUILT_IN_VA_COPY:
10364       case BUILT_IN_TRAP:
10365       case BUILT_IN_SAVEREGS:
10366       case BUILT_IN_POPCOUNTL:
10367       case BUILT_IN_POPCOUNTLL:
10368       case BUILT_IN_POPCOUNTIMAX:
10369       case BUILT_IN_POPCOUNT:
10370       case BUILT_IN_PARITYL:
10371       case BUILT_IN_PARITYLL:
10372       case BUILT_IN_PARITYIMAX:
10373       case BUILT_IN_PARITY:
10374       case BUILT_IN_LABS:
10375       case BUILT_IN_LLABS:
10376       case BUILT_IN_PREFETCH:
10377       case BUILT_IN_ACC_ON_DEVICE:
10378 	return true;
10379 
10380       default:
10381 	return is_simple_builtin (decl);
10382       }
10383 
10384   return false;
10385 }
10386 
10387 /* Return true if T is a constant and the value cast to a target char
10388    can be represented by a host char.
10389    Store the casted char constant in *P if so.  */
10390 
10391 bool
10392 target_char_cst_p (tree t, char *p)
10393 {
10394   if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10395     return false;
10396 
10397   *p = (char)tree_to_uhwi (t);
10398   return true;
10399 }
10400