xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/builtins.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* Expand builtin functions.
2    Copyright (C) 1988-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Legacy warning!  Please add no further builtin simplifications here
21    (apart from pure constant folding) - builtin simplifications should go
22    to match.pd or gimple-fold.c instead.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename()  */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
80 
81 /* Define the names of the builtin function types and codes.  */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83   = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84 
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
87 {
88 #include "builtins.def"
89 };
90 
91 /* Setup an array of builtin_info_type, make sure each element decl is
92    initialized to NULL_TREE.  */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94 
95 /* Non-zero if __builtin_constant_p should be folded right away.  */
96 bool force_folding_builtin_constant_p;
97 
98 static int target_char_cast (tree, char *);
99 static rtx get_memory_rtx (tree, tree);
100 static int apply_args_size (void);
101 static int apply_result_size (void);
102 static rtx result_vector (int, rtx);
103 static void expand_builtin_prefetch (tree);
104 static rtx expand_builtin_apply_args (void);
105 static rtx expand_builtin_apply_args_1 (void);
106 static rtx expand_builtin_apply (rtx, rtx, rtx);
107 static void expand_builtin_return (rtx);
108 static enum type_class type_to_class (tree);
109 static rtx expand_builtin_classify_type (tree);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx inline_expand_builtin_bytecmp (tree, rtx);
122 static rtx expand_builtin_strcmp (tree, rtx);
123 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
124 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
125 static rtx expand_builtin_memchr (tree, rtx);
126 static rtx expand_builtin_memcpy (tree, rtx);
127 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
128 					    rtx target, tree exp,
129 					    memop_ret retmode);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 					enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
173 
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
177 
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 				      enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
185 
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
196 
197 /* Return true if NAME starts with __builtin_ or __sync_.  */
198 
199 static bool
200 is_builtin_name (const char *name)
201 {
202   if (strncmp (name, "__builtin_", 10) == 0)
203     return true;
204   if (strncmp (name, "__sync_", 7) == 0)
205     return true;
206   if (strncmp (name, "__atomic_", 9) == 0)
207     return true;
208   return false;
209 }
210 
211 /* Return true if NODE should be considered for inline expansion regardless
212    of the optimization level.  This means whenever a function is invoked with
213    its "internal" name, which normally contains the prefix "__builtin".  */
214 
215 bool
216 called_as_built_in (tree node)
217 {
218   /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219      we want the name used to call the function, not the name it
220      will have. */
221   const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222   return is_builtin_name (name);
223 }
224 
225 /* Compute values M and N such that M divides (address of EXP - N) and such
226    that N < M.  If these numbers can be determined, store M in alignp and N in
227    *BITPOSP and return true.  Otherwise return false and store BITS_PER_UNIT to
228    *alignp and any bit-offset to *bitposp.
229 
230    Note that the address (and thus the alignment) computed here is based
231    on the address to which a symbol resolves, whereas DECL_ALIGN is based
232    on the address at which an object is actually located.  These two
233    addresses are not always the same.  For example, on ARM targets,
234    the address &foo of a Thumb function foo() has the lowest bit set,
235    whereas foo() itself starts on an even address.
236 
237    If ADDR_P is true we are taking the address of the memory reference EXP
238    and thus cannot rely on the access taking place.  */
239 
240 static bool
241 get_object_alignment_2 (tree exp, unsigned int *alignp,
242 			unsigned HOST_WIDE_INT *bitposp, bool addr_p)
243 {
244   poly_int64 bitsize, bitpos;
245   tree offset;
246   machine_mode mode;
247   int unsignedp, reversep, volatilep;
248   unsigned int align = BITS_PER_UNIT;
249   bool known_alignment = false;
250 
251   /* Get the innermost object and the constant (bitpos) and possibly
252      variable (offset) offset of the access.  */
253   exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
254 			     &unsignedp, &reversep, &volatilep);
255 
256   /* Extract alignment information from the innermost object and
257      possibly adjust bitpos and offset.  */
258   if (TREE_CODE (exp) == FUNCTION_DECL)
259     {
260       /* Function addresses can encode extra information besides their
261 	 alignment.  However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 	 allows the low bit to be used as a virtual bit, we know
263 	 that the address itself must be at least 2-byte aligned.  */
264       if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 	align = 2 * BITS_PER_UNIT;
266     }
267   else if (TREE_CODE (exp) == LABEL_DECL)
268     ;
269   else if (TREE_CODE (exp) == CONST_DECL)
270     {
271       /* The alignment of a CONST_DECL is determined by its initializer.  */
272       exp = DECL_INITIAL (exp);
273       align = TYPE_ALIGN (TREE_TYPE (exp));
274       if (CONSTANT_CLASS_P (exp))
275 	align = targetm.constant_alignment (exp, align);
276 
277       known_alignment = true;
278     }
279   else if (DECL_P (exp))
280     {
281       align = DECL_ALIGN (exp);
282       known_alignment = true;
283     }
284   else if (TREE_CODE (exp) == INDIRECT_REF
285 	   || TREE_CODE (exp) == MEM_REF
286 	   || TREE_CODE (exp) == TARGET_MEM_REF)
287     {
288       tree addr = TREE_OPERAND (exp, 0);
289       unsigned ptr_align;
290       unsigned HOST_WIDE_INT ptr_bitpos;
291       unsigned HOST_WIDE_INT ptr_bitmask = ~0;
292 
293       /* If the address is explicitely aligned, handle that.  */
294       if (TREE_CODE (addr) == BIT_AND_EXPR
295 	  && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
296 	{
297 	  ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 	  ptr_bitmask *= BITS_PER_UNIT;
299 	  align = least_bit_hwi (ptr_bitmask);
300 	  addr = TREE_OPERAND (addr, 0);
301 	}
302 
303       known_alignment
304 	= get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
305       align = MAX (ptr_align, align);
306 
307       /* Re-apply explicit alignment to the bitpos.  */
308       ptr_bitpos &= ptr_bitmask;
309 
310       /* The alignment of the pointer operand in a TARGET_MEM_REF
311 	 has to take the variable offset parts into account.  */
312       if (TREE_CODE (exp) == TARGET_MEM_REF)
313 	{
314 	  if (TMR_INDEX (exp))
315 	    {
316 	      unsigned HOST_WIDE_INT step = 1;
317 	      if (TMR_STEP (exp))
318 		step = TREE_INT_CST_LOW (TMR_STEP (exp));
319 	      align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
320 	    }
321 	  if (TMR_INDEX2 (exp))
322 	    align = BITS_PER_UNIT;
323 	  known_alignment = false;
324 	}
325 
326       /* When EXP is an actual memory reference then we can use
327 	 TYPE_ALIGN of a pointer indirection to derive alignment.
328 	 Do so only if get_pointer_alignment_1 did not reveal absolute
329 	 alignment knowledge and if using that alignment would
330 	 improve the situation.  */
331       unsigned int talign;
332       if (!addr_p && !known_alignment
333 	  && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 	  && talign > align)
335 	align = talign;
336       else
337 	{
338 	  /* Else adjust bitpos accordingly.  */
339 	  bitpos += ptr_bitpos;
340 	  if (TREE_CODE (exp) == MEM_REF
341 	      || TREE_CODE (exp) == TARGET_MEM_REF)
342 	    bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
343 	}
344     }
345   else if (TREE_CODE (exp) == STRING_CST)
346     {
347       /* STRING_CST are the only constant objects we allow to be not
348          wrapped inside a CONST_DECL.  */
349       align = TYPE_ALIGN (TREE_TYPE (exp));
350       if (CONSTANT_CLASS_P (exp))
351 	align = targetm.constant_alignment (exp, align);
352 
353       known_alignment = true;
354     }
355 
356   /* If there is a non-constant offset part extract the maximum
357      alignment that can prevail.  */
358   if (offset)
359     {
360       unsigned int trailing_zeros = tree_ctz (offset);
361       if (trailing_zeros < HOST_BITS_PER_INT)
362 	{
363 	  unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 	  if (inner)
365 	    align = MIN (align, inner);
366 	}
367     }
368 
369   /* Account for the alignment of runtime coefficients, so that the constant
370      bitpos is guaranteed to be accurate.  */
371   unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372   if (alt_align != 0 && alt_align < align)
373     {
374       align = alt_align;
375       known_alignment = false;
376     }
377 
378   *alignp = align;
379   *bitposp = bitpos.coeffs[0] & (align - 1);
380   return known_alignment;
381 }
382 
383 /* For a memory reference expression EXP compute values M and N such that M
384    divides (&EXP - N) and such that N < M.  If these numbers can be determined,
385    store M in alignp and N in *BITPOSP and return true.  Otherwise return false
386    and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.  */
387 
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 			unsigned HOST_WIDE_INT *bitposp)
391 {
392   return get_object_alignment_2 (exp, alignp, bitposp, false);
393 }
394 
395 /* Return the alignment in bits of EXP, an object.  */
396 
397 unsigned int
398 get_object_alignment (tree exp)
399 {
400   unsigned HOST_WIDE_INT bitpos = 0;
401   unsigned int align;
402 
403   get_object_alignment_1 (exp, &align, &bitpos);
404 
405   /* align and bitpos now specify known low bits of the pointer.
406      ptr & (align - 1) == bitpos.  */
407 
408   if (bitpos != 0)
409     align = least_bit_hwi (bitpos);
410   return align;
411 }
412 
413 /* For a pointer valued expression EXP compute values M and N such that M
414    divides (EXP - N) and such that N < M.  If these numbers can be determined,
415    store M in alignp and N in *BITPOSP and return true.  Return false if
416    the results are just a conservative approximation.
417 
418    If EXP is not a pointer, false is returned too.  */
419 
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 			 unsigned HOST_WIDE_INT *bitposp)
423 {
424   STRIP_NOPS (exp);
425 
426   if (TREE_CODE (exp) == ADDR_EXPR)
427     return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 				   alignp, bitposp, true);
429   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430     {
431       unsigned int align;
432       unsigned HOST_WIDE_INT bitpos;
433       bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 					  &align, &bitpos);
435       if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 	bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437       else
438 	{
439 	  unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 	  if (trailing_zeros < HOST_BITS_PER_INT)
441 	    {
442 	      unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 	      if (inner)
444 		align = MIN (align, inner);
445 	    }
446 	}
447       *alignp = align;
448       *bitposp = bitpos & (align - 1);
449       return res;
450     }
451   else if (TREE_CODE (exp) == SSA_NAME
452 	   && POINTER_TYPE_P (TREE_TYPE (exp)))
453     {
454       unsigned int ptr_align, ptr_misalign;
455       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
456 
457       if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 	{
459 	  *bitposp = ptr_misalign * BITS_PER_UNIT;
460 	  *alignp = ptr_align * BITS_PER_UNIT;
461 	  /* Make sure to return a sensible alignment when the multiplication
462 	     by BITS_PER_UNIT overflowed.  */
463 	  if (*alignp == 0)
464 	    *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 	  /* We cannot really tell whether this result is an approximation.  */
466 	  return false;
467 	}
468       else
469 	{
470 	  *bitposp = 0;
471 	  *alignp = BITS_PER_UNIT;
472 	  return false;
473 	}
474     }
475   else if (TREE_CODE (exp) == INTEGER_CST)
476     {
477       *alignp = BIGGEST_ALIGNMENT;
478       *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 		  & (BIGGEST_ALIGNMENT - 1));
480       return true;
481     }
482 
483   *bitposp = 0;
484   *alignp = BITS_PER_UNIT;
485   return false;
486 }
487 
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489    The alignment returned is, by default, the alignment of the thing that
490    EXP points to.  If it is not a POINTER_TYPE, 0 is returned.
491 
492    Otherwise, look at the expression to see if we can do better, i.e., if the
493    expression is actually pointing at an object whose alignment is tighter.  */
494 
495 unsigned int
496 get_pointer_alignment (tree exp)
497 {
498   unsigned HOST_WIDE_INT bitpos = 0;
499   unsigned int align;
500 
501   get_pointer_alignment_1 (exp, &align, &bitpos);
502 
503   /* align and bitpos now specify known low bits of the pointer.
504      ptr & (align - 1) == bitpos.  */
505 
506   if (bitpos != 0)
507     align = least_bit_hwi (bitpos);
508 
509   return align;
510 }
511 
512 /* Return the number of leading non-zero elements in the sequence
513    [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514    ELTSIZE must be a power of 2 less than 8.  Used by c_strlen.  */
515 
516 unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
518 {
519   gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
520 
521   unsigned n;
522 
523   if (eltsize == 1)
524     {
525       /* Optimize the common case of plain char.  */
526       for (n = 0; n < maxelts; n++)
527 	{
528 	  const char *elt = (const char*) ptr + n;
529 	  if (!*elt)
530 	    break;
531 	}
532     }
533   else
534     {
535       for (n = 0; n < maxelts; n++)
536 	{
537 	  const char *elt = (const char*) ptr + n * eltsize;
538 	  if (!memcmp (elt, "\0\0\0\0", eltsize))
539 	    break;
540 	}
541     }
542   return n;
543 }
544 
545 /* For a call at LOC to a function FN that expects a string in the argument
546    ARG, issue a diagnostic due to it being a called with an argument
547    declared at NONSTR that is a character array with no terminating NUL.  */
548 
549 void
550 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
551 {
552   if (TREE_NO_WARNING (arg))
553     return;
554 
555   loc = expansion_point_location_if_in_system_header (loc);
556 
557   if (warning_at (loc, OPT_Wstringop_overflow_,
558 		  "%qs argument missing terminating nul", fn))
559     {
560       inform (DECL_SOURCE_LOCATION (decl),
561 	      "referenced argument declared here");
562       TREE_NO_WARNING (arg) = 1;
563     }
564 }
565 
566 /* If EXP refers to an unterminated constant character array return
567    the declaration of the object of which the array is a member or
568    element and if SIZE is not null, set *SIZE to the size of
569    the unterminated array and set *EXACT if the size is exact or
570    clear it otherwise.  Otherwise return null.  */
571 
572 tree
573 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
574 {
575   /* C_STRLEN will return NULL and set DECL in the info
576      structure if EXP references a unterminated array.  */
577   c_strlen_data lendata = { };
578   tree len = c_strlen (exp, 1, &lendata);
579   if (len == NULL_TREE && lendata.minlen && lendata.decl)
580      {
581        if (size)
582 	{
583 	  len = lendata.minlen;
584 	  if (lendata.off)
585 	    {
586 	      /* Constant offsets are already accounted for in LENDATA.MINLEN,
587 		 but not in a SSA_NAME + CST expression.  */
588 	      if (TREE_CODE (lendata.off) == INTEGER_CST)
589 		*exact = true;
590 	      else if (TREE_CODE (lendata.off) == PLUS_EXPR
591 		       && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
592 		{
593 		  /* Subtract the offset from the size of the array.  */
594 		  *exact = false;
595 		  tree temp = TREE_OPERAND (lendata.off, 1);
596 		  temp = fold_convert (ssizetype, temp);
597 		  len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
598 		}
599 	      else
600 		*exact = false;
601 	    }
602 	  else
603 	    *exact = true;
604 
605 	  *size = len;
606 	}
607        return lendata.decl;
608      }
609 
610   return NULL_TREE;
611 }
612 
613 /* Compute the length of a null-terminated character string or wide
614    character string handling character sizes of 1, 2, and 4 bytes.
615    TREE_STRING_LENGTH is not the right way because it evaluates to
616    the size of the character array in bytes (as opposed to characters)
617    and because it can contain a zero byte in the middle.
618 
619    ONLY_VALUE should be nonzero if the result is not going to be emitted
620    into the instruction stream and zero if it is going to be expanded.
621    E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
622    is returned, otherwise NULL, since
623    len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
624    evaluate the side-effects.
625 
626    If ONLY_VALUE is two then we do not emit warnings about out-of-bound
627    accesses.  Note that this implies the result is not going to be emitted
628    into the instruction stream.
629 
630    Additional information about the string accessed may be recorded
631    in DATA.  For example, if SRC references an unterminated string,
632    then the declaration will be stored in the DECL field.   If the
633    length of the unterminated string can be determined, it'll be
634    stored in the LEN field.  Note this length could well be different
635    than what a C strlen call would return.
636 
637    ELTSIZE is 1 for normal single byte character strings, and 2 or
638    4 for wide characer strings.  ELTSIZE is by default 1.
639 
640    The value returned is of type `ssizetype'.  */
641 
642 tree
643 c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
644 {
645   /* If we were not passed a DATA pointer, then get one to a local
646      structure.  That avoids having to check DATA for NULL before
647      each time we want to use it.  */
648   c_strlen_data local_strlen_data = { };
649   if (!data)
650     data = &local_strlen_data;
651 
652   gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
653   STRIP_NOPS (src);
654   if (TREE_CODE (src) == COND_EXPR
655       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
656     {
657       tree len1, len2;
658 
659       len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
660       len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
661       if (tree_int_cst_equal (len1, len2))
662 	return len1;
663     }
664 
665   if (TREE_CODE (src) == COMPOUND_EXPR
666       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
667     return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
668 
669   location_t loc = EXPR_LOC_OR_LOC (src, input_location);
670 
671   /* Offset from the beginning of the string in bytes.  */
672   tree byteoff;
673   tree memsize;
674   tree decl;
675   src = string_constant (src, &byteoff, &memsize, &decl);
676   if (src == 0)
677     return NULL_TREE;
678 
679   /* Determine the size of the string element.  */
680   if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
681     return NULL_TREE;
682 
683   /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
684      length of SRC.  Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
685      in case the latter is less than the size of the array, such as when
686      SRC refers to a short string literal used to initialize a large array.
687      In that case, the elements of the array after the terminating NUL are
688      all NUL.  */
689   HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
690   strelts = strelts / eltsize;
691 
692   if (!tree_fits_uhwi_p (memsize))
693     return NULL_TREE;
694 
695   HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
696 
697   /* PTR can point to the byte representation of any string type, including
698      char* and wchar_t*.  */
699   const char *ptr = TREE_STRING_POINTER (src);
700 
701   if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
702     {
703       /* The code below works only for single byte character types.  */
704       if (eltsize != 1)
705 	return NULL_TREE;
706 
707       /* If the string has an internal NUL character followed by any
708 	 non-NUL characters (e.g., "foo\0bar"), we can't compute
709 	 the offset to the following NUL if we don't know where to
710 	 start searching for it.  */
711       unsigned len = string_length (ptr, eltsize, strelts);
712 
713       /* Return when an embedded null character is found or none at all.
714 	 In the latter case, set the DECL/LEN field in the DATA structure
715 	 so that callers may examine them.  */
716       if (len + 1 < strelts)
717 	return NULL_TREE;
718       else if (len >= maxelts)
719 	{
720 	  data->decl = decl;
721 	  data->off = byteoff;
722 	  data->minlen = ssize_int (len);
723 	  return NULL_TREE;
724 	}
725 
726       /* For empty strings the result should be zero.  */
727       if (len == 0)
728 	return ssize_int (0);
729 
730       /* We don't know the starting offset, but we do know that the string
731 	 has no internal zero bytes.  If the offset falls within the bounds
732 	 of the string subtract the offset from the length of the string,
733 	 and return that.  Otherwise the length is zero.  Take care to
734 	 use SAVE_EXPR in case the OFFSET has side-effects.  */
735       tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
736 						 : byteoff;
737       offsave = fold_convert_loc (loc, sizetype, offsave);
738       tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
739 				      size_int (len));
740       tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
741 				     offsave);
742       lenexp = fold_convert_loc (loc, ssizetype, lenexp);
743       return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
744 			      build_zero_cst (ssizetype));
745     }
746 
747   /* Offset from the beginning of the string in elements.  */
748   HOST_WIDE_INT eltoff;
749 
750   /* We have a known offset into the string.  Start searching there for
751      a null character if we can represent it as a single HOST_WIDE_INT.  */
752   if (byteoff == 0)
753     eltoff = 0;
754   else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
755     eltoff = -1;
756   else
757     eltoff = tree_to_uhwi (byteoff) / eltsize;
758 
759   /* If the offset is known to be out of bounds, warn, and call strlen at
760      runtime.  */
761   if (eltoff < 0 || eltoff >= maxelts)
762     {
763       /* Suppress multiple warnings for propagated constant strings.  */
764       if (only_value != 2
765 	  && !TREE_NO_WARNING (src)
766 	  && warning_at (loc, OPT_Warray_bounds,
767 			 "offset %qwi outside bounds of constant string",
768 			 eltoff))
769 	TREE_NO_WARNING (src) = 1;
770       return NULL_TREE;
771     }
772 
773   /* If eltoff is larger than strelts but less than maxelts the
774      string length is zero, since the excess memory will be zero.  */
775   if (eltoff > strelts)
776     return ssize_int (0);
777 
778   /* Use strlen to search for the first zero byte.  Since any strings
779      constructed with build_string will have nulls appended, we win even
780      if we get handed something like (char[4])"abcd".
781 
782      Since ELTOFF is our starting index into the string, no further
783      calculation is needed.  */
784   unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
785 				strelts - eltoff);
786 
787   /* Don't know what to return if there was no zero termination.
788      Ideally this would turn into a gcc_checking_assert over time.
789      Set DECL/LEN so callers can examine them.  */
790   if (len >= maxelts - eltoff)
791     {
792       data->decl = decl;
793       data->off = byteoff;
794       data->minlen = ssize_int (len);
795       return NULL_TREE;
796     }
797 
798   return ssize_int (len);
799 }
800 
801 /* Return a constant integer corresponding to target reading
802    GET_MODE_BITSIZE (MODE) bits from string constant STR.  If
803    NULL_TERMINATED_P, reading stops after '\0' character, all further ones
804    are assumed to be zero, otherwise it reads as many characters
805    as needed.  */
806 
807 rtx
808 c_readstr (const char *str, scalar_int_mode mode,
809 	   bool null_terminated_p/*=true*/)
810 {
811   HOST_WIDE_INT ch;
812   unsigned int i, j;
813   HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
814 
815   gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
816   unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
817     / HOST_BITS_PER_WIDE_INT;
818 
819   gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
820   for (i = 0; i < len; i++)
821     tmp[i] = 0;
822 
823   ch = 1;
824   for (i = 0; i < GET_MODE_SIZE (mode); i++)
825     {
826       j = i;
827       if (WORDS_BIG_ENDIAN)
828 	j = GET_MODE_SIZE (mode) - i - 1;
829       if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
830 	  && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
831 	j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
832       j *= BITS_PER_UNIT;
833 
834       if (ch || !null_terminated_p)
835 	ch = (unsigned char) str[i];
836       tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
837     }
838 
839   wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
840   return immed_wide_int_const (c, mode);
841 }
842 
843 /* Cast a target constant CST to target CHAR and if that value fits into
844    host char type, return zero and put that value into variable pointed to by
845    P.  */
846 
847 static int
848 target_char_cast (tree cst, char *p)
849 {
850   unsigned HOST_WIDE_INT val, hostval;
851 
852   if (TREE_CODE (cst) != INTEGER_CST
853       || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
854     return 1;
855 
856   /* Do not care if it fits or not right here.  */
857   val = TREE_INT_CST_LOW (cst);
858 
859   if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
860     val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
861 
862   hostval = val;
863   if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
864     hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
865 
866   if (val != hostval)
867     return 1;
868 
869   *p = hostval;
870   return 0;
871 }
872 
873 /* Similar to save_expr, but assumes that arbitrary code is not executed
874    in between the multiple evaluations.  In particular, we assume that a
875    non-addressable local variable will not be modified.  */
876 
877 static tree
878 builtin_save_expr (tree exp)
879 {
880   if (TREE_CODE (exp) == SSA_NAME
881       || (TREE_ADDRESSABLE (exp) == 0
882 	  && (TREE_CODE (exp) == PARM_DECL
883 	      || (VAR_P (exp) && !TREE_STATIC (exp)))))
884     return exp;
885 
886   return save_expr (exp);
887 }
888 
889 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
890    times to get the address of either a higher stack frame, or a return
891    address located within it (depending on FNDECL_CODE).  */
892 
893 static rtx
894 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
895 {
896   int i;
897   rtx tem = INITIAL_FRAME_ADDRESS_RTX;
898   if (tem == NULL_RTX)
899     {
900       /* For a zero count with __builtin_return_address, we don't care what
901 	 frame address we return, because target-specific definitions will
902 	 override us.  Therefore frame pointer elimination is OK, and using
903 	 the soft frame pointer is OK.
904 
905 	 For a nonzero count, or a zero count with __builtin_frame_address,
906 	 we require a stable offset from the current frame pointer to the
907 	 previous one, so we must use the hard frame pointer, and
908 	 we must disable frame pointer elimination.  */
909       if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
910 	tem = frame_pointer_rtx;
911       else
912 	{
913 	  tem = hard_frame_pointer_rtx;
914 
915 	  /* Tell reload not to eliminate the frame pointer.  */
916 	  crtl->accesses_prior_frames = 1;
917 	}
918     }
919 
920   if (count > 0)
921     SETUP_FRAME_ADDRESSES ();
922 
923   /* On the SPARC, the return address is not in the frame, it is in a
924      register.  There is no way to access it off of the current frame
925      pointer, but it can be accessed off the previous frame pointer by
926      reading the value from the register window save area.  */
927   if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
928     count--;
929 
930   /* Scan back COUNT frames to the specified frame.  */
931   for (i = 0; i < count; i++)
932     {
933       /* Assume the dynamic chain pointer is in the word that the
934 	 frame address points to, unless otherwise specified.  */
935       tem = DYNAMIC_CHAIN_ADDRESS (tem);
936       tem = memory_address (Pmode, tem);
937       tem = gen_frame_mem (Pmode, tem);
938       tem = copy_to_reg (tem);
939     }
940 
941   /* For __builtin_frame_address, return what we've got.  But, on
942      the SPARC for example, we may have to add a bias.  */
943   if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
944     return FRAME_ADDR_RTX (tem);
945 
946   /* For __builtin_return_address, get the return address from that frame.  */
947 #ifdef RETURN_ADDR_RTX
948   tem = RETURN_ADDR_RTX (count, tem);
949 #else
950   tem = memory_address (Pmode,
951 			plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
952   tem = gen_frame_mem (Pmode, tem);
953 #endif
954   return tem;
955 }
956 
957 /* Alias set used for setjmp buffer.  */
958 static alias_set_type setjmp_alias_set = -1;
959 
960 /* Construct the leading half of a __builtin_setjmp call.  Control will
961    return to RECEIVER_LABEL.  This is also called directly by the SJLJ
962    exception handling code.  */
963 
964 void
965 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
966 {
967   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
968   rtx stack_save;
969   rtx mem;
970 
971   if (setjmp_alias_set == -1)
972     setjmp_alias_set = new_alias_set ();
973 
974   buf_addr = convert_memory_address (Pmode, buf_addr);
975 
976   buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
977 
978   /* We store the frame pointer and the address of receiver_label in
979      the buffer and use the rest of it for the stack save area, which
980      is machine-dependent.  */
981 
982   mem = gen_rtx_MEM (Pmode, buf_addr);
983   set_mem_alias_set (mem, setjmp_alias_set);
984   emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
985 
986   mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
987 					   GET_MODE_SIZE (Pmode))),
988   set_mem_alias_set (mem, setjmp_alias_set);
989 
990   emit_move_insn (validize_mem (mem),
991 		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
992 
993   stack_save = gen_rtx_MEM (sa_mode,
994 			    plus_constant (Pmode, buf_addr,
995 					   2 * GET_MODE_SIZE (Pmode)));
996   set_mem_alias_set (stack_save, setjmp_alias_set);
997   emit_stack_save (SAVE_NONLOCAL, &stack_save);
998 
999   /* If there is further processing to do, do it.  */
1000   if (targetm.have_builtin_setjmp_setup ())
1001     emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1002 
1003   /* We have a nonlocal label.   */
1004   cfun->has_nonlocal_label = 1;
1005 }
1006 
1007 /* Construct the trailing part of a __builtin_setjmp call.  This is
1008    also called directly by the SJLJ exception handling code.
1009    If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler.  */
1010 
1011 void
1012 expand_builtin_setjmp_receiver (rtx receiver_label)
1013 {
1014   rtx chain;
1015 
1016   /* Mark the FP as used when we get here, so we have to make sure it's
1017      marked as used by this function.  */
1018   emit_use (hard_frame_pointer_rtx);
1019 
1020   /* Mark the static chain as clobbered here so life information
1021      doesn't get messed up for it.  */
1022   chain = rtx_for_static_chain (current_function_decl, true);
1023   if (chain && REG_P (chain))
1024     emit_clobber (chain);
1025 
1026   /* Now put in the code to restore the frame pointer, and argument
1027      pointer, if needed.  */
1028   if (! targetm.have_nonlocal_goto ())
1029     {
1030       /* First adjust our frame pointer to its actual value.  It was
1031 	 previously set to the start of the virtual area corresponding to
1032 	 the stacked variables when we branched here and now needs to be
1033 	 adjusted to the actual hardware fp value.
1034 
1035 	 Assignments to virtual registers are converted by
1036 	 instantiate_virtual_regs into the corresponding assignment
1037 	 to the underlying register (fp in this case) that makes
1038 	 the original assignment true.
1039 	 So the following insn will actually be decrementing fp by
1040 	 TARGET_STARTING_FRAME_OFFSET.  */
1041       emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1042 
1043       /* Restoring the frame pointer also modifies the hard frame pointer.
1044 	 Mark it used (so that the previous assignment remains live once
1045 	 the frame pointer is eliminated) and clobbered (to represent the
1046 	 implicit update from the assignment).  */
1047       emit_use (hard_frame_pointer_rtx);
1048       emit_clobber (hard_frame_pointer_rtx);
1049     }
1050 
1051   if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1052     {
1053       /* If the argument pointer can be eliminated in favor of the
1054 	 frame pointer, we don't need to restore it.  We assume here
1055 	 that if such an elimination is present, it can always be used.
1056 	 This is the case on all known machines; if we don't make this
1057 	 assumption, we do unnecessary saving on many machines.  */
1058       size_t i;
1059       static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1060 
1061       for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1062 	if (elim_regs[i].from == ARG_POINTER_REGNUM
1063 	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1064 	  break;
1065 
1066       if (i == ARRAY_SIZE (elim_regs))
1067 	{
1068 	  /* Now restore our arg pointer from the address at which it
1069 	     was saved in our stack frame.  */
1070 	  emit_move_insn (crtl->args.internal_arg_pointer,
1071 			  copy_to_reg (get_arg_pointer_save_area ()));
1072 	}
1073     }
1074 
1075   if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1076     emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1077   else if (targetm.have_nonlocal_goto_receiver ())
1078     emit_insn (targetm.gen_nonlocal_goto_receiver ());
1079   else
1080     { /* Nothing */ }
1081 
1082   /* We must not allow the code we just generated to be reordered by
1083      scheduling.  Specifically, the update of the frame pointer must
1084      happen immediately, not later.  */
1085   emit_insn (gen_blockage ());
1086 }
1087 
1088 /* __builtin_longjmp is passed a pointer to an array of five words (not
1089    all will be used on all machines).  It operates similarly to the C
1090    library function of the same name, but is more efficient.  Much of
1091    the code below is copied from the handling of non-local gotos.  */
1092 
1093 static void
1094 expand_builtin_longjmp (rtx buf_addr, rtx value)
1095 {
1096   rtx fp, lab, stack;
1097   rtx_insn *insn, *last;
1098   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1099 
1100   /* DRAP is needed for stack realign if longjmp is expanded to current
1101      function  */
1102   if (SUPPORTS_STACK_ALIGNMENT)
1103     crtl->need_drap = true;
1104 
1105   if (setjmp_alias_set == -1)
1106     setjmp_alias_set = new_alias_set ();
1107 
1108   buf_addr = convert_memory_address (Pmode, buf_addr);
1109 
1110   buf_addr = force_reg (Pmode, buf_addr);
1111 
1112   /* We require that the user must pass a second argument of 1, because
1113      that is what builtin_setjmp will return.  */
1114   gcc_assert (value == const1_rtx);
1115 
1116   last = get_last_insn ();
1117   if (targetm.have_builtin_longjmp ())
1118     emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1119   else
1120     {
1121       fp = gen_rtx_MEM (Pmode, buf_addr);
1122       lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1123 					       GET_MODE_SIZE (Pmode)));
1124 
1125       stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1126 						   2 * GET_MODE_SIZE (Pmode)));
1127       set_mem_alias_set (fp, setjmp_alias_set);
1128       set_mem_alias_set (lab, setjmp_alias_set);
1129       set_mem_alias_set (stack, setjmp_alias_set);
1130 
1131       /* Pick up FP, label, and SP from the block and jump.  This code is
1132 	 from expand_goto in stmt.c; see there for detailed comments.  */
1133       if (targetm.have_nonlocal_goto ())
1134 	/* We have to pass a value to the nonlocal_goto pattern that will
1135 	   get copied into the static_chain pointer, but it does not matter
1136 	   what that value is, because builtin_setjmp does not use it.  */
1137 	emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1138       else
1139 	{
1140 	  lab = copy_to_reg (lab);
1141 
1142 	  emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1143 	  emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1144 
1145 	  /* Restore the frame pointer and stack pointer.  We must use a
1146 	     temporary since the setjmp buffer may be a local.  */
1147 	  fp = copy_to_reg (fp);
1148 	  emit_stack_restore (SAVE_NONLOCAL, stack);
1149 	  emit_move_insn (hard_frame_pointer_rtx, fp);
1150 
1151 	  emit_use (hard_frame_pointer_rtx);
1152 	  emit_use (stack_pointer_rtx);
1153 	  emit_indirect_jump (lab);
1154 	}
1155     }
1156 
1157   /* Search backwards and mark the jump insn as a non-local goto.
1158      Note that this precludes the use of __builtin_longjmp to a
1159      __builtin_setjmp target in the same function.  However, we've
1160      already cautioned the user that these functions are for
1161      internal exception handling use only.  */
1162   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1163     {
1164       gcc_assert (insn != last);
1165 
1166       if (JUMP_P (insn))
1167 	{
1168 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1169 	  break;
1170 	}
1171       else if (CALL_P (insn))
1172 	break;
1173     }
1174 }
1175 
1176 static inline bool
1177 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1178 {
1179   return (iter->i < iter->n);
1180 }
1181 
1182 /* This function validates the types of a function call argument list
1183    against a specified list of tree_codes.  If the last specifier is a 0,
1184    that represents an ellipsis, otherwise the last specifier must be a
1185    VOID_TYPE.  */
1186 
1187 static bool
1188 validate_arglist (const_tree callexpr, ...)
1189 {
1190   enum tree_code code;
1191   bool res = 0;
1192   va_list ap;
1193   const_call_expr_arg_iterator iter;
1194   const_tree arg;
1195 
1196   va_start (ap, callexpr);
1197   init_const_call_expr_arg_iterator (callexpr, &iter);
1198 
1199   /* Get a bitmap of pointer argument numbers declared attribute nonnull.  */
1200   tree fn = CALL_EXPR_FN (callexpr);
1201   bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1202 
1203   for (unsigned argno = 1; ; ++argno)
1204     {
1205       code = (enum tree_code) va_arg (ap, int);
1206 
1207       switch (code)
1208 	{
1209 	case 0:
1210 	  /* This signifies an ellipses, any further arguments are all ok.  */
1211 	  res = true;
1212 	  goto end;
1213 	case VOID_TYPE:
1214 	  /* This signifies an endlink, if no arguments remain, return
1215 	     true, otherwise return false.  */
1216 	  res = !more_const_call_expr_args_p (&iter);
1217 	  goto end;
1218 	case POINTER_TYPE:
1219 	  /* The actual argument must be nonnull when either the whole
1220 	     called function has been declared nonnull, or when the formal
1221 	     argument corresponding to the actual argument has been.  */
1222 	  if (argmap
1223 	      && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1224 	    {
1225 	      arg = next_const_call_expr_arg (&iter);
1226 	      if (!validate_arg (arg, code) || integer_zerop (arg))
1227 		goto end;
1228 	      break;
1229 	    }
1230 	  /* FALLTHRU */
1231 	default:
1232 	  /* If no parameters remain or the parameter's code does not
1233 	     match the specified code, return false.  Otherwise continue
1234 	     checking any remaining arguments.  */
1235 	  arg = next_const_call_expr_arg (&iter);
1236 	  if (!validate_arg (arg, code))
1237 	    goto end;
1238 	  break;
1239 	}
1240     }
1241 
1242   /* We need gotos here since we can only have one VA_CLOSE in a
1243      function.  */
1244  end: ;
1245   va_end (ap);
1246 
1247   BITMAP_FREE (argmap);
1248 
1249   return res;
1250 }
1251 
1252 /* Expand a call to __builtin_nonlocal_goto.  We're passed the target label
1253    and the address of the save area.  */
1254 
1255 static rtx
1256 expand_builtin_nonlocal_goto (tree exp)
1257 {
1258   tree t_label, t_save_area;
1259   rtx r_label, r_save_area, r_fp, r_sp;
1260   rtx_insn *insn;
1261 
1262   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1263     return NULL_RTX;
1264 
1265   t_label = CALL_EXPR_ARG (exp, 0);
1266   t_save_area = CALL_EXPR_ARG (exp, 1);
1267 
1268   r_label = expand_normal (t_label);
1269   r_label = convert_memory_address (Pmode, r_label);
1270   r_save_area = expand_normal (t_save_area);
1271   r_save_area = convert_memory_address (Pmode, r_save_area);
1272   /* Copy the address of the save location to a register just in case it was
1273      based on the frame pointer.   */
1274   r_save_area = copy_to_reg (r_save_area);
1275   r_fp = gen_rtx_MEM (Pmode, r_save_area);
1276   r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1277 		      plus_constant (Pmode, r_save_area,
1278 				     GET_MODE_SIZE (Pmode)));
1279 
1280   crtl->has_nonlocal_goto = 1;
1281 
1282   /* ??? We no longer need to pass the static chain value, afaik.  */
1283   if (targetm.have_nonlocal_goto ())
1284     emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1285   else
1286     {
1287       r_label = copy_to_reg (r_label);
1288 
1289       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1290       emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1291 
1292       /* Restore the frame pointer and stack pointer.  We must use a
1293 	 temporary since the setjmp buffer may be a local.  */
1294       r_fp = copy_to_reg (r_fp);
1295       emit_stack_restore (SAVE_NONLOCAL, r_sp);
1296       emit_move_insn (hard_frame_pointer_rtx, r_fp);
1297 
1298       /* USE of hard_frame_pointer_rtx added for consistency;
1299 	 not clear if really needed.  */
1300       emit_use (hard_frame_pointer_rtx);
1301       emit_use (stack_pointer_rtx);
1302 
1303       /* If the architecture is using a GP register, we must
1304 	 conservatively assume that the target function makes use of it.
1305 	 The prologue of functions with nonlocal gotos must therefore
1306 	 initialize the GP register to the appropriate value, and we
1307 	 must then make sure that this value is live at the point
1308 	 of the jump.  (Note that this doesn't necessarily apply
1309 	 to targets with a nonlocal_goto pattern; they are free
1310 	 to implement it in their own way.  Note also that this is
1311 	 a no-op if the GP register is a global invariant.)  */
1312       unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1313       if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1314 	emit_use (pic_offset_table_rtx);
1315 
1316       emit_indirect_jump (r_label);
1317     }
1318 
1319   /* Search backwards to the jump insn and mark it as a
1320      non-local goto.  */
1321   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1322     {
1323       if (JUMP_P (insn))
1324 	{
1325 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1326 	  break;
1327 	}
1328       else if (CALL_P (insn))
1329 	break;
1330     }
1331 
1332   return const0_rtx;
1333 }
1334 
1335 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1336    (not all will be used on all machines) that was passed to __builtin_setjmp.
1337    It updates the stack pointer in that block to the current value.  This is
1338    also called directly by the SJLJ exception handling code.  */
1339 
1340 void
1341 expand_builtin_update_setjmp_buf (rtx buf_addr)
1342 {
1343   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1344   buf_addr = convert_memory_address (Pmode, buf_addr);
1345   rtx stack_save
1346     = gen_rtx_MEM (sa_mode,
1347 		   memory_address
1348 		   (sa_mode,
1349 		    plus_constant (Pmode, buf_addr,
1350 				   2 * GET_MODE_SIZE (Pmode))));
1351 
1352   emit_stack_save (SAVE_NONLOCAL, &stack_save);
1353 }
1354 
1355 /* Expand a call to __builtin_prefetch.  For a target that does not support
1356    data prefetch, evaluate the memory address argument in case it has side
1357    effects.  */
1358 
1359 static void
1360 expand_builtin_prefetch (tree exp)
1361 {
1362   tree arg0, arg1, arg2;
1363   int nargs;
1364   rtx op0, op1, op2;
1365 
1366   if (!validate_arglist (exp, POINTER_TYPE, 0))
1367     return;
1368 
1369   arg0 = CALL_EXPR_ARG (exp, 0);
1370 
1371   /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1372      zero (read) and argument 2 (locality) defaults to 3 (high degree of
1373      locality).  */
1374   nargs = call_expr_nargs (exp);
1375   if (nargs > 1)
1376     arg1 = CALL_EXPR_ARG (exp, 1);
1377   else
1378     arg1 = integer_zero_node;
1379   if (nargs > 2)
1380     arg2 = CALL_EXPR_ARG (exp, 2);
1381   else
1382     arg2 = integer_three_node;
1383 
1384   /* Argument 0 is an address.  */
1385   op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1386 
1387   /* Argument 1 (read/write flag) must be a compile-time constant int.  */
1388   if (TREE_CODE (arg1) != INTEGER_CST)
1389     {
1390       error ("second argument to %<__builtin_prefetch%> must be a constant");
1391       arg1 = integer_zero_node;
1392     }
1393   op1 = expand_normal (arg1);
1394   /* Argument 1 must be either zero or one.  */
1395   if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1396     {
1397       warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1398 	       " using zero");
1399       op1 = const0_rtx;
1400     }
1401 
1402   /* Argument 2 (locality) must be a compile-time constant int.  */
1403   if (TREE_CODE (arg2) != INTEGER_CST)
1404     {
1405       error ("third argument to %<__builtin_prefetch%> must be a constant");
1406       arg2 = integer_zero_node;
1407     }
1408   op2 = expand_normal (arg2);
1409   /* Argument 2 must be 0, 1, 2, or 3.  */
1410   if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1411     {
1412       warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1413       op2 = const0_rtx;
1414     }
1415 
1416   if (targetm.have_prefetch ())
1417     {
1418       struct expand_operand ops[3];
1419 
1420       create_address_operand (&ops[0], op0);
1421       create_integer_operand (&ops[1], INTVAL (op1));
1422       create_integer_operand (&ops[2], INTVAL (op2));
1423       if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1424 	return;
1425     }
1426 
1427   /* Don't do anything with direct references to volatile memory, but
1428      generate code to handle other side effects.  */
1429   if (!MEM_P (op0) && side_effects_p (op0))
1430     emit_insn (op0);
1431 }
1432 
1433 /* Get a MEM rtx for expression EXP which is the address of an operand
1434    to be used in a string instruction (cmpstrsi, movmemsi, ..).  LEN is
1435    the maximum length of the block of memory that might be accessed or
1436    NULL if unknown.  */
1437 
1438 static rtx
1439 get_memory_rtx (tree exp, tree len)
1440 {
1441   tree orig_exp = exp;
1442   rtx addr, mem;
1443 
1444   /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1445      from its expression, for expr->a.b only <variable>.a.b is recorded.  */
1446   if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1447     exp = TREE_OPERAND (exp, 0);
1448 
1449   addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1450   mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1451 
1452   /* Get an expression we can use to find the attributes to assign to MEM.
1453      First remove any nops.  */
1454   while (CONVERT_EXPR_P (exp)
1455 	 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1456     exp = TREE_OPERAND (exp, 0);
1457 
1458   /* Build a MEM_REF representing the whole accessed area as a byte blob,
1459      (as builtin stringops may alias with anything).  */
1460   exp = fold_build2 (MEM_REF,
1461 		     build_array_type (char_type_node,
1462 				       build_range_type (sizetype,
1463 							 size_one_node, len)),
1464 		     exp, build_int_cst (ptr_type_node, 0));
1465 
1466   /* If the MEM_REF has no acceptable address, try to get the base object
1467      from the original address we got, and build an all-aliasing
1468      unknown-sized access to that one.  */
1469   if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1470     set_mem_attributes (mem, exp, 0);
1471   else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1472 	   && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1473 						     0))))
1474     {
1475       exp = build_fold_addr_expr (exp);
1476       exp = fold_build2 (MEM_REF,
1477 			 build_array_type (char_type_node,
1478 					   build_range_type (sizetype,
1479 							     size_zero_node,
1480 							     NULL)),
1481 			 exp, build_int_cst (ptr_type_node, 0));
1482       set_mem_attributes (mem, exp, 0);
1483     }
1484   set_mem_alias_set (mem, 0);
1485   return mem;
1486 }
1487 
1488 /* Built-in functions to perform an untyped call and return.  */
1489 
1490 #define apply_args_mode \
1491   (this_target_builtins->x_apply_args_mode)
1492 #define apply_result_mode \
1493   (this_target_builtins->x_apply_result_mode)
1494 
1495 /* Return the size required for the block returned by __builtin_apply_args,
1496    and initialize apply_args_mode.  */
1497 
1498 static int
1499 apply_args_size (void)
1500 {
1501   static int size = -1;
1502   int align;
1503   unsigned int regno;
1504 
1505   /* The values computed by this function never change.  */
1506   if (size < 0)
1507     {
1508       /* The first value is the incoming arg-pointer.  */
1509       size = GET_MODE_SIZE (Pmode);
1510 
1511       /* The second value is the structure value address unless this is
1512 	 passed as an "invisible" first argument.  */
1513       if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1514 	size += GET_MODE_SIZE (Pmode);
1515 
1516       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1517 	if (FUNCTION_ARG_REGNO_P (regno))
1518 	  {
1519 	    fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1520 
1521 	    gcc_assert (mode != VOIDmode);
1522 
1523 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1524 	    if (size % align != 0)
1525 	      size = CEIL (size, align) * align;
1526 	    size += GET_MODE_SIZE (mode);
1527 	    apply_args_mode[regno] = mode;
1528 	  }
1529 	else
1530 	  {
1531 	    apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1532 	  }
1533     }
1534   return size;
1535 }
1536 
1537 /* Return the size required for the block returned by __builtin_apply,
1538    and initialize apply_result_mode.  */
1539 
1540 static int
1541 apply_result_size (void)
1542 {
1543   static int size = -1;
1544   int align, regno;
1545 
1546   /* The values computed by this function never change.  */
1547   if (size < 0)
1548     {
1549       size = 0;
1550 
1551       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1552 	if (targetm.calls.function_value_regno_p (regno))
1553 	  {
1554 	    fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1555 
1556 	    gcc_assert (mode != VOIDmode);
1557 
1558 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1559 	    if (size % align != 0)
1560 	      size = CEIL (size, align) * align;
1561 	    size += GET_MODE_SIZE (mode);
1562 	    apply_result_mode[regno] = mode;
1563 	  }
1564 	else
1565 	  apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1566 
1567       /* Allow targets that use untyped_call and untyped_return to override
1568 	 the size so that machine-specific information can be stored here.  */
1569 #ifdef APPLY_RESULT_SIZE
1570       size = APPLY_RESULT_SIZE;
1571 #endif
1572     }
1573   return size;
1574 }
1575 
1576 /* Create a vector describing the result block RESULT.  If SAVEP is true,
1577    the result block is used to save the values; otherwise it is used to
1578    restore the values.  */
1579 
1580 static rtx
1581 result_vector (int savep, rtx result)
1582 {
1583   int regno, size, align, nelts;
1584   fixed_size_mode mode;
1585   rtx reg, mem;
1586   rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1587 
1588   size = nelts = 0;
1589   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1590     if ((mode = apply_result_mode[regno]) != VOIDmode)
1591       {
1592 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1593 	if (size % align != 0)
1594 	  size = CEIL (size, align) * align;
1595 	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1596 	mem = adjust_address (result, mode, size);
1597 	savevec[nelts++] = (savep
1598 			    ? gen_rtx_SET (mem, reg)
1599 			    : gen_rtx_SET (reg, mem));
1600 	size += GET_MODE_SIZE (mode);
1601       }
1602   return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1603 }
1604 
1605 /* Save the state required to perform an untyped call with the same
1606    arguments as were passed to the current function.  */
1607 
1608 static rtx
1609 expand_builtin_apply_args_1 (void)
1610 {
1611   rtx registers, tem;
1612   int size, align, regno;
1613   fixed_size_mode mode;
1614   rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1615 
1616   /* Create a block where the arg-pointer, structure value address,
1617      and argument registers can be saved.  */
1618   registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1619 
1620   /* Walk past the arg-pointer and structure value address.  */
1621   size = GET_MODE_SIZE (Pmode);
1622   if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1623     size += GET_MODE_SIZE (Pmode);
1624 
1625   /* Save each register used in calling a function to the block.  */
1626   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627     if ((mode = apply_args_mode[regno]) != VOIDmode)
1628       {
1629 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1630 	if (size % align != 0)
1631 	  size = CEIL (size, align) * align;
1632 
1633 	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1634 
1635 	emit_move_insn (adjust_address (registers, mode, size), tem);
1636 	size += GET_MODE_SIZE (mode);
1637       }
1638 
1639   /* Save the arg pointer to the block.  */
1640   tem = copy_to_reg (crtl->args.internal_arg_pointer);
1641   /* We need the pointer as the caller actually passed them to us, not
1642      as we might have pretended they were passed.  Make sure it's a valid
1643      operand, as emit_move_insn isn't expected to handle a PLUS.  */
1644   if (STACK_GROWS_DOWNWARD)
1645     tem
1646       = force_operand (plus_constant (Pmode, tem,
1647 				      crtl->args.pretend_args_size),
1648 		       NULL_RTX);
1649   emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1650 
1651   size = GET_MODE_SIZE (Pmode);
1652 
1653   /* Save the structure value address unless this is passed as an
1654      "invisible" first argument.  */
1655   if (struct_incoming_value)
1656     {
1657       emit_move_insn (adjust_address (registers, Pmode, size),
1658 		      copy_to_reg (struct_incoming_value));
1659       size += GET_MODE_SIZE (Pmode);
1660     }
1661 
1662   /* Return the address of the block.  */
1663   return copy_addr_to_reg (XEXP (registers, 0));
1664 }
1665 
1666 /* __builtin_apply_args returns block of memory allocated on
1667    the stack into which is stored the arg pointer, structure
1668    value address, static chain, and all the registers that might
1669    possibly be used in performing a function call.  The code is
1670    moved to the start of the function so the incoming values are
1671    saved.  */
1672 
1673 static rtx
1674 expand_builtin_apply_args (void)
1675 {
1676   /* Don't do __builtin_apply_args more than once in a function.
1677      Save the result of the first call and reuse it.  */
1678   if (apply_args_value != 0)
1679     return apply_args_value;
1680   {
1681     /* When this function is called, it means that registers must be
1682        saved on entry to this function.  So we migrate the
1683        call to the first insn of this function.  */
1684     rtx temp;
1685 
1686     start_sequence ();
1687     temp = expand_builtin_apply_args_1 ();
1688     rtx_insn *seq = get_insns ();
1689     end_sequence ();
1690 
1691     apply_args_value = temp;
1692 
1693     /* Put the insns after the NOTE that starts the function.
1694        If this is inside a start_sequence, make the outer-level insn
1695        chain current, so the code is placed at the start of the
1696        function.  If internal_arg_pointer is a non-virtual pseudo,
1697        it needs to be placed after the function that initializes
1698        that pseudo.  */
1699     push_topmost_sequence ();
1700     if (REG_P (crtl->args.internal_arg_pointer)
1701 	&& REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1702       emit_insn_before (seq, parm_birth_insn);
1703     else
1704       emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1705     pop_topmost_sequence ();
1706     return temp;
1707   }
1708 }
1709 
1710 /* Perform an untyped call and save the state required to perform an
1711    untyped return of whatever value was returned by the given function.  */
1712 
1713 static rtx
1714 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1715 {
1716   int size, align, regno;
1717   fixed_size_mode mode;
1718   rtx incoming_args, result, reg, dest, src;
1719   rtx_call_insn *call_insn;
1720   rtx old_stack_level = 0;
1721   rtx call_fusage = 0;
1722   rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1723 
1724   arguments = convert_memory_address (Pmode, arguments);
1725 
1726   /* Create a block where the return registers can be saved.  */
1727   result = assign_stack_local (BLKmode, apply_result_size (), -1);
1728 
1729   /* Fetch the arg pointer from the ARGUMENTS block.  */
1730   incoming_args = gen_reg_rtx (Pmode);
1731   emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1732   if (!STACK_GROWS_DOWNWARD)
1733     incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1734 					 incoming_args, 0, OPTAB_LIB_WIDEN);
1735 
1736   /* Push a new argument block and copy the arguments.  Do not allow
1737      the (potential) memcpy call below to interfere with our stack
1738      manipulations.  */
1739   do_pending_stack_adjust ();
1740   NO_DEFER_POP;
1741 
1742   /* Save the stack with nonlocal if available.  */
1743   if (targetm.have_save_stack_nonlocal ())
1744     emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1745   else
1746     emit_stack_save (SAVE_BLOCK, &old_stack_level);
1747 
1748   /* Allocate a block of memory onto the stack and copy the memory
1749      arguments to the outgoing arguments address.  We can pass TRUE
1750      as the 4th argument because we just saved the stack pointer
1751      and will restore it right after the call.  */
1752   allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1753 
1754   /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1755      may have already set current_function_calls_alloca to true.
1756      current_function_calls_alloca won't be set if argsize is zero,
1757      so we have to guarantee need_drap is true here.  */
1758   if (SUPPORTS_STACK_ALIGNMENT)
1759     crtl->need_drap = true;
1760 
1761   dest = virtual_outgoing_args_rtx;
1762   if (!STACK_GROWS_DOWNWARD)
1763     {
1764       if (CONST_INT_P (argsize))
1765 	dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1766       else
1767 	dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1768     }
1769   dest = gen_rtx_MEM (BLKmode, dest);
1770   set_mem_align (dest, PARM_BOUNDARY);
1771   src = gen_rtx_MEM (BLKmode, incoming_args);
1772   set_mem_align (src, PARM_BOUNDARY);
1773   emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1774 
1775   /* Refer to the argument block.  */
1776   apply_args_size ();
1777   arguments = gen_rtx_MEM (BLKmode, arguments);
1778   set_mem_align (arguments, PARM_BOUNDARY);
1779 
1780   /* Walk past the arg-pointer and structure value address.  */
1781   size = GET_MODE_SIZE (Pmode);
1782   if (struct_value)
1783     size += GET_MODE_SIZE (Pmode);
1784 
1785   /* Restore each of the registers previously saved.  Make USE insns
1786      for each of these registers for use in making the call.  */
1787   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1788     if ((mode = apply_args_mode[regno]) != VOIDmode)
1789       {
1790 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1791 	if (size % align != 0)
1792 	  size = CEIL (size, align) * align;
1793 	reg = gen_rtx_REG (mode, regno);
1794 	emit_move_insn (reg, adjust_address (arguments, mode, size));
1795 	use_reg (&call_fusage, reg);
1796 	size += GET_MODE_SIZE (mode);
1797       }
1798 
1799   /* Restore the structure value address unless this is passed as an
1800      "invisible" first argument.  */
1801   size = GET_MODE_SIZE (Pmode);
1802   if (struct_value)
1803     {
1804       rtx value = gen_reg_rtx (Pmode);
1805       emit_move_insn (value, adjust_address (arguments, Pmode, size));
1806       emit_move_insn (struct_value, value);
1807       if (REG_P (struct_value))
1808 	use_reg (&call_fusage, struct_value);
1809       size += GET_MODE_SIZE (Pmode);
1810     }
1811 
1812   /* All arguments and registers used for the call are set up by now!  */
1813   function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1814 
1815   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
1816      and we don't want to load it into a register as an optimization,
1817      because prepare_call_address already did it if it should be done.  */
1818   if (GET_CODE (function) != SYMBOL_REF)
1819     function = memory_address (FUNCTION_MODE, function);
1820 
1821   /* Generate the actual call instruction and save the return value.  */
1822   if (targetm.have_untyped_call ())
1823     {
1824       rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1825       emit_call_insn (targetm.gen_untyped_call (mem, result,
1826 						result_vector (1, result)));
1827     }
1828   else if (targetm.have_call_value ())
1829     {
1830       rtx valreg = 0;
1831 
1832       /* Locate the unique return register.  It is not possible to
1833 	 express a call that sets more than one return register using
1834 	 call_value; use untyped_call for that.  In fact, untyped_call
1835 	 only needs to save the return registers in the given block.  */
1836       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1837 	if ((mode = apply_result_mode[regno]) != VOIDmode)
1838 	  {
1839 	    gcc_assert (!valreg); /* have_untyped_call required.  */
1840 
1841 	    valreg = gen_rtx_REG (mode, regno);
1842 	  }
1843 
1844       emit_insn (targetm.gen_call_value (valreg,
1845 					 gen_rtx_MEM (FUNCTION_MODE, function),
1846 					 const0_rtx, NULL_RTX, const0_rtx));
1847 
1848       emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1849     }
1850   else
1851     gcc_unreachable ();
1852 
1853   /* Find the CALL insn we just emitted, and attach the register usage
1854      information.  */
1855   call_insn = last_call_insn ();
1856   add_function_usage_to (call_insn, call_fusage);
1857 
1858   /* Restore the stack.  */
1859   if (targetm.have_save_stack_nonlocal ())
1860     emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1861   else
1862     emit_stack_restore (SAVE_BLOCK, old_stack_level);
1863   fixup_args_size_notes (call_insn, get_last_insn (), 0);
1864 
1865   OK_DEFER_POP;
1866 
1867   /* Return the address of the result block.  */
1868   result = copy_addr_to_reg (XEXP (result, 0));
1869   return convert_memory_address (ptr_mode, result);
1870 }
1871 
1872 /* Perform an untyped return.  */
1873 
1874 static void
1875 expand_builtin_return (rtx result)
1876 {
1877   int size, align, regno;
1878   fixed_size_mode mode;
1879   rtx reg;
1880   rtx_insn *call_fusage = 0;
1881 
1882   result = convert_memory_address (Pmode, result);
1883 
1884   apply_result_size ();
1885   result = gen_rtx_MEM (BLKmode, result);
1886 
1887   if (targetm.have_untyped_return ())
1888     {
1889       rtx vector = result_vector (0, result);
1890       emit_jump_insn (targetm.gen_untyped_return (result, vector));
1891       emit_barrier ();
1892       return;
1893     }
1894 
1895   /* Restore the return value and note that each value is used.  */
1896   size = 0;
1897   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1898     if ((mode = apply_result_mode[regno]) != VOIDmode)
1899       {
1900 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1901 	if (size % align != 0)
1902 	  size = CEIL (size, align) * align;
1903 	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1904 	emit_move_insn (reg, adjust_address (result, mode, size));
1905 
1906 	push_to_sequence (call_fusage);
1907 	emit_use (reg);
1908 	call_fusage = get_insns ();
1909 	end_sequence ();
1910 	size += GET_MODE_SIZE (mode);
1911       }
1912 
1913   /* Put the USE insns before the return.  */
1914   emit_insn (call_fusage);
1915 
1916   /* Return whatever values was restored by jumping directly to the end
1917      of the function.  */
1918   expand_naked_return ();
1919 }
1920 
1921 /* Used by expand_builtin_classify_type and fold_builtin_classify_type.  */
1922 
1923 static enum type_class
1924 type_to_class (tree type)
1925 {
1926   switch (TREE_CODE (type))
1927     {
1928     case VOID_TYPE:	   return void_type_class;
1929     case INTEGER_TYPE:	   return integer_type_class;
1930     case ENUMERAL_TYPE:	   return enumeral_type_class;
1931     case BOOLEAN_TYPE:	   return boolean_type_class;
1932     case POINTER_TYPE:	   return pointer_type_class;
1933     case REFERENCE_TYPE:   return reference_type_class;
1934     case OFFSET_TYPE:	   return offset_type_class;
1935     case REAL_TYPE:	   return real_type_class;
1936     case COMPLEX_TYPE:	   return complex_type_class;
1937     case FUNCTION_TYPE:	   return function_type_class;
1938     case METHOD_TYPE:	   return method_type_class;
1939     case RECORD_TYPE:	   return record_type_class;
1940     case UNION_TYPE:
1941     case QUAL_UNION_TYPE:  return union_type_class;
1942     case ARRAY_TYPE:	   return (TYPE_STRING_FLAG (type)
1943 				   ? string_type_class : array_type_class);
1944     case LANG_TYPE:	   return lang_type_class;
1945     default:		   return no_type_class;
1946     }
1947 }
1948 
1949 /* Expand a call EXP to __builtin_classify_type.  */
1950 
1951 static rtx
1952 expand_builtin_classify_type (tree exp)
1953 {
1954   if (call_expr_nargs (exp))
1955     return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1956   return GEN_INT (no_type_class);
1957 }
1958 
1959 /* This helper macro, meant to be used in mathfn_built_in below, determines
1960    which among a set of builtin math functions is appropriate for a given type
1961    mode.  The `F' (float) and `L' (long double) are automatically generated
1962    from the 'double' case.  If a function supports the _Float<N> and _Float<N>X
1963    types, there are additional types that are considered with 'F32', 'F64',
1964    'F128', etc. suffixes.  */
1965 #define CASE_MATHFN(MATHFN) \
1966   CASE_CFN_##MATHFN: \
1967   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1968   fcodel = BUILT_IN_##MATHFN##L ; break;
1969 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1970    types.  */
1971 #define CASE_MATHFN_FLOATN(MATHFN) \
1972   CASE_CFN_##MATHFN: \
1973   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1974   fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1975   fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1976   fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1977   fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1978   break;
1979 /* Similar to above, but appends _R after any F/L suffix.  */
1980 #define CASE_MATHFN_REENT(MATHFN) \
1981   case CFN_BUILT_IN_##MATHFN##_R: \
1982   case CFN_BUILT_IN_##MATHFN##F_R: \
1983   case CFN_BUILT_IN_##MATHFN##L_R: \
1984   fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1985   fcodel = BUILT_IN_##MATHFN##L_R ; break;
1986 
1987 /* Return a function equivalent to FN but operating on floating-point
1988    values of type TYPE, or END_BUILTINS if no such function exists.
1989    This is purely an operation on function codes; it does not guarantee
1990    that the target actually has an implementation of the function.  */
1991 
1992 static built_in_function
1993 mathfn_built_in_2 (tree type, combined_fn fn)
1994 {
1995   tree mtype;
1996   built_in_function fcode, fcodef, fcodel;
1997   built_in_function fcodef16 = END_BUILTINS;
1998   built_in_function fcodef32 = END_BUILTINS;
1999   built_in_function fcodef64 = END_BUILTINS;
2000   built_in_function fcodef128 = END_BUILTINS;
2001   built_in_function fcodef32x = END_BUILTINS;
2002   built_in_function fcodef64x = END_BUILTINS;
2003   built_in_function fcodef128x = END_BUILTINS;
2004 
2005   switch (fn)
2006     {
2007     CASE_MATHFN (ACOS)
2008     CASE_MATHFN (ACOSH)
2009     CASE_MATHFN (ASIN)
2010     CASE_MATHFN (ASINH)
2011     CASE_MATHFN (ATAN)
2012     CASE_MATHFN (ATAN2)
2013     CASE_MATHFN (ATANH)
2014     CASE_MATHFN (CBRT)
2015     CASE_MATHFN_FLOATN (CEIL)
2016     CASE_MATHFN (CEXPI)
2017     CASE_MATHFN_FLOATN (COPYSIGN)
2018     CASE_MATHFN (COS)
2019     CASE_MATHFN (COSH)
2020     CASE_MATHFN (DREM)
2021     CASE_MATHFN (ERF)
2022     CASE_MATHFN (ERFC)
2023     CASE_MATHFN (EXP)
2024     CASE_MATHFN (EXP10)
2025     CASE_MATHFN (EXP2)
2026     CASE_MATHFN (EXPM1)
2027     CASE_MATHFN (FABS)
2028     CASE_MATHFN (FDIM)
2029     CASE_MATHFN_FLOATN (FLOOR)
2030     CASE_MATHFN_FLOATN (FMA)
2031     CASE_MATHFN_FLOATN (FMAX)
2032     CASE_MATHFN_FLOATN (FMIN)
2033     CASE_MATHFN (FMOD)
2034     CASE_MATHFN (FREXP)
2035     CASE_MATHFN (GAMMA)
2036     CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2037     CASE_MATHFN (HUGE_VAL)
2038     CASE_MATHFN (HYPOT)
2039     CASE_MATHFN (ILOGB)
2040     CASE_MATHFN (ICEIL)
2041     CASE_MATHFN (IFLOOR)
2042     CASE_MATHFN (INF)
2043     CASE_MATHFN (IRINT)
2044     CASE_MATHFN (IROUND)
2045     CASE_MATHFN (ISINF)
2046     CASE_MATHFN (J0)
2047     CASE_MATHFN (J1)
2048     CASE_MATHFN (JN)
2049     CASE_MATHFN (LCEIL)
2050     CASE_MATHFN (LDEXP)
2051     CASE_MATHFN (LFLOOR)
2052     CASE_MATHFN (LGAMMA)
2053     CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2054     CASE_MATHFN (LLCEIL)
2055     CASE_MATHFN (LLFLOOR)
2056     CASE_MATHFN (LLRINT)
2057     CASE_MATHFN (LLROUND)
2058     CASE_MATHFN (LOG)
2059     CASE_MATHFN (LOG10)
2060     CASE_MATHFN (LOG1P)
2061     CASE_MATHFN (LOG2)
2062     CASE_MATHFN (LOGB)
2063     CASE_MATHFN (LRINT)
2064     CASE_MATHFN (LROUND)
2065     CASE_MATHFN (MODF)
2066     CASE_MATHFN (NAN)
2067     CASE_MATHFN (NANS)
2068     CASE_MATHFN_FLOATN (NEARBYINT)
2069     CASE_MATHFN (NEXTAFTER)
2070     CASE_MATHFN (NEXTTOWARD)
2071     CASE_MATHFN (POW)
2072     CASE_MATHFN (POWI)
2073     CASE_MATHFN (POW10)
2074     CASE_MATHFN (REMAINDER)
2075     CASE_MATHFN (REMQUO)
2076     CASE_MATHFN_FLOATN (RINT)
2077     CASE_MATHFN_FLOATN (ROUND)
2078     CASE_MATHFN (SCALB)
2079     CASE_MATHFN (SCALBLN)
2080     CASE_MATHFN (SCALBN)
2081     CASE_MATHFN (SIGNBIT)
2082     CASE_MATHFN (SIGNIFICAND)
2083     CASE_MATHFN (SIN)
2084     CASE_MATHFN (SINCOS)
2085     CASE_MATHFN (SINH)
2086     CASE_MATHFN_FLOATN (SQRT)
2087     CASE_MATHFN (TAN)
2088     CASE_MATHFN (TANH)
2089     CASE_MATHFN (TGAMMA)
2090     CASE_MATHFN_FLOATN (TRUNC)
2091     CASE_MATHFN (Y0)
2092     CASE_MATHFN (Y1)
2093     CASE_MATHFN (YN)
2094 
2095     default:
2096       return END_BUILTINS;
2097     }
2098 
2099   mtype = TYPE_MAIN_VARIANT (type);
2100   if (mtype == double_type_node)
2101     return fcode;
2102   else if (mtype == float_type_node)
2103     return fcodef;
2104   else if (mtype == long_double_type_node)
2105     return fcodel;
2106   else if (mtype == float16_type_node)
2107     return fcodef16;
2108   else if (mtype == float32_type_node)
2109     return fcodef32;
2110   else if (mtype == float64_type_node)
2111     return fcodef64;
2112   else if (mtype == float128_type_node)
2113     return fcodef128;
2114   else if (mtype == float32x_type_node)
2115     return fcodef32x;
2116   else if (mtype == float64x_type_node)
2117     return fcodef64x;
2118   else if (mtype == float128x_type_node)
2119     return fcodef128x;
2120   else
2121     return END_BUILTINS;
2122 }
2123 
2124 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2125    if available.  If IMPLICIT_P is true use the implicit builtin declaration,
2126    otherwise use the explicit declaration.  If we can't do the conversion,
2127    return null.  */
2128 
2129 static tree
2130 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2131 {
2132   built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2133   if (fcode2 == END_BUILTINS)
2134     return NULL_TREE;
2135 
2136   if (implicit_p && !builtin_decl_implicit_p (fcode2))
2137     return NULL_TREE;
2138 
2139   return builtin_decl_explicit (fcode2);
2140 }
2141 
2142 /* Like mathfn_built_in_1, but always use the implicit array.  */
2143 
2144 tree
2145 mathfn_built_in (tree type, combined_fn fn)
2146 {
2147   return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2148 }
2149 
2150 /* Like mathfn_built_in_1, but take a built_in_function and
2151    always use the implicit array.  */
2152 
2153 tree
2154 mathfn_built_in (tree type, enum built_in_function fn)
2155 {
2156   return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2157 }
2158 
2159 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2160    return its code, otherwise return IFN_LAST.  Note that this function
2161    only tests whether the function is defined in internals.def, not whether
2162    it is actually available on the target.  */
2163 
2164 internal_fn
2165 associated_internal_fn (tree fndecl)
2166 {
2167   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2168   tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2169   switch (DECL_FUNCTION_CODE (fndecl))
2170     {
2171 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2172     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2173 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2174     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2175     CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2176 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2177     CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2178 #include "internal-fn.def"
2179 
2180     CASE_FLT_FN (BUILT_IN_POW10):
2181       return IFN_EXP10;
2182 
2183     CASE_FLT_FN (BUILT_IN_DREM):
2184       return IFN_REMAINDER;
2185 
2186     CASE_FLT_FN (BUILT_IN_SCALBN):
2187     CASE_FLT_FN (BUILT_IN_SCALBLN):
2188       if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2189 	return IFN_LDEXP;
2190       return IFN_LAST;
2191 
2192     default:
2193       return IFN_LAST;
2194     }
2195 }
2196 
2197 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2198    on the current target by a call to an internal function, return the
2199    code of that internal function, otherwise return IFN_LAST.  The caller
2200    is responsible for ensuring that any side-effects of the built-in
2201    call are dealt with correctly.  E.g. if CALL sets errno, the caller
2202    must decide that the errno result isn't needed or make it available
2203    in some other way.  */
2204 
2205 internal_fn
2206 replacement_internal_fn (gcall *call)
2207 {
2208   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2209     {
2210       internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2211       if (ifn != IFN_LAST)
2212 	{
2213 	  tree_pair types = direct_internal_fn_types (ifn, call);
2214 	  optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2215 	  if (direct_internal_fn_supported_p (ifn, types, opt_type))
2216 	    return ifn;
2217 	}
2218     }
2219   return IFN_LAST;
2220 }
2221 
2222 /* Expand a call to the builtin trinary math functions (fma).
2223    Return NULL_RTX if a normal call should be emitted rather than expanding the
2224    function in-line.  EXP is the expression that is a call to the builtin
2225    function; if convenient, the result should be placed in TARGET.
2226    SUBTARGET may be used as the target for computing one of EXP's
2227    operands.  */
2228 
2229 static rtx
2230 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2231 {
2232   optab builtin_optab;
2233   rtx op0, op1, op2, result;
2234   rtx_insn *insns;
2235   tree fndecl = get_callee_fndecl (exp);
2236   tree arg0, arg1, arg2;
2237   machine_mode mode;
2238 
2239   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2240     return NULL_RTX;
2241 
2242   arg0 = CALL_EXPR_ARG (exp, 0);
2243   arg1 = CALL_EXPR_ARG (exp, 1);
2244   arg2 = CALL_EXPR_ARG (exp, 2);
2245 
2246   switch (DECL_FUNCTION_CODE (fndecl))
2247     {
2248     CASE_FLT_FN (BUILT_IN_FMA):
2249     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2250       builtin_optab = fma_optab; break;
2251     default:
2252       gcc_unreachable ();
2253     }
2254 
2255   /* Make a suitable register to place result in.  */
2256   mode = TYPE_MODE (TREE_TYPE (exp));
2257 
2258   /* Before working hard, check whether the instruction is available.  */
2259   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2260     return NULL_RTX;
2261 
2262   result = gen_reg_rtx (mode);
2263 
2264   /* Always stabilize the argument list.  */
2265   CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2266   CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2267   CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2268 
2269   op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2270   op1 = expand_normal (arg1);
2271   op2 = expand_normal (arg2);
2272 
2273   start_sequence ();
2274 
2275   /* Compute into RESULT.
2276      Set RESULT to wherever the result comes back.  */
2277   result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2278 			      result, 0);
2279 
2280   /* If we were unable to expand via the builtin, stop the sequence
2281      (without outputting the insns) and call to the library function
2282      with the stabilized argument list.  */
2283   if (result == 0)
2284     {
2285       end_sequence ();
2286       return expand_call (exp, target, target == const0_rtx);
2287     }
2288 
2289   /* Output the entire sequence.  */
2290   insns = get_insns ();
2291   end_sequence ();
2292   emit_insn (insns);
2293 
2294   return result;
2295 }
2296 
2297 /* Expand a call to the builtin sin and cos math functions.
2298    Return NULL_RTX if a normal call should be emitted rather than expanding the
2299    function in-line.  EXP is the expression that is a call to the builtin
2300    function; if convenient, the result should be placed in TARGET.
2301    SUBTARGET may be used as the target for computing one of EXP's
2302    operands.  */
2303 
2304 static rtx
2305 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2306 {
2307   optab builtin_optab;
2308   rtx op0;
2309   rtx_insn *insns;
2310   tree fndecl = get_callee_fndecl (exp);
2311   machine_mode mode;
2312   tree arg;
2313 
2314   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2315     return NULL_RTX;
2316 
2317   arg = CALL_EXPR_ARG (exp, 0);
2318 
2319   switch (DECL_FUNCTION_CODE (fndecl))
2320     {
2321     CASE_FLT_FN (BUILT_IN_SIN):
2322     CASE_FLT_FN (BUILT_IN_COS):
2323       builtin_optab = sincos_optab; break;
2324     default:
2325       gcc_unreachable ();
2326     }
2327 
2328   /* Make a suitable register to place result in.  */
2329   mode = TYPE_MODE (TREE_TYPE (exp));
2330 
2331   /* Check if sincos insn is available, otherwise fallback
2332      to sin or cos insn.  */
2333   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2334     switch (DECL_FUNCTION_CODE (fndecl))
2335       {
2336       CASE_FLT_FN (BUILT_IN_SIN):
2337 	builtin_optab = sin_optab; break;
2338       CASE_FLT_FN (BUILT_IN_COS):
2339 	builtin_optab = cos_optab; break;
2340       default:
2341 	gcc_unreachable ();
2342       }
2343 
2344   /* Before working hard, check whether the instruction is available.  */
2345   if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2346     {
2347       rtx result = gen_reg_rtx (mode);
2348 
2349       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2350 	 need to expand the argument again.  This way, we will not perform
2351 	 side-effects more the once.  */
2352       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2353 
2354       op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2355 
2356       start_sequence ();
2357 
2358       /* Compute into RESULT.
2359 	 Set RESULT to wherever the result comes back.  */
2360       if (builtin_optab == sincos_optab)
2361 	{
2362 	  int ok;
2363 
2364 	  switch (DECL_FUNCTION_CODE (fndecl))
2365 	    {
2366 	    CASE_FLT_FN (BUILT_IN_SIN):
2367 	      ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2368 	      break;
2369 	    CASE_FLT_FN (BUILT_IN_COS):
2370 	      ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2371 	      break;
2372 	    default:
2373 	      gcc_unreachable ();
2374 	    }
2375 	  gcc_assert (ok);
2376 	}
2377       else
2378 	result = expand_unop (mode, builtin_optab, op0, result, 0);
2379 
2380       if (result != 0)
2381 	{
2382 	  /* Output the entire sequence.  */
2383 	  insns = get_insns ();
2384 	  end_sequence ();
2385 	  emit_insn (insns);
2386 	  return result;
2387 	}
2388 
2389       /* If we were unable to expand via the builtin, stop the sequence
2390 	 (without outputting the insns) and call to the library function
2391 	 with the stabilized argument list.  */
2392       end_sequence ();
2393     }
2394 
2395   return expand_call (exp, target, target == const0_rtx);
2396 }
2397 
2398 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2399    return an RTL instruction code that implements the functionality.
2400    If that isn't possible or available return CODE_FOR_nothing.  */
2401 
2402 static enum insn_code
2403 interclass_mathfn_icode (tree arg, tree fndecl)
2404 {
2405   bool errno_set = false;
2406   optab builtin_optab = unknown_optab;
2407   machine_mode mode;
2408 
2409   switch (DECL_FUNCTION_CODE (fndecl))
2410     {
2411     CASE_FLT_FN (BUILT_IN_ILOGB):
2412       errno_set = true; builtin_optab = ilogb_optab; break;
2413     CASE_FLT_FN (BUILT_IN_ISINF):
2414       builtin_optab = isinf_optab; break;
2415     case BUILT_IN_ISNORMAL:
2416     case BUILT_IN_ISFINITE:
2417     CASE_FLT_FN (BUILT_IN_FINITE):
2418     case BUILT_IN_FINITED32:
2419     case BUILT_IN_FINITED64:
2420     case BUILT_IN_FINITED128:
2421     case BUILT_IN_ISINFD32:
2422     case BUILT_IN_ISINFD64:
2423     case BUILT_IN_ISINFD128:
2424       /* These builtins have no optabs (yet).  */
2425       break;
2426     default:
2427       gcc_unreachable ();
2428     }
2429 
2430   /* There's no easy way to detect the case we need to set EDOM.  */
2431   if (flag_errno_math && errno_set)
2432     return CODE_FOR_nothing;
2433 
2434   /* Optab mode depends on the mode of the input argument.  */
2435   mode = TYPE_MODE (TREE_TYPE (arg));
2436 
2437   if (builtin_optab)
2438     return optab_handler (builtin_optab, mode);
2439   return CODE_FOR_nothing;
2440 }
2441 
2442 /* Expand a call to one of the builtin math functions that operate on
2443    floating point argument and output an integer result (ilogb, isinf,
2444    isnan, etc).
2445    Return 0 if a normal call should be emitted rather than expanding the
2446    function in-line.  EXP is the expression that is a call to the builtin
2447    function; if convenient, the result should be placed in TARGET.  */
2448 
2449 static rtx
2450 expand_builtin_interclass_mathfn (tree exp, rtx target)
2451 {
2452   enum insn_code icode = CODE_FOR_nothing;
2453   rtx op0;
2454   tree fndecl = get_callee_fndecl (exp);
2455   machine_mode mode;
2456   tree arg;
2457 
2458   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2459     return NULL_RTX;
2460 
2461   arg = CALL_EXPR_ARG (exp, 0);
2462   icode = interclass_mathfn_icode (arg, fndecl);
2463   mode = TYPE_MODE (TREE_TYPE (arg));
2464 
2465   if (icode != CODE_FOR_nothing)
2466     {
2467       struct expand_operand ops[1];
2468       rtx_insn *last = get_last_insn ();
2469       tree orig_arg = arg;
2470 
2471       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2472 	 need to expand the argument again.  This way, we will not perform
2473 	 side-effects more the once.  */
2474       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2475 
2476       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2477 
2478       if (mode != GET_MODE (op0))
2479 	op0 = convert_to_mode (mode, op0, 0);
2480 
2481       create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2482       if (maybe_legitimize_operands (icode, 0, 1, ops)
2483 	  && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2484 	return ops[0].value;
2485 
2486       delete_insns_since (last);
2487       CALL_EXPR_ARG (exp, 0) = orig_arg;
2488     }
2489 
2490   return NULL_RTX;
2491 }
2492 
2493 /* Expand a call to the builtin sincos math function.
2494    Return NULL_RTX if a normal call should be emitted rather than expanding the
2495    function in-line.  EXP is the expression that is a call to the builtin
2496    function.  */
2497 
2498 static rtx
2499 expand_builtin_sincos (tree exp)
2500 {
2501   rtx op0, op1, op2, target1, target2;
2502   machine_mode mode;
2503   tree arg, sinp, cosp;
2504   int result;
2505   location_t loc = EXPR_LOCATION (exp);
2506   tree alias_type, alias_off;
2507 
2508   if (!validate_arglist (exp, REAL_TYPE,
2509  			 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2510     return NULL_RTX;
2511 
2512   arg = CALL_EXPR_ARG (exp, 0);
2513   sinp = CALL_EXPR_ARG (exp, 1);
2514   cosp = CALL_EXPR_ARG (exp, 2);
2515 
2516   /* Make a suitable register to place result in.  */
2517   mode = TYPE_MODE (TREE_TYPE (arg));
2518 
2519   /* Check if sincos insn is available, otherwise emit the call.  */
2520   if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2521     return NULL_RTX;
2522 
2523   target1 = gen_reg_rtx (mode);
2524   target2 = gen_reg_rtx (mode);
2525 
2526   op0 = expand_normal (arg);
2527   alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2528   alias_off = build_int_cst (alias_type, 0);
2529   op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2530 					sinp, alias_off));
2531   op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2532 					cosp, alias_off));
2533 
2534   /* Compute into target1 and target2.
2535      Set TARGET to wherever the result comes back.  */
2536   result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2537   gcc_assert (result);
2538 
2539   /* Move target1 and target2 to the memory locations indicated
2540      by op1 and op2.  */
2541   emit_move_insn (op1, target1);
2542   emit_move_insn (op2, target2);
2543 
2544   return const0_rtx;
2545 }
2546 
2547 /* Expand a call to the internal cexpi builtin to the sincos math function.
2548    EXP is the expression that is a call to the builtin function; if convenient,
2549    the result should be placed in TARGET.  */
2550 
2551 static rtx
2552 expand_builtin_cexpi (tree exp, rtx target)
2553 {
2554   tree fndecl = get_callee_fndecl (exp);
2555   tree arg, type;
2556   machine_mode mode;
2557   rtx op0, op1, op2;
2558   location_t loc = EXPR_LOCATION (exp);
2559 
2560   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2561     return NULL_RTX;
2562 
2563   arg = CALL_EXPR_ARG (exp, 0);
2564   type = TREE_TYPE (arg);
2565   mode = TYPE_MODE (TREE_TYPE (arg));
2566 
2567   /* Try expanding via a sincos optab, fall back to emitting a libcall
2568      to sincos or cexp.  We are sure we have sincos or cexp because cexpi
2569      is only generated from sincos, cexp or if we have either of them.  */
2570   if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2571     {
2572       op1 = gen_reg_rtx (mode);
2573       op2 = gen_reg_rtx (mode);
2574 
2575       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2576 
2577       /* Compute into op1 and op2.  */
2578       expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2579     }
2580   else if (targetm.libc_has_function (function_sincos))
2581     {
2582       tree call, fn = NULL_TREE;
2583       tree top1, top2;
2584       rtx op1a, op2a;
2585 
2586       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2587 	fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2588       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2589 	fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2590       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2591 	fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2592       else
2593 	gcc_unreachable ();
2594 
2595       op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2596       op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2597       op1a = copy_addr_to_reg (XEXP (op1, 0));
2598       op2a = copy_addr_to_reg (XEXP (op2, 0));
2599       top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2600       top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2601 
2602       /* Make sure not to fold the sincos call again.  */
2603       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2604       expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2605 				      call, 3, arg, top1, top2));
2606     }
2607   else
2608     {
2609       tree call, fn = NULL_TREE, narg;
2610       tree ctype = build_complex_type (type);
2611 
2612       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2613 	fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2614       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2615 	fn = builtin_decl_explicit (BUILT_IN_CEXP);
2616       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2617 	fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2618       else
2619 	gcc_unreachable ();
2620 
2621       /* If we don't have a decl for cexp create one.  This is the
2622 	 friendliest fallback if the user calls __builtin_cexpi
2623 	 without full target C99 function support.  */
2624       if (fn == NULL_TREE)
2625 	{
2626 	  tree fntype;
2627 	  const char *name = NULL;
2628 
2629 	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2630 	    name = "cexpf";
2631 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2632 	    name = "cexp";
2633 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2634 	    name = "cexpl";
2635 
2636 	  fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2637 	  fn = build_fn_decl (name, fntype);
2638 	}
2639 
2640       narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2641 			  build_real (type, dconst0), arg);
2642 
2643       /* Make sure not to fold the cexp call again.  */
2644       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2645       return expand_expr (build_call_nary (ctype, call, 1, narg),
2646 			  target, VOIDmode, EXPAND_NORMAL);
2647     }
2648 
2649   /* Now build the proper return type.  */
2650   return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2651 			      make_tree (TREE_TYPE (arg), op2),
2652 			      make_tree (TREE_TYPE (arg), op1)),
2653 		      target, VOIDmode, EXPAND_NORMAL);
2654 }
2655 
2656 /* Conveniently construct a function call expression.  FNDECL names the
2657    function to be called, N is the number of arguments, and the "..."
2658    parameters are the argument expressions.  Unlike build_call_exr
2659    this doesn't fold the call, hence it will always return a CALL_EXPR.  */
2660 
2661 static tree
2662 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2663 {
2664   va_list ap;
2665   tree fntype = TREE_TYPE (fndecl);
2666   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2667 
2668   va_start (ap, n);
2669   fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2670   va_end (ap);
2671   SET_EXPR_LOCATION (fn, loc);
2672   return fn;
2673 }
2674 
2675 /* Expand a call to one of the builtin rounding functions gcc defines
2676    as an extension (lfloor and lceil).  As these are gcc extensions we
2677    do not need to worry about setting errno to EDOM.
2678    If expanding via optab fails, lower expression to (int)(floor(x)).
2679    EXP is the expression that is a call to the builtin function;
2680    if convenient, the result should be placed in TARGET.  */
2681 
2682 static rtx
2683 expand_builtin_int_roundingfn (tree exp, rtx target)
2684 {
2685   convert_optab builtin_optab;
2686   rtx op0, tmp;
2687   rtx_insn *insns;
2688   tree fndecl = get_callee_fndecl (exp);
2689   enum built_in_function fallback_fn;
2690   tree fallback_fndecl;
2691   machine_mode mode;
2692   tree arg;
2693 
2694   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2695     return NULL_RTX;
2696 
2697   arg = CALL_EXPR_ARG (exp, 0);
2698 
2699   switch (DECL_FUNCTION_CODE (fndecl))
2700     {
2701     CASE_FLT_FN (BUILT_IN_ICEIL):
2702     CASE_FLT_FN (BUILT_IN_LCEIL):
2703     CASE_FLT_FN (BUILT_IN_LLCEIL):
2704       builtin_optab = lceil_optab;
2705       fallback_fn = BUILT_IN_CEIL;
2706       break;
2707 
2708     CASE_FLT_FN (BUILT_IN_IFLOOR):
2709     CASE_FLT_FN (BUILT_IN_LFLOOR):
2710     CASE_FLT_FN (BUILT_IN_LLFLOOR):
2711       builtin_optab = lfloor_optab;
2712       fallback_fn = BUILT_IN_FLOOR;
2713       break;
2714 
2715     default:
2716       gcc_unreachable ();
2717     }
2718 
2719   /* Make a suitable register to place result in.  */
2720   mode = TYPE_MODE (TREE_TYPE (exp));
2721 
2722   target = gen_reg_rtx (mode);
2723 
2724   /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2725      need to expand the argument again.  This way, we will not perform
2726      side-effects more the once.  */
2727   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2728 
2729   op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2730 
2731   start_sequence ();
2732 
2733   /* Compute into TARGET.  */
2734   if (expand_sfix_optab (target, op0, builtin_optab))
2735     {
2736       /* Output the entire sequence.  */
2737       insns = get_insns ();
2738       end_sequence ();
2739       emit_insn (insns);
2740       return target;
2741     }
2742 
2743   /* If we were unable to expand via the builtin, stop the sequence
2744      (without outputting the insns).  */
2745   end_sequence ();
2746 
2747   /* Fall back to floating point rounding optab.  */
2748   fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2749 
2750   /* For non-C99 targets we may end up without a fallback fndecl here
2751      if the user called __builtin_lfloor directly.  In this case emit
2752      a call to the floor/ceil variants nevertheless.  This should result
2753      in the best user experience for not full C99 targets.  */
2754   if (fallback_fndecl == NULL_TREE)
2755     {
2756       tree fntype;
2757       const char *name = NULL;
2758 
2759       switch (DECL_FUNCTION_CODE (fndecl))
2760 	{
2761 	case BUILT_IN_ICEIL:
2762 	case BUILT_IN_LCEIL:
2763 	case BUILT_IN_LLCEIL:
2764 	  name = "ceil";
2765 	  break;
2766 	case BUILT_IN_ICEILF:
2767 	case BUILT_IN_LCEILF:
2768 	case BUILT_IN_LLCEILF:
2769 	  name = "ceilf";
2770 	  break;
2771 	case BUILT_IN_ICEILL:
2772 	case BUILT_IN_LCEILL:
2773 	case BUILT_IN_LLCEILL:
2774 	  name = "ceill";
2775 	  break;
2776 	case BUILT_IN_IFLOOR:
2777 	case BUILT_IN_LFLOOR:
2778 	case BUILT_IN_LLFLOOR:
2779 	  name = "floor";
2780 	  break;
2781 	case BUILT_IN_IFLOORF:
2782 	case BUILT_IN_LFLOORF:
2783 	case BUILT_IN_LLFLOORF:
2784 	  name = "floorf";
2785 	  break;
2786 	case BUILT_IN_IFLOORL:
2787 	case BUILT_IN_LFLOORL:
2788 	case BUILT_IN_LLFLOORL:
2789 	  name = "floorl";
2790 	  break;
2791 	default:
2792 	  gcc_unreachable ();
2793 	}
2794 
2795       fntype = build_function_type_list (TREE_TYPE (arg),
2796 					 TREE_TYPE (arg), NULL_TREE);
2797       fallback_fndecl = build_fn_decl (name, fntype);
2798     }
2799 
2800   exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2801 
2802   tmp = expand_normal (exp);
2803   tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2804 
2805   /* Truncate the result of floating point optab to integer
2806      via expand_fix ().  */
2807   target = gen_reg_rtx (mode);
2808   expand_fix (target, tmp, 0);
2809 
2810   return target;
2811 }
2812 
2813 /* Expand a call to one of the builtin math functions doing integer
2814    conversion (lrint).
2815    Return 0 if a normal call should be emitted rather than expanding the
2816    function in-line.  EXP is the expression that is a call to the builtin
2817    function; if convenient, the result should be placed in TARGET.  */
2818 
2819 static rtx
2820 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2821 {
2822   convert_optab builtin_optab;
2823   rtx op0;
2824   rtx_insn *insns;
2825   tree fndecl = get_callee_fndecl (exp);
2826   tree arg;
2827   machine_mode mode;
2828   enum built_in_function fallback_fn = BUILT_IN_NONE;
2829 
2830   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2831     return NULL_RTX;
2832 
2833   arg = CALL_EXPR_ARG (exp, 0);
2834 
2835   switch (DECL_FUNCTION_CODE (fndecl))
2836     {
2837     CASE_FLT_FN (BUILT_IN_IRINT):
2838       fallback_fn = BUILT_IN_LRINT;
2839       gcc_fallthrough ();
2840     CASE_FLT_FN (BUILT_IN_LRINT):
2841     CASE_FLT_FN (BUILT_IN_LLRINT):
2842       builtin_optab = lrint_optab;
2843       break;
2844 
2845     CASE_FLT_FN (BUILT_IN_IROUND):
2846       fallback_fn = BUILT_IN_LROUND;
2847       gcc_fallthrough ();
2848     CASE_FLT_FN (BUILT_IN_LROUND):
2849     CASE_FLT_FN (BUILT_IN_LLROUND):
2850       builtin_optab = lround_optab;
2851       break;
2852 
2853     default:
2854       gcc_unreachable ();
2855     }
2856 
2857   /* There's no easy way to detect the case we need to set EDOM.  */
2858   if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2859     return NULL_RTX;
2860 
2861   /* Make a suitable register to place result in.  */
2862   mode = TYPE_MODE (TREE_TYPE (exp));
2863 
2864   /* There's no easy way to detect the case we need to set EDOM.  */
2865   if (!flag_errno_math)
2866     {
2867       rtx result = gen_reg_rtx (mode);
2868 
2869       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2870 	 need to expand the argument again.  This way, we will not perform
2871 	 side-effects more the once.  */
2872       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2873 
2874       op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2875 
2876       start_sequence ();
2877 
2878       if (expand_sfix_optab (result, op0, builtin_optab))
2879 	{
2880 	  /* Output the entire sequence.  */
2881 	  insns = get_insns ();
2882 	  end_sequence ();
2883 	  emit_insn (insns);
2884 	  return result;
2885 	}
2886 
2887       /* If we were unable to expand via the builtin, stop the sequence
2888 	 (without outputting the insns) and call to the library function
2889 	 with the stabilized argument list.  */
2890       end_sequence ();
2891     }
2892 
2893   if (fallback_fn != BUILT_IN_NONE)
2894     {
2895       /* Fall back to rounding to long int.  Use implicit_p 0 - for non-C99
2896 	 targets, (int) round (x) should never be transformed into
2897 	 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2898 	 a call to lround in the hope that the target provides at least some
2899 	 C99 functions.  This should result in the best user experience for
2900 	 not full C99 targets.  */
2901       tree fallback_fndecl = mathfn_built_in_1
2902 	(TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2903 
2904       exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2905 				   fallback_fndecl, 1, arg);
2906 
2907       target = expand_call (exp, NULL_RTX, target == const0_rtx);
2908       target = maybe_emit_group_store (target, TREE_TYPE (exp));
2909       return convert_to_mode (mode, target, 0);
2910     }
2911 
2912   return expand_call (exp, target, target == const0_rtx);
2913 }
2914 
2915 /* Expand a call to the powi built-in mathematical function.  Return NULL_RTX if
2916    a normal call should be emitted rather than expanding the function
2917    in-line.  EXP is the expression that is a call to the builtin
2918    function; if convenient, the result should be placed in TARGET.  */
2919 
2920 static rtx
2921 expand_builtin_powi (tree exp, rtx target)
2922 {
2923   tree arg0, arg1;
2924   rtx op0, op1;
2925   machine_mode mode;
2926   machine_mode mode2;
2927 
2928   if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2929     return NULL_RTX;
2930 
2931   arg0 = CALL_EXPR_ARG (exp, 0);
2932   arg1 = CALL_EXPR_ARG (exp, 1);
2933   mode = TYPE_MODE (TREE_TYPE (exp));
2934 
2935   /* Emit a libcall to libgcc.  */
2936 
2937   /* Mode of the 2nd argument must match that of an int.  */
2938   mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2939 
2940   if (target == NULL_RTX)
2941     target = gen_reg_rtx (mode);
2942 
2943   op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2944   if (GET_MODE (op0) != mode)
2945     op0 = convert_to_mode (mode, op0, 0);
2946   op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2947   if (GET_MODE (op1) != mode2)
2948     op1 = convert_to_mode (mode2, op1, 0);
2949 
2950   target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2951 				    target, LCT_CONST, mode,
2952 				    op0, mode, op1, mode2);
2953 
2954   return target;
2955 }
2956 
2957 /* Expand expression EXP which is a call to the strlen builtin.  Return
2958    NULL_RTX if we failed and the caller should emit a normal call, otherwise
2959    try to get the result in TARGET, if convenient.  */
2960 
2961 static rtx
2962 expand_builtin_strlen (tree exp, rtx target,
2963 		       machine_mode target_mode)
2964 {
2965   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2966     return NULL_RTX;
2967 
2968   struct expand_operand ops[4];
2969   rtx pat;
2970   tree len;
2971   tree src = CALL_EXPR_ARG (exp, 0);
2972   rtx src_reg;
2973   rtx_insn *before_strlen;
2974   machine_mode insn_mode;
2975   enum insn_code icode = CODE_FOR_nothing;
2976   unsigned int align;
2977 
2978   /* If the length can be computed at compile-time, return it.  */
2979   len = c_strlen (src, 0);
2980   if (len)
2981     return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2982 
2983   /* If the length can be computed at compile-time and is constant
2984      integer, but there are side-effects in src, evaluate
2985      src for side-effects, then return len.
2986      E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2987      can be optimized into: i++; x = 3;  */
2988   len = c_strlen (src, 1);
2989   if (len && TREE_CODE (len) == INTEGER_CST)
2990     {
2991       expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2992       return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2993     }
2994 
2995   align = get_pointer_alignment (src) / BITS_PER_UNIT;
2996 
2997   /* If SRC is not a pointer type, don't do this operation inline.  */
2998   if (align == 0)
2999     return NULL_RTX;
3000 
3001   /* Bail out if we can't compute strlen in the right mode.  */
3002   FOR_EACH_MODE_FROM (insn_mode, target_mode)
3003     {
3004       icode = optab_handler (strlen_optab, insn_mode);
3005       if (icode != CODE_FOR_nothing)
3006 	break;
3007     }
3008   if (insn_mode == VOIDmode)
3009     return NULL_RTX;
3010 
3011   /* Make a place to hold the source address.  We will not expand
3012      the actual source until we are sure that the expansion will
3013      not fail -- there are trees that cannot be expanded twice.  */
3014   src_reg = gen_reg_rtx (Pmode);
3015 
3016   /* Mark the beginning of the strlen sequence so we can emit the
3017      source operand later.  */
3018   before_strlen = get_last_insn ();
3019 
3020   create_output_operand (&ops[0], target, insn_mode);
3021   create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3022   create_integer_operand (&ops[2], 0);
3023   create_integer_operand (&ops[3], align);
3024   if (!maybe_expand_insn (icode, 4, ops))
3025     return NULL_RTX;
3026 
3027   /* Check to see if the argument was declared attribute nonstring
3028      and if so, issue a warning since at this point it's not known
3029      to be nul-terminated.  */
3030   maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3031 
3032   /* Now that we are assured of success, expand the source.  */
3033   start_sequence ();
3034   pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3035   if (pat != src_reg)
3036     {
3037 #ifdef POINTERS_EXTEND_UNSIGNED
3038       if (GET_MODE (pat) != Pmode)
3039 	pat = convert_to_mode (Pmode, pat,
3040 			       POINTERS_EXTEND_UNSIGNED);
3041 #endif
3042       emit_move_insn (src_reg, pat);
3043     }
3044   pat = get_insns ();
3045   end_sequence ();
3046 
3047   if (before_strlen)
3048     emit_insn_after (pat, before_strlen);
3049   else
3050     emit_insn_before (pat, get_insns ());
3051 
3052   /* Return the value in the proper mode for this function.  */
3053   if (GET_MODE (ops[0].value) == target_mode)
3054     target = ops[0].value;
3055   else if (target != 0)
3056     convert_move (target, ops[0].value, 0);
3057   else
3058     target = convert_to_mode (target_mode, ops[0].value, 0);
3059 
3060   return target;
3061 }
3062 
3063 /* Expand call EXP to the strnlen built-in, returning the result
3064    and setting it in TARGET.  Otherwise return NULL_RTX on failure.  */
3065 
3066 static rtx
3067 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3068 {
3069   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3070     return NULL_RTX;
3071 
3072   tree src = CALL_EXPR_ARG (exp, 0);
3073   tree bound = CALL_EXPR_ARG (exp, 1);
3074 
3075   if (!bound)
3076     return NULL_RTX;
3077 
3078   location_t loc = UNKNOWN_LOCATION;
3079   if (EXPR_HAS_LOCATION (exp))
3080     loc = EXPR_LOCATION (exp);
3081 
3082   tree maxobjsize = max_object_size ();
3083   tree func = get_callee_fndecl (exp);
3084 
3085   /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3086      so these conversions aren't necessary.  */
3087   c_strlen_data lendata = { };
3088   tree len = c_strlen (src, 0, &lendata, 1);
3089   if (len)
3090     len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3091 
3092   if (TREE_CODE (bound) == INTEGER_CST)
3093     {
3094       if (!TREE_NO_WARNING (exp)
3095 	  && tree_int_cst_lt (maxobjsize, bound)
3096 	  && warning_at (loc, OPT_Wstringop_overflow_,
3097 			 "%K%qD specified bound %E "
3098 			 "exceeds maximum object size %E",
3099 			 exp, func, bound, maxobjsize))
3100 	TREE_NO_WARNING (exp) = true;
3101 
3102       bool exact = true;
3103       if (!len || TREE_CODE (len) != INTEGER_CST)
3104 	{
3105 	  /* Clear EXACT if LEN may be less than SRC suggests,
3106 	     such as in
3107 	       strnlen (&a[i], sizeof a)
3108 	     where the value of i is unknown.  Unless i's value is
3109 	     zero, the call is unsafe because the bound is greater. */
3110 	  lendata.decl = unterminated_array (src, &len, &exact);
3111 	  if (!lendata.decl)
3112 	    return NULL_RTX;
3113 	}
3114 
3115       if (lendata.decl
3116 	  && !TREE_NO_WARNING (exp)
3117 	  && ((tree_int_cst_lt (len, bound))
3118 	      || !exact))
3119 	{
3120 	  location_t warnloc
3121 	    = expansion_point_location_if_in_system_header (loc);
3122 
3123 	  if (warning_at (warnloc, OPT_Wstringop_overflow_,
3124 			  exact
3125 			  ? G_("%K%qD specified bound %E exceeds the size %E "
3126 			       "of unterminated array")
3127 			  : G_("%K%qD specified bound %E may exceed the size "
3128 			       "of at most %E of unterminated array"),
3129 			  exp, func, bound, len))
3130 	    {
3131 	      inform (DECL_SOURCE_LOCATION (lendata.decl),
3132 		      "referenced argument declared here");
3133 	      TREE_NO_WARNING (exp) = true;
3134 	      return NULL_RTX;
3135 	    }
3136 	}
3137 
3138       if (!len)
3139 	return NULL_RTX;
3140 
3141       len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3142       return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3143     }
3144 
3145   if (TREE_CODE (bound) != SSA_NAME)
3146     return NULL_RTX;
3147 
3148   wide_int min, max;
3149   enum value_range_kind rng = get_range_info (bound, &min, &max);
3150   if (rng != VR_RANGE)
3151     return NULL_RTX;
3152 
3153   if (!TREE_NO_WARNING (exp)
3154       && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3155       && warning_at (loc, OPT_Wstringop_overflow_,
3156 		     "%K%qD specified bound [%wu, %wu] "
3157 		     "exceeds maximum object size %E",
3158 		     exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3159     TREE_NO_WARNING (exp) = true;
3160 
3161   bool exact = true;
3162   if (!len || TREE_CODE (len) != INTEGER_CST)
3163     {
3164       lendata.decl = unterminated_array (src, &len, &exact);
3165       if (!lendata.decl)
3166 	return NULL_RTX;
3167     }
3168 
3169   if (lendata.decl
3170       && !TREE_NO_WARNING (exp)
3171       && (wi::ltu_p (wi::to_wide (len), min)
3172 	  || !exact))
3173     {
3174       location_t warnloc
3175 	= expansion_point_location_if_in_system_header (loc);
3176 
3177       if (warning_at (warnloc, OPT_Wstringop_overflow_,
3178 		      exact
3179 		      ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3180 			   "the size %E of unterminated array")
3181 		      : G_("%K%qD specified bound [%wu, %wu] may exceed "
3182 			   "the size of at most %E of unterminated array"),
3183 		      exp, func, min.to_uhwi (), max.to_uhwi (), len))
3184 	{
3185 	  inform (DECL_SOURCE_LOCATION (lendata.decl),
3186 		  "referenced argument declared here");
3187 	  TREE_NO_WARNING (exp) = true;
3188 	}
3189     }
3190 
3191   if (lendata.decl)
3192     return NULL_RTX;
3193 
3194   if (wi::gtu_p (min, wi::to_wide (len)))
3195     return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3196 
3197   len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3198   return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3199 }
3200 
3201 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3202    bytes from bytes at DATA + OFFSET and return it reinterpreted as
3203    a target constant.  */
3204 
3205 static rtx
3206 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3207 			 scalar_int_mode mode)
3208 {
3209   /* The REPresentation pointed to by DATA need not be a nul-terminated
3210      string but the caller guarantees it's large enough for MODE.  */
3211   const char *rep = (const char *) data;
3212 
3213   return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3214 }
3215 
3216 /* LEN specify length of the block of memcpy/memset operation.
3217    Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3218    In some cases we can make very likely guess on max size, then we
3219    set it into PROBABLE_MAX_SIZE.  */
3220 
3221 static void
3222 determine_block_size (tree len, rtx len_rtx,
3223 		      unsigned HOST_WIDE_INT *min_size,
3224 		      unsigned HOST_WIDE_INT *max_size,
3225 		      unsigned HOST_WIDE_INT *probable_max_size)
3226 {
3227   if (CONST_INT_P (len_rtx))
3228     {
3229       *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3230       return;
3231     }
3232   else
3233     {
3234       wide_int min, max;
3235       enum value_range_kind range_type = VR_UNDEFINED;
3236 
3237       /* Determine bounds from the type.  */
3238       if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3239 	*min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3240       else
3241 	*min_size = 0;
3242       if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3243 	*probable_max_size = *max_size
3244 	  = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3245       else
3246 	*probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3247 
3248       if (TREE_CODE (len) == SSA_NAME)
3249 	range_type = get_range_info (len, &min, &max);
3250       if (range_type == VR_RANGE)
3251 	{
3252 	  if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3253 	    *min_size = min.to_uhwi ();
3254 	  if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3255 	    *probable_max_size = *max_size = max.to_uhwi ();
3256 	}
3257       else if (range_type == VR_ANTI_RANGE)
3258 	{
3259 	  /* Anti range 0...N lets us to determine minimal size to N+1.  */
3260 	  if (min == 0)
3261 	    {
3262 	      if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3263 		*min_size = max.to_uhwi () + 1;
3264 	    }
3265 	  /* Code like
3266 
3267 	     int n;
3268 	     if (n < 100)
3269 	       memcpy (a, b, n)
3270 
3271 	     Produce anti range allowing negative values of N.  We still
3272 	     can use the information and make a guess that N is not negative.
3273 	     */
3274 	  else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3275 	    *probable_max_size = min.to_uhwi () - 1;
3276 	}
3277     }
3278   gcc_checking_assert (*max_size <=
3279 		       (unsigned HOST_WIDE_INT)
3280 			  GET_MODE_MASK (GET_MODE (len_rtx)));
3281 }
3282 
3283 /* Try to verify that the sizes and lengths of the arguments to a string
3284    manipulation function given by EXP are within valid bounds and that
3285    the operation does not lead to buffer overflow or read past the end.
3286    Arguments other than EXP may be null.  When non-null, the arguments
3287    have the following meaning:
3288    DST is the destination of a copy call or NULL otherwise.
3289    SRC is the source of a copy call or NULL otherwise.
3290    DSTWRITE is the number of bytes written into the destination obtained
3291    from the user-supplied size argument to the function (such as in
3292    memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3293    MAXREAD is the user-supplied bound on the length of the source sequence
3294    (such as in strncat(d, s, N).  It specifies the upper limit on the number
3295    of bytes to write.  If NULL, it's taken to be the same as DSTWRITE.
3296    SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3297    expression EXP is a string function call (as opposed to a memory call
3298    like memcpy).  As an exception, SRCSTR can also be an integer denoting
3299    the precomputed size of the source string or object (for functions like
3300    memcpy).
3301    DSTSIZE is the size of the destination object specified by the last
3302    argument to the _chk builtins, typically resulting from the expansion
3303    of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3304    DSTSIZE).
3305 
3306    When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3307    SIZE_MAX.
3308 
3309    If the call is successfully verified as safe return true, otherwise
3310    return false.  */
3311 
3312 static bool
3313 check_access (tree exp, tree, tree, tree dstwrite,
3314 	      tree maxread, tree srcstr, tree dstsize)
3315 {
3316   int opt = OPT_Wstringop_overflow_;
3317 
3318   /* The size of the largest object is half the address space, or
3319      PTRDIFF_MAX.  (This is way too permissive.)  */
3320   tree maxobjsize = max_object_size ();
3321 
3322   /* Either the length of the source string for string functions or
3323      the size of the source object for raw memory functions.  */
3324   tree slen = NULL_TREE;
3325 
3326   tree range[2] = { NULL_TREE, NULL_TREE };
3327 
3328   /* Set to true when the exact number of bytes written by a string
3329      function like strcpy is not known and the only thing that is
3330      known is that it must be at least one (for the terminating nul).  */
3331   bool at_least_one = false;
3332   if (srcstr)
3333     {
3334       /* SRCSTR is normally a pointer to string but as a special case
3335 	 it can be an integer denoting the length of a string.  */
3336       if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3337 	{
3338 	  /* Try to determine the range of lengths the source string
3339 	     refers to.  If it can be determined and is less than
3340 	     the upper bound given by MAXREAD add one to it for
3341 	     the terminating nul.  Otherwise, set it to one for
3342 	     the same reason, or to MAXREAD as appropriate.  */
3343 	  c_strlen_data lendata = { };
3344 	  get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3345 	  range[0] = lendata.minlen;
3346 	  range[1] = lendata.maxbound;
3347 	  if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3348 	    {
3349 	      if (maxread && tree_int_cst_le (maxread, range[0]))
3350 		range[0] = range[1] = maxread;
3351 	      else
3352 		range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3353 					range[0], size_one_node);
3354 
3355 	      if (maxread && tree_int_cst_le (maxread, range[1]))
3356 		range[1] = maxread;
3357 	      else if (!integer_all_onesp (range[1]))
3358 		range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3359 					range[1], size_one_node);
3360 
3361 	      slen = range[0];
3362 	    }
3363 	  else
3364 	    {
3365 	      at_least_one = true;
3366 	      slen = size_one_node;
3367 	    }
3368 	}
3369       else
3370 	slen = srcstr;
3371     }
3372 
3373   if (!dstwrite && !maxread)
3374     {
3375       /* When the only available piece of data is the object size
3376 	 there is nothing to do.  */
3377       if (!slen)
3378 	return true;
3379 
3380       /* Otherwise, when the length of the source sequence is known
3381 	 (as with strlen), set DSTWRITE to it.  */
3382       if (!range[0])
3383 	dstwrite = slen;
3384     }
3385 
3386   if (!dstsize)
3387     dstsize = maxobjsize;
3388 
3389   if (dstwrite)
3390     get_size_range (dstwrite, range);
3391 
3392   tree func = get_callee_fndecl (exp);
3393 
3394   /* First check the number of bytes to be written against the maximum
3395      object size.  */
3396   if (range[0]
3397       && TREE_CODE (range[0]) == INTEGER_CST
3398       && tree_int_cst_lt (maxobjsize, range[0]))
3399     {
3400       if (TREE_NO_WARNING (exp))
3401 	return false;
3402 
3403       location_t loc = tree_nonartificial_location (exp);
3404       loc = expansion_point_location_if_in_system_header (loc);
3405 
3406       bool warned;
3407       if (range[0] == range[1])
3408 	warned = warning_at (loc, opt,
3409 			     "%K%qD specified size %E "
3410 			     "exceeds maximum object size %E",
3411 			     exp, func, range[0], maxobjsize);
3412       else
3413 	warned = warning_at (loc, opt,
3414 			     "%K%qD specified size between %E and %E "
3415 			     "exceeds maximum object size %E",
3416 			     exp, func,
3417 			     range[0], range[1], maxobjsize);
3418       if (warned)
3419 	TREE_NO_WARNING (exp) = true;
3420 
3421       return false;
3422     }
3423 
3424   /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3425      constant, and in range of unsigned HOST_WIDE_INT.  */
3426   bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3427 
3428   /* Next check the number of bytes to be written against the destination
3429      object size.  */
3430   if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3431     {
3432       if (range[0]
3433 	  && TREE_CODE (range[0]) == INTEGER_CST
3434 	  && ((tree_fits_uhwi_p (dstsize)
3435 	       && tree_int_cst_lt (dstsize, range[0]))
3436 	      || (dstwrite
3437 		  && tree_fits_uhwi_p (dstwrite)
3438 		  && tree_int_cst_lt (dstwrite, range[0]))))
3439 	{
3440 	  if (TREE_NO_WARNING (exp))
3441 	    return false;
3442 
3443 	  location_t loc = tree_nonartificial_location (exp);
3444 	  loc = expansion_point_location_if_in_system_header (loc);
3445 
3446 	  if (dstwrite == slen && at_least_one)
3447 	    {
3448 	      /* This is a call to strcpy with a destination of 0 size
3449 		 and a source of unknown length.  The call will write
3450 		 at least one byte past the end of the destination.  */
3451 	      warning_at (loc, opt,
3452 			  "%K%qD writing %E or more bytes into a region "
3453 			  "of size %E overflows the destination",
3454 			  exp, func, range[0], dstsize);
3455 	    }
3456 	  else if (tree_int_cst_equal (range[0], range[1]))
3457 	    warning_n (loc, opt, tree_to_uhwi (range[0]),
3458 		       "%K%qD writing %E byte into a region "
3459 		       "of size %E overflows the destination",
3460 		       "%K%qD writing %E bytes into a region "
3461 		       "of size %E overflows the destination",
3462 		       exp, func, range[0], dstsize);
3463 	  else if (tree_int_cst_sign_bit (range[1]))
3464 	    {
3465 	      /* Avoid printing the upper bound if it's invalid.  */
3466 	      warning_at (loc, opt,
3467 			  "%K%qD writing %E or more bytes into a region "
3468 			  "of size %E overflows the destination",
3469 			  exp, func, range[0], dstsize);
3470 	    }
3471 	  else
3472 	    warning_at (loc, opt,
3473 			"%K%qD writing between %E and %E bytes into "
3474 			"a region of size %E overflows the destination",
3475 			exp, func, range[0], range[1],
3476 			dstsize);
3477 
3478 	  /* Return error when an overflow has been detected.  */
3479 	  return false;
3480 	}
3481     }
3482 
3483   /* Check the maximum length of the source sequence against the size
3484      of the destination object if known, or against the maximum size
3485      of an object.  */
3486   if (maxread)
3487     {
3488       get_size_range (maxread, range);
3489 
3490       /* Use the lower end for MAXREAD from now on.  */
3491       if (range[0])
3492 	maxread = range[0];
3493 
3494       if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3495 	{
3496 	  location_t loc = tree_nonartificial_location (exp);
3497 	  loc = expansion_point_location_if_in_system_header (loc);
3498 
3499 	  if (tree_int_cst_lt (maxobjsize, range[0]))
3500 	    {
3501 	      if (TREE_NO_WARNING (exp))
3502 		return false;
3503 
3504 	      /* Warn about crazy big sizes first since that's more
3505 		 likely to be meaningful than saying that the bound
3506 		 is greater than the object size if both are big.  */
3507 	      if (range[0] == range[1])
3508 		warning_at (loc, opt,
3509 			    "%K%qD specified bound %E "
3510 			    "exceeds maximum object size %E",
3511 			    exp, func,
3512 			    range[0], maxobjsize);
3513 	      else
3514 		warning_at (loc, opt,
3515 			    "%K%qD specified bound between %E and %E "
3516 			    "exceeds maximum object size %E",
3517 			    exp, func,
3518 			    range[0], range[1], maxobjsize);
3519 
3520 	      return false;
3521 	    }
3522 
3523 	  if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3524 	    {
3525 	      if (TREE_NO_WARNING (exp))
3526 		return false;
3527 
3528 	      if (tree_int_cst_equal (range[0], range[1]))
3529 		warning_at (loc, opt,
3530 			    "%K%qD specified bound %E "
3531 			    "exceeds destination size %E",
3532 			    exp, func,
3533 			    range[0], dstsize);
3534 	      else
3535 		warning_at (loc, opt,
3536 			    "%K%qD specified bound between %E and %E "
3537 			    "exceeds destination size %E",
3538 			    exp, func,
3539 			    range[0], range[1], dstsize);
3540 	      return false;
3541 	    }
3542 	}
3543     }
3544 
3545   /* Check for reading past the end of SRC.  */
3546   if (slen
3547       && slen == srcstr
3548       && dstwrite && range[0]
3549       && tree_int_cst_lt (slen, range[0]))
3550     {
3551       if (TREE_NO_WARNING (exp))
3552 	return false;
3553 
3554       location_t loc = tree_nonartificial_location (exp);
3555 
3556       if (tree_int_cst_equal (range[0], range[1]))
3557 	warning_n (loc, opt, tree_to_uhwi (range[0]),
3558 		   "%K%qD reading %E byte from a region of size %E",
3559 		   "%K%qD reading %E bytes from a region of size %E",
3560 		    exp, func, range[0], slen);
3561       else if (tree_int_cst_sign_bit (range[1]))
3562 	{
3563 	  /* Avoid printing the upper bound if it's invalid.  */
3564 	  warning_at (loc, opt,
3565 		      "%K%qD reading %E or more bytes from a region "
3566 		      "of size %E",
3567 		      exp, func, range[0], slen);
3568 	}
3569       else
3570 	warning_at (loc, opt,
3571 		    "%K%qD reading between %E and %E bytes from a region "
3572 		    "of size %E",
3573 		    exp, func, range[0], range[1], slen);
3574       return false;
3575     }
3576 
3577   return true;
3578 }
3579 
3580 /* Helper to compute the size of the object referenced by the DEST
3581    expression which must have pointer type, using Object Size type
3582    OSTYPE (only the least significant 2 bits are used).  Return
3583    an estimate of the size of the object if successful or NULL when
3584    the size cannot be determined.  When the referenced object involves
3585    a non-constant offset in some range the returned value represents
3586    the largest size given the smallest non-negative offset in the
3587    range.  The function is intended for diagnostics and should not
3588    be used to influence code generation or optimization.  */
3589 
3590 tree
3591 compute_objsize (tree dest, int ostype)
3592 {
3593   unsigned HOST_WIDE_INT size;
3594 
3595   /* Only the two least significant bits are meaningful.  */
3596   ostype &= 3;
3597 
3598   if (compute_builtin_object_size (dest, ostype, &size))
3599     return build_int_cst (sizetype, size);
3600 
3601   if (TREE_CODE (dest) == SSA_NAME)
3602     {
3603       gimple *stmt = SSA_NAME_DEF_STMT (dest);
3604       if (!is_gimple_assign (stmt))
3605 	return NULL_TREE;
3606 
3607       dest = gimple_assign_rhs1 (stmt);
3608 
3609       tree_code code = gimple_assign_rhs_code (stmt);
3610       if (code == POINTER_PLUS_EXPR)
3611 	{
3612 	  /* compute_builtin_object_size fails for addresses with
3613 	     non-constant offsets.  Try to determine the range of
3614 	     such an offset here and use it to adjust the constant
3615 	     size.  */
3616 	  tree off = gimple_assign_rhs2 (stmt);
3617 	  if (TREE_CODE (off) == INTEGER_CST)
3618 	    {
3619 	      if (tree size = compute_objsize (dest, ostype))
3620 		{
3621 		  wide_int wioff = wi::to_wide (off);
3622 		  wide_int wisiz = wi::to_wide (size);
3623 
3624 		  /* Ignore negative offsets for now.  For others,
3625 		     use the lower bound as the most optimistic
3626 		     estimate of the (remaining) size.  */
3627 		  if (wi::sign_mask (wioff))
3628 		    ;
3629 		  else if (wi::ltu_p (wioff, wisiz))
3630 		    return wide_int_to_tree (TREE_TYPE (size),
3631 					     wi::sub (wisiz, wioff));
3632 		  else
3633 		    return size_zero_node;
3634 		}
3635 	    }
3636 	  else if (TREE_CODE (off) == SSA_NAME
3637 	      && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3638 	    {
3639 	      wide_int min, max;
3640 	      enum value_range_kind rng = get_range_info (off, &min, &max);
3641 
3642 	      if (rng == VR_RANGE)
3643 		{
3644 		  if (tree size = compute_objsize (dest, ostype))
3645 		    {
3646 		      wide_int wisiz = wi::to_wide (size);
3647 
3648 		      /* Ignore negative offsets for now.  For others,
3649 			 use the lower bound as the most optimistic
3650 			 estimate of the (remaining)size.  */
3651 		      if (wi::sign_mask (min)
3652 			  || wi::sign_mask (max))
3653 			;
3654 		      else if (wi::ltu_p (min, wisiz))
3655 			return wide_int_to_tree (TREE_TYPE (size),
3656 						 wi::sub (wisiz, min));
3657 		      else
3658 			return size_zero_node;
3659 		    }
3660 		}
3661 	    }
3662 	}
3663       else if (code != ADDR_EXPR)
3664 	return NULL_TREE;
3665     }
3666 
3667   /* Unless computing the largest size (for memcpy and other raw memory
3668      functions), try to determine the size of the object from its type.  */
3669   if (!ostype)
3670     return NULL_TREE;
3671 
3672   if (TREE_CODE (dest) != ADDR_EXPR)
3673     return NULL_TREE;
3674 
3675   tree type = TREE_TYPE (dest);
3676   if (TREE_CODE (type) == POINTER_TYPE)
3677     type = TREE_TYPE (type);
3678 
3679   type = TYPE_MAIN_VARIANT (type);
3680 
3681   if (TREE_CODE (type) == ARRAY_TYPE
3682       && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3683     {
3684       /* Return the constant size unless it's zero (that's a zero-length
3685 	 array likely at the end of a struct).  */
3686       tree size = TYPE_SIZE_UNIT (type);
3687       if (size && TREE_CODE (size) == INTEGER_CST
3688 	  && !integer_zerop (size))
3689 	return size;
3690     }
3691 
3692   return NULL_TREE;
3693 }
3694 
3695 /* Helper to determine and check the sizes of the source and the destination
3696    of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls.  EXP is the
3697    call expression, DEST is the destination argument, SRC is the source
3698    argument or null, and LEN is the number of bytes.  Use Object Size type-0
3699    regardless of the OPT_Wstringop_overflow_ setting.  Return true on success
3700    (no overflow or invalid sizes), false otherwise.  */
3701 
3702 static bool
3703 check_memop_access (tree exp, tree dest, tree src, tree size)
3704 {
3705   /* For functions like memset and memcpy that operate on raw memory
3706      try to determine the size of the largest source and destination
3707      object using type-0 Object Size regardless of the object size
3708      type specified by the option.  */
3709   tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3710   tree dstsize = compute_objsize (dest, 0);
3711 
3712   return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3713 		       srcsize, dstsize);
3714 }
3715 
3716 /* Validate memchr arguments without performing any expansion.
3717    Return NULL_RTX.  */
3718 
3719 static rtx
3720 expand_builtin_memchr (tree exp, rtx)
3721 {
3722   if (!validate_arglist (exp,
3723  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3724     return NULL_RTX;
3725 
3726   tree arg1 = CALL_EXPR_ARG (exp, 0);
3727   tree len = CALL_EXPR_ARG (exp, 2);
3728 
3729   /* Diagnose calls where the specified length exceeds the size
3730      of the object.  */
3731   if (warn_stringop_overflow)
3732     {
3733       tree size = compute_objsize (arg1, 0);
3734       check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3735 		    /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3736     }
3737 
3738   return NULL_RTX;
3739 }
3740 
3741 /* Expand a call EXP to the memcpy builtin.
3742    Return NULL_RTX if we failed, the caller should emit a normal call,
3743    otherwise try to get the result in TARGET, if convenient (and in
3744    mode MODE if that's convenient).  */
3745 
3746 static rtx
3747 expand_builtin_memcpy (tree exp, rtx target)
3748 {
3749   if (!validate_arglist (exp,
3750  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3751     return NULL_RTX;
3752 
3753   tree dest = CALL_EXPR_ARG (exp, 0);
3754   tree src = CALL_EXPR_ARG (exp, 1);
3755   tree len = CALL_EXPR_ARG (exp, 2);
3756 
3757   check_memop_access (exp, dest, src, len);
3758 
3759   return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3760 					  /*retmode=*/ RETURN_BEGIN);
3761 }
3762 
3763 /* Check a call EXP to the memmove built-in for validity.
3764    Return NULL_RTX on both success and failure.  */
3765 
3766 static rtx
3767 expand_builtin_memmove (tree exp, rtx)
3768 {
3769   if (!validate_arglist (exp,
3770  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3771     return NULL_RTX;
3772 
3773   tree dest = CALL_EXPR_ARG (exp, 0);
3774   tree src = CALL_EXPR_ARG (exp, 1);
3775   tree len = CALL_EXPR_ARG (exp, 2);
3776 
3777   check_memop_access (exp, dest, src, len);
3778 
3779   return NULL_RTX;
3780 }
3781 
3782 /* Expand a call EXP to the mempcpy builtin.
3783    Return NULL_RTX if we failed; the caller should emit a normal call,
3784    otherwise try to get the result in TARGET, if convenient (and in
3785    mode MODE if that's convenient).  */
3786 
3787 static rtx
3788 expand_builtin_mempcpy (tree exp, rtx target)
3789 {
3790   if (!validate_arglist (exp,
3791  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3792     return NULL_RTX;
3793 
3794   tree dest = CALL_EXPR_ARG (exp, 0);
3795   tree src = CALL_EXPR_ARG (exp, 1);
3796   tree len = CALL_EXPR_ARG (exp, 2);
3797 
3798   /* Policy does not generally allow using compute_objsize (which
3799      is used internally by check_memop_size) to change code generation
3800      or drive optimization decisions.
3801 
3802      In this instance it is safe because the code we generate has
3803      the same semantics regardless of the return value of
3804      check_memop_sizes.   Exactly the same amount of data is copied
3805      and the return value is exactly the same in both cases.
3806 
3807      Furthermore, check_memop_size always uses mode 0 for the call to
3808      compute_objsize, so the imprecise nature of compute_objsize is
3809      avoided.  */
3810 
3811   /* Avoid expanding mempcpy into memcpy when the call is determined
3812      to overflow the buffer.  This also prevents the same overflow
3813      from being diagnosed again when expanding memcpy.  */
3814   if (!check_memop_access (exp, dest, src, len))
3815     return NULL_RTX;
3816 
3817   return expand_builtin_mempcpy_args (dest, src, len,
3818 				      target, exp, /*retmode=*/ RETURN_END);
3819 }
3820 
3821 /* Helper function to do the actual work for expand of memory copy family
3822    functions (memcpy, mempcpy, stpcpy).  Expansing should assign LEN bytes
3823    of memory from SRC to DEST and assign to TARGET if convenient.  Return
3824    value is based on RETMODE argument.  */
3825 
3826 static rtx
3827 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3828 				 rtx target, tree exp, memop_ret retmode)
3829 {
3830   unsigned int src_align = get_pointer_alignment (src);
3831   unsigned int dest_align = get_pointer_alignment (dest);
3832   rtx dest_mem, src_mem, dest_addr, len_rtx;
3833   HOST_WIDE_INT expected_size = -1;
3834   unsigned int expected_align = 0;
3835   unsigned HOST_WIDE_INT min_size;
3836   unsigned HOST_WIDE_INT max_size;
3837   unsigned HOST_WIDE_INT probable_max_size;
3838 
3839   /* If DEST is not a pointer type, call the normal function.  */
3840   if (dest_align == 0)
3841     return NULL_RTX;
3842 
3843   /* If either SRC is not a pointer type, don't do this
3844      operation in-line.  */
3845   if (src_align == 0)
3846     return NULL_RTX;
3847 
3848   if (currently_expanding_gimple_stmt)
3849     stringop_block_profile (currently_expanding_gimple_stmt,
3850 			    &expected_align, &expected_size);
3851 
3852   if (expected_align < dest_align)
3853     expected_align = dest_align;
3854   dest_mem = get_memory_rtx (dest, len);
3855   set_mem_align (dest_mem, dest_align);
3856   len_rtx = expand_normal (len);
3857   determine_block_size (len, len_rtx, &min_size, &max_size,
3858 			&probable_max_size);
3859 
3860   /* Try to get the byte representation of the constant SRC points to,
3861      with its byte size in NBYTES.  */
3862   unsigned HOST_WIDE_INT nbytes;
3863   const char *rep = c_getstr (src, &nbytes);
3864 
3865   /* If the function's constant bound LEN_RTX is less than or equal
3866      to the byte size of the representation of the constant argument,
3867      and if block move would be done by pieces, we can avoid loading
3868      the bytes from memory and only store the computed constant.
3869      This works in the overlap (memmove) case as well because
3870      store_by_pieces just generates a series of stores of constants
3871      from the representation returned by c_getstr().  */
3872   if (rep
3873       && CONST_INT_P (len_rtx)
3874       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3875       && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3876 			      CONST_CAST (char *, rep),
3877 			      dest_align, false))
3878     {
3879       dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3880 				  builtin_memcpy_read_str,
3881 				  CONST_CAST (char *, rep),
3882 				  dest_align, false, retmode);
3883       dest_mem = force_operand (XEXP (dest_mem, 0), target);
3884       dest_mem = convert_memory_address (ptr_mode, dest_mem);
3885       return dest_mem;
3886     }
3887 
3888   src_mem = get_memory_rtx (src, len);
3889   set_mem_align (src_mem, src_align);
3890 
3891   /* Copy word part most expediently.  */
3892   enum block_op_methods method = BLOCK_OP_NORMAL;
3893   if (CALL_EXPR_TAILCALL (exp)
3894       && (retmode == RETURN_BEGIN || target == const0_rtx))
3895     method = BLOCK_OP_TAILCALL;
3896   if (retmode == RETURN_END && target != const0_rtx)
3897   dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3898 				     expected_align, expected_size,
3899 				     min_size, max_size, probable_max_size);
3900     return NULL_RTX;
3901 
3902   if (dest_addr == 0)
3903     {
3904       dest_addr = force_operand (XEXP (dest_mem, 0), target);
3905       dest_addr = convert_memory_address (ptr_mode, dest_addr);
3906     }
3907 
3908   if (retmode != RETURN_BEGIN && target != const0_rtx)
3909     {
3910       dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3911       /* stpcpy pointer to last byte.  */
3912       if (retmode == RETURN_END_MINUS_ONE)
3913 	dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3914     }
3915 
3916   return dest_addr;
3917 }
3918 
3919 static rtx
3920 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3921 			     rtx target, tree orig_exp, memop_ret retmode)
3922 {
3923   return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3924 					  retmode);
3925 }
3926 
3927 /* Expand into a movstr instruction, if one is available.  Return NULL_RTX if
3928    we failed, the caller should emit a normal call, otherwise try to
3929    get the result in TARGET, if convenient.
3930    Return value is based on RETMODE argument.  */
3931 
3932 static rtx
3933 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3934 {
3935   struct expand_operand ops[3];
3936   rtx dest_mem;
3937   rtx src_mem;
3938 
3939   if (!targetm.have_movstr ())
3940     return NULL_RTX;
3941 
3942   dest_mem = get_memory_rtx (dest, NULL);
3943   src_mem = get_memory_rtx (src, NULL);
3944   if (retmode == RETURN_BEGIN)
3945     {
3946       target = force_reg (Pmode, XEXP (dest_mem, 0));
3947       dest_mem = replace_equiv_address (dest_mem, target);
3948     }
3949 
3950   create_output_operand (&ops[0],
3951 			 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3952   create_fixed_operand (&ops[1], dest_mem);
3953   create_fixed_operand (&ops[2], src_mem);
3954   if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3955     return NULL_RTX;
3956 
3957   if (retmode != RETURN_BEGIN && target != const0_rtx)
3958     {
3959       target = ops[0].value;
3960       /* movstr is supposed to set end to the address of the NUL
3961 	 terminator.  If the caller requested a mempcpy-like return value,
3962 	 adjust it.  */
3963       if (retmode == RETURN_END)
3964 	{
3965 	  rtx tem = plus_constant (GET_MODE (target),
3966 				   gen_lowpart (GET_MODE (target), target), 1);
3967 	  emit_move_insn (target, force_operand (tem, NULL_RTX));
3968 	}
3969     }
3970   return target;
3971 }
3972 
3973 /* Do some very basic size validation of a call to the strcpy builtin
3974    given by EXP.  Return NULL_RTX to have the built-in expand to a call
3975    to the library function.  */
3976 
3977 static rtx
3978 expand_builtin_strcat (tree exp, rtx)
3979 {
3980   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3981       || !warn_stringop_overflow)
3982     return NULL_RTX;
3983 
3984   tree dest = CALL_EXPR_ARG (exp, 0);
3985   tree src = CALL_EXPR_ARG (exp, 1);
3986 
3987   /* There is no way here to determine the length of the string in
3988      the destination to which the SRC string is being appended so
3989      just diagnose cases when the souce string is longer than
3990      the destination object.  */
3991 
3992   tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3993 
3994   check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3995 		destsize);
3996 
3997   return NULL_RTX;
3998 }
3999 
4000 /* Expand expression EXP, which is a call to the strcpy builtin.  Return
4001    NULL_RTX if we failed the caller should emit a normal call, otherwise
4002    try to get the result in TARGET, if convenient (and in mode MODE if that's
4003    convenient).  */
4004 
4005 static rtx
4006 expand_builtin_strcpy (tree exp, rtx target)
4007 {
4008   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4009     return NULL_RTX;
4010 
4011   tree dest = CALL_EXPR_ARG (exp, 0);
4012   tree src = CALL_EXPR_ARG (exp, 1);
4013 
4014   if (warn_stringop_overflow)
4015     {
4016       tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4017       check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4018 		    src, destsize);
4019     }
4020 
4021   if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4022     {
4023       /* Check to see if the argument was declared attribute nonstring
4024 	 and if so, issue a warning since at this point it's not known
4025 	 to be nul-terminated.  */
4026       tree fndecl = get_callee_fndecl (exp);
4027       maybe_warn_nonstring_arg (fndecl, exp);
4028       return ret;
4029     }
4030 
4031   return NULL_RTX;
4032 }
4033 
4034 /* Helper function to do the actual work for expand_builtin_strcpy.  The
4035    arguments to the builtin_strcpy call DEST and SRC are broken out
4036    so that this can also be called without constructing an actual CALL_EXPR.
4037    The other arguments and return value are the same as for
4038    expand_builtin_strcpy.  */
4039 
4040 static rtx
4041 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4042 {
4043   /* Detect strcpy calls with unterminated arrays..  */
4044   if (tree nonstr = unterminated_array (src))
4045     {
4046       /* NONSTR refers to the non-nul terminated constant array.  */
4047       if (!TREE_NO_WARNING (exp))
4048 	warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4049       return NULL_RTX;
4050     }
4051 
4052   return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4053 }
4054 
4055 /* Expand a call EXP to the stpcpy builtin.
4056    Return NULL_RTX if we failed the caller should emit a normal call,
4057    otherwise try to get the result in TARGET, if convenient (and in
4058    mode MODE if that's convenient).  */
4059 
4060 static rtx
4061 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4062 {
4063   tree dst, src;
4064   location_t loc = EXPR_LOCATION (exp);
4065 
4066   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4067     return NULL_RTX;
4068 
4069   dst = CALL_EXPR_ARG (exp, 0);
4070   src = CALL_EXPR_ARG (exp, 1);
4071 
4072   if (warn_stringop_overflow)
4073     {
4074       tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4075       check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4076 		    src, destsize);
4077     }
4078 
4079   /* If return value is ignored, transform stpcpy into strcpy.  */
4080   if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4081     {
4082       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4083       tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4084       return expand_expr (result, target, mode, EXPAND_NORMAL);
4085     }
4086   else
4087     {
4088       tree len, lenp1;
4089       rtx ret;
4090 
4091       /* Ensure we get an actual string whose length can be evaluated at
4092 	 compile-time, not an expression containing a string.  This is
4093 	 because the latter will potentially produce pessimized code
4094 	 when used to produce the return value.  */
4095       c_strlen_data lendata = { };
4096       if (!c_getstr (src, NULL)
4097 	  || !(len = c_strlen (src, 0, &lendata, 1)))
4098 	return expand_movstr (dst, src, target,
4099 			      /*retmode=*/ RETURN_END_MINUS_ONE);
4100 
4101       if (lendata.decl && !TREE_NO_WARNING (exp))
4102 	warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4103 
4104       lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4105       ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4106 					 target, exp,
4107 					 /*retmode=*/ RETURN_END_MINUS_ONE);
4108 
4109       if (ret)
4110 	return ret;
4111 
4112       if (TREE_CODE (len) == INTEGER_CST)
4113 	{
4114 	  rtx len_rtx = expand_normal (len);
4115 
4116 	  if (CONST_INT_P (len_rtx))
4117 	    {
4118 	      ret = expand_builtin_strcpy_args (exp, dst, src, target);
4119 
4120 	      if (ret)
4121 		{
4122 		  if (! target)
4123 		    {
4124 		      if (mode != VOIDmode)
4125 			target = gen_reg_rtx (mode);
4126 		      else
4127 			target = gen_reg_rtx (GET_MODE (ret));
4128 		    }
4129 		  if (GET_MODE (target) != GET_MODE (ret))
4130 		    ret = gen_lowpart (GET_MODE (target), ret);
4131 
4132 		  ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4133 		  ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4134 		  gcc_assert (ret);
4135 
4136 		  return target;
4137 		}
4138 	    }
4139 	}
4140 
4141       return expand_movstr (dst, src, target,
4142 			    /*retmode=*/ RETURN_END_MINUS_ONE);
4143     }
4144 }
4145 
4146 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4147    arguments while being careful to avoid duplicate warnings (which could
4148    be issued if the expander were to expand the call, resulting in it
4149    being emitted in expand_call().  */
4150 
4151 static rtx
4152 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4153 {
4154   if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4155     {
4156       /* The call has been successfully expanded.  Check for nonstring
4157 	 arguments and issue warnings as appropriate.  */
4158       maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4159       return ret;
4160     }
4161 
4162   return NULL_RTX;
4163 }
4164 
4165 /* Check a call EXP to the stpncpy built-in for validity.
4166    Return NULL_RTX on both success and failure.  */
4167 
4168 static rtx
4169 expand_builtin_stpncpy (tree exp, rtx)
4170 {
4171   if (!validate_arglist (exp,
4172 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4173       || !warn_stringop_overflow)
4174     return NULL_RTX;
4175 
4176   /* The source and destination of the call.  */
4177   tree dest = CALL_EXPR_ARG (exp, 0);
4178   tree src = CALL_EXPR_ARG (exp, 1);
4179 
4180   /* The exact number of bytes to write (not the maximum).  */
4181   tree len = CALL_EXPR_ARG (exp, 2);
4182 
4183   /* The size of the destination object.  */
4184   tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4185 
4186   check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4187 
4188   return NULL_RTX;
4189 }
4190 
4191 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
4192    bytes from constant string DATA + OFFSET and return it as target
4193    constant.  */
4194 
4195 rtx
4196 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4197 			  scalar_int_mode mode)
4198 {
4199   const char *str = (const char *) data;
4200 
4201   if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4202     return const0_rtx;
4203 
4204   return c_readstr (str + offset, mode);
4205 }
4206 
4207 /* Helper to check the sizes of sequences and the destination of calls
4208    to __builtin_strncat and __builtin___strncat_chk.  Returns true on
4209    success (no overflow or invalid sizes), false otherwise.  */
4210 
4211 static bool
4212 check_strncat_sizes (tree exp, tree objsize)
4213 {
4214   tree dest = CALL_EXPR_ARG (exp, 0);
4215   tree src = CALL_EXPR_ARG (exp, 1);
4216   tree maxread = CALL_EXPR_ARG (exp, 2);
4217 
4218   /* Try to determine the range of lengths that the source expression
4219      refers to.  */
4220   c_strlen_data lendata = { };
4221   get_range_strlen (src, &lendata, /* eltsize = */ 1);
4222 
4223   /* Try to verify that the destination is big enough for the shortest
4224      string.  */
4225 
4226   if (!objsize && warn_stringop_overflow)
4227     {
4228       /* If it hasn't been provided by __strncat_chk, try to determine
4229 	 the size of the destination object into which the source is
4230 	 being copied.  */
4231       objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4232     }
4233 
4234   /* Add one for the terminating nul.  */
4235   tree srclen = (lendata.minlen
4236 		 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4237 				size_one_node)
4238 		 : NULL_TREE);
4239 
4240   /* The strncat function copies at most MAXREAD bytes and always appends
4241      the terminating nul so the specified upper bound should never be equal
4242      to (or greater than) the size of the destination.  */
4243   if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4244       && tree_int_cst_equal (objsize, maxread))
4245     {
4246       location_t loc = tree_nonartificial_location (exp);
4247       loc = expansion_point_location_if_in_system_header (loc);
4248 
4249       warning_at (loc, OPT_Wstringop_overflow_,
4250 		  "%K%qD specified bound %E equals destination size",
4251 		  exp, get_callee_fndecl (exp), maxread);
4252 
4253       return false;
4254     }
4255 
4256   if (!srclen
4257       || (maxread && tree_fits_uhwi_p (maxread)
4258 	  && tree_fits_uhwi_p (srclen)
4259 	  && tree_int_cst_lt (maxread, srclen)))
4260     srclen = maxread;
4261 
4262   /* The number of bytes to write is LEN but check_access will also
4263      check SRCLEN if LEN's value isn't known.  */
4264   return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4265 		       objsize);
4266 }
4267 
4268 /* Similar to expand_builtin_strcat, do some very basic size validation
4269    of a call to the strcpy builtin given by EXP.  Return NULL_RTX to have
4270    the built-in expand to a call to the library function.  */
4271 
4272 static rtx
4273 expand_builtin_strncat (tree exp, rtx)
4274 {
4275   if (!validate_arglist (exp,
4276 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4277       || !warn_stringop_overflow)
4278     return NULL_RTX;
4279 
4280   tree dest = CALL_EXPR_ARG (exp, 0);
4281   tree src = CALL_EXPR_ARG (exp, 1);
4282   /* The upper bound on the number of bytes to write.  */
4283   tree maxread = CALL_EXPR_ARG (exp, 2);
4284   /* The length of the source sequence.  */
4285   tree slen = c_strlen (src, 1);
4286 
4287   /* Try to determine the range of lengths that the source expression
4288      refers to.  Since the lengths are only used for warning and not
4289      for code generation disable strict mode below.  */
4290   tree maxlen = slen;
4291   if (!maxlen)
4292     {
4293       c_strlen_data lendata = { };
4294       get_range_strlen (src, &lendata, /* eltsize = */ 1);
4295       maxlen = lendata.maxbound;
4296     }
4297 
4298   /* Try to verify that the destination is big enough for the shortest
4299      string.  First try to determine the size of the destination object
4300      into which the source is being copied.  */
4301   tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4302 
4303   /* Add one for the terminating nul.  */
4304   tree srclen = (maxlen
4305 		 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4306 				size_one_node)
4307 		 : NULL_TREE);
4308 
4309   /* The strncat function copies at most MAXREAD bytes and always appends
4310      the terminating nul so the specified upper bound should never be equal
4311      to (or greater than) the size of the destination.  */
4312   if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4313       && tree_int_cst_equal (destsize, maxread))
4314     {
4315       location_t loc = tree_nonartificial_location (exp);
4316       loc = expansion_point_location_if_in_system_header (loc);
4317 
4318       warning_at (loc, OPT_Wstringop_overflow_,
4319 		  "%K%qD specified bound %E equals destination size",
4320 		  exp, get_callee_fndecl (exp), maxread);
4321 
4322       return NULL_RTX;
4323     }
4324 
4325   if (!srclen
4326       || (maxread && tree_fits_uhwi_p (maxread)
4327 	  && tree_fits_uhwi_p (srclen)
4328 	  && tree_int_cst_lt (maxread, srclen)))
4329     srclen = maxread;
4330 
4331   /* The number of bytes to write is SRCLEN.  */
4332   check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4333 
4334   return NULL_RTX;
4335 }
4336 
4337 /* Expand expression EXP, which is a call to the strncpy builtin.  Return
4338    NULL_RTX if we failed the caller should emit a normal call.  */
4339 
4340 static rtx
4341 expand_builtin_strncpy (tree exp, rtx target)
4342 {
4343   location_t loc = EXPR_LOCATION (exp);
4344 
4345   if (validate_arglist (exp,
4346  			POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4347     {
4348       tree dest = CALL_EXPR_ARG (exp, 0);
4349       tree src = CALL_EXPR_ARG (exp, 1);
4350       /* The number of bytes to write (not the maximum).  */
4351       tree len = CALL_EXPR_ARG (exp, 2);
4352       /* The length of the source sequence.  */
4353       tree slen = c_strlen (src, 1);
4354 
4355       if (warn_stringop_overflow)
4356 	{
4357 	  tree destsize = compute_objsize (dest,
4358 					   warn_stringop_overflow - 1);
4359 
4360 	  /* The number of bytes to write is LEN but check_access will also
4361 	     check SLEN if LEN's value isn't known.  */
4362 	  check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4363 			destsize);
4364 	}
4365 
4366       /* We must be passed a constant len and src parameter.  */
4367       if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4368 	return NULL_RTX;
4369 
4370       slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4371 
4372       /* We're required to pad with trailing zeros if the requested
4373 	 len is greater than strlen(s2)+1.  In that case try to
4374 	 use store_by_pieces, if it fails, punt.  */
4375       if (tree_int_cst_lt (slen, len))
4376 	{
4377 	  unsigned int dest_align = get_pointer_alignment (dest);
4378 	  const char *p = c_getstr (src);
4379 	  rtx dest_mem;
4380 
4381 	  if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4382 	      || !can_store_by_pieces (tree_to_uhwi (len),
4383 				       builtin_strncpy_read_str,
4384 				       CONST_CAST (char *, p),
4385 				       dest_align, false))
4386 	    return NULL_RTX;
4387 
4388 	  dest_mem = get_memory_rtx (dest, len);
4389 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
4390 			   builtin_strncpy_read_str,
4391 			   CONST_CAST (char *, p), dest_align, false,
4392 			   RETURN_BEGIN);
4393 	  dest_mem = force_operand (XEXP (dest_mem, 0), target);
4394 	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
4395 	  return dest_mem;
4396 	}
4397     }
4398   return NULL_RTX;
4399 }
4400 
4401 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
4402    bytes from constant string DATA + OFFSET and return it as target
4403    constant.  */
4404 
4405 rtx
4406 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4407 			 scalar_int_mode mode)
4408 {
4409   const char *c = (const char *) data;
4410   char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4411 
4412   memset (p, *c, GET_MODE_SIZE (mode));
4413 
4414   return c_readstr (p, mode);
4415 }
4416 
4417 /* Callback routine for store_by_pieces.  Return the RTL of a register
4418    containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4419    char value given in the RTL register data.  For example, if mode is
4420    4 bytes wide, return the RTL for 0x01010101*data.  */
4421 
4422 static rtx
4423 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4424 			scalar_int_mode mode)
4425 {
4426   rtx target, coeff;
4427   size_t size;
4428   char *p;
4429 
4430   size = GET_MODE_SIZE (mode);
4431   if (size == 1)
4432     return (rtx) data;
4433 
4434   p = XALLOCAVEC (char, size);
4435   memset (p, 1, size);
4436   coeff = c_readstr (p, mode);
4437 
4438   target = convert_to_mode (mode, (rtx) data, 1);
4439   target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4440   return force_reg (mode, target);
4441 }
4442 
4443 /* Expand expression EXP, which is a call to the memset builtin.  Return
4444    NULL_RTX if we failed the caller should emit a normal call, otherwise
4445    try to get the result in TARGET, if convenient (and in mode MODE if that's
4446    convenient).  */
4447 
4448 static rtx
4449 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4450 {
4451   if (!validate_arglist (exp,
4452  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4453     return NULL_RTX;
4454 
4455   tree dest = CALL_EXPR_ARG (exp, 0);
4456   tree val = CALL_EXPR_ARG (exp, 1);
4457   tree len = CALL_EXPR_ARG (exp, 2);
4458 
4459   check_memop_access (exp, dest, NULL_TREE, len);
4460 
4461   return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4462 }
4463 
4464 /* Helper function to do the actual work for expand_builtin_memset.  The
4465    arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4466    so that this can also be called without constructing an actual CALL_EXPR.
4467    The other arguments and return value are the same as for
4468    expand_builtin_memset.  */
4469 
4470 static rtx
4471 expand_builtin_memset_args (tree dest, tree val, tree len,
4472 			    rtx target, machine_mode mode, tree orig_exp)
4473 {
4474   tree fndecl, fn;
4475   enum built_in_function fcode;
4476   machine_mode val_mode;
4477   char c;
4478   unsigned int dest_align;
4479   rtx dest_mem, dest_addr, len_rtx;
4480   HOST_WIDE_INT expected_size = -1;
4481   unsigned int expected_align = 0;
4482   unsigned HOST_WIDE_INT min_size;
4483   unsigned HOST_WIDE_INT max_size;
4484   unsigned HOST_WIDE_INT probable_max_size;
4485 
4486   dest_align = get_pointer_alignment (dest);
4487 
4488   /* If DEST is not a pointer type, don't do this operation in-line.  */
4489   if (dest_align == 0)
4490     return NULL_RTX;
4491 
4492   if (currently_expanding_gimple_stmt)
4493     stringop_block_profile (currently_expanding_gimple_stmt,
4494 			    &expected_align, &expected_size);
4495 
4496   if (expected_align < dest_align)
4497     expected_align = dest_align;
4498 
4499   /* If the LEN parameter is zero, return DEST.  */
4500   if (integer_zerop (len))
4501     {
4502       /* Evaluate and ignore VAL in case it has side-effects.  */
4503       expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4504       return expand_expr (dest, target, mode, EXPAND_NORMAL);
4505     }
4506 
4507   /* Stabilize the arguments in case we fail.  */
4508   dest = builtin_save_expr (dest);
4509   val = builtin_save_expr (val);
4510   len = builtin_save_expr (len);
4511 
4512   len_rtx = expand_normal (len);
4513   determine_block_size (len, len_rtx, &min_size, &max_size,
4514 			&probable_max_size);
4515   dest_mem = get_memory_rtx (dest, len);
4516   val_mode = TYPE_MODE (unsigned_char_type_node);
4517 
4518   if (TREE_CODE (val) != INTEGER_CST)
4519     {
4520       rtx val_rtx;
4521 
4522       val_rtx = expand_normal (val);
4523       val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4524 
4525       /* Assume that we can memset by pieces if we can store
4526        * the coefficients by pieces (in the required modes).
4527        * We can't pass builtin_memset_gen_str as that emits RTL.  */
4528       c = 1;
4529       if (tree_fits_uhwi_p (len)
4530 	  && can_store_by_pieces (tree_to_uhwi (len),
4531 				  builtin_memset_read_str, &c, dest_align,
4532 				  true))
4533 	{
4534 	  val_rtx = force_reg (val_mode, val_rtx);
4535 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
4536 			   builtin_memset_gen_str, val_rtx, dest_align,
4537 			   true, RETURN_BEGIN);
4538 	}
4539       else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4540 					dest_align, expected_align,
4541 					expected_size, min_size, max_size,
4542 					probable_max_size))
4543 	goto do_libcall;
4544 
4545       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4546       dest_mem = convert_memory_address (ptr_mode, dest_mem);
4547       return dest_mem;
4548     }
4549 
4550   if (target_char_cast (val, &c))
4551     goto do_libcall;
4552 
4553   if (c)
4554     {
4555       if (tree_fits_uhwi_p (len)
4556 	  && can_store_by_pieces (tree_to_uhwi (len),
4557 				  builtin_memset_read_str, &c, dest_align,
4558 				  true))
4559 	store_by_pieces (dest_mem, tree_to_uhwi (len),
4560 			 builtin_memset_read_str, &c, dest_align, true,
4561 			 RETURN_BEGIN);
4562       else if (!set_storage_via_setmem (dest_mem, len_rtx,
4563 					gen_int_mode (c, val_mode),
4564 					dest_align, expected_align,
4565 					expected_size, min_size, max_size,
4566 					probable_max_size))
4567 	goto do_libcall;
4568 
4569       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4570       dest_mem = convert_memory_address (ptr_mode, dest_mem);
4571       return dest_mem;
4572     }
4573 
4574   set_mem_align (dest_mem, dest_align);
4575   dest_addr = clear_storage_hints (dest_mem, len_rtx,
4576 				   CALL_EXPR_TAILCALL (orig_exp)
4577 				   ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4578 				   expected_align, expected_size,
4579 				   min_size, max_size,
4580 				   probable_max_size);
4581 
4582   if (dest_addr == 0)
4583     {
4584       dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4585       dest_addr = convert_memory_address (ptr_mode, dest_addr);
4586     }
4587 
4588   return dest_addr;
4589 
4590  do_libcall:
4591   fndecl = get_callee_fndecl (orig_exp);
4592   fcode = DECL_FUNCTION_CODE (fndecl);
4593   if (fcode == BUILT_IN_MEMSET)
4594     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4595 				dest, val, len);
4596   else if (fcode == BUILT_IN_BZERO)
4597     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4598 				dest, len);
4599   else
4600     gcc_unreachable ();
4601   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4602   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4603   return expand_call (fn, target, target == const0_rtx);
4604 }
4605 
4606 /* Expand expression EXP, which is a call to the bzero builtin.  Return
4607    NULL_RTX if we failed the caller should emit a normal call.  */
4608 
4609 static rtx
4610 expand_builtin_bzero (tree exp)
4611 {
4612   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4613     return NULL_RTX;
4614 
4615   tree dest = CALL_EXPR_ARG (exp, 0);
4616   tree size = CALL_EXPR_ARG (exp, 1);
4617 
4618   check_memop_access (exp, dest, NULL_TREE, size);
4619 
4620   /* New argument list transforming bzero(ptr x, int y) to
4621      memset(ptr x, int 0, size_t y).   This is done this way
4622      so that if it isn't expanded inline, we fallback to
4623      calling bzero instead of memset.  */
4624 
4625   location_t loc = EXPR_LOCATION (exp);
4626 
4627   return expand_builtin_memset_args (dest, integer_zero_node,
4628 				     fold_convert_loc (loc,
4629 						       size_type_node, size),
4630 				     const0_rtx, VOIDmode, exp);
4631 }
4632 
4633 /* Try to expand cmpstr operation ICODE with the given operands.
4634    Return the result rtx on success, otherwise return null.  */
4635 
4636 static rtx
4637 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4638 	       HOST_WIDE_INT align)
4639 {
4640   machine_mode insn_mode = insn_data[icode].operand[0].mode;
4641 
4642   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4643     target = NULL_RTX;
4644 
4645   struct expand_operand ops[4];
4646   create_output_operand (&ops[0], target, insn_mode);
4647   create_fixed_operand (&ops[1], arg1_rtx);
4648   create_fixed_operand (&ops[2], arg2_rtx);
4649   create_integer_operand (&ops[3], align);
4650   if (maybe_expand_insn (icode, 4, ops))
4651     return ops[0].value;
4652   return NULL_RTX;
4653 }
4654 
4655 /* Expand expression EXP, which is a call to the memcmp built-in function.
4656    Return NULL_RTX if we failed and the caller should emit a normal call,
4657    otherwise try to get the result in TARGET, if convenient.
4658    RESULT_EQ is true if we can relax the returned value to be either zero
4659    or nonzero, without caring about the sign.  */
4660 
4661 static rtx
4662 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4663 {
4664   if (!validate_arglist (exp,
4665  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4666     return NULL_RTX;
4667 
4668   tree arg1 = CALL_EXPR_ARG (exp, 0);
4669   tree arg2 = CALL_EXPR_ARG (exp, 1);
4670   tree len = CALL_EXPR_ARG (exp, 2);
4671   enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4672   bool no_overflow = true;
4673 
4674   /* Diagnose calls where the specified length exceeds the size of either
4675      object.  */
4676   tree size = compute_objsize (arg1, 0);
4677   no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4678 			      len, /*maxread=*/NULL_TREE, size,
4679 			      /*objsize=*/NULL_TREE);
4680   if (no_overflow)
4681     {
4682       size = compute_objsize (arg2, 0);
4683       no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4684 				  len,  /*maxread=*/NULL_TREE, size,
4685 				  /*objsize=*/NULL_TREE);
4686     }
4687 
4688   /* If the specified length exceeds the size of either object,
4689      call the function.  */
4690   if (!no_overflow)
4691     return NULL_RTX;
4692 
4693   /* Due to the performance benefit, always inline the calls first
4694      when result_eq is false.  */
4695   rtx result = NULL_RTX;
4696 
4697   if (!result_eq && fcode != BUILT_IN_BCMP)
4698     {
4699       result = inline_expand_builtin_bytecmp (exp, target);
4700       if (result)
4701 	return result;
4702     }
4703 
4704   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4705   location_t loc = EXPR_LOCATION (exp);
4706 
4707   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4708   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4709 
4710   /* If we don't have POINTER_TYPE, call the function.  */
4711   if (arg1_align == 0 || arg2_align == 0)
4712     return NULL_RTX;
4713 
4714   rtx arg1_rtx = get_memory_rtx (arg1, len);
4715   rtx arg2_rtx = get_memory_rtx (arg2, len);
4716   rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4717 
4718   /* Set MEM_SIZE as appropriate.  */
4719   if (CONST_INT_P (len_rtx))
4720     {
4721       set_mem_size (arg1_rtx, INTVAL (len_rtx));
4722       set_mem_size (arg2_rtx, INTVAL (len_rtx));
4723     }
4724 
4725   by_pieces_constfn constfn = NULL;
4726 
4727   /* Try to get the byte representation of the constant ARG2 (or, only
4728      when the function's result is used for equality to zero, ARG1)
4729      points to, with its byte size in NBYTES.  */
4730   unsigned HOST_WIDE_INT nbytes;
4731   const char *rep = c_getstr (arg2, &nbytes);
4732   if (result_eq && rep == NULL)
4733     {
4734       /* For equality to zero the arguments are interchangeable.  */
4735       rep = c_getstr (arg1, &nbytes);
4736       if (rep != NULL)
4737 	std::swap (arg1_rtx, arg2_rtx);
4738     }
4739 
4740   /* If the function's constant bound LEN_RTX is less than or equal
4741      to the byte size of the representation of the constant argument,
4742      and if block move would be done by pieces, we can avoid loading
4743      the bytes from memory and only store the computed constant result.  */
4744   if (rep
4745       && CONST_INT_P (len_rtx)
4746       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4747     constfn = builtin_memcpy_read_str;
4748 
4749   result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4750 				 TREE_TYPE (len), target,
4751 				 result_eq, constfn,
4752 				 CONST_CAST (char *, rep));
4753 
4754   if (result)
4755     {
4756       /* Return the value in the proper mode for this function.  */
4757       if (GET_MODE (result) == mode)
4758 	return result;
4759 
4760       if (target != 0)
4761 	{
4762 	  convert_move (target, result, 0);
4763 	  return target;
4764 	}
4765 
4766       return convert_to_mode (mode, result, 0);
4767     }
4768 
4769   return NULL_RTX;
4770 }
4771 
4772 /* Expand expression EXP, which is a call to the strcmp builtin.  Return NULL_RTX
4773    if we failed the caller should emit a normal call, otherwise try to get
4774    the result in TARGET, if convenient.  */
4775 
4776 static rtx
4777 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4778 {
4779   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4780     return NULL_RTX;
4781 
4782   /* Due to the performance benefit, always inline the calls first.  */
4783   rtx result = NULL_RTX;
4784   result = inline_expand_builtin_bytecmp (exp, target);
4785   if (result)
4786     return result;
4787 
4788   insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4789   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4790   if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4791     return NULL_RTX;
4792 
4793   tree arg1 = CALL_EXPR_ARG (exp, 0);
4794   tree arg2 = CALL_EXPR_ARG (exp, 1);
4795 
4796   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4797   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4798 
4799   /* If we don't have POINTER_TYPE, call the function.  */
4800   if (arg1_align == 0 || arg2_align == 0)
4801     return NULL_RTX;
4802 
4803   /* Stabilize the arguments in case gen_cmpstr(n)si fail.  */
4804   arg1 = builtin_save_expr (arg1);
4805   arg2 = builtin_save_expr (arg2);
4806 
4807   rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4808   rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4809 
4810   /* Try to call cmpstrsi.  */
4811   if (cmpstr_icode != CODE_FOR_nothing)
4812     result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4813 			    MIN (arg1_align, arg2_align));
4814 
4815   /* Try to determine at least one length and call cmpstrnsi.  */
4816   if (!result && cmpstrn_icode != CODE_FOR_nothing)
4817     {
4818       tree len;
4819       rtx arg3_rtx;
4820 
4821       tree len1 = c_strlen (arg1, 1);
4822       tree len2 = c_strlen (arg2, 1);
4823 
4824       if (len1)
4825 	len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4826       if (len2)
4827 	len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4828 
4829       /* If we don't have a constant length for the first, use the length
4830 	 of the second, if we know it.  We don't require a constant for
4831 	 this case; some cost analysis could be done if both are available
4832 	 but neither is constant.  For now, assume they're equally cheap,
4833 	 unless one has side effects.  If both strings have constant lengths,
4834 	 use the smaller.  */
4835 
4836       if (!len1)
4837 	len = len2;
4838       else if (!len2)
4839 	len = len1;
4840       else if (TREE_SIDE_EFFECTS (len1))
4841 	len = len2;
4842       else if (TREE_SIDE_EFFECTS (len2))
4843 	len = len1;
4844       else if (TREE_CODE (len1) != INTEGER_CST)
4845 	len = len2;
4846       else if (TREE_CODE (len2) != INTEGER_CST)
4847 	len = len1;
4848       else if (tree_int_cst_lt (len1, len2))
4849 	len = len1;
4850       else
4851 	len = len2;
4852 
4853       /* If both arguments have side effects, we cannot optimize.  */
4854       if (len && !TREE_SIDE_EFFECTS (len))
4855 	{
4856 	  arg3_rtx = expand_normal (len);
4857 	  result = expand_cmpstrn_or_cmpmem
4858 	    (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4859 	     arg3_rtx, MIN (arg1_align, arg2_align));
4860 	}
4861     }
4862 
4863   tree fndecl = get_callee_fndecl (exp);
4864   if (result)
4865     {
4866       /* Check to see if the argument was declared attribute nonstring
4867 	 and if so, issue a warning since at this point it's not known
4868 	 to be nul-terminated.  */
4869       maybe_warn_nonstring_arg (fndecl, exp);
4870 
4871       /* Return the value in the proper mode for this function.  */
4872       machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4873       if (GET_MODE (result) == mode)
4874 	return result;
4875       if (target == 0)
4876 	return convert_to_mode (mode, result, 0);
4877       convert_move (target, result, 0);
4878       return target;
4879     }
4880 
4881   /* Expand the library call ourselves using a stabilized argument
4882      list to avoid re-evaluating the function's arguments twice.  */
4883   tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4884   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4885   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4886   return expand_call (fn, target, target == const0_rtx);
4887 }
4888 
4889 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4890    NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4891    the result in TARGET, if convenient.  */
4892 
4893 static rtx
4894 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4895 			ATTRIBUTE_UNUSED machine_mode mode)
4896 {
4897   if (!validate_arglist (exp,
4898  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4899     return NULL_RTX;
4900 
4901   /* Due to the performance benefit, always inline the calls first.  */
4902   rtx result = NULL_RTX;
4903   result = inline_expand_builtin_bytecmp (exp, target);
4904   if (result)
4905     return result;
4906 
4907   /* If c_strlen can determine an expression for one of the string
4908      lengths, and it doesn't have side effects, then emit cmpstrnsi
4909      using length MIN(strlen(string)+1, arg3).  */
4910   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4911   if (cmpstrn_icode == CODE_FOR_nothing)
4912     return NULL_RTX;
4913 
4914   tree len;
4915 
4916   tree arg1 = CALL_EXPR_ARG (exp, 0);
4917   tree arg2 = CALL_EXPR_ARG (exp, 1);
4918   tree arg3 = CALL_EXPR_ARG (exp, 2);
4919 
4920   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4921   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4922 
4923   tree len1 = c_strlen (arg1, 1);
4924   tree len2 = c_strlen (arg2, 1);
4925 
4926   location_t loc = EXPR_LOCATION (exp);
4927 
4928   if (len1)
4929     len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4930   if (len2)
4931     len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4932 
4933   tree len3 = fold_convert_loc (loc, sizetype, arg3);
4934 
4935   /* If we don't have a constant length for the first, use the length
4936      of the second, if we know it.  If neither string is constant length,
4937      use the given length argument.  We don't require a constant for
4938      this case; some cost analysis could be done if both are available
4939      but neither is constant.  For now, assume they're equally cheap,
4940      unless one has side effects.  If both strings have constant lengths,
4941      use the smaller.  */
4942 
4943   if (!len1 && !len2)
4944     len = len3;
4945   else if (!len1)
4946     len = len2;
4947   else if (!len2)
4948     len = len1;
4949   else if (TREE_SIDE_EFFECTS (len1))
4950     len = len2;
4951   else if (TREE_SIDE_EFFECTS (len2))
4952     len = len1;
4953   else if (TREE_CODE (len1) != INTEGER_CST)
4954     len = len2;
4955   else if (TREE_CODE (len2) != INTEGER_CST)
4956     len = len1;
4957   else if (tree_int_cst_lt (len1, len2))
4958     len = len1;
4959   else
4960     len = len2;
4961 
4962   /* If we are not using the given length, we must incorporate it here.
4963      The actual new length parameter will be MIN(len,arg3) in this case.  */
4964   if (len != len3)
4965     {
4966       len = fold_convert_loc (loc, sizetype, len);
4967       len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4968     }
4969   rtx arg1_rtx = get_memory_rtx (arg1, len);
4970   rtx arg2_rtx = get_memory_rtx (arg2, len);
4971   rtx arg3_rtx = expand_normal (len);
4972   result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4973 				     arg2_rtx, TREE_TYPE (len), arg3_rtx,
4974 				     MIN (arg1_align, arg2_align));
4975 
4976   tree fndecl = get_callee_fndecl (exp);
4977   if (result)
4978     {
4979       /* Check to see if the argument was declared attribute nonstring
4980 	 and if so, issue a warning since at this point it's not known
4981 	 to be nul-terminated.  */
4982       maybe_warn_nonstring_arg (fndecl, exp);
4983 
4984       /* Return the value in the proper mode for this function.  */
4985       mode = TYPE_MODE (TREE_TYPE (exp));
4986       if (GET_MODE (result) == mode)
4987 	return result;
4988       if (target == 0)
4989 	return convert_to_mode (mode, result, 0);
4990       convert_move (target, result, 0);
4991       return target;
4992     }
4993 
4994   /* Expand the library call ourselves using a stabilized argument
4995      list to avoid re-evaluating the function's arguments twice.  */
4996   tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4997   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4998   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4999   return expand_call (fn, target, target == const0_rtx);
5000 }
5001 
5002 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5003    if that's convenient.  */
5004 
5005 rtx
5006 expand_builtin_saveregs (void)
5007 {
5008   rtx val;
5009   rtx_insn *seq;
5010 
5011   /* Don't do __builtin_saveregs more than once in a function.
5012      Save the result of the first call and reuse it.  */
5013   if (saveregs_value != 0)
5014     return saveregs_value;
5015 
5016   /* When this function is called, it means that registers must be
5017      saved on entry to this function.  So we migrate the call to the
5018      first insn of this function.  */
5019 
5020   start_sequence ();
5021 
5022   /* Do whatever the machine needs done in this case.  */
5023   val = targetm.calls.expand_builtin_saveregs ();
5024 
5025   seq = get_insns ();
5026   end_sequence ();
5027 
5028   saveregs_value = val;
5029 
5030   /* Put the insns after the NOTE that starts the function.  If this
5031      is inside a start_sequence, make the outer-level insn chain current, so
5032      the code is placed at the start of the function.  */
5033   push_topmost_sequence ();
5034   emit_insn_after (seq, entry_of_function ());
5035   pop_topmost_sequence ();
5036 
5037   return val;
5038 }
5039 
5040 /* Expand a call to __builtin_next_arg.  */
5041 
5042 static rtx
5043 expand_builtin_next_arg (void)
5044 {
5045   /* Checking arguments is already done in fold_builtin_next_arg
5046      that must be called before this function.  */
5047   return expand_binop (ptr_mode, add_optab,
5048 		       crtl->args.internal_arg_pointer,
5049 		       crtl->args.arg_offset_rtx,
5050 		       NULL_RTX, 0, OPTAB_LIB_WIDEN);
5051 }
5052 
5053 /* Make it easier for the backends by protecting the valist argument
5054    from multiple evaluations.  */
5055 
5056 static tree
5057 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5058 {
5059   tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5060 
5061   /* The current way of determining the type of valist is completely
5062      bogus.  We should have the information on the va builtin instead.  */
5063   if (!vatype)
5064     vatype = targetm.fn_abi_va_list (cfun->decl);
5065 
5066   if (TREE_CODE (vatype) == ARRAY_TYPE)
5067     {
5068       if (TREE_SIDE_EFFECTS (valist))
5069 	valist = save_expr (valist);
5070 
5071       /* For this case, the backends will be expecting a pointer to
5072 	 vatype, but it's possible we've actually been given an array
5073 	 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5074 	 So fix it.  */
5075       if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5076 	{
5077 	  tree p1 = build_pointer_type (TREE_TYPE (vatype));
5078 	  valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5079 	}
5080     }
5081   else
5082     {
5083       tree pt = build_pointer_type (vatype);
5084 
5085       if (! needs_lvalue)
5086 	{
5087 	  if (! TREE_SIDE_EFFECTS (valist))
5088 	    return valist;
5089 
5090 	  valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5091 	  TREE_SIDE_EFFECTS (valist) = 1;
5092 	}
5093 
5094       if (TREE_SIDE_EFFECTS (valist))
5095 	valist = save_expr (valist);
5096       valist = fold_build2_loc (loc, MEM_REF,
5097 				vatype, valist, build_int_cst (pt, 0));
5098     }
5099 
5100   return valist;
5101 }
5102 
5103 /* The "standard" definition of va_list is void*.  */
5104 
5105 tree
5106 std_build_builtin_va_list (void)
5107 {
5108   return ptr_type_node;
5109 }
5110 
5111 /* The "standard" abi va_list is va_list_type_node.  */
5112 
5113 tree
5114 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5115 {
5116   return va_list_type_node;
5117 }
5118 
5119 /* The "standard" type of va_list is va_list_type_node.  */
5120 
5121 tree
5122 std_canonical_va_list_type (tree type)
5123 {
5124   tree wtype, htype;
5125 
5126   wtype = va_list_type_node;
5127   htype = type;
5128 
5129   if (TREE_CODE (wtype) == ARRAY_TYPE)
5130     {
5131       /* If va_list is an array type, the argument may have decayed
5132 	 to a pointer type, e.g. by being passed to another function.
5133 	 In that case, unwrap both types so that we can compare the
5134 	 underlying records.  */
5135       if (TREE_CODE (htype) == ARRAY_TYPE
5136 	  || POINTER_TYPE_P (htype))
5137 	{
5138 	  wtype = TREE_TYPE (wtype);
5139 	  htype = TREE_TYPE (htype);
5140 	}
5141     }
5142   if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5143     return va_list_type_node;
5144 
5145   return NULL_TREE;
5146 }
5147 
5148 /* The "standard" implementation of va_start: just assign `nextarg' to
5149    the variable.  */
5150 
5151 void
5152 std_expand_builtin_va_start (tree valist, rtx nextarg)
5153 {
5154   rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5155   convert_move (va_r, nextarg, 0);
5156 }
5157 
5158 /* Expand EXP, a call to __builtin_va_start.  */
5159 
5160 static rtx
5161 expand_builtin_va_start (tree exp)
5162 {
5163   rtx nextarg;
5164   tree valist;
5165   location_t loc = EXPR_LOCATION (exp);
5166 
5167   if (call_expr_nargs (exp) < 2)
5168     {
5169       error_at (loc, "too few arguments to function %<va_start%>");
5170       return const0_rtx;
5171     }
5172 
5173   if (fold_builtin_next_arg (exp, true))
5174     return const0_rtx;
5175 
5176   nextarg = expand_builtin_next_arg ();
5177   valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5178 
5179   if (targetm.expand_builtin_va_start)
5180     targetm.expand_builtin_va_start (valist, nextarg);
5181   else
5182     std_expand_builtin_va_start (valist, nextarg);
5183 
5184   return const0_rtx;
5185 }
5186 
5187 /* Expand EXP, a call to __builtin_va_end.  */
5188 
5189 static rtx
5190 expand_builtin_va_end (tree exp)
5191 {
5192   tree valist = CALL_EXPR_ARG (exp, 0);
5193 
5194   /* Evaluate for side effects, if needed.  I hate macros that don't
5195      do that.  */
5196   if (TREE_SIDE_EFFECTS (valist))
5197     expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5198 
5199   return const0_rtx;
5200 }
5201 
5202 /* Expand EXP, a call to __builtin_va_copy.  We do this as a
5203    builtin rather than just as an assignment in stdarg.h because of the
5204    nastiness of array-type va_list types.  */
5205 
5206 static rtx
5207 expand_builtin_va_copy (tree exp)
5208 {
5209   tree dst, src, t;
5210   location_t loc = EXPR_LOCATION (exp);
5211 
5212   dst = CALL_EXPR_ARG (exp, 0);
5213   src = CALL_EXPR_ARG (exp, 1);
5214 
5215   dst = stabilize_va_list_loc (loc, dst, 1);
5216   src = stabilize_va_list_loc (loc, src, 0);
5217 
5218   gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5219 
5220   if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5221     {
5222       t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5223       TREE_SIDE_EFFECTS (t) = 1;
5224       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5225     }
5226   else
5227     {
5228       rtx dstb, srcb, size;
5229 
5230       /* Evaluate to pointers.  */
5231       dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5232       srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5233       size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5234       		  NULL_RTX, VOIDmode, EXPAND_NORMAL);
5235 
5236       dstb = convert_memory_address (Pmode, dstb);
5237       srcb = convert_memory_address (Pmode, srcb);
5238 
5239       /* "Dereference" to BLKmode memories.  */
5240       dstb = gen_rtx_MEM (BLKmode, dstb);
5241       set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5242       set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5243       srcb = gen_rtx_MEM (BLKmode, srcb);
5244       set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5245       set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5246 
5247       /* Copy.  */
5248       emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5249     }
5250 
5251   return const0_rtx;
5252 }
5253 
5254 /* Expand a call to one of the builtin functions __builtin_frame_address or
5255    __builtin_return_address.  */
5256 
5257 static rtx
5258 expand_builtin_frame_address (tree fndecl, tree exp)
5259 {
5260   /* The argument must be a nonnegative integer constant.
5261      It counts the number of frames to scan up the stack.
5262      The value is either the frame pointer value or the return
5263      address saved in that frame.  */
5264   if (call_expr_nargs (exp) == 0)
5265     /* Warning about missing arg was already issued.  */
5266     return const0_rtx;
5267   else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5268     {
5269       error ("invalid argument to %qD", fndecl);
5270       return const0_rtx;
5271     }
5272   else
5273     {
5274       /* Number of frames to scan up the stack.  */
5275       unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5276 
5277       rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5278 
5279       /* Some ports cannot access arbitrary stack frames.  */
5280       if (tem == NULL)
5281 	{
5282 	  warning (0, "unsupported argument to %qD", fndecl);
5283 	  return const0_rtx;
5284 	}
5285 
5286       if (count)
5287 	{
5288 	  /* Warn since no effort is made to ensure that any frame
5289 	     beyond the current one exists or can be safely reached.  */
5290 	  warning (OPT_Wframe_address, "calling %qD with "
5291 		   "a nonzero argument is unsafe", fndecl);
5292 	}
5293 
5294       /* For __builtin_frame_address, return what we've got.  */
5295       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5296 	return tem;
5297 
5298       if (!REG_P (tem)
5299 	  && ! CONSTANT_P (tem))
5300 	tem = copy_addr_to_reg (tem);
5301       return tem;
5302     }
5303 }
5304 
5305 /* Expand EXP, a call to the alloca builtin.  Return NULL_RTX if we
5306    failed and the caller should emit a normal call.  */
5307 
5308 static rtx
5309 expand_builtin_alloca (tree exp)
5310 {
5311   rtx op0;
5312   rtx result;
5313   unsigned int align;
5314   tree fndecl = get_callee_fndecl (exp);
5315   HOST_WIDE_INT max_size;
5316   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5317   bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5318   bool valid_arglist
5319     = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5320        ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5321 			   VOID_TYPE)
5322        : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5323 	 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5324 	 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5325 
5326   if (!valid_arglist)
5327     return NULL_RTX;
5328 
5329   if ((alloca_for_var
5330        && warn_vla_limit >= HOST_WIDE_INT_MAX
5331        && warn_alloc_size_limit < warn_vla_limit)
5332       || (!alloca_for_var
5333 	  && warn_alloca_limit >= HOST_WIDE_INT_MAX
5334 	  && warn_alloc_size_limit < warn_alloca_limit
5335 	  ))
5336     {
5337       /* -Walloca-larger-than and -Wvla-larger-than settings of
5338 	 less than HOST_WIDE_INT_MAX override the more general
5339 	 -Walloc-size-larger-than so unless either of the former
5340 	 options is smaller than the last one (wchich would imply
5341 	 that the call was already checked), check the alloca
5342 	 arguments for overflow.  */
5343       tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5344       int idx[] = { 0, -1 };
5345       maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5346     }
5347 
5348   /* Compute the argument.  */
5349   op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5350 
5351   /* Compute the alignment.  */
5352   align = (fcode == BUILT_IN_ALLOCA
5353 	   ? BIGGEST_ALIGNMENT
5354 	   : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5355 
5356   /* Compute the maximum size.  */
5357   max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5358               ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5359               : -1);
5360 
5361   /* Allocate the desired space.  If the allocation stems from the declaration
5362      of a variable-sized object, it cannot accumulate.  */
5363   result
5364     = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5365   result = convert_memory_address (ptr_mode, result);
5366 
5367   return result;
5368 }
5369 
5370 /* Emit a call to __asan_allocas_unpoison call in EXP.  Add to second argument
5371    of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5372    STACK_DYNAMIC_OFFSET value.  See motivation for this in comment to
5373    handle_builtin_stack_restore function.  */
5374 
5375 static rtx
5376 expand_asan_emit_allocas_unpoison (tree exp)
5377 {
5378   tree arg0 = CALL_EXPR_ARG (exp, 0);
5379   tree arg1 = CALL_EXPR_ARG (exp, 1);
5380   rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5381   rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5382   rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5383 				 stack_pointer_rtx, NULL_RTX, 0,
5384 				 OPTAB_LIB_WIDEN);
5385   off = convert_modes (ptr_mode, Pmode, off, 0);
5386   bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5387 			     OPTAB_LIB_WIDEN);
5388   rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5389   ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5390 				 top, ptr_mode, bot, ptr_mode);
5391   return ret;
5392 }
5393 
5394 /* Expand a call to bswap builtin in EXP.
5395    Return NULL_RTX if a normal call should be emitted rather than expanding the
5396    function in-line.  If convenient, the result should be placed in TARGET.
5397    SUBTARGET may be used as the target for computing one of EXP's operands.  */
5398 
5399 static rtx
5400 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5401 		      rtx subtarget)
5402 {
5403   tree arg;
5404   rtx op0;
5405 
5406   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5407     return NULL_RTX;
5408 
5409   arg = CALL_EXPR_ARG (exp, 0);
5410   op0 = expand_expr (arg,
5411 		     subtarget && GET_MODE (subtarget) == target_mode
5412 		     ? subtarget : NULL_RTX,
5413 		     target_mode, EXPAND_NORMAL);
5414   if (GET_MODE (op0) != target_mode)
5415     op0 = convert_to_mode (target_mode, op0, 1);
5416 
5417   target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5418 
5419   gcc_assert (target);
5420 
5421   return convert_to_mode (target_mode, target, 1);
5422 }
5423 
5424 /* Expand a call to a unary builtin in EXP.
5425    Return NULL_RTX if a normal call should be emitted rather than expanding the
5426    function in-line.  If convenient, the result should be placed in TARGET.
5427    SUBTARGET may be used as the target for computing one of EXP's operands.  */
5428 
5429 static rtx
5430 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5431 		     rtx subtarget, optab op_optab)
5432 {
5433   rtx op0;
5434 
5435   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5436     return NULL_RTX;
5437 
5438   /* Compute the argument.  */
5439   op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5440 		     (subtarget
5441 		      && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5442 			  == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5443 		     VOIDmode, EXPAND_NORMAL);
5444   /* Compute op, into TARGET if possible.
5445      Set TARGET to wherever the result comes back.  */
5446   target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5447 			op_optab, op0, target, op_optab != clrsb_optab);
5448   gcc_assert (target);
5449 
5450   return convert_to_mode (target_mode, target, 0);
5451 }
5452 
5453 /* Expand a call to __builtin_expect.  We just return our argument
5454    as the builtin_expect semantic should've been already executed by
5455    tree branch prediction pass. */
5456 
5457 static rtx
5458 expand_builtin_expect (tree exp, rtx target)
5459 {
5460   tree arg;
5461 
5462   if (call_expr_nargs (exp) < 2)
5463     return const0_rtx;
5464   arg = CALL_EXPR_ARG (exp, 0);
5465 
5466   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5467   /* When guessing was done, the hints should be already stripped away.  */
5468   gcc_assert (!flag_guess_branch_prob
5469 	      || optimize == 0 || seen_error ());
5470   return target;
5471 }
5472 
5473 /* Expand a call to __builtin_expect_with_probability.  We just return our
5474    argument as the builtin_expect semantic should've been already executed by
5475    tree branch prediction pass.  */
5476 
5477 static rtx
5478 expand_builtin_expect_with_probability (tree exp, rtx target)
5479 {
5480   tree arg;
5481 
5482   if (call_expr_nargs (exp) < 3)
5483     return const0_rtx;
5484   arg = CALL_EXPR_ARG (exp, 0);
5485 
5486   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5487   /* When guessing was done, the hints should be already stripped away.  */
5488   gcc_assert (!flag_guess_branch_prob
5489 	      || optimize == 0 || seen_error ());
5490   return target;
5491 }
5492 
5493 
5494 /* Expand a call to __builtin_assume_aligned.  We just return our first
5495    argument as the builtin_assume_aligned semantic should've been already
5496    executed by CCP.  */
5497 
5498 static rtx
5499 expand_builtin_assume_aligned (tree exp, rtx target)
5500 {
5501   if (call_expr_nargs (exp) < 2)
5502     return const0_rtx;
5503   target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5504 			EXPAND_NORMAL);
5505   gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5506 	      && (call_expr_nargs (exp) < 3
5507 		  || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5508   return target;
5509 }
5510 
5511 void
5512 expand_builtin_trap (void)
5513 {
5514   if (targetm.have_trap ())
5515     {
5516       rtx_insn *insn = emit_insn (targetm.gen_trap ());
5517       /* For trap insns when not accumulating outgoing args force
5518 	 REG_ARGS_SIZE note to prevent crossjumping of calls with
5519 	 different args sizes.  */
5520       if (!ACCUMULATE_OUTGOING_ARGS)
5521 	add_args_size_note (insn, stack_pointer_delta);
5522     }
5523   else
5524     {
5525       tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5526       tree call_expr = build_call_expr (fn, 0);
5527       expand_call (call_expr, NULL_RTX, false);
5528     }
5529 
5530   emit_barrier ();
5531 }
5532 
5533 /* Expand a call to __builtin_unreachable.  We do nothing except emit
5534    a barrier saying that control flow will not pass here.
5535 
5536    It is the responsibility of the program being compiled to ensure
5537    that control flow does never reach __builtin_unreachable.  */
5538 static void
5539 expand_builtin_unreachable (void)
5540 {
5541   emit_barrier ();
5542 }
5543 
5544 /* Expand EXP, a call to fabs, fabsf or fabsl.
5545    Return NULL_RTX if a normal call should be emitted rather than expanding
5546    the function inline.  If convenient, the result should be placed
5547    in TARGET.  SUBTARGET may be used as the target for computing
5548    the operand.  */
5549 
5550 static rtx
5551 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5552 {
5553   machine_mode mode;
5554   tree arg;
5555   rtx op0;
5556 
5557   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5558     return NULL_RTX;
5559 
5560   arg = CALL_EXPR_ARG (exp, 0);
5561   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5562   mode = TYPE_MODE (TREE_TYPE (arg));
5563   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5564   return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5565 }
5566 
5567 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5568    Return NULL is a normal call should be emitted rather than expanding the
5569    function inline.  If convenient, the result should be placed in TARGET.
5570    SUBTARGET may be used as the target for computing the operand.  */
5571 
5572 static rtx
5573 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5574 {
5575   rtx op0, op1;
5576   tree arg;
5577 
5578   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5579     return NULL_RTX;
5580 
5581   arg = CALL_EXPR_ARG (exp, 0);
5582   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5583 
5584   arg = CALL_EXPR_ARG (exp, 1);
5585   op1 = expand_normal (arg);
5586 
5587   return expand_copysign (op0, op1, target);
5588 }
5589 
5590 /* Expand a call to __builtin___clear_cache.  */
5591 
5592 static rtx
5593 expand_builtin___clear_cache (tree exp)
5594 {
5595   if (!targetm.code_for_clear_cache)
5596     {
5597 #ifdef CLEAR_INSN_CACHE
5598       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5599 	 does something.  Just do the default expansion to a call to
5600 	 __clear_cache().  */
5601       return NULL_RTX;
5602 #else
5603       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5604 	 does nothing.  There is no need to call it.  Do nothing.  */
5605       return const0_rtx;
5606 #endif /* CLEAR_INSN_CACHE */
5607     }
5608 
5609   /* We have a "clear_cache" insn, and it will handle everything.  */
5610   tree begin, end;
5611   rtx begin_rtx, end_rtx;
5612 
5613   /* We must not expand to a library call.  If we did, any
5614      fallback library function in libgcc that might contain a call to
5615      __builtin___clear_cache() would recurse infinitely.  */
5616   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5617     {
5618       error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5619       return const0_rtx;
5620     }
5621 
5622   if (targetm.have_clear_cache ())
5623     {
5624       struct expand_operand ops[2];
5625 
5626       begin = CALL_EXPR_ARG (exp, 0);
5627       begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5628 
5629       end = CALL_EXPR_ARG (exp, 1);
5630       end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5631 
5632       create_address_operand (&ops[0], begin_rtx);
5633       create_address_operand (&ops[1], end_rtx);
5634       if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5635 	return const0_rtx;
5636     }
5637   return const0_rtx;
5638 }
5639 
5640 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT.  */
5641 
5642 static rtx
5643 round_trampoline_addr (rtx tramp)
5644 {
5645   rtx temp, addend, mask;
5646 
5647   /* If we don't need too much alignment, we'll have been guaranteed
5648      proper alignment by get_trampoline_type.  */
5649   if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5650     return tramp;
5651 
5652   /* Round address up to desired boundary.  */
5653   temp = gen_reg_rtx (Pmode);
5654   addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5655   mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5656 
5657   temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
5658 			       temp, 0, OPTAB_LIB_WIDEN);
5659   tramp = expand_simple_binop (Pmode, AND, temp, mask,
5660 			       temp, 0, OPTAB_LIB_WIDEN);
5661 
5662   return tramp;
5663 }
5664 
5665 static rtx
5666 expand_builtin_init_trampoline (tree exp, bool onstack)
5667 {
5668   tree t_tramp, t_func, t_chain;
5669   rtx m_tramp, r_tramp, r_chain, tmp;
5670 
5671   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5672 			 POINTER_TYPE, VOID_TYPE))
5673     return NULL_RTX;
5674 
5675   t_tramp = CALL_EXPR_ARG (exp, 0);
5676   t_func = CALL_EXPR_ARG (exp, 1);
5677   t_chain = CALL_EXPR_ARG (exp, 2);
5678 
5679   r_tramp = expand_normal (t_tramp);
5680   m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5681   MEM_NOTRAP_P (m_tramp) = 1;
5682 
5683   /* If ONSTACK, the TRAMP argument should be the address of a field
5684      within the local function's FRAME decl.  Either way, let's see if
5685      we can fill in the MEM_ATTRs for this memory.  */
5686   if (TREE_CODE (t_tramp) == ADDR_EXPR)
5687     set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5688 
5689   /* Creator of a heap trampoline is responsible for making sure the
5690      address is aligned to at least STACK_BOUNDARY.  Normally malloc
5691      will ensure this anyhow.  */
5692   tmp = round_trampoline_addr (r_tramp);
5693   if (tmp != r_tramp)
5694     {
5695       m_tramp = change_address (m_tramp, BLKmode, tmp);
5696       set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5697       set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5698     }
5699 
5700   /* The FUNC argument should be the address of the nested function.
5701      Extract the actual function decl to pass to the hook.  */
5702   gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5703   t_func = TREE_OPERAND (t_func, 0);
5704   gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5705 
5706   r_chain = expand_normal (t_chain);
5707 
5708   /* Generate insns to initialize the trampoline.  */
5709   targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5710 
5711   if (onstack)
5712     {
5713       trampolines_created = 1;
5714 
5715       if (targetm.calls.custom_function_descriptors != 0)
5716 	warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5717 		    "trampoline generated for nested function %qD", t_func);
5718     }
5719 
5720   return const0_rtx;
5721 }
5722 
5723 static rtx
5724 expand_builtin_adjust_trampoline (tree exp)
5725 {
5726   rtx tramp;
5727 
5728   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5729     return NULL_RTX;
5730 
5731   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5732   tramp = round_trampoline_addr (tramp);
5733   if (targetm.calls.trampoline_adjust_address)
5734     tramp = targetm.calls.trampoline_adjust_address (tramp);
5735 
5736   return tramp;
5737 }
5738 
5739 /* Expand a call to the builtin descriptor initialization routine.
5740    A descriptor is made up of a couple of pointers to the static
5741    chain and the code entry in this order.  */
5742 
5743 static rtx
5744 expand_builtin_init_descriptor (tree exp)
5745 {
5746   tree t_descr, t_func, t_chain;
5747   rtx m_descr, r_descr, r_func, r_chain;
5748 
5749   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5750 			 VOID_TYPE))
5751     return NULL_RTX;
5752 
5753   t_descr = CALL_EXPR_ARG (exp, 0);
5754   t_func = CALL_EXPR_ARG (exp, 1);
5755   t_chain = CALL_EXPR_ARG (exp, 2);
5756 
5757   r_descr = expand_normal (t_descr);
5758   m_descr = gen_rtx_MEM (BLKmode, r_descr);
5759   MEM_NOTRAP_P (m_descr) = 1;
5760 
5761   r_func = expand_normal (t_func);
5762   r_chain = expand_normal (t_chain);
5763 
5764   /* Generate insns to initialize the descriptor.  */
5765   emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5766   emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5767 				     POINTER_SIZE / BITS_PER_UNIT), r_func);
5768 
5769   return const0_rtx;
5770 }
5771 
5772 /* Expand a call to the builtin descriptor adjustment routine.  */
5773 
5774 static rtx
5775 expand_builtin_adjust_descriptor (tree exp)
5776 {
5777   rtx tramp;
5778 
5779   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5780     return NULL_RTX;
5781 
5782   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5783 
5784   /* Unalign the descriptor to allow runtime identification.  */
5785   tramp = plus_constant (ptr_mode, tramp,
5786 			 targetm.calls.custom_function_descriptors);
5787 
5788   return force_operand (tramp, NULL_RTX);
5789 }
5790 
5791 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5792    function.  The function first checks whether the back end provides
5793    an insn to implement signbit for the respective mode.  If not, it
5794    checks whether the floating point format of the value is such that
5795    the sign bit can be extracted.  If that is not the case, error out.
5796    EXP is the expression that is a call to the builtin function; if
5797    convenient, the result should be placed in TARGET.  */
5798 static rtx
5799 expand_builtin_signbit (tree exp, rtx target)
5800 {
5801   const struct real_format *fmt;
5802   scalar_float_mode fmode;
5803   scalar_int_mode rmode, imode;
5804   tree arg;
5805   int word, bitpos;
5806   enum insn_code icode;
5807   rtx temp;
5808   location_t loc = EXPR_LOCATION (exp);
5809 
5810   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5811     return NULL_RTX;
5812 
5813   arg = CALL_EXPR_ARG (exp, 0);
5814   fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5815   rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5816   fmt = REAL_MODE_FORMAT (fmode);
5817 
5818   arg = builtin_save_expr (arg);
5819 
5820   /* Expand the argument yielding a RTX expression. */
5821   temp = expand_normal (arg);
5822 
5823   /* Check if the back end provides an insn that handles signbit for the
5824      argument's mode. */
5825   icode = optab_handler (signbit_optab, fmode);
5826   if (icode != CODE_FOR_nothing)
5827     {
5828       rtx_insn *last = get_last_insn ();
5829       target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5830       if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5831 	return target;
5832       delete_insns_since (last);
5833     }
5834 
5835   /* For floating point formats without a sign bit, implement signbit
5836      as "ARG < 0.0".  */
5837   bitpos = fmt->signbit_ro;
5838   if (bitpos < 0)
5839   {
5840     /* But we can't do this if the format supports signed zero.  */
5841     gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5842 
5843     arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5844 		       build_real (TREE_TYPE (arg), dconst0));
5845     return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5846   }
5847 
5848   if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5849     {
5850       imode = int_mode_for_mode (fmode).require ();
5851       temp = gen_lowpart (imode, temp);
5852     }
5853   else
5854     {
5855       imode = word_mode;
5856       /* Handle targets with different FP word orders.  */
5857       if (FLOAT_WORDS_BIG_ENDIAN)
5858 	word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5859       else
5860 	word = bitpos / BITS_PER_WORD;
5861       temp = operand_subword_force (temp, word, fmode);
5862       bitpos = bitpos % BITS_PER_WORD;
5863     }
5864 
5865   /* Force the intermediate word_mode (or narrower) result into a
5866      register.  This avoids attempting to create paradoxical SUBREGs
5867      of floating point modes below.  */
5868   temp = force_reg (imode, temp);
5869 
5870   /* If the bitpos is within the "result mode" lowpart, the operation
5871      can be implement with a single bitwise AND.  Otherwise, we need
5872      a right shift and an AND.  */
5873 
5874   if (bitpos < GET_MODE_BITSIZE (rmode))
5875     {
5876       wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5877 
5878       if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5879 	temp = gen_lowpart (rmode, temp);
5880       temp = expand_binop (rmode, and_optab, temp,
5881 			   immed_wide_int_const (mask, rmode),
5882 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5883     }
5884   else
5885     {
5886       /* Perform a logical right shift to place the signbit in the least
5887 	 significant bit, then truncate the result to the desired mode
5888 	 and mask just this bit.  */
5889       temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5890       temp = gen_lowpart (rmode, temp);
5891       temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5892 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5893     }
5894 
5895   return temp;
5896 }
5897 
5898 /* Expand fork or exec calls.  TARGET is the desired target of the
5899    call.  EXP is the call. FN is the
5900    identificator of the actual function.  IGNORE is nonzero if the
5901    value is to be ignored.  */
5902 
5903 static rtx
5904 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5905 {
5906   tree id, decl;
5907   tree call;
5908 
5909   /* If we are not profiling, just call the function.  */
5910   if (!profile_arc_flag)
5911     return NULL_RTX;
5912 
5913   /* Otherwise call the wrapper.  This should be equivalent for the rest of
5914      compiler, so the code does not diverge, and the wrapper may run the
5915      code necessary for keeping the profiling sane.  */
5916 
5917   switch (DECL_FUNCTION_CODE (fn))
5918     {
5919     case BUILT_IN_FORK:
5920       id = get_identifier ("__gcov_fork");
5921       break;
5922 
5923     case BUILT_IN_EXECL:
5924       id = get_identifier ("__gcov_execl");
5925       break;
5926 
5927     case BUILT_IN_EXECV:
5928       id = get_identifier ("__gcov_execv");
5929       break;
5930 
5931     case BUILT_IN_EXECLP:
5932       id = get_identifier ("__gcov_execlp");
5933       break;
5934 
5935     case BUILT_IN_EXECLE:
5936       id = get_identifier ("__gcov_execle");
5937       break;
5938 
5939     case BUILT_IN_EXECVP:
5940       id = get_identifier ("__gcov_execvp");
5941       break;
5942 
5943     case BUILT_IN_EXECVE:
5944       id = get_identifier ("__gcov_execve");
5945       break;
5946 
5947     default:
5948       gcc_unreachable ();
5949     }
5950 
5951   decl = build_decl (DECL_SOURCE_LOCATION (fn),
5952 		     FUNCTION_DECL, id, TREE_TYPE (fn));
5953   DECL_EXTERNAL (decl) = 1;
5954   TREE_PUBLIC (decl) = 1;
5955   DECL_ARTIFICIAL (decl) = 1;
5956   TREE_NOTHROW (decl) = 1;
5957   DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5958   DECL_VISIBILITY_SPECIFIED (decl) = 1;
5959   call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5960   return expand_call (call, target, ignore);
5961  }
5962 
5963 
5964 
5965 /* Reconstitute a mode for a __sync intrinsic operation.  Since the type of
5966    the pointer in these functions is void*, the tree optimizers may remove
5967    casts.  The mode computed in expand_builtin isn't reliable either, due
5968    to __sync_bool_compare_and_swap.
5969 
5970    FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5971    group of builtins.  This gives us log2 of the mode size.  */
5972 
5973 static inline machine_mode
5974 get_builtin_sync_mode (int fcode_diff)
5975 {
5976   /* The size is not negotiable, so ask not to get BLKmode in return
5977      if the target indicates that a smaller size would be better.  */
5978   return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5979 }
5980 
5981 /* Expand the memory expression LOC and return the appropriate memory operand
5982    for the builtin_sync operations.  */
5983 
5984 static rtx
5985 get_builtin_sync_mem (tree loc, machine_mode mode)
5986 {
5987   rtx addr, mem;
5988   int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5989 				    ? TREE_TYPE (TREE_TYPE (loc))
5990 				    : TREE_TYPE (loc));
5991   scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5992 
5993   addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5994   addr = convert_memory_address (addr_mode, addr);
5995 
5996   /* Note that we explicitly do not want any alias information for this
5997      memory, so that we kill all other live memories.  Otherwise we don't
5998      satisfy the full barrier semantics of the intrinsic.  */
5999   mem = gen_rtx_MEM (mode, addr);
6000 
6001   set_mem_addr_space (mem, addr_space);
6002 
6003   mem = validize_mem (mem);
6004 
6005   /* The alignment needs to be at least according to that of the mode.  */
6006   set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6007 			   get_pointer_alignment (loc)));
6008   set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6009   MEM_VOLATILE_P (mem) = 1;
6010 
6011   return mem;
6012 }
6013 
6014 /* Make sure an argument is in the right mode.
6015    EXP is the tree argument.
6016    MODE is the mode it should be in.  */
6017 
6018 static rtx
6019 expand_expr_force_mode (tree exp, machine_mode mode)
6020 {
6021   rtx val;
6022   machine_mode old_mode;
6023 
6024   val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6025   /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
6026      of CONST_INTs, where we know the old_mode only from the call argument.  */
6027 
6028   old_mode = GET_MODE (val);
6029   if (old_mode == VOIDmode)
6030     old_mode = TYPE_MODE (TREE_TYPE (exp));
6031   val = convert_modes (mode, old_mode, val, 1);
6032   return val;
6033 }
6034 
6035 
6036 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6037    EXP is the CALL_EXPR.  CODE is the rtx code
6038    that corresponds to the arithmetic or logical operation from the name;
6039    an exception here is that NOT actually means NAND.  TARGET is an optional
6040    place for us to store the results; AFTER is true if this is the
6041    fetch_and_xxx form.  */
6042 
6043 static rtx
6044 expand_builtin_sync_operation (machine_mode mode, tree exp,
6045 			       enum rtx_code code, bool after,
6046 			       rtx target)
6047 {
6048   rtx val, mem;
6049   location_t loc = EXPR_LOCATION (exp);
6050 
6051   if (code == NOT && warn_sync_nand)
6052     {
6053       tree fndecl = get_callee_fndecl (exp);
6054       enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6055 
6056       static bool warned_f_a_n, warned_n_a_f;
6057 
6058       switch (fcode)
6059 	{
6060 	case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6061 	case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6062 	case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6063 	case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6064 	case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6065 	  if (warned_f_a_n)
6066 	    break;
6067 
6068 	  fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6069 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6070 	  warned_f_a_n = true;
6071 	  break;
6072 
6073 	case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6074 	case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6075 	case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6076 	case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6077 	case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6078 	  if (warned_n_a_f)
6079 	    break;
6080 
6081 	 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6082 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6083 	  warned_n_a_f = true;
6084 	  break;
6085 
6086 	default:
6087 	  gcc_unreachable ();
6088 	}
6089     }
6090 
6091   /* Expand the operands.  */
6092   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6093   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6094 
6095   return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6096 				 after);
6097 }
6098 
6099 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6100    intrinsics. EXP is the CALL_EXPR.  IS_BOOL is
6101    true if this is the boolean form.  TARGET is a place for us to store the
6102    results; this is NOT optional if IS_BOOL is true.  */
6103 
6104 static rtx
6105 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6106 				 bool is_bool, rtx target)
6107 {
6108   rtx old_val, new_val, mem;
6109   rtx *pbool, *poval;
6110 
6111   /* Expand the operands.  */
6112   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6113   old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6114   new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6115 
6116   pbool = poval = NULL;
6117   if (target != const0_rtx)
6118     {
6119       if (is_bool)
6120 	pbool = &target;
6121       else
6122 	poval = &target;
6123     }
6124   if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6125 				       false, MEMMODEL_SYNC_SEQ_CST,
6126 				       MEMMODEL_SYNC_SEQ_CST))
6127     return NULL_RTX;
6128 
6129   return target;
6130 }
6131 
6132 /* Expand the __sync_lock_test_and_set intrinsic.  Note that the most
6133    general form is actually an atomic exchange, and some targets only
6134    support a reduced form with the second argument being a constant 1.
6135    EXP is the CALL_EXPR; TARGET is an optional place for us to store
6136    the results.  */
6137 
6138 static rtx
6139 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6140 				       rtx target)
6141 {
6142   rtx val, mem;
6143 
6144   /* Expand the operands.  */
6145   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6146   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6147 
6148   return expand_sync_lock_test_and_set (target, mem, val);
6149 }
6150 
6151 /* Expand the __sync_lock_release intrinsic.  EXP is the CALL_EXPR.  */
6152 
6153 static void
6154 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6155 {
6156   rtx mem;
6157 
6158   /* Expand the operands.  */
6159   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6160 
6161   expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6162 }
6163 
6164 /* Given an integer representing an ``enum memmodel'', verify its
6165    correctness and return the memory model enum.  */
6166 
6167 static enum memmodel
6168 get_memmodel (tree exp)
6169 {
6170   rtx op;
6171   unsigned HOST_WIDE_INT val;
6172   location_t loc
6173     = expansion_point_location_if_in_system_header (input_location);
6174 
6175   /* If the parameter is not a constant, it's a run time value so we'll just
6176      convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking.  */
6177   if (TREE_CODE (exp) != INTEGER_CST)
6178     return MEMMODEL_SEQ_CST;
6179 
6180   op = expand_normal (exp);
6181 
6182   val = INTVAL (op);
6183   if (targetm.memmodel_check)
6184     val = targetm.memmodel_check (val);
6185   else if (val & ~MEMMODEL_MASK)
6186     {
6187       warning_at (loc, OPT_Winvalid_memory_model,
6188 		  "unknown architecture specifier in memory model to builtin");
6189       return MEMMODEL_SEQ_CST;
6190     }
6191 
6192   /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6193   if (memmodel_base (val) >= MEMMODEL_LAST)
6194     {
6195       warning_at (loc, OPT_Winvalid_memory_model,
6196 		  "invalid memory model argument to builtin");
6197       return MEMMODEL_SEQ_CST;
6198     }
6199 
6200   /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6201      be conservative and promote consume to acquire.  */
6202   if (val == MEMMODEL_CONSUME)
6203     val = MEMMODEL_ACQUIRE;
6204 
6205   return (enum memmodel) val;
6206 }
6207 
6208 /* Expand the __atomic_exchange intrinsic:
6209    	TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6210    EXP is the CALL_EXPR.
6211    TARGET is an optional place for us to store the results.  */
6212 
6213 static rtx
6214 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6215 {
6216   rtx val, mem;
6217   enum memmodel model;
6218 
6219   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6220 
6221   if (!flag_inline_atomics)
6222     return NULL_RTX;
6223 
6224   /* Expand the operands.  */
6225   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6226   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6227 
6228   return expand_atomic_exchange (target, mem, val, model);
6229 }
6230 
6231 /* Expand the __atomic_compare_exchange intrinsic:
6232    	bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6233 					TYPE desired, BOOL weak,
6234 					enum memmodel success,
6235 					enum memmodel failure)
6236    EXP is the CALL_EXPR.
6237    TARGET is an optional place for us to store the results.  */
6238 
6239 static rtx
6240 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6241 					rtx target)
6242 {
6243   rtx expect, desired, mem, oldval;
6244   rtx_code_label *label;
6245   enum memmodel success, failure;
6246   tree weak;
6247   bool is_weak;
6248   location_t loc
6249     = expansion_point_location_if_in_system_header (input_location);
6250 
6251   success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6252   failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6253 
6254   if (failure > success)
6255     {
6256       warning_at (loc, OPT_Winvalid_memory_model,
6257 		  "failure memory model cannot be stronger than success "
6258 		  "memory model for %<__atomic_compare_exchange%>");
6259       success = MEMMODEL_SEQ_CST;
6260     }
6261 
6262   if (is_mm_release (failure) || is_mm_acq_rel (failure))
6263     {
6264       warning_at (loc, OPT_Winvalid_memory_model,
6265 		  "invalid failure memory model for "
6266 		  "%<__atomic_compare_exchange%>");
6267       failure = MEMMODEL_SEQ_CST;
6268       success = MEMMODEL_SEQ_CST;
6269     }
6270 
6271 
6272   if (!flag_inline_atomics)
6273     return NULL_RTX;
6274 
6275   /* Expand the operands.  */
6276   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6277 
6278   expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6279   expect = convert_memory_address (Pmode, expect);
6280   expect = gen_rtx_MEM (mode, expect);
6281   desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6282 
6283   weak = CALL_EXPR_ARG (exp, 3);
6284   is_weak = false;
6285   if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6286     is_weak = true;
6287 
6288   if (target == const0_rtx)
6289     target = NULL;
6290 
6291   /* Lest the rtl backend create a race condition with an imporoper store
6292      to memory, always create a new pseudo for OLDVAL.  */
6293   oldval = NULL;
6294 
6295   if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6296 				       is_weak, success, failure))
6297     return NULL_RTX;
6298 
6299   /* Conditionally store back to EXPECT, lest we create a race condition
6300      with an improper store to memory.  */
6301   /* ??? With a rearrangement of atomics at the gimple level, we can handle
6302      the normal case where EXPECT is totally private, i.e. a register.  At
6303      which point the store can be unconditional.  */
6304   label = gen_label_rtx ();
6305   emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6306 			   GET_MODE (target), 1, label);
6307   emit_move_insn (expect, oldval);
6308   emit_label (label);
6309 
6310   return target;
6311 }
6312 
6313 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6314    internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6315    call.  The weak parameter must be dropped to match the expected parameter
6316    list and the expected argument changed from value to pointer to memory
6317    slot.  */
6318 
6319 static void
6320 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6321 {
6322   unsigned int z;
6323   vec<tree, va_gc> *vec;
6324 
6325   vec_alloc (vec, 5);
6326   vec->quick_push (gimple_call_arg (call, 0));
6327   tree expected = gimple_call_arg (call, 1);
6328   rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6329 				      TREE_TYPE (expected));
6330   rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6331   if (expd != x)
6332     emit_move_insn (x, expd);
6333   tree v = make_tree (TREE_TYPE (expected), x);
6334   vec->quick_push (build1 (ADDR_EXPR,
6335 			   build_pointer_type (TREE_TYPE (expected)), v));
6336   vec->quick_push (gimple_call_arg (call, 2));
6337   /* Skip the boolean weak parameter.  */
6338   for (z = 4; z < 6; z++)
6339     vec->quick_push (gimple_call_arg (call, z));
6340   /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}.  */
6341   unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6342   gcc_assert (bytes_log2 < 5);
6343   built_in_function fncode
6344     = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6345 			   + bytes_log2);
6346   tree fndecl = builtin_decl_explicit (fncode);
6347   tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6348 		    fndecl);
6349   tree exp = build_call_vec (boolean_type_node, fn, vec);
6350   tree lhs = gimple_call_lhs (call);
6351   rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6352   if (lhs)
6353     {
6354       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6355       if (GET_MODE (boolret) != mode)
6356 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6357       x = force_reg (mode, x);
6358       write_complex_part (target, boolret, true);
6359       write_complex_part (target, x, false);
6360     }
6361 }
6362 
6363 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function.  */
6364 
6365 void
6366 expand_ifn_atomic_compare_exchange (gcall *call)
6367 {
6368   int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6369   gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6370   machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6371   rtx expect, desired, mem, oldval, boolret;
6372   enum memmodel success, failure;
6373   tree lhs;
6374   bool is_weak;
6375   location_t loc
6376     = expansion_point_location_if_in_system_header (gimple_location (call));
6377 
6378   success = get_memmodel (gimple_call_arg (call, 4));
6379   failure = get_memmodel (gimple_call_arg (call, 5));
6380 
6381   if (failure > success)
6382     {
6383       warning_at (loc, OPT_Winvalid_memory_model,
6384 		  "failure memory model cannot be stronger than success "
6385 		  "memory model for %<__atomic_compare_exchange%>");
6386       success = MEMMODEL_SEQ_CST;
6387     }
6388 
6389   if (is_mm_release (failure) || is_mm_acq_rel (failure))
6390     {
6391       warning_at (loc, OPT_Winvalid_memory_model,
6392 		  "invalid failure memory model for "
6393 		  "%<__atomic_compare_exchange%>");
6394       failure = MEMMODEL_SEQ_CST;
6395       success = MEMMODEL_SEQ_CST;
6396     }
6397 
6398   if (!flag_inline_atomics)
6399     {
6400       expand_ifn_atomic_compare_exchange_into_call (call, mode);
6401       return;
6402     }
6403 
6404   /* Expand the operands.  */
6405   mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6406 
6407   expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6408   desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6409 
6410   is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6411 
6412   boolret = NULL;
6413   oldval = NULL;
6414 
6415   if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6416 				       is_weak, success, failure))
6417     {
6418       expand_ifn_atomic_compare_exchange_into_call (call, mode);
6419       return;
6420     }
6421 
6422   lhs = gimple_call_lhs (call);
6423   if (lhs)
6424     {
6425       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6426       if (GET_MODE (boolret) != mode)
6427 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6428       write_complex_part (target, boolret, true);
6429       write_complex_part (target, oldval, false);
6430     }
6431 }
6432 
6433 /* Expand the __atomic_load intrinsic:
6434    	TYPE __atomic_load (TYPE *object, enum memmodel)
6435    EXP is the CALL_EXPR.
6436    TARGET is an optional place for us to store the results.  */
6437 
6438 static rtx
6439 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6440 {
6441   rtx mem;
6442   enum memmodel model;
6443 
6444   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6445   if (is_mm_release (model) || is_mm_acq_rel (model))
6446     {
6447       location_t loc
6448 	= expansion_point_location_if_in_system_header (input_location);
6449       warning_at (loc, OPT_Winvalid_memory_model,
6450 		  "invalid memory model for %<__atomic_load%>");
6451       model = MEMMODEL_SEQ_CST;
6452     }
6453 
6454   if (!flag_inline_atomics)
6455     return NULL_RTX;
6456 
6457   /* Expand the operand.  */
6458   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6459 
6460   return expand_atomic_load (target, mem, model);
6461 }
6462 
6463 
6464 /* Expand the __atomic_store intrinsic:
6465    	void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6466    EXP is the CALL_EXPR.
6467    TARGET is an optional place for us to store the results.  */
6468 
6469 static rtx
6470 expand_builtin_atomic_store (machine_mode mode, tree exp)
6471 {
6472   rtx mem, val;
6473   enum memmodel model;
6474 
6475   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6476   if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6477 	|| is_mm_release (model)))
6478     {
6479       location_t loc
6480 	= expansion_point_location_if_in_system_header (input_location);
6481       warning_at (loc, OPT_Winvalid_memory_model,
6482 		  "invalid memory model for %<__atomic_store%>");
6483       model = MEMMODEL_SEQ_CST;
6484     }
6485 
6486   if (!flag_inline_atomics)
6487     return NULL_RTX;
6488 
6489   /* Expand the operands.  */
6490   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6491   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6492 
6493   return expand_atomic_store (mem, val, model, false);
6494 }
6495 
6496 /* Expand the __atomic_fetch_XXX intrinsic:
6497    	TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6498    EXP is the CALL_EXPR.
6499    TARGET is an optional place for us to store the results.
6500    CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6501    FETCH_AFTER is true if returning the result of the operation.
6502    FETCH_AFTER is false if returning the value before the operation.
6503    IGNORE is true if the result is not used.
6504    EXT_CALL is the correct builtin for an external call if this cannot be
6505    resolved to an instruction sequence.  */
6506 
6507 static rtx
6508 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6509 				enum rtx_code code, bool fetch_after,
6510 				bool ignore, enum built_in_function ext_call)
6511 {
6512   rtx val, mem, ret;
6513   enum memmodel model;
6514   tree fndecl;
6515   tree addr;
6516 
6517   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6518 
6519   /* Expand the operands.  */
6520   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6521   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6522 
6523   /* Only try generating instructions if inlining is turned on.  */
6524   if (flag_inline_atomics)
6525     {
6526       ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6527       if (ret)
6528 	return ret;
6529     }
6530 
6531   /* Return if a different routine isn't needed for the library call.  */
6532   if (ext_call == BUILT_IN_NONE)
6533     return NULL_RTX;
6534 
6535   /* Change the call to the specified function.  */
6536   fndecl = get_callee_fndecl (exp);
6537   addr = CALL_EXPR_FN (exp);
6538   STRIP_NOPS (addr);
6539 
6540   gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6541   TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6542 
6543   /* If we will emit code after the call, the call cannot be a tail call.
6544      If it is emitted as a tail call, a barrier is emitted after it, and
6545      then all trailing code is removed.  */
6546   if (!ignore)
6547     CALL_EXPR_TAILCALL (exp) = 0;
6548 
6549   /* Expand the call here so we can emit trailing code.  */
6550   ret = expand_call (exp, target, ignore);
6551 
6552   /* Replace the original function just in case it matters.  */
6553   TREE_OPERAND (addr, 0) = fndecl;
6554 
6555   /* Then issue the arithmetic correction to return the right result.  */
6556   if (!ignore)
6557     {
6558       if (code == NOT)
6559 	{
6560 	  ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6561 				     OPTAB_LIB_WIDEN);
6562 	  ret = expand_simple_unop (mode, NOT, ret, target, true);
6563 	}
6564       else
6565 	ret = expand_simple_binop (mode, code, ret, val, target, true,
6566 				   OPTAB_LIB_WIDEN);
6567     }
6568   return ret;
6569 }
6570 
6571 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function.  */
6572 
6573 void
6574 expand_ifn_atomic_bit_test_and (gcall *call)
6575 {
6576   tree ptr = gimple_call_arg (call, 0);
6577   tree bit = gimple_call_arg (call, 1);
6578   tree flag = gimple_call_arg (call, 2);
6579   tree lhs = gimple_call_lhs (call);
6580   enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6581   machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6582   enum rtx_code code;
6583   optab optab;
6584   struct expand_operand ops[5];
6585 
6586   gcc_assert (flag_inline_atomics);
6587 
6588   if (gimple_call_num_args (call) == 4)
6589     model = get_memmodel (gimple_call_arg (call, 3));
6590 
6591   rtx mem = get_builtin_sync_mem (ptr, mode);
6592   rtx val = expand_expr_force_mode (bit, mode);
6593 
6594   switch (gimple_call_internal_fn (call))
6595     {
6596     case IFN_ATOMIC_BIT_TEST_AND_SET:
6597       code = IOR;
6598       optab = atomic_bit_test_and_set_optab;
6599       break;
6600     case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6601       code = XOR;
6602       optab = atomic_bit_test_and_complement_optab;
6603       break;
6604     case IFN_ATOMIC_BIT_TEST_AND_RESET:
6605       code = AND;
6606       optab = atomic_bit_test_and_reset_optab;
6607       break;
6608     default:
6609       gcc_unreachable ();
6610     }
6611 
6612   if (lhs == NULL_TREE)
6613     {
6614       val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6615 				 val, NULL_RTX, true, OPTAB_DIRECT);
6616       if (code == AND)
6617 	val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6618       expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6619       return;
6620     }
6621 
6622   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6623   enum insn_code icode = direct_optab_handler (optab, mode);
6624   gcc_assert (icode != CODE_FOR_nothing);
6625   create_output_operand (&ops[0], target, mode);
6626   create_fixed_operand (&ops[1], mem);
6627   create_convert_operand_to (&ops[2], val, mode, true);
6628   create_integer_operand (&ops[3], model);
6629   create_integer_operand (&ops[4], integer_onep (flag));
6630   if (maybe_expand_insn (icode, 5, ops))
6631     return;
6632 
6633   rtx bitval = val;
6634   val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6635 			     val, NULL_RTX, true, OPTAB_DIRECT);
6636   rtx maskval = val;
6637   if (code == AND)
6638     val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6639   rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6640 				       code, model, false);
6641   if (integer_onep (flag))
6642     {
6643       result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6644 				    NULL_RTX, true, OPTAB_DIRECT);
6645       result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6646 				    true, OPTAB_DIRECT);
6647     }
6648   else
6649     result = expand_simple_binop (mode, AND, result, maskval, target, true,
6650 				  OPTAB_DIRECT);
6651   if (result != target)
6652     emit_move_insn (target, result);
6653 }
6654 
6655 /* Expand an atomic clear operation.
6656 	void _atomic_clear (BOOL *obj, enum memmodel)
6657    EXP is the call expression.  */
6658 
6659 static rtx
6660 expand_builtin_atomic_clear (tree exp)
6661 {
6662   machine_mode mode;
6663   rtx mem, ret;
6664   enum memmodel model;
6665 
6666   mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6667   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6668   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6669 
6670   if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6671     {
6672       location_t loc
6673 	= expansion_point_location_if_in_system_header (input_location);
6674       warning_at (loc, OPT_Winvalid_memory_model,
6675 		  "invalid memory model for %<__atomic_store%>");
6676       model = MEMMODEL_SEQ_CST;
6677     }
6678 
6679   /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6680      Failing that, a store is issued by __atomic_store.  The only way this can
6681      fail is if the bool type is larger than a word size.  Unlikely, but
6682      handle it anyway for completeness.  Assume a single threaded model since
6683      there is no atomic support in this case, and no barriers are required.  */
6684   ret = expand_atomic_store (mem, const0_rtx, model, true);
6685   if (!ret)
6686     emit_move_insn (mem, const0_rtx);
6687   return const0_rtx;
6688 }
6689 
6690 /* Expand an atomic test_and_set operation.
6691 	bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6692    EXP is the call expression.  */
6693 
6694 static rtx
6695 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6696 {
6697   rtx mem;
6698   enum memmodel model;
6699   machine_mode mode;
6700 
6701   mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6702   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6703   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6704 
6705   return expand_atomic_test_and_set (target, mem, model);
6706 }
6707 
6708 
6709 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6710    this architecture.  If ARG1 is NULL, use typical alignment for size ARG0.  */
6711 
6712 static tree
6713 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6714 {
6715   int size;
6716   machine_mode mode;
6717   unsigned int mode_align, type_align;
6718 
6719   if (TREE_CODE (arg0) != INTEGER_CST)
6720     return NULL_TREE;
6721 
6722   /* We need a corresponding integer mode for the access to be lock-free.  */
6723   size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6724   if (!int_mode_for_size (size, 0).exists (&mode))
6725     return boolean_false_node;
6726 
6727   mode_align = GET_MODE_ALIGNMENT (mode);
6728 
6729   if (TREE_CODE (arg1) == INTEGER_CST)
6730     {
6731       unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6732 
6733       /* Either this argument is null, or it's a fake pointer encoding
6734          the alignment of the object.  */
6735       val = least_bit_hwi (val);
6736       val *= BITS_PER_UNIT;
6737 
6738       if (val == 0 || mode_align < val)
6739         type_align = mode_align;
6740       else
6741         type_align = val;
6742     }
6743   else
6744     {
6745       tree ttype = TREE_TYPE (arg1);
6746 
6747       /* This function is usually invoked and folded immediately by the front
6748 	 end before anything else has a chance to look at it.  The pointer
6749 	 parameter at this point is usually cast to a void *, so check for that
6750 	 and look past the cast.  */
6751       if (CONVERT_EXPR_P (arg1)
6752 	  && POINTER_TYPE_P (ttype)
6753 	  && VOID_TYPE_P (TREE_TYPE (ttype))
6754 	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6755 	arg1 = TREE_OPERAND (arg1, 0);
6756 
6757       ttype = TREE_TYPE (arg1);
6758       gcc_assert (POINTER_TYPE_P (ttype));
6759 
6760       /* Get the underlying type of the object.  */
6761       ttype = TREE_TYPE (ttype);
6762       type_align = TYPE_ALIGN (ttype);
6763     }
6764 
6765   /* If the object has smaller alignment, the lock free routines cannot
6766      be used.  */
6767   if (type_align < mode_align)
6768     return boolean_false_node;
6769 
6770   /* Check if a compare_and_swap pattern exists for the mode which represents
6771      the required size.  The pattern is not allowed to fail, so the existence
6772      of the pattern indicates support is present.  Also require that an
6773      atomic load exists for the required size.  */
6774   if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6775     return boolean_true_node;
6776   else
6777     return boolean_false_node;
6778 }
6779 
6780 /* Return true if the parameters to call EXP represent an object which will
6781    always generate lock free instructions.  The first argument represents the
6782    size of the object, and the second parameter is a pointer to the object
6783    itself.  If NULL is passed for the object, then the result is based on
6784    typical alignment for an object of the specified size.  Otherwise return
6785    false.  */
6786 
6787 static rtx
6788 expand_builtin_atomic_always_lock_free (tree exp)
6789 {
6790   tree size;
6791   tree arg0 = CALL_EXPR_ARG (exp, 0);
6792   tree arg1 = CALL_EXPR_ARG (exp, 1);
6793 
6794   if (TREE_CODE (arg0) != INTEGER_CST)
6795     {
6796       error ("non-constant argument 1 to __atomic_always_lock_free");
6797       return const0_rtx;
6798     }
6799 
6800   size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6801   if (size == boolean_true_node)
6802     return const1_rtx;
6803   return const0_rtx;
6804 }
6805 
6806 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6807    is lock free on this architecture.  */
6808 
6809 static tree
6810 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6811 {
6812   if (!flag_inline_atomics)
6813     return NULL_TREE;
6814 
6815   /* If it isn't always lock free, don't generate a result.  */
6816   if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6817     return boolean_true_node;
6818 
6819   return NULL_TREE;
6820 }
6821 
6822 /* Return true if the parameters to call EXP represent an object which will
6823    always generate lock free instructions.  The first argument represents the
6824    size of the object, and the second parameter is a pointer to the object
6825    itself.  If NULL is passed for the object, then the result is based on
6826    typical alignment for an object of the specified size.  Otherwise return
6827    NULL*/
6828 
6829 static rtx
6830 expand_builtin_atomic_is_lock_free (tree exp)
6831 {
6832   tree size;
6833   tree arg0 = CALL_EXPR_ARG (exp, 0);
6834   tree arg1 = CALL_EXPR_ARG (exp, 1);
6835 
6836   if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6837     {
6838       error ("non-integer argument 1 to __atomic_is_lock_free");
6839       return NULL_RTX;
6840     }
6841 
6842   if (!flag_inline_atomics)
6843     return NULL_RTX;
6844 
6845   /* If the value is known at compile time, return the RTX for it.  */
6846   size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6847   if (size == boolean_true_node)
6848     return const1_rtx;
6849 
6850   return NULL_RTX;
6851 }
6852 
6853 /* Expand the __atomic_thread_fence intrinsic:
6854    	void __atomic_thread_fence (enum memmodel)
6855    EXP is the CALL_EXPR.  */
6856 
6857 static void
6858 expand_builtin_atomic_thread_fence (tree exp)
6859 {
6860   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6861   expand_mem_thread_fence (model);
6862 }
6863 
6864 /* Expand the __atomic_signal_fence intrinsic:
6865    	void __atomic_signal_fence (enum memmodel)
6866    EXP is the CALL_EXPR.  */
6867 
6868 static void
6869 expand_builtin_atomic_signal_fence (tree exp)
6870 {
6871   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6872   expand_mem_signal_fence (model);
6873 }
6874 
6875 /* Expand the __sync_synchronize intrinsic.  */
6876 
6877 static void
6878 expand_builtin_sync_synchronize (void)
6879 {
6880   expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6881 }
6882 
6883 static rtx
6884 expand_builtin_thread_pointer (tree exp, rtx target)
6885 {
6886   enum insn_code icode;
6887   if (!validate_arglist (exp, VOID_TYPE))
6888     return const0_rtx;
6889   icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6890   if (icode != CODE_FOR_nothing)
6891     {
6892       struct expand_operand op;
6893       /* If the target is not sutitable then create a new target. */
6894       if (target == NULL_RTX
6895 	  || !REG_P (target)
6896 	  || GET_MODE (target) != Pmode)
6897 	target = gen_reg_rtx (Pmode);
6898       create_output_operand (&op, target, Pmode);
6899       expand_insn (icode, 1, &op);
6900       return target;
6901     }
6902   error ("%<__builtin_thread_pointer%> is not supported on this target");
6903   return const0_rtx;
6904 }
6905 
6906 static void
6907 expand_builtin_set_thread_pointer (tree exp)
6908 {
6909   enum insn_code icode;
6910   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6911     return;
6912   icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6913   if (icode != CODE_FOR_nothing)
6914     {
6915       struct expand_operand op;
6916       rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6917 			     Pmode, EXPAND_NORMAL);
6918       create_input_operand (&op, val, Pmode);
6919       expand_insn (icode, 1, &op);
6920       return;
6921     }
6922   error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6923 }
6924 
6925 
6926 /* Emit code to restore the current value of stack.  */
6927 
6928 static void
6929 expand_stack_restore (tree var)
6930 {
6931   rtx_insn *prev;
6932   rtx sa = expand_normal (var);
6933 
6934   sa = convert_memory_address (Pmode, sa);
6935 
6936   prev = get_last_insn ();
6937   emit_stack_restore (SAVE_BLOCK, sa);
6938 
6939   record_new_stack_level ();
6940 
6941   fixup_args_size_notes (prev, get_last_insn (), 0);
6942 }
6943 
6944 /* Emit code to save the current value of stack.  */
6945 
6946 static rtx
6947 expand_stack_save (void)
6948 {
6949   rtx ret = NULL_RTX;
6950 
6951   emit_stack_save (SAVE_BLOCK, &ret);
6952   return ret;
6953 }
6954 
6955 /* Emit code to get the openacc gang, worker or vector id or size.  */
6956 
6957 static rtx
6958 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6959 {
6960   const char *name;
6961   rtx fallback_retval;
6962   rtx_insn *(*gen_fn) (rtx, rtx);
6963   switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6964     {
6965     case BUILT_IN_GOACC_PARLEVEL_ID:
6966       name = "__builtin_goacc_parlevel_id";
6967       fallback_retval = const0_rtx;
6968       gen_fn = targetm.gen_oacc_dim_pos;
6969       break;
6970     case BUILT_IN_GOACC_PARLEVEL_SIZE:
6971       name = "__builtin_goacc_parlevel_size";
6972       fallback_retval = const1_rtx;
6973       gen_fn = targetm.gen_oacc_dim_size;
6974       break;
6975     default:
6976       gcc_unreachable ();
6977     }
6978 
6979   if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6980     {
6981       error ("%qs only supported in OpenACC code", name);
6982       return const0_rtx;
6983     }
6984 
6985   tree arg = CALL_EXPR_ARG (exp, 0);
6986   if (TREE_CODE (arg) != INTEGER_CST)
6987     {
6988       error ("non-constant argument 0 to %qs", name);
6989       return const0_rtx;
6990     }
6991 
6992   int dim = TREE_INT_CST_LOW (arg);
6993   switch (dim)
6994     {
6995     case GOMP_DIM_GANG:
6996     case GOMP_DIM_WORKER:
6997     case GOMP_DIM_VECTOR:
6998       break;
6999     default:
7000       error ("illegal argument 0 to %qs", name);
7001       return const0_rtx;
7002     }
7003 
7004   if (ignore)
7005     return target;
7006 
7007   if (target == NULL_RTX)
7008     target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7009 
7010   if (!targetm.have_oacc_dim_size ())
7011     {
7012       emit_move_insn (target, fallback_retval);
7013       return target;
7014     }
7015 
7016   rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7017   emit_insn (gen_fn (reg, GEN_INT (dim)));
7018   if (reg != target)
7019     emit_move_insn (target, reg);
7020 
7021   return target;
7022 }
7023 
7024 /* Expand a string compare operation using a sequence of char comparison
7025    to get rid of the calling overhead, with result going to TARGET if
7026    that's convenient.
7027 
7028    VAR_STR is the variable string source;
7029    CONST_STR is the constant string source;
7030    LENGTH is the number of chars to compare;
7031    CONST_STR_N indicates which source string is the constant string;
7032    IS_MEMCMP indicates whether it's a memcmp or strcmp.
7033 
7034    to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7035 
7036    target = (int) (unsigned char) var_str[0]
7037 	    - (int) (unsigned char) const_str[0];
7038    if (target != 0)
7039      goto ne_label;
7040      ...
7041    target = (int) (unsigned char) var_str[length - 2]
7042 	    - (int) (unsigned char) const_str[length - 2];
7043    if (target != 0)
7044      goto ne_label;
7045    target = (int) (unsigned char) var_str[length - 1]
7046 	    - (int) (unsigned char) const_str[length - 1];
7047    ne_label:
7048   */
7049 
7050 static rtx
7051 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7052 		   unsigned HOST_WIDE_INT length,
7053 		   int const_str_n, machine_mode mode)
7054 {
7055   HOST_WIDE_INT offset = 0;
7056   rtx var_rtx_array
7057     = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7058   rtx var_rtx = NULL_RTX;
7059   rtx const_rtx = NULL_RTX;
7060   rtx result = target ? target : gen_reg_rtx (mode);
7061   rtx_code_label *ne_label = gen_label_rtx ();
7062   tree unit_type_node = unsigned_char_type_node;
7063   scalar_int_mode unit_mode
7064     = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7065 
7066   start_sequence ();
7067 
7068   for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7069     {
7070       var_rtx
7071 	= adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7072       const_rtx = c_readstr (const_str + offset, unit_mode);
7073       rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7074       rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7075 
7076       op0 = convert_modes (mode, unit_mode, op0, 1);
7077       op1 = convert_modes (mode, unit_mode, op1, 1);
7078       result = expand_simple_binop (mode, MINUS, op0, op1,
7079 				    result, 1, OPTAB_WIDEN);
7080       if (i < length - 1)
7081 	emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7082 	    			 mode, true, ne_label);
7083       offset += GET_MODE_SIZE (unit_mode);
7084     }
7085 
7086   emit_label (ne_label);
7087   rtx_insn *insns = get_insns ();
7088   end_sequence ();
7089   emit_insn (insns);
7090 
7091   return result;
7092 }
7093 
7094 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7095    to TARGET if that's convenient.
7096    If the call is not been inlined, return NULL_RTX.  */
7097 
7098 static rtx
7099 inline_expand_builtin_bytecmp (tree exp, rtx target)
7100 {
7101   tree fndecl = get_callee_fndecl (exp);
7102   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7103   bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7104 
7105   /* Do NOT apply this inlining expansion when optimizing for size or
7106      optimization level below 2.  */
7107   if (optimize < 2 || optimize_insn_for_size_p ())
7108     return NULL_RTX;
7109 
7110   gcc_checking_assert (fcode == BUILT_IN_STRCMP
7111 		       || fcode == BUILT_IN_STRNCMP
7112 		       || fcode == BUILT_IN_MEMCMP);
7113 
7114   /* On a target where the type of the call (int) has same or narrower presicion
7115      than unsigned char, give up the inlining expansion.  */
7116   if (TYPE_PRECISION (unsigned_char_type_node)
7117       >= TYPE_PRECISION (TREE_TYPE (exp)))
7118     return NULL_RTX;
7119 
7120   tree arg1 = CALL_EXPR_ARG (exp, 0);
7121   tree arg2 = CALL_EXPR_ARG (exp, 1);
7122   tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7123 
7124   unsigned HOST_WIDE_INT len1 = 0;
7125   unsigned HOST_WIDE_INT len2 = 0;
7126   unsigned HOST_WIDE_INT len3 = 0;
7127 
7128   /* Get the object representation of the initializers of ARG1 and ARG2
7129      as strings, provided they refer to constant objects, with their byte
7130      sizes in LEN1 and LEN2, respectively.  */
7131   const char *bytes1 = c_getstr (arg1, &len1);
7132   const char *bytes2 = c_getstr (arg2, &len2);
7133 
7134   /* Fail if neither argument refers to an initialized constant.  */
7135   if (!bytes1 && !bytes2)
7136     return NULL_RTX;
7137 
7138   if (is_ncmp)
7139     {
7140       /* Fail if the memcmp/strncmp bound is not a constant.  */
7141       if (!tree_fits_uhwi_p (len3_tree))
7142 	return NULL_RTX;
7143 
7144       len3 = tree_to_uhwi (len3_tree);
7145 
7146       if (fcode == BUILT_IN_MEMCMP)
7147 	{
7148 	  /* Fail if the memcmp bound is greater than the size of either
7149 	     of the two constant objects.  */
7150 	  if ((bytes1 && len1 < len3)
7151 	      || (bytes2 && len2 < len3))
7152 	    return NULL_RTX;
7153 	}
7154     }
7155 
7156   if (fcode != BUILT_IN_MEMCMP)
7157     {
7158       /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7159 	 and LEN2 to the length of the nul-terminated string stored
7160 	 in each.  */
7161       if (bytes1 != NULL)
7162 	len1 = strnlen (bytes1, len1) + 1;
7163       if (bytes2 != NULL)
7164 	len2 = strnlen (bytes2, len2) + 1;
7165     }
7166 
7167   /* See inline_string_cmp.  */
7168   int const_str_n;
7169   if (!len1)
7170     const_str_n = 2;
7171   else if (!len2)
7172     const_str_n = 1;
7173   else if (len2 > len1)
7174     const_str_n = 1;
7175   else
7176     const_str_n = 2;
7177 
7178   /* For strncmp only, compute the new bound as the smallest of
7179      the lengths of the two strings (plus 1) and the bound provided
7180      to the function.  */
7181   unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7182   if (is_ncmp && len3 < bound)
7183     bound = len3;
7184 
7185   /* If the bound of the comparison is larger than the threshold,
7186      do nothing.  */
7187   if (bound > (unsigned HOST_WIDE_INT)
7188 	       PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7189     return NULL_RTX;
7190 
7191   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7192 
7193   /* Now, start inline expansion the call.  */
7194   return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7195 			    (const_str_n == 1) ? bytes1 : bytes2, bound,
7196 			    const_str_n, mode);
7197 }
7198 
7199 /* Expand a call to __builtin_speculation_safe_value_<N>.  MODE
7200    represents the size of the first argument to that call, or VOIDmode
7201    if the argument is a pointer.  IGNORE will be true if the result
7202    isn't used.  */
7203 static rtx
7204 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7205 			       bool ignore)
7206 {
7207   rtx val, failsafe;
7208   unsigned nargs = call_expr_nargs (exp);
7209 
7210   tree arg0 = CALL_EXPR_ARG (exp, 0);
7211 
7212   if (mode == VOIDmode)
7213     {
7214       mode = TYPE_MODE (TREE_TYPE (arg0));
7215       gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7216     }
7217 
7218   val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7219 
7220   /* An optional second argument can be used as a failsafe value on
7221      some machines.  If it isn't present, then the failsafe value is
7222      assumed to be 0.  */
7223   if (nargs > 1)
7224     {
7225       tree arg1 = CALL_EXPR_ARG (exp, 1);
7226       failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7227     }
7228   else
7229     failsafe = const0_rtx;
7230 
7231   /* If the result isn't used, the behavior is undefined.  It would be
7232      nice to emit a warning here, but path splitting means this might
7233      happen with legitimate code.  So simply drop the builtin
7234      expansion in that case; we've handled any side-effects above.  */
7235   if (ignore)
7236     return const0_rtx;
7237 
7238   /* If we don't have a suitable target, create one to hold the result.  */
7239   if (target == NULL || GET_MODE (target) != mode)
7240     target = gen_reg_rtx (mode);
7241 
7242   if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7243     val = convert_modes (mode, VOIDmode, val, false);
7244 
7245   return targetm.speculation_safe_value (mode, target, val, failsafe);
7246 }
7247 
7248 /* Expand an expression EXP that calls a built-in function,
7249    with result going to TARGET if that's convenient
7250    (and in mode MODE if that's convenient).
7251    SUBTARGET may be used as the target for computing one of EXP's operands.
7252    IGNORE is nonzero if the value is to be ignored.  */
7253 
7254 rtx
7255 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7256 		int ignore)
7257 {
7258   tree fndecl = get_callee_fndecl (exp);
7259   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7260   machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7261   int flags;
7262 
7263   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7264     return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7265 
7266   /* When ASan is enabled, we don't want to expand some memory/string
7267      builtins and rely on libsanitizer's hooks.  This allows us to avoid
7268      redundant checks and be sure, that possible overflow will be detected
7269      by ASan.  */
7270 
7271   if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7272     return expand_call (exp, target, ignore);
7273 
7274   /* When not optimizing, generate calls to library functions for a certain
7275      set of builtins.  */
7276   if (!optimize
7277       && !called_as_built_in (fndecl)
7278       && fcode != BUILT_IN_FORK
7279       && fcode != BUILT_IN_EXECL
7280       && fcode != BUILT_IN_EXECV
7281       && fcode != BUILT_IN_EXECLP
7282       && fcode != BUILT_IN_EXECLE
7283       && fcode != BUILT_IN_EXECVP
7284       && fcode != BUILT_IN_EXECVE
7285       && !ALLOCA_FUNCTION_CODE_P (fcode)
7286       && fcode != BUILT_IN_FREE)
7287     return expand_call (exp, target, ignore);
7288 
7289   /* The built-in function expanders test for target == const0_rtx
7290      to determine whether the function's result will be ignored.  */
7291   if (ignore)
7292     target = const0_rtx;
7293 
7294   /* If the result of a pure or const built-in function is ignored, and
7295      none of its arguments are volatile, we can avoid expanding the
7296      built-in call and just evaluate the arguments for side-effects.  */
7297   if (target == const0_rtx
7298       && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7299       && !(flags & ECF_LOOPING_CONST_OR_PURE))
7300     {
7301       bool volatilep = false;
7302       tree arg;
7303       call_expr_arg_iterator iter;
7304 
7305       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7306 	if (TREE_THIS_VOLATILE (arg))
7307 	  {
7308 	    volatilep = true;
7309 	    break;
7310 	  }
7311 
7312       if (! volatilep)
7313 	{
7314 	  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7315 	    expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7316 	  return const0_rtx;
7317 	}
7318     }
7319 
7320   switch (fcode)
7321     {
7322     CASE_FLT_FN (BUILT_IN_FABS):
7323     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7324     case BUILT_IN_FABSD32:
7325     case BUILT_IN_FABSD64:
7326     case BUILT_IN_FABSD128:
7327       target = expand_builtin_fabs (exp, target, subtarget);
7328       if (target)
7329 	return target;
7330       break;
7331 
7332     CASE_FLT_FN (BUILT_IN_COPYSIGN):
7333     CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7334       target = expand_builtin_copysign (exp, target, subtarget);
7335       if (target)
7336 	return target;
7337       break;
7338 
7339       /* Just do a normal library call if we were unable to fold
7340 	 the values.  */
7341     CASE_FLT_FN (BUILT_IN_CABS):
7342       break;
7343 
7344     CASE_FLT_FN (BUILT_IN_FMA):
7345     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7346       target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7347       if (target)
7348 	return target;
7349       break;
7350 
7351     CASE_FLT_FN (BUILT_IN_ILOGB):
7352       if (! flag_unsafe_math_optimizations)
7353 	break;
7354       gcc_fallthrough ();
7355     CASE_FLT_FN (BUILT_IN_ISINF):
7356     CASE_FLT_FN (BUILT_IN_FINITE):
7357     case BUILT_IN_ISFINITE:
7358     case BUILT_IN_ISNORMAL:
7359       target = expand_builtin_interclass_mathfn (exp, target);
7360       if (target)
7361 	return target;
7362       break;
7363 
7364     CASE_FLT_FN (BUILT_IN_ICEIL):
7365     CASE_FLT_FN (BUILT_IN_LCEIL):
7366     CASE_FLT_FN (BUILT_IN_LLCEIL):
7367     CASE_FLT_FN (BUILT_IN_LFLOOR):
7368     CASE_FLT_FN (BUILT_IN_IFLOOR):
7369     CASE_FLT_FN (BUILT_IN_LLFLOOR):
7370       target = expand_builtin_int_roundingfn (exp, target);
7371       if (target)
7372 	return target;
7373       break;
7374 
7375     CASE_FLT_FN (BUILT_IN_IRINT):
7376     CASE_FLT_FN (BUILT_IN_LRINT):
7377     CASE_FLT_FN (BUILT_IN_LLRINT):
7378     CASE_FLT_FN (BUILT_IN_IROUND):
7379     CASE_FLT_FN (BUILT_IN_LROUND):
7380     CASE_FLT_FN (BUILT_IN_LLROUND):
7381       target = expand_builtin_int_roundingfn_2 (exp, target);
7382       if (target)
7383 	return target;
7384       break;
7385 
7386     CASE_FLT_FN (BUILT_IN_POWI):
7387       target = expand_builtin_powi (exp, target);
7388       if (target)
7389 	return target;
7390       break;
7391 
7392     CASE_FLT_FN (BUILT_IN_CEXPI):
7393       target = expand_builtin_cexpi (exp, target);
7394       gcc_assert (target);
7395       return target;
7396 
7397     CASE_FLT_FN (BUILT_IN_SIN):
7398     CASE_FLT_FN (BUILT_IN_COS):
7399       if (! flag_unsafe_math_optimizations)
7400 	break;
7401       target = expand_builtin_mathfn_3 (exp, target, subtarget);
7402       if (target)
7403 	return target;
7404       break;
7405 
7406     CASE_FLT_FN (BUILT_IN_SINCOS):
7407       if (! flag_unsafe_math_optimizations)
7408 	break;
7409       target = expand_builtin_sincos (exp);
7410       if (target)
7411 	return target;
7412       break;
7413 
7414     case BUILT_IN_APPLY_ARGS:
7415       return expand_builtin_apply_args ();
7416 
7417       /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7418 	 FUNCTION with a copy of the parameters described by
7419 	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
7420 	 allocated on the stack into which is stored all the registers
7421 	 that might possibly be used for returning the result of a
7422 	 function.  ARGUMENTS is the value returned by
7423 	 __builtin_apply_args.  ARGSIZE is the number of bytes of
7424 	 arguments that must be copied.  ??? How should this value be
7425 	 computed?  We'll also need a safe worst case value for varargs
7426 	 functions.  */
7427     case BUILT_IN_APPLY:
7428       if (!validate_arglist (exp, POINTER_TYPE,
7429 			     POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7430 	  && !validate_arglist (exp, REFERENCE_TYPE,
7431 				POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7432 	return const0_rtx;
7433       else
7434 	{
7435 	  rtx ops[3];
7436 
7437 	  ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7438 	  ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7439 	  ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7440 
7441 	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
7442 	}
7443 
7444       /* __builtin_return (RESULT) causes the function to return the
7445 	 value described by RESULT.  RESULT is address of the block of
7446 	 memory returned by __builtin_apply.  */
7447     case BUILT_IN_RETURN:
7448       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7449 	expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7450       return const0_rtx;
7451 
7452     case BUILT_IN_SAVEREGS:
7453       return expand_builtin_saveregs ();
7454 
7455     case BUILT_IN_VA_ARG_PACK:
7456       /* All valid uses of __builtin_va_arg_pack () are removed during
7457 	 inlining.  */
7458       error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7459       return const0_rtx;
7460 
7461     case BUILT_IN_VA_ARG_PACK_LEN:
7462       /* All valid uses of __builtin_va_arg_pack_len () are removed during
7463 	 inlining.  */
7464       error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7465       return const0_rtx;
7466 
7467       /* Return the address of the first anonymous stack arg.  */
7468     case BUILT_IN_NEXT_ARG:
7469       if (fold_builtin_next_arg (exp, false))
7470 	return const0_rtx;
7471       return expand_builtin_next_arg ();
7472 
7473     case BUILT_IN_CLEAR_CACHE:
7474       target = expand_builtin___clear_cache (exp);
7475       if (target)
7476         return target;
7477       break;
7478 
7479     case BUILT_IN_CLASSIFY_TYPE:
7480       return expand_builtin_classify_type (exp);
7481 
7482     case BUILT_IN_CONSTANT_P:
7483       return const0_rtx;
7484 
7485     case BUILT_IN_FRAME_ADDRESS:
7486     case BUILT_IN_RETURN_ADDRESS:
7487       return expand_builtin_frame_address (fndecl, exp);
7488 
7489     /* Returns the address of the area where the structure is returned.
7490        0 otherwise.  */
7491     case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7492       if (call_expr_nargs (exp) != 0
7493 	  || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7494 	  || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7495 	return const0_rtx;
7496       else
7497 	return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7498 
7499     CASE_BUILT_IN_ALLOCA:
7500       target = expand_builtin_alloca (exp);
7501       if (target)
7502 	return target;
7503       break;
7504 
7505     case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7506       return expand_asan_emit_allocas_unpoison (exp);
7507 
7508     case BUILT_IN_STACK_SAVE:
7509       return expand_stack_save ();
7510 
7511     case BUILT_IN_STACK_RESTORE:
7512       expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7513       return const0_rtx;
7514 
7515     case BUILT_IN_BSWAP16:
7516     case BUILT_IN_BSWAP32:
7517     case BUILT_IN_BSWAP64:
7518       target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7519       if (target)
7520 	return target;
7521       break;
7522 
7523     CASE_INT_FN (BUILT_IN_FFS):
7524       target = expand_builtin_unop (target_mode, exp, target,
7525 				    subtarget, ffs_optab);
7526       if (target)
7527 	return target;
7528       break;
7529 
7530     CASE_INT_FN (BUILT_IN_CLZ):
7531       target = expand_builtin_unop (target_mode, exp, target,
7532 				    subtarget, clz_optab);
7533       if (target)
7534 	return target;
7535       break;
7536 
7537     CASE_INT_FN (BUILT_IN_CTZ):
7538       target = expand_builtin_unop (target_mode, exp, target,
7539 				    subtarget, ctz_optab);
7540       if (target)
7541 	return target;
7542       break;
7543 
7544     CASE_INT_FN (BUILT_IN_CLRSB):
7545       target = expand_builtin_unop (target_mode, exp, target,
7546 				    subtarget, clrsb_optab);
7547       if (target)
7548 	return target;
7549       break;
7550 
7551     CASE_INT_FN (BUILT_IN_POPCOUNT):
7552       target = expand_builtin_unop (target_mode, exp, target,
7553 				    subtarget, popcount_optab);
7554       if (target)
7555 	return target;
7556       break;
7557 
7558     CASE_INT_FN (BUILT_IN_PARITY):
7559       target = expand_builtin_unop (target_mode, exp, target,
7560 				    subtarget, parity_optab);
7561       if (target)
7562 	return target;
7563       break;
7564 
7565     case BUILT_IN_STRLEN:
7566       target = expand_builtin_strlen (exp, target, target_mode);
7567       if (target)
7568 	return target;
7569       break;
7570 
7571     case BUILT_IN_STRNLEN:
7572       target = expand_builtin_strnlen (exp, target, target_mode);
7573       if (target)
7574 	return target;
7575       break;
7576 
7577     case BUILT_IN_STRCAT:
7578       target = expand_builtin_strcat (exp, target);
7579       if (target)
7580 	return target;
7581       break;
7582 
7583     case BUILT_IN_STRCPY:
7584       target = expand_builtin_strcpy (exp, target);
7585       if (target)
7586 	return target;
7587       break;
7588 
7589     case BUILT_IN_STRNCAT:
7590       target = expand_builtin_strncat (exp, target);
7591       if (target)
7592 	return target;
7593       break;
7594 
7595     case BUILT_IN_STRNCPY:
7596       target = expand_builtin_strncpy (exp, target);
7597       if (target)
7598 	return target;
7599       break;
7600 
7601     case BUILT_IN_STPCPY:
7602       target = expand_builtin_stpcpy (exp, target, mode);
7603       if (target)
7604 	return target;
7605       break;
7606 
7607     case BUILT_IN_STPNCPY:
7608       target = expand_builtin_stpncpy (exp, target);
7609       if (target)
7610 	return target;
7611       break;
7612 
7613     case BUILT_IN_MEMCHR:
7614       target = expand_builtin_memchr (exp, target);
7615       if (target)
7616 	return target;
7617       break;
7618 
7619     case BUILT_IN_MEMCPY:
7620       target = expand_builtin_memcpy (exp, target);
7621       if (target)
7622 	return target;
7623       break;
7624 
7625     case BUILT_IN_MEMMOVE:
7626       target = expand_builtin_memmove (exp, target);
7627       if (target)
7628 	return target;
7629       break;
7630 
7631     case BUILT_IN_MEMPCPY:
7632       target = expand_builtin_mempcpy (exp, target);
7633       if (target)
7634 	return target;
7635       break;
7636 
7637     case BUILT_IN_MEMSET:
7638       target = expand_builtin_memset (exp, target, mode);
7639       if (target)
7640 	return target;
7641       break;
7642 
7643     case BUILT_IN_BZERO:
7644       target = expand_builtin_bzero (exp);
7645       if (target)
7646 	return target;
7647       break;
7648 
7649     /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7650        back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7651        when changing it to a strcmp call.  */
7652     case BUILT_IN_STRCMP_EQ:
7653       target = expand_builtin_memcmp (exp, target, true);
7654       if (target)
7655 	return target;
7656 
7657       /* Change this call back to a BUILT_IN_STRCMP.  */
7658       TREE_OPERAND (exp, 1)
7659 	= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7660 
7661       /* Delete the last parameter.  */
7662       unsigned int i;
7663       vec<tree, va_gc> *arg_vec;
7664       vec_alloc (arg_vec, 2);
7665       for (i = 0; i < 2; i++)
7666 	arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7667       exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7668       /* FALLTHROUGH */
7669 
7670     case BUILT_IN_STRCMP:
7671       target = expand_builtin_strcmp (exp, target);
7672       if (target)
7673 	return target;
7674       break;
7675 
7676     /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7677        back to a BUILT_IN_STRNCMP.  */
7678     case BUILT_IN_STRNCMP_EQ:
7679       target = expand_builtin_memcmp (exp, target, true);
7680       if (target)
7681 	return target;
7682 
7683       /* Change it back to a BUILT_IN_STRNCMP.  */
7684       TREE_OPERAND (exp, 1)
7685 	= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7686       /* FALLTHROUGH */
7687 
7688     case BUILT_IN_STRNCMP:
7689       target = expand_builtin_strncmp (exp, target, mode);
7690       if (target)
7691 	return target;
7692       break;
7693 
7694     case BUILT_IN_BCMP:
7695     case BUILT_IN_MEMCMP:
7696     case BUILT_IN_MEMCMP_EQ:
7697       target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7698       if (target)
7699 	return target;
7700       if (fcode == BUILT_IN_MEMCMP_EQ)
7701 	{
7702 	  tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7703 	  TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7704 	}
7705       break;
7706 
7707     case BUILT_IN_SETJMP:
7708       /* This should have been lowered to the builtins below.  */
7709       gcc_unreachable ();
7710 
7711     case BUILT_IN_SETJMP_SETUP:
7712       /* __builtin_setjmp_setup is passed a pointer to an array of five words
7713           and the receiver label.  */
7714       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7715 	{
7716 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7717 				      VOIDmode, EXPAND_NORMAL);
7718 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7719 	  rtx_insn *label_r = label_rtx (label);
7720 
7721 	  /* This is copied from the handling of non-local gotos.  */
7722 	  expand_builtin_setjmp_setup (buf_addr, label_r);
7723 	  nonlocal_goto_handler_labels
7724 	    = gen_rtx_INSN_LIST (VOIDmode, label_r,
7725 				 nonlocal_goto_handler_labels);
7726 	  /* ??? Do not let expand_label treat us as such since we would
7727 	     not want to be both on the list of non-local labels and on
7728 	     the list of forced labels.  */
7729 	  FORCED_LABEL (label) = 0;
7730 	  return const0_rtx;
7731 	}
7732       break;
7733 
7734     case BUILT_IN_SETJMP_RECEIVER:
7735        /* __builtin_setjmp_receiver is passed the receiver label.  */
7736       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7737 	{
7738 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7739 	  rtx_insn *label_r = label_rtx (label);
7740 
7741 	  expand_builtin_setjmp_receiver (label_r);
7742 	  return const0_rtx;
7743 	}
7744       break;
7745 
7746       /* __builtin_longjmp is passed a pointer to an array of five words.
7747 	 It's similar to the C library longjmp function but works with
7748 	 __builtin_setjmp above.  */
7749     case BUILT_IN_LONGJMP:
7750       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7751 	{
7752 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7753 				      VOIDmode, EXPAND_NORMAL);
7754 	  rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7755 
7756 	  if (value != const1_rtx)
7757 	    {
7758 	      error ("%<__builtin_longjmp%> second argument must be 1");
7759 	      return const0_rtx;
7760 	    }
7761 
7762 	  expand_builtin_longjmp (buf_addr, value);
7763 	  return const0_rtx;
7764 	}
7765       break;
7766 
7767     case BUILT_IN_NONLOCAL_GOTO:
7768       target = expand_builtin_nonlocal_goto (exp);
7769       if (target)
7770 	return target;
7771       break;
7772 
7773       /* This updates the setjmp buffer that is its argument with the value
7774 	 of the current stack pointer.  */
7775     case BUILT_IN_UPDATE_SETJMP_BUF:
7776       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7777 	{
7778 	  rtx buf_addr
7779 	    = expand_normal (CALL_EXPR_ARG (exp, 0));
7780 
7781 	  expand_builtin_update_setjmp_buf (buf_addr);
7782 	  return const0_rtx;
7783 	}
7784       break;
7785 
7786     case BUILT_IN_TRAP:
7787       expand_builtin_trap ();
7788       return const0_rtx;
7789 
7790     case BUILT_IN_UNREACHABLE:
7791       expand_builtin_unreachable ();
7792       return const0_rtx;
7793 
7794     CASE_FLT_FN (BUILT_IN_SIGNBIT):
7795     case BUILT_IN_SIGNBITD32:
7796     case BUILT_IN_SIGNBITD64:
7797     case BUILT_IN_SIGNBITD128:
7798       target = expand_builtin_signbit (exp, target);
7799       if (target)
7800 	return target;
7801       break;
7802 
7803       /* Various hooks for the DWARF 2 __throw routine.  */
7804     case BUILT_IN_UNWIND_INIT:
7805       expand_builtin_unwind_init ();
7806       return const0_rtx;
7807     case BUILT_IN_DWARF_CFA:
7808       return virtual_cfa_rtx;
7809 #ifdef DWARF2_UNWIND_INFO
7810     case BUILT_IN_DWARF_SP_COLUMN:
7811       return expand_builtin_dwarf_sp_column ();
7812     case BUILT_IN_INIT_DWARF_REG_SIZES:
7813       expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7814       return const0_rtx;
7815 #endif
7816     case BUILT_IN_FROB_RETURN_ADDR:
7817       return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7818     case BUILT_IN_EXTRACT_RETURN_ADDR:
7819       return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7820     case BUILT_IN_EH_RETURN:
7821       expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7822 				CALL_EXPR_ARG (exp, 1));
7823       return const0_rtx;
7824     case BUILT_IN_EH_RETURN_DATA_REGNO:
7825       return expand_builtin_eh_return_data_regno (exp);
7826     case BUILT_IN_EXTEND_POINTER:
7827       return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7828     case BUILT_IN_EH_POINTER:
7829       return expand_builtin_eh_pointer (exp);
7830     case BUILT_IN_EH_FILTER:
7831       return expand_builtin_eh_filter (exp);
7832     case BUILT_IN_EH_COPY_VALUES:
7833       return expand_builtin_eh_copy_values (exp);
7834 
7835     case BUILT_IN_VA_START:
7836       return expand_builtin_va_start (exp);
7837     case BUILT_IN_VA_END:
7838       return expand_builtin_va_end (exp);
7839     case BUILT_IN_VA_COPY:
7840       return expand_builtin_va_copy (exp);
7841     case BUILT_IN_EXPECT:
7842       return expand_builtin_expect (exp, target);
7843     case BUILT_IN_EXPECT_WITH_PROBABILITY:
7844       return expand_builtin_expect_with_probability (exp, target);
7845     case BUILT_IN_ASSUME_ALIGNED:
7846       return expand_builtin_assume_aligned (exp, target);
7847     case BUILT_IN_PREFETCH:
7848       expand_builtin_prefetch (exp);
7849       return const0_rtx;
7850 
7851     case BUILT_IN_INIT_TRAMPOLINE:
7852       return expand_builtin_init_trampoline (exp, true);
7853     case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7854       return expand_builtin_init_trampoline (exp, false);
7855     case BUILT_IN_ADJUST_TRAMPOLINE:
7856       return expand_builtin_adjust_trampoline (exp);
7857 
7858     case BUILT_IN_INIT_DESCRIPTOR:
7859       return expand_builtin_init_descriptor (exp);
7860     case BUILT_IN_ADJUST_DESCRIPTOR:
7861       return expand_builtin_adjust_descriptor (exp);
7862 
7863     case BUILT_IN_FORK:
7864     case BUILT_IN_EXECL:
7865     case BUILT_IN_EXECV:
7866     case BUILT_IN_EXECLP:
7867     case BUILT_IN_EXECLE:
7868     case BUILT_IN_EXECVP:
7869     case BUILT_IN_EXECVE:
7870       target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7871       if (target)
7872 	return target;
7873       break;
7874 
7875     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7876     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7877     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7878     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7879     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7880       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7881       target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7882       if (target)
7883 	return target;
7884       break;
7885 
7886     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7887     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7888     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7889     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7890     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7891       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7892       target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7893       if (target)
7894 	return target;
7895       break;
7896 
7897     case BUILT_IN_SYNC_FETCH_AND_OR_1:
7898     case BUILT_IN_SYNC_FETCH_AND_OR_2:
7899     case BUILT_IN_SYNC_FETCH_AND_OR_4:
7900     case BUILT_IN_SYNC_FETCH_AND_OR_8:
7901     case BUILT_IN_SYNC_FETCH_AND_OR_16:
7902       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7903       target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7904       if (target)
7905 	return target;
7906       break;
7907 
7908     case BUILT_IN_SYNC_FETCH_AND_AND_1:
7909     case BUILT_IN_SYNC_FETCH_AND_AND_2:
7910     case BUILT_IN_SYNC_FETCH_AND_AND_4:
7911     case BUILT_IN_SYNC_FETCH_AND_AND_8:
7912     case BUILT_IN_SYNC_FETCH_AND_AND_16:
7913       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7914       target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7915       if (target)
7916 	return target;
7917       break;
7918 
7919     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7920     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7921     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7922     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7923     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7924       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7925       target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7926       if (target)
7927 	return target;
7928       break;
7929 
7930     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7931     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7932     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7933     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7934     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7935       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7936       target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7937       if (target)
7938 	return target;
7939       break;
7940 
7941     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7942     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7943     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7944     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7945     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7946       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7947       target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7948       if (target)
7949 	return target;
7950       break;
7951 
7952     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7953     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7954     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7955     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7956     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7957       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7958       target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7959       if (target)
7960 	return target;
7961       break;
7962 
7963     case BUILT_IN_SYNC_OR_AND_FETCH_1:
7964     case BUILT_IN_SYNC_OR_AND_FETCH_2:
7965     case BUILT_IN_SYNC_OR_AND_FETCH_4:
7966     case BUILT_IN_SYNC_OR_AND_FETCH_8:
7967     case BUILT_IN_SYNC_OR_AND_FETCH_16:
7968       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7969       target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7970       if (target)
7971 	return target;
7972       break;
7973 
7974     case BUILT_IN_SYNC_AND_AND_FETCH_1:
7975     case BUILT_IN_SYNC_AND_AND_FETCH_2:
7976     case BUILT_IN_SYNC_AND_AND_FETCH_4:
7977     case BUILT_IN_SYNC_AND_AND_FETCH_8:
7978     case BUILT_IN_SYNC_AND_AND_FETCH_16:
7979       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7980       target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7981       if (target)
7982 	return target;
7983       break;
7984 
7985     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7986     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7987     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7988     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7989     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7990       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7991       target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7992       if (target)
7993 	return target;
7994       break;
7995 
7996     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7997     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7998     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7999     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8000     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8001       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8002       target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8003       if (target)
8004 	return target;
8005       break;
8006 
8007     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8008     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8009     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8010     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8011     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8012       if (mode == VOIDmode)
8013 	mode = TYPE_MODE (boolean_type_node);
8014       if (!target || !register_operand (target, mode))
8015 	target = gen_reg_rtx (mode);
8016 
8017       mode = get_builtin_sync_mode
8018 				(fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8019       target = expand_builtin_compare_and_swap (mode, exp, true, target);
8020       if (target)
8021 	return target;
8022       break;
8023 
8024     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8025     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8026     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8027     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8028     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8029       mode = get_builtin_sync_mode
8030 				(fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8031       target = expand_builtin_compare_and_swap (mode, exp, false, target);
8032       if (target)
8033 	return target;
8034       break;
8035 
8036     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8037     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8038     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8039     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8040     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8041       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8042       target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8043       if (target)
8044 	return target;
8045       break;
8046 
8047     case BUILT_IN_SYNC_LOCK_RELEASE_1:
8048     case BUILT_IN_SYNC_LOCK_RELEASE_2:
8049     case BUILT_IN_SYNC_LOCK_RELEASE_4:
8050     case BUILT_IN_SYNC_LOCK_RELEASE_8:
8051     case BUILT_IN_SYNC_LOCK_RELEASE_16:
8052       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8053       expand_builtin_sync_lock_release (mode, exp);
8054       return const0_rtx;
8055 
8056     case BUILT_IN_SYNC_SYNCHRONIZE:
8057       expand_builtin_sync_synchronize ();
8058       return const0_rtx;
8059 
8060     case BUILT_IN_ATOMIC_EXCHANGE_1:
8061     case BUILT_IN_ATOMIC_EXCHANGE_2:
8062     case BUILT_IN_ATOMIC_EXCHANGE_4:
8063     case BUILT_IN_ATOMIC_EXCHANGE_8:
8064     case BUILT_IN_ATOMIC_EXCHANGE_16:
8065       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8066       target = expand_builtin_atomic_exchange (mode, exp, target);
8067       if (target)
8068 	return target;
8069       break;
8070 
8071     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8072     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8073     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8074     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8075     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8076       {
8077 	unsigned int nargs, z;
8078 	vec<tree, va_gc> *vec;
8079 
8080 	mode =
8081 	    get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8082 	target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8083 	if (target)
8084 	  return target;
8085 
8086 	/* If this is turned into an external library call, the weak parameter
8087 	   must be dropped to match the expected parameter list.  */
8088 	nargs = call_expr_nargs (exp);
8089 	vec_alloc (vec, nargs - 1);
8090 	for (z = 0; z < 3; z++)
8091 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
8092 	/* Skip the boolean weak parameter.  */
8093 	for (z = 4; z < 6; z++)
8094 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
8095 	exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8096 	break;
8097       }
8098 
8099     case BUILT_IN_ATOMIC_LOAD_1:
8100     case BUILT_IN_ATOMIC_LOAD_2:
8101     case BUILT_IN_ATOMIC_LOAD_4:
8102     case BUILT_IN_ATOMIC_LOAD_8:
8103     case BUILT_IN_ATOMIC_LOAD_16:
8104       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8105       target = expand_builtin_atomic_load (mode, exp, target);
8106       if (target)
8107 	return target;
8108       break;
8109 
8110     case BUILT_IN_ATOMIC_STORE_1:
8111     case BUILT_IN_ATOMIC_STORE_2:
8112     case BUILT_IN_ATOMIC_STORE_4:
8113     case BUILT_IN_ATOMIC_STORE_8:
8114     case BUILT_IN_ATOMIC_STORE_16:
8115       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8116       target = expand_builtin_atomic_store (mode, exp);
8117       if (target)
8118 	return const0_rtx;
8119       break;
8120 
8121     case BUILT_IN_ATOMIC_ADD_FETCH_1:
8122     case BUILT_IN_ATOMIC_ADD_FETCH_2:
8123     case BUILT_IN_ATOMIC_ADD_FETCH_4:
8124     case BUILT_IN_ATOMIC_ADD_FETCH_8:
8125     case BUILT_IN_ATOMIC_ADD_FETCH_16:
8126       {
8127 	enum built_in_function lib;
8128 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8129 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8130 				       (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8131 	target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8132 						 ignore, lib);
8133 	if (target)
8134 	  return target;
8135 	break;
8136       }
8137     case BUILT_IN_ATOMIC_SUB_FETCH_1:
8138     case BUILT_IN_ATOMIC_SUB_FETCH_2:
8139     case BUILT_IN_ATOMIC_SUB_FETCH_4:
8140     case BUILT_IN_ATOMIC_SUB_FETCH_8:
8141     case BUILT_IN_ATOMIC_SUB_FETCH_16:
8142       {
8143 	enum built_in_function lib;
8144 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8145 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8146 				       (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8147 	target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8148 						 ignore, lib);
8149 	if (target)
8150 	  return target;
8151 	break;
8152       }
8153     case BUILT_IN_ATOMIC_AND_FETCH_1:
8154     case BUILT_IN_ATOMIC_AND_FETCH_2:
8155     case BUILT_IN_ATOMIC_AND_FETCH_4:
8156     case BUILT_IN_ATOMIC_AND_FETCH_8:
8157     case BUILT_IN_ATOMIC_AND_FETCH_16:
8158       {
8159 	enum built_in_function lib;
8160 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8161 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8162 				       (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8163 	target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8164 						 ignore, lib);
8165 	if (target)
8166 	  return target;
8167 	break;
8168       }
8169     case BUILT_IN_ATOMIC_NAND_FETCH_1:
8170     case BUILT_IN_ATOMIC_NAND_FETCH_2:
8171     case BUILT_IN_ATOMIC_NAND_FETCH_4:
8172     case BUILT_IN_ATOMIC_NAND_FETCH_8:
8173     case BUILT_IN_ATOMIC_NAND_FETCH_16:
8174       {
8175 	enum built_in_function lib;
8176 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8177 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8178 				       (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8179 	target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8180 						 ignore, lib);
8181 	if (target)
8182 	  return target;
8183 	break;
8184       }
8185     case BUILT_IN_ATOMIC_XOR_FETCH_1:
8186     case BUILT_IN_ATOMIC_XOR_FETCH_2:
8187     case BUILT_IN_ATOMIC_XOR_FETCH_4:
8188     case BUILT_IN_ATOMIC_XOR_FETCH_8:
8189     case BUILT_IN_ATOMIC_XOR_FETCH_16:
8190       {
8191 	enum built_in_function lib;
8192 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8193 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8194 				       (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8195 	target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8196 						 ignore, lib);
8197 	if (target)
8198 	  return target;
8199 	break;
8200       }
8201     case BUILT_IN_ATOMIC_OR_FETCH_1:
8202     case BUILT_IN_ATOMIC_OR_FETCH_2:
8203     case BUILT_IN_ATOMIC_OR_FETCH_4:
8204     case BUILT_IN_ATOMIC_OR_FETCH_8:
8205     case BUILT_IN_ATOMIC_OR_FETCH_16:
8206       {
8207 	enum built_in_function lib;
8208 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8209 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8210 				       (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8211 	target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8212 						 ignore, lib);
8213 	if (target)
8214 	  return target;
8215 	break;
8216       }
8217     case BUILT_IN_ATOMIC_FETCH_ADD_1:
8218     case BUILT_IN_ATOMIC_FETCH_ADD_2:
8219     case BUILT_IN_ATOMIC_FETCH_ADD_4:
8220     case BUILT_IN_ATOMIC_FETCH_ADD_8:
8221     case BUILT_IN_ATOMIC_FETCH_ADD_16:
8222       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8223       target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8224 					       ignore, BUILT_IN_NONE);
8225       if (target)
8226 	return target;
8227       break;
8228 
8229     case BUILT_IN_ATOMIC_FETCH_SUB_1:
8230     case BUILT_IN_ATOMIC_FETCH_SUB_2:
8231     case BUILT_IN_ATOMIC_FETCH_SUB_4:
8232     case BUILT_IN_ATOMIC_FETCH_SUB_8:
8233     case BUILT_IN_ATOMIC_FETCH_SUB_16:
8234       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8235       target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8236 					       ignore, BUILT_IN_NONE);
8237       if (target)
8238 	return target;
8239       break;
8240 
8241     case BUILT_IN_ATOMIC_FETCH_AND_1:
8242     case BUILT_IN_ATOMIC_FETCH_AND_2:
8243     case BUILT_IN_ATOMIC_FETCH_AND_4:
8244     case BUILT_IN_ATOMIC_FETCH_AND_8:
8245     case BUILT_IN_ATOMIC_FETCH_AND_16:
8246       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8247       target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8248 					       ignore, BUILT_IN_NONE);
8249       if (target)
8250 	return target;
8251       break;
8252 
8253     case BUILT_IN_ATOMIC_FETCH_NAND_1:
8254     case BUILT_IN_ATOMIC_FETCH_NAND_2:
8255     case BUILT_IN_ATOMIC_FETCH_NAND_4:
8256     case BUILT_IN_ATOMIC_FETCH_NAND_8:
8257     case BUILT_IN_ATOMIC_FETCH_NAND_16:
8258       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8259       target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8260 					       ignore, BUILT_IN_NONE);
8261       if (target)
8262 	return target;
8263       break;
8264 
8265     case BUILT_IN_ATOMIC_FETCH_XOR_1:
8266     case BUILT_IN_ATOMIC_FETCH_XOR_2:
8267     case BUILT_IN_ATOMIC_FETCH_XOR_4:
8268     case BUILT_IN_ATOMIC_FETCH_XOR_8:
8269     case BUILT_IN_ATOMIC_FETCH_XOR_16:
8270       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8271       target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8272 					       ignore, BUILT_IN_NONE);
8273       if (target)
8274 	return target;
8275       break;
8276 
8277     case BUILT_IN_ATOMIC_FETCH_OR_1:
8278     case BUILT_IN_ATOMIC_FETCH_OR_2:
8279     case BUILT_IN_ATOMIC_FETCH_OR_4:
8280     case BUILT_IN_ATOMIC_FETCH_OR_8:
8281     case BUILT_IN_ATOMIC_FETCH_OR_16:
8282       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8283       target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8284 					       ignore, BUILT_IN_NONE);
8285       if (target)
8286 	return target;
8287       break;
8288 
8289     case BUILT_IN_ATOMIC_TEST_AND_SET:
8290       return expand_builtin_atomic_test_and_set (exp, target);
8291 
8292     case BUILT_IN_ATOMIC_CLEAR:
8293       return expand_builtin_atomic_clear (exp);
8294 
8295     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8296       return expand_builtin_atomic_always_lock_free (exp);
8297 
8298     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8299       target = expand_builtin_atomic_is_lock_free (exp);
8300       if (target)
8301         return target;
8302       break;
8303 
8304     case BUILT_IN_ATOMIC_THREAD_FENCE:
8305       expand_builtin_atomic_thread_fence (exp);
8306       return const0_rtx;
8307 
8308     case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8309       expand_builtin_atomic_signal_fence (exp);
8310       return const0_rtx;
8311 
8312     case BUILT_IN_OBJECT_SIZE:
8313       return expand_builtin_object_size (exp);
8314 
8315     case BUILT_IN_MEMCPY_CHK:
8316     case BUILT_IN_MEMPCPY_CHK:
8317     case BUILT_IN_MEMMOVE_CHK:
8318     case BUILT_IN_MEMSET_CHK:
8319       target = expand_builtin_memory_chk (exp, target, mode, fcode);
8320       if (target)
8321 	return target;
8322       break;
8323 
8324     case BUILT_IN_STRCPY_CHK:
8325     case BUILT_IN_STPCPY_CHK:
8326     case BUILT_IN_STRNCPY_CHK:
8327     case BUILT_IN_STPNCPY_CHK:
8328     case BUILT_IN_STRCAT_CHK:
8329     case BUILT_IN_STRNCAT_CHK:
8330     case BUILT_IN_SNPRINTF_CHK:
8331     case BUILT_IN_VSNPRINTF_CHK:
8332       maybe_emit_chk_warning (exp, fcode);
8333       break;
8334 
8335     case BUILT_IN_SPRINTF_CHK:
8336     case BUILT_IN_VSPRINTF_CHK:
8337       maybe_emit_sprintf_chk_warning (exp, fcode);
8338       break;
8339 
8340     case BUILT_IN_FREE:
8341       if (warn_free_nonheap_object)
8342 	maybe_emit_free_warning (exp);
8343       break;
8344 
8345     case BUILT_IN_THREAD_POINTER:
8346       return expand_builtin_thread_pointer (exp, target);
8347 
8348     case BUILT_IN_SET_THREAD_POINTER:
8349       expand_builtin_set_thread_pointer (exp);
8350       return const0_rtx;
8351 
8352     case BUILT_IN_ACC_ON_DEVICE:
8353       /* Do library call, if we failed to expand the builtin when
8354 	 folding.  */
8355       break;
8356 
8357     case BUILT_IN_GOACC_PARLEVEL_ID:
8358     case BUILT_IN_GOACC_PARLEVEL_SIZE:
8359       return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8360 
8361     case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8362       return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8363 
8364     case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8365     case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8366     case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8367     case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8368     case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8369       mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8370       return expand_speculation_safe_value (mode, exp, target, ignore);
8371 
8372     default:	/* just do library call, if unknown builtin */
8373       break;
8374     }
8375 
8376   /* The switch statement above can drop through to cause the function
8377      to be called normally.  */
8378   return expand_call (exp, target, ignore);
8379 }
8380 
8381 /* Determine whether a tree node represents a call to a built-in
8382    function.  If the tree T is a call to a built-in function with
8383    the right number of arguments of the appropriate types, return
8384    the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8385    Otherwise the return value is END_BUILTINS.  */
8386 
8387 enum built_in_function
8388 builtin_mathfn_code (const_tree t)
8389 {
8390   const_tree fndecl, arg, parmlist;
8391   const_tree argtype, parmtype;
8392   const_call_expr_arg_iterator iter;
8393 
8394   if (TREE_CODE (t) != CALL_EXPR)
8395     return END_BUILTINS;
8396 
8397   fndecl = get_callee_fndecl (t);
8398   if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8399       return END_BUILTINS;
8400 
8401   parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8402   init_const_call_expr_arg_iterator (t, &iter);
8403   for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8404     {
8405       /* If a function doesn't take a variable number of arguments,
8406 	 the last element in the list will have type `void'.  */
8407       parmtype = TREE_VALUE (parmlist);
8408       if (VOID_TYPE_P (parmtype))
8409 	{
8410 	  if (more_const_call_expr_args_p (&iter))
8411 	    return END_BUILTINS;
8412 	  return DECL_FUNCTION_CODE (fndecl);
8413 	}
8414 
8415       if (! more_const_call_expr_args_p (&iter))
8416 	return END_BUILTINS;
8417 
8418       arg = next_const_call_expr_arg (&iter);
8419       argtype = TREE_TYPE (arg);
8420 
8421       if (SCALAR_FLOAT_TYPE_P (parmtype))
8422 	{
8423 	  if (! SCALAR_FLOAT_TYPE_P (argtype))
8424 	    return END_BUILTINS;
8425 	}
8426       else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8427 	{
8428 	  if (! COMPLEX_FLOAT_TYPE_P (argtype))
8429 	    return END_BUILTINS;
8430 	}
8431       else if (POINTER_TYPE_P (parmtype))
8432 	{
8433 	  if (! POINTER_TYPE_P (argtype))
8434 	    return END_BUILTINS;
8435 	}
8436       else if (INTEGRAL_TYPE_P (parmtype))
8437 	{
8438 	  if (! INTEGRAL_TYPE_P (argtype))
8439 	    return END_BUILTINS;
8440 	}
8441       else
8442 	return END_BUILTINS;
8443     }
8444 
8445   /* Variable-length argument list.  */
8446   return DECL_FUNCTION_CODE (fndecl);
8447 }
8448 
8449 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8450    evaluate to a constant.  */
8451 
8452 static tree
8453 fold_builtin_constant_p (tree arg)
8454 {
8455   /* We return 1 for a numeric type that's known to be a constant
8456      value at compile-time or for an aggregate type that's a
8457      literal constant.  */
8458   STRIP_NOPS (arg);
8459 
8460   /* If we know this is a constant, emit the constant of one.  */
8461   if (CONSTANT_CLASS_P (arg)
8462       || (TREE_CODE (arg) == CONSTRUCTOR
8463 	  && TREE_CONSTANT (arg)))
8464     return integer_one_node;
8465   if (TREE_CODE (arg) == ADDR_EXPR)
8466     {
8467        tree op = TREE_OPERAND (arg, 0);
8468        if (TREE_CODE (op) == STRING_CST
8469 	   || (TREE_CODE (op) == ARRAY_REF
8470 	       && integer_zerop (TREE_OPERAND (op, 1))
8471 	       && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8472 	 return integer_one_node;
8473     }
8474 
8475   /* If this expression has side effects, show we don't know it to be a
8476      constant.  Likewise if it's a pointer or aggregate type since in
8477      those case we only want literals, since those are only optimized
8478      when generating RTL, not later.
8479      And finally, if we are compiling an initializer, not code, we
8480      need to return a definite result now; there's not going to be any
8481      more optimization done.  */
8482   if (TREE_SIDE_EFFECTS (arg)
8483       || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8484       || POINTER_TYPE_P (TREE_TYPE (arg))
8485       || cfun == 0
8486       || folding_initializer
8487       || force_folding_builtin_constant_p)
8488     return integer_zero_node;
8489 
8490   return NULL_TREE;
8491 }
8492 
8493 /* Create builtin_expect or builtin_expect_with_probability
8494    with PRED and EXPECTED as its arguments and return it as a truthvalue.
8495    Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8496    builtin_expect_with_probability instead uses third argument as PROBABILITY
8497    value.  */
8498 
8499 static tree
8500 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8501 				tree predictor, tree probability)
8502 {
8503   tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8504 
8505   fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8506 			      : BUILT_IN_EXPECT_WITH_PROBABILITY);
8507   arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8508   ret_type = TREE_TYPE (TREE_TYPE (fn));
8509   pred_type = TREE_VALUE (arg_types);
8510   expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8511 
8512   pred = fold_convert_loc (loc, pred_type, pred);
8513   expected = fold_convert_loc (loc, expected_type, expected);
8514 
8515   if (probability)
8516     call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8517   else
8518     call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8519 				     predictor);
8520 
8521   return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8522 		 build_int_cst (ret_type, 0));
8523 }
8524 
8525 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3.  Return
8526    NULL_TREE if no simplification is possible.  */
8527 
8528 tree
8529 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8530 		     tree arg3)
8531 {
8532   tree inner, fndecl, inner_arg0;
8533   enum tree_code code;
8534 
8535   /* Distribute the expected value over short-circuiting operators.
8536      See through the cast from truthvalue_type_node to long.  */
8537   inner_arg0 = arg0;
8538   while (CONVERT_EXPR_P (inner_arg0)
8539 	 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8540 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8541     inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8542 
8543   /* If this is a builtin_expect within a builtin_expect keep the
8544      inner one.  See through a comparison against a constant.  It
8545      might have been added to create a thruthvalue.  */
8546   inner = inner_arg0;
8547 
8548   if (COMPARISON_CLASS_P (inner)
8549       && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8550     inner = TREE_OPERAND (inner, 0);
8551 
8552   if (TREE_CODE (inner) == CALL_EXPR
8553       && (fndecl = get_callee_fndecl (inner))
8554       && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8555 	  || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8556     return arg0;
8557 
8558   inner = inner_arg0;
8559   code = TREE_CODE (inner);
8560   if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8561     {
8562       tree op0 = TREE_OPERAND (inner, 0);
8563       tree op1 = TREE_OPERAND (inner, 1);
8564       arg1 = save_expr (arg1);
8565 
8566       op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8567       op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8568       inner = build2 (code, TREE_TYPE (inner), op0, op1);
8569 
8570       return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8571     }
8572 
8573   /* If the argument isn't invariant then there's nothing else we can do.  */
8574   if (!TREE_CONSTANT (inner_arg0))
8575     return NULL_TREE;
8576 
8577   /* If we expect that a comparison against the argument will fold to
8578      a constant return the constant.  In practice, this means a true
8579      constant or the address of a non-weak symbol.  */
8580   inner = inner_arg0;
8581   STRIP_NOPS (inner);
8582   if (TREE_CODE (inner) == ADDR_EXPR)
8583     {
8584       do
8585 	{
8586 	  inner = TREE_OPERAND (inner, 0);
8587 	}
8588       while (TREE_CODE (inner) == COMPONENT_REF
8589 	     || TREE_CODE (inner) == ARRAY_REF);
8590       if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8591 	return NULL_TREE;
8592     }
8593 
8594   /* Otherwise, ARG0 already has the proper type for the return value.  */
8595   return arg0;
8596 }
8597 
8598 /* Fold a call to __builtin_classify_type with argument ARG.  */
8599 
8600 static tree
8601 fold_builtin_classify_type (tree arg)
8602 {
8603   if (arg == 0)
8604     return build_int_cst (integer_type_node, no_type_class);
8605 
8606   return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8607 }
8608 
8609 /* Fold a call to __builtin_strlen with argument ARG.  */
8610 
8611 static tree
8612 fold_builtin_strlen (location_t loc, tree type, tree arg)
8613 {
8614   if (!validate_arg (arg, POINTER_TYPE))
8615     return NULL_TREE;
8616   else
8617     {
8618       c_strlen_data lendata = { };
8619       tree len = c_strlen (arg, 0, &lendata);
8620 
8621       if (len)
8622 	return fold_convert_loc (loc, type, len);
8623 
8624       if (!lendata.decl)
8625 	c_strlen (arg, 1, &lendata);
8626 
8627       if (lendata.decl)
8628 	{
8629 	  if (EXPR_HAS_LOCATION (arg))
8630 	    loc = EXPR_LOCATION (arg);
8631 	  else if (loc == UNKNOWN_LOCATION)
8632 	    loc = input_location;
8633 	  warn_string_no_nul (loc, "strlen", arg, lendata.decl);
8634 	}
8635 
8636       return NULL_TREE;
8637     }
8638 }
8639 
8640 /* Fold a call to __builtin_inf or __builtin_huge_val.  */
8641 
8642 static tree
8643 fold_builtin_inf (location_t loc, tree type, int warn)
8644 {
8645   REAL_VALUE_TYPE real;
8646 
8647   /* __builtin_inff is intended to be usable to define INFINITY on all
8648      targets.  If an infinity is not available, INFINITY expands "to a
8649      positive constant of type float that overflows at translation
8650      time", footnote "In this case, using INFINITY will violate the
8651      constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8652      Thus we pedwarn to ensure this constraint violation is
8653      diagnosed.  */
8654   if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8655     pedwarn (loc, 0, "target format does not support infinity");
8656 
8657   real_inf (&real);
8658   return build_real (type, real);
8659 }
8660 
8661 /* Fold function call to builtin sincos, sincosf, or sincosl.  Return
8662    NULL_TREE if no simplification can be made.  */
8663 
8664 static tree
8665 fold_builtin_sincos (location_t loc,
8666 		     tree arg0, tree arg1, tree arg2)
8667 {
8668   tree type;
8669   tree fndecl, call = NULL_TREE;
8670 
8671   if (!validate_arg (arg0, REAL_TYPE)
8672       || !validate_arg (arg1, POINTER_TYPE)
8673       || !validate_arg (arg2, POINTER_TYPE))
8674     return NULL_TREE;
8675 
8676   type = TREE_TYPE (arg0);
8677 
8678   /* Calculate the result when the argument is a constant.  */
8679   built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8680   if (fn == END_BUILTINS)
8681     return NULL_TREE;
8682 
8683   /* Canonicalize sincos to cexpi.  */
8684   if (TREE_CODE (arg0) == REAL_CST)
8685     {
8686       tree complex_type = build_complex_type (type);
8687       call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8688     }
8689   if (!call)
8690     {
8691       if (!targetm.libc_has_function (function_c99_math_complex)
8692 	  || !builtin_decl_implicit_p (fn))
8693 	return NULL_TREE;
8694       fndecl = builtin_decl_explicit (fn);
8695       call = build_call_expr_loc (loc, fndecl, 1, arg0);
8696       call = builtin_save_expr (call);
8697     }
8698 
8699   tree ptype = build_pointer_type (type);
8700   arg1 = fold_convert (ptype, arg1);
8701   arg2 = fold_convert (ptype, arg2);
8702   return build2 (COMPOUND_EXPR, void_type_node,
8703 		 build2 (MODIFY_EXPR, void_type_node,
8704 			 build_fold_indirect_ref_loc (loc, arg1),
8705 			 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8706 		 build2 (MODIFY_EXPR, void_type_node,
8707 			 build_fold_indirect_ref_loc (loc, arg2),
8708 			 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8709 }
8710 
8711 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8712    Return NULL_TREE if no simplification can be made.  */
8713 
8714 static tree
8715 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8716 {
8717   if (!validate_arg (arg1, POINTER_TYPE)
8718       || !validate_arg (arg2, POINTER_TYPE)
8719       || !validate_arg (len, INTEGER_TYPE))
8720     return NULL_TREE;
8721 
8722   /* If the LEN parameter is zero, return zero.  */
8723   if (integer_zerop (len))
8724     return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8725 			      arg1, arg2);
8726 
8727   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
8728   if (operand_equal_p (arg1, arg2, 0))
8729     return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8730 
8731   /* If len parameter is one, return an expression corresponding to
8732      (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
8733   if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8734     {
8735       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8736       tree cst_uchar_ptr_node
8737 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8738 
8739       tree ind1
8740 	= fold_convert_loc (loc, integer_type_node,
8741 			    build1 (INDIRECT_REF, cst_uchar_node,
8742 				    fold_convert_loc (loc,
8743 						      cst_uchar_ptr_node,
8744 						      arg1)));
8745       tree ind2
8746 	= fold_convert_loc (loc, integer_type_node,
8747 			    build1 (INDIRECT_REF, cst_uchar_node,
8748 				    fold_convert_loc (loc,
8749 						      cst_uchar_ptr_node,
8750 						      arg2)));
8751       return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8752     }
8753 
8754   return NULL_TREE;
8755 }
8756 
8757 /* Fold a call to builtin isascii with argument ARG.  */
8758 
8759 static tree
8760 fold_builtin_isascii (location_t loc, tree arg)
8761 {
8762   if (!validate_arg (arg, INTEGER_TYPE))
8763     return NULL_TREE;
8764   else
8765     {
8766       /* Transform isascii(c) -> ((c & ~0x7f) == 0).  */
8767       arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8768 			 build_int_cst (integer_type_node,
8769 					~ (unsigned HOST_WIDE_INT) 0x7f));
8770       return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8771 			      arg, integer_zero_node);
8772     }
8773 }
8774 
8775 /* Fold a call to builtin toascii with argument ARG.  */
8776 
8777 static tree
8778 fold_builtin_toascii (location_t loc, tree arg)
8779 {
8780   if (!validate_arg (arg, INTEGER_TYPE))
8781     return NULL_TREE;
8782 
8783   /* Transform toascii(c) -> (c & 0x7f).  */
8784   return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8785 			  build_int_cst (integer_type_node, 0x7f));
8786 }
8787 
8788 /* Fold a call to builtin isdigit with argument ARG.  */
8789 
8790 static tree
8791 fold_builtin_isdigit (location_t loc, tree arg)
8792 {
8793   if (!validate_arg (arg, INTEGER_TYPE))
8794     return NULL_TREE;
8795   else
8796     {
8797       /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9.  */
8798       /* According to the C standard, isdigit is unaffected by locale.
8799 	 However, it definitely is affected by the target character set.  */
8800       unsigned HOST_WIDE_INT target_digit0
8801 	= lang_hooks.to_target_charset ('0');
8802 
8803       if (target_digit0 == 0)
8804 	return NULL_TREE;
8805 
8806       arg = fold_convert_loc (loc, unsigned_type_node, arg);
8807       arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8808 			 build_int_cst (unsigned_type_node, target_digit0));
8809       return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8810 			  build_int_cst (unsigned_type_node, 9));
8811     }
8812 }
8813 
8814 /* Fold a call to fabs, fabsf or fabsl with argument ARG.  */
8815 
8816 static tree
8817 fold_builtin_fabs (location_t loc, tree arg, tree type)
8818 {
8819   if (!validate_arg (arg, REAL_TYPE))
8820     return NULL_TREE;
8821 
8822   arg = fold_convert_loc (loc, type, arg);
8823   return fold_build1_loc (loc, ABS_EXPR, type, arg);
8824 }
8825 
8826 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG.  */
8827 
8828 static tree
8829 fold_builtin_abs (location_t loc, tree arg, tree type)
8830 {
8831   if (!validate_arg (arg, INTEGER_TYPE))
8832     return NULL_TREE;
8833 
8834   arg = fold_convert_loc (loc, type, arg);
8835   return fold_build1_loc (loc, ABS_EXPR, type, arg);
8836 }
8837 
8838 /* Fold a call to builtin carg(a+bi) -> atan2(b,a).  */
8839 
8840 static tree
8841 fold_builtin_carg (location_t loc, tree arg, tree type)
8842 {
8843   if (validate_arg (arg, COMPLEX_TYPE)
8844       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8845     {
8846       tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8847 
8848       if (atan2_fn)
8849         {
8850   	  tree new_arg = builtin_save_expr (arg);
8851 	  tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8852 	  tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8853 	  return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8854 	}
8855     }
8856 
8857   return NULL_TREE;
8858 }
8859 
8860 /* Fold a call to builtin frexp, we can assume the base is 2.  */
8861 
8862 static tree
8863 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8864 {
8865   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8866     return NULL_TREE;
8867 
8868   STRIP_NOPS (arg0);
8869 
8870   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8871     return NULL_TREE;
8872 
8873   arg1 = build_fold_indirect_ref_loc (loc, arg1);
8874 
8875   /* Proceed if a valid pointer type was passed in.  */
8876   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8877     {
8878       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8879       tree frac, exp;
8880 
8881       switch (value->cl)
8882       {
8883       case rvc_zero:
8884 	/* For +-0, return (*exp = 0, +-0).  */
8885 	exp = integer_zero_node;
8886 	frac = arg0;
8887 	break;
8888       case rvc_nan:
8889       case rvc_inf:
8890 	/* For +-NaN or +-Inf, *exp is unspecified, return arg0.  */
8891 	return omit_one_operand_loc (loc, rettype, arg0, arg1);
8892       case rvc_normal:
8893 	{
8894 	  /* Since the frexp function always expects base 2, and in
8895 	     GCC normalized significands are already in the range
8896 	     [0.5, 1.0), we have exactly what frexp wants.  */
8897 	  REAL_VALUE_TYPE frac_rvt = *value;
8898 	  SET_REAL_EXP (&frac_rvt, 0);
8899 	  frac = build_real (rettype, frac_rvt);
8900 	  exp = build_int_cst (integer_type_node, REAL_EXP (value));
8901 	}
8902 	break;
8903       default:
8904 	gcc_unreachable ();
8905       }
8906 
8907       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8908       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8909       TREE_SIDE_EFFECTS (arg1) = 1;
8910       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8911     }
8912 
8913   return NULL_TREE;
8914 }
8915 
8916 /* Fold a call to builtin modf.  */
8917 
8918 static tree
8919 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8920 {
8921   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8922     return NULL_TREE;
8923 
8924   STRIP_NOPS (arg0);
8925 
8926   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8927     return NULL_TREE;
8928 
8929   arg1 = build_fold_indirect_ref_loc (loc, arg1);
8930 
8931   /* Proceed if a valid pointer type was passed in.  */
8932   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8933     {
8934       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8935       REAL_VALUE_TYPE trunc, frac;
8936 
8937       switch (value->cl)
8938       {
8939       case rvc_nan:
8940       case rvc_zero:
8941 	/* For +-NaN or +-0, return (*arg1 = arg0, arg0).  */
8942 	trunc = frac = *value;
8943 	break;
8944       case rvc_inf:
8945 	/* For +-Inf, return (*arg1 = arg0, +-0).  */
8946 	frac = dconst0;
8947 	frac.sign = value->sign;
8948 	trunc = *value;
8949 	break;
8950       case rvc_normal:
8951 	/* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)).  */
8952 	real_trunc (&trunc, VOIDmode, value);
8953 	real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8954 	/* If the original number was negative and already
8955 	   integral, then the fractional part is -0.0.  */
8956 	if (value->sign && frac.cl == rvc_zero)
8957 	  frac.sign = value->sign;
8958 	break;
8959       }
8960 
8961       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8962       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8963 			  build_real (rettype, trunc));
8964       TREE_SIDE_EFFECTS (arg1) = 1;
8965       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8966 			  build_real (rettype, frac));
8967     }
8968 
8969   return NULL_TREE;
8970 }
8971 
8972 /* Given a location LOC, an interclass builtin function decl FNDECL
8973    and its single argument ARG, return an folded expression computing
8974    the same, or NULL_TREE if we either couldn't or didn't want to fold
8975    (the latter happen if there's an RTL instruction available).  */
8976 
8977 static tree
8978 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8979 {
8980   machine_mode mode;
8981 
8982   if (!validate_arg (arg, REAL_TYPE))
8983     return NULL_TREE;
8984 
8985   if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8986     return NULL_TREE;
8987 
8988   mode = TYPE_MODE (TREE_TYPE (arg));
8989 
8990   bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8991 
8992   /* If there is no optab, try generic code.  */
8993   switch (DECL_FUNCTION_CODE (fndecl))
8994     {
8995       tree result;
8996 
8997     CASE_FLT_FN (BUILT_IN_ISINF):
8998       {
8999 	/* isinf(x) -> isgreater(fabs(x),DBL_MAX).  */
9000 	tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9001 	tree type = TREE_TYPE (arg);
9002 	REAL_VALUE_TYPE r;
9003 	char buf[128];
9004 
9005 	if (is_ibm_extended)
9006 	  {
9007 	    /* NaN and Inf are encoded in the high-order double value
9008 	       only.  The low-order value is not significant.  */
9009 	    type = double_type_node;
9010 	    mode = DFmode;
9011 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9012 	  }
9013 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9014 	real_from_string (&r, buf);
9015 	result = build_call_expr (isgr_fn, 2,
9016 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
9017 				  build_real (type, r));
9018 	return result;
9019       }
9020     CASE_FLT_FN (BUILT_IN_FINITE):
9021     case BUILT_IN_ISFINITE:
9022       {
9023 	/* isfinite(x) -> islessequal(fabs(x),DBL_MAX).  */
9024 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9025 	tree type = TREE_TYPE (arg);
9026 	REAL_VALUE_TYPE r;
9027 	char buf[128];
9028 
9029 	if (is_ibm_extended)
9030 	  {
9031 	    /* NaN and Inf are encoded in the high-order double value
9032 	       only.  The low-order value is not significant.  */
9033 	    type = double_type_node;
9034 	    mode = DFmode;
9035 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9036 	  }
9037 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9038 	real_from_string (&r, buf);
9039 	result = build_call_expr (isle_fn, 2,
9040 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
9041 				  build_real (type, r));
9042 	/*result = fold_build2_loc (loc, UNGT_EXPR,
9043 				  TREE_TYPE (TREE_TYPE (fndecl)),
9044 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
9045 				  build_real (type, r));
9046 	result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9047 				  TREE_TYPE (TREE_TYPE (fndecl)),
9048 				  result);*/
9049 	return result;
9050       }
9051     case BUILT_IN_ISNORMAL:
9052       {
9053 	/* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9054 	   islessequal(fabs(x),DBL_MAX).  */
9055 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9056 	tree type = TREE_TYPE (arg);
9057 	tree orig_arg, max_exp, min_exp;
9058 	machine_mode orig_mode = mode;
9059 	REAL_VALUE_TYPE rmax, rmin;
9060 	char buf[128];
9061 
9062 	orig_arg = arg = builtin_save_expr (arg);
9063 	if (is_ibm_extended)
9064 	  {
9065 	    /* Use double to test the normal range of IBM extended
9066 	       precision.  Emin for IBM extended precision is
9067 	       different to emin for IEEE double, being 53 higher
9068 	       since the low double exponent is at least 53 lower
9069 	       than the high double exponent.  */
9070 	    type = double_type_node;
9071 	    mode = DFmode;
9072 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9073 	  }
9074 	arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9075 
9076 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9077 	real_from_string (&rmax, buf);
9078 	sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9079 	real_from_string (&rmin, buf);
9080 	max_exp = build_real (type, rmax);
9081 	min_exp = build_real (type, rmin);
9082 
9083 	max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9084 	if (is_ibm_extended)
9085 	  {
9086 	    /* Testing the high end of the range is done just using
9087 	       the high double, using the same test as isfinite().
9088 	       For the subnormal end of the range we first test the
9089 	       high double, then if its magnitude is equal to the
9090 	       limit of 0x1p-969, we test whether the low double is
9091 	       non-zero and opposite sign to the high double.  */
9092 	    tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9093 	    tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9094 	    tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9095 	    tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9096 				       arg, min_exp);
9097 	    tree as_complex = build1 (VIEW_CONVERT_EXPR,
9098 				      complex_double_type_node, orig_arg);
9099 	    tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9100 	    tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9101 	    tree zero = build_real (type, dconst0);
9102 	    tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9103 	    tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9104 	    tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9105 	    tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9106 				      fold_build3 (COND_EXPR,
9107 						   integer_type_node,
9108 						   hilt, logt, lolt));
9109 	    eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9110 				  eq_min, ok_lo);
9111 	    min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9112 				   gt_min, eq_min);
9113 	  }
9114 	else
9115 	  {
9116 	    tree const isge_fn
9117 	      = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9118 	    min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9119 	  }
9120 	result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9121 			      max_exp, min_exp);
9122 	return result;
9123       }
9124     default:
9125       break;
9126     }
9127 
9128   return NULL_TREE;
9129 }
9130 
9131 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9132    ARG is the argument for the call.  */
9133 
9134 static tree
9135 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9136 {
9137   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9138 
9139   if (!validate_arg (arg, REAL_TYPE))
9140     return NULL_TREE;
9141 
9142   switch (builtin_index)
9143     {
9144     case BUILT_IN_ISINF:
9145       if (!HONOR_INFINITIES (arg))
9146 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9147 
9148       return NULL_TREE;
9149 
9150     case BUILT_IN_ISINF_SIGN:
9151       {
9152 	/* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9153 	/* In a boolean context, GCC will fold the inner COND_EXPR to
9154 	   1.  So e.g. "if (isinf_sign(x))" would be folded to just
9155 	   "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9156 	tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9157 	tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9158 	tree tmp = NULL_TREE;
9159 
9160 	arg = builtin_save_expr (arg);
9161 
9162 	if (signbit_fn && isinf_fn)
9163 	  {
9164 	    tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9165 	    tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9166 
9167 	    signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9168 					signbit_call, integer_zero_node);
9169 	    isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9170 				      isinf_call, integer_zero_node);
9171 
9172 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9173 			       integer_minus_one_node, integer_one_node);
9174 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9175 			       isinf_call, tmp,
9176 			       integer_zero_node);
9177 	  }
9178 
9179 	return tmp;
9180       }
9181 
9182     case BUILT_IN_ISFINITE:
9183       if (!HONOR_NANS (arg)
9184 	  && !HONOR_INFINITIES (arg))
9185 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
9186 
9187       return NULL_TREE;
9188 
9189     case BUILT_IN_ISNAN:
9190       if (!HONOR_NANS (arg))
9191 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9192 
9193       {
9194 	bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9195 	if (is_ibm_extended)
9196 	  {
9197 	    /* NaN and Inf are encoded in the high-order double value
9198 	       only.  The low-order value is not significant.  */
9199 	    arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9200 	  }
9201       }
9202       arg = builtin_save_expr (arg);
9203       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9204 
9205     default:
9206       gcc_unreachable ();
9207     }
9208 }
9209 
9210 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9211    This builtin will generate code to return the appropriate floating
9212    point classification depending on the value of the floating point
9213    number passed in.  The possible return values must be supplied as
9214    int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9215    FP_NORMAL, FP_SUBNORMAL and FP_ZERO.  The ellipses is for exactly
9216    one floating point argument which is "type generic".  */
9217 
9218 static tree
9219 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9220 {
9221   tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9222     arg, type, res, tmp;
9223   machine_mode mode;
9224   REAL_VALUE_TYPE r;
9225   char buf[128];
9226 
9227   /* Verify the required arguments in the original call.  */
9228   if (nargs != 6
9229       || !validate_arg (args[0], INTEGER_TYPE)
9230       || !validate_arg (args[1], INTEGER_TYPE)
9231       || !validate_arg (args[2], INTEGER_TYPE)
9232       || !validate_arg (args[3], INTEGER_TYPE)
9233       || !validate_arg (args[4], INTEGER_TYPE)
9234       || !validate_arg (args[5], REAL_TYPE))
9235     return NULL_TREE;
9236 
9237   fp_nan = args[0];
9238   fp_infinite = args[1];
9239   fp_normal = args[2];
9240   fp_subnormal = args[3];
9241   fp_zero = args[4];
9242   arg = args[5];
9243   type = TREE_TYPE (arg);
9244   mode = TYPE_MODE (type);
9245   arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9246 
9247   /* fpclassify(x) ->
9248        isnan(x) ? FP_NAN :
9249          (fabs(x) == Inf ? FP_INFINITE :
9250 	   (fabs(x) >= DBL_MIN ? FP_NORMAL :
9251 	     (x == 0 ? FP_ZERO : FP_SUBNORMAL))).  */
9252 
9253   tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9254 		     build_real (type, dconst0));
9255   res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9256 		     tmp, fp_zero, fp_subnormal);
9257 
9258   sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9259   real_from_string (&r, buf);
9260   tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9261 		     arg, build_real (type, r));
9262   res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9263 
9264   if (HONOR_INFINITIES (mode))
9265     {
9266       real_inf (&r);
9267       tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9268 			 build_real (type, r));
9269       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9270 			 fp_infinite, res);
9271     }
9272 
9273   if (HONOR_NANS (mode))
9274     {
9275       tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9276       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9277     }
9278 
9279   return res;
9280 }
9281 
9282 /* Fold a call to an unordered comparison function such as
9283    __builtin_isgreater().  FNDECL is the FUNCTION_DECL for the function
9284    being called and ARG0 and ARG1 are the arguments for the call.
9285    UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9286    the opposite of the desired result.  UNORDERED_CODE is used
9287    for modes that can hold NaNs and ORDERED_CODE is used for
9288    the rest.  */
9289 
9290 static tree
9291 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9292 			    enum tree_code unordered_code,
9293 			    enum tree_code ordered_code)
9294 {
9295   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9296   enum tree_code code;
9297   tree type0, type1;
9298   enum tree_code code0, code1;
9299   tree cmp_type = NULL_TREE;
9300 
9301   type0 = TREE_TYPE (arg0);
9302   type1 = TREE_TYPE (arg1);
9303 
9304   code0 = TREE_CODE (type0);
9305   code1 = TREE_CODE (type1);
9306 
9307   if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9308     /* Choose the wider of two real types.  */
9309     cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9310       ? type0 : type1;
9311   else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9312     cmp_type = type0;
9313   else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9314     cmp_type = type1;
9315 
9316   arg0 = fold_convert_loc (loc, cmp_type, arg0);
9317   arg1 = fold_convert_loc (loc, cmp_type, arg1);
9318 
9319   if (unordered_code == UNORDERED_EXPR)
9320     {
9321       if (!HONOR_NANS (arg0))
9322 	return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9323       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9324     }
9325 
9326   code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9327   return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9328 		      fold_build2_loc (loc, code, type, arg0, arg1));
9329 }
9330 
9331 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9332    arithmetics if it can never overflow, or into internal functions that
9333    return both result of arithmetics and overflowed boolean flag in
9334    a complex integer result, or some other check for overflow.
9335    Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9336    checking part of that.  */
9337 
9338 static tree
9339 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9340 			     tree arg0, tree arg1, tree arg2)
9341 {
9342   enum internal_fn ifn = IFN_LAST;
9343   /* The code of the expression corresponding to the built-in.  */
9344   enum tree_code opcode = ERROR_MARK;
9345   bool ovf_only = false;
9346 
9347   switch (fcode)
9348     {
9349     case BUILT_IN_ADD_OVERFLOW_P:
9350       ovf_only = true;
9351       /* FALLTHRU */
9352     case BUILT_IN_ADD_OVERFLOW:
9353     case BUILT_IN_SADD_OVERFLOW:
9354     case BUILT_IN_SADDL_OVERFLOW:
9355     case BUILT_IN_SADDLL_OVERFLOW:
9356     case BUILT_IN_UADD_OVERFLOW:
9357     case BUILT_IN_UADDL_OVERFLOW:
9358     case BUILT_IN_UADDLL_OVERFLOW:
9359       opcode = PLUS_EXPR;
9360       ifn = IFN_ADD_OVERFLOW;
9361       break;
9362     case BUILT_IN_SUB_OVERFLOW_P:
9363       ovf_only = true;
9364       /* FALLTHRU */
9365     case BUILT_IN_SUB_OVERFLOW:
9366     case BUILT_IN_SSUB_OVERFLOW:
9367     case BUILT_IN_SSUBL_OVERFLOW:
9368     case BUILT_IN_SSUBLL_OVERFLOW:
9369     case BUILT_IN_USUB_OVERFLOW:
9370     case BUILT_IN_USUBL_OVERFLOW:
9371     case BUILT_IN_USUBLL_OVERFLOW:
9372       opcode = MINUS_EXPR;
9373       ifn = IFN_SUB_OVERFLOW;
9374       break;
9375     case BUILT_IN_MUL_OVERFLOW_P:
9376       ovf_only = true;
9377       /* FALLTHRU */
9378     case BUILT_IN_MUL_OVERFLOW:
9379     case BUILT_IN_SMUL_OVERFLOW:
9380     case BUILT_IN_SMULL_OVERFLOW:
9381     case BUILT_IN_SMULLL_OVERFLOW:
9382     case BUILT_IN_UMUL_OVERFLOW:
9383     case BUILT_IN_UMULL_OVERFLOW:
9384     case BUILT_IN_UMULLL_OVERFLOW:
9385       opcode = MULT_EXPR;
9386       ifn = IFN_MUL_OVERFLOW;
9387       break;
9388     default:
9389       gcc_unreachable ();
9390     }
9391 
9392   /* For the "generic" overloads, the first two arguments can have different
9393      types and the last argument determines the target type to use to check
9394      for overflow.  The arguments of the other overloads all have the same
9395      type.  */
9396   tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9397 
9398   /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9399      arguments are constant, attempt to fold the built-in call into a constant
9400      expression indicating whether or not it detected an overflow.  */
9401   if (ovf_only
9402       && TREE_CODE (arg0) == INTEGER_CST
9403       && TREE_CODE (arg1) == INTEGER_CST)
9404     /* Perform the computation in the target type and check for overflow.  */
9405     return omit_one_operand_loc (loc, boolean_type_node,
9406 				 arith_overflowed_p (opcode, type, arg0, arg1)
9407 				 ? boolean_true_node : boolean_false_node,
9408 				 arg2);
9409 
9410   tree intres, ovfres;
9411   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9412     {
9413       intres = fold_binary_loc (loc, opcode, type,
9414 				fold_convert_loc (loc, type, arg0),
9415 				fold_convert_loc (loc, type, arg1));
9416       if (TREE_OVERFLOW (intres))
9417 	intres = drop_tree_overflow (intres);
9418       ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9419 		? boolean_true_node : boolean_false_node);
9420     }
9421   else
9422     {
9423       tree ctype = build_complex_type (type);
9424       tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9425 						arg0, arg1);
9426       tree tgt = save_expr (call);
9427       intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9428       ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9429       ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9430     }
9431 
9432   if (ovf_only)
9433     return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9434 
9435   tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9436   tree store
9437     = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9438   return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9439 }
9440 
9441 /* Fold a call to __builtin_FILE to a constant string.  */
9442 
9443 static inline tree
9444 fold_builtin_FILE (location_t loc)
9445 {
9446   if (const char *fname = LOCATION_FILE (loc))
9447     {
9448       /* The documentation says this builtin is equivalent to the preprocessor
9449 	 __FILE__ macro so it appears appropriate to use the same file prefix
9450 	 mappings.  */
9451       fname = remap_macro_filename (fname);
9452     return build_string_literal (strlen (fname) + 1, fname);
9453     }
9454 
9455   return build_string_literal (1, "");
9456 }
9457 
9458 /* Fold a call to __builtin_FUNCTION to a constant string.  */
9459 
9460 static inline tree
9461 fold_builtin_FUNCTION ()
9462 {
9463   const char *name = "";
9464 
9465   if (current_function_decl)
9466     name = lang_hooks.decl_printable_name (current_function_decl, 0);
9467 
9468   return build_string_literal (strlen (name) + 1, name);
9469 }
9470 
9471 /* Fold a call to __builtin_LINE to an integer constant.  */
9472 
9473 static inline tree
9474 fold_builtin_LINE (location_t loc, tree type)
9475 {
9476   return build_int_cst (type, LOCATION_LINE (loc));
9477 }
9478 
9479 /* Fold a call to built-in function FNDECL with 0 arguments.
9480    This function returns NULL_TREE if no simplification was possible.  */
9481 
9482 static tree
9483 fold_builtin_0 (location_t loc, tree fndecl)
9484 {
9485   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9486   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9487   switch (fcode)
9488     {
9489     case BUILT_IN_FILE:
9490       return fold_builtin_FILE (loc);
9491 
9492     case BUILT_IN_FUNCTION:
9493       return fold_builtin_FUNCTION ();
9494 
9495     case BUILT_IN_LINE:
9496       return fold_builtin_LINE (loc, type);
9497 
9498     CASE_FLT_FN (BUILT_IN_INF):
9499     CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9500     case BUILT_IN_INFD32:
9501     case BUILT_IN_INFD64:
9502     case BUILT_IN_INFD128:
9503       return fold_builtin_inf (loc, type, true);
9504 
9505     CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9506     CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9507       return fold_builtin_inf (loc, type, false);
9508 
9509     case BUILT_IN_CLASSIFY_TYPE:
9510       return fold_builtin_classify_type (NULL_TREE);
9511 
9512     default:
9513       break;
9514     }
9515   return NULL_TREE;
9516 }
9517 
9518 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9519    This function returns NULL_TREE if no simplification was possible.  */
9520 
9521 static tree
9522 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9523 {
9524   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9525   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9526 
9527   if (TREE_CODE (arg0) == ERROR_MARK)
9528     return NULL_TREE;
9529 
9530   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9531     return ret;
9532 
9533   switch (fcode)
9534     {
9535     case BUILT_IN_CONSTANT_P:
9536       {
9537 	tree val = fold_builtin_constant_p (arg0);
9538 
9539 	/* Gimplification will pull the CALL_EXPR for the builtin out of
9540 	   an if condition.  When not optimizing, we'll not CSE it back.
9541 	   To avoid link error types of regressions, return false now.  */
9542 	if (!val && !optimize)
9543 	  val = integer_zero_node;
9544 
9545 	return val;
9546       }
9547 
9548     case BUILT_IN_CLASSIFY_TYPE:
9549       return fold_builtin_classify_type (arg0);
9550 
9551     case BUILT_IN_STRLEN:
9552       return fold_builtin_strlen (loc, type, arg0);
9553 
9554     CASE_FLT_FN (BUILT_IN_FABS):
9555     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9556     case BUILT_IN_FABSD32:
9557     case BUILT_IN_FABSD64:
9558     case BUILT_IN_FABSD128:
9559       return fold_builtin_fabs (loc, arg0, type);
9560 
9561     case BUILT_IN_ABS:
9562     case BUILT_IN_LABS:
9563     case BUILT_IN_LLABS:
9564     case BUILT_IN_IMAXABS:
9565       return fold_builtin_abs (loc, arg0, type);
9566 
9567     CASE_FLT_FN (BUILT_IN_CONJ):
9568       if (validate_arg (arg0, COMPLEX_TYPE)
9569 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9570 	return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9571     break;
9572 
9573     CASE_FLT_FN (BUILT_IN_CREAL):
9574       if (validate_arg (arg0, COMPLEX_TYPE)
9575 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9576 	return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9577     break;
9578 
9579     CASE_FLT_FN (BUILT_IN_CIMAG):
9580       if (validate_arg (arg0, COMPLEX_TYPE)
9581 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9582 	return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9583     break;
9584 
9585     CASE_FLT_FN (BUILT_IN_CARG):
9586       return fold_builtin_carg (loc, arg0, type);
9587 
9588     case BUILT_IN_ISASCII:
9589       return fold_builtin_isascii (loc, arg0);
9590 
9591     case BUILT_IN_TOASCII:
9592       return fold_builtin_toascii (loc, arg0);
9593 
9594     case BUILT_IN_ISDIGIT:
9595       return fold_builtin_isdigit (loc, arg0);
9596 
9597     CASE_FLT_FN (BUILT_IN_FINITE):
9598     case BUILT_IN_FINITED32:
9599     case BUILT_IN_FINITED64:
9600     case BUILT_IN_FINITED128:
9601     case BUILT_IN_ISFINITE:
9602       {
9603 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9604 	if (ret)
9605 	  return ret;
9606 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9607       }
9608 
9609     CASE_FLT_FN (BUILT_IN_ISINF):
9610     case BUILT_IN_ISINFD32:
9611     case BUILT_IN_ISINFD64:
9612     case BUILT_IN_ISINFD128:
9613       {
9614 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9615 	if (ret)
9616 	  return ret;
9617 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9618       }
9619 
9620     case BUILT_IN_ISNORMAL:
9621       return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9622 
9623     case BUILT_IN_ISINF_SIGN:
9624       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9625 
9626     CASE_FLT_FN (BUILT_IN_ISNAN):
9627     case BUILT_IN_ISNAND32:
9628     case BUILT_IN_ISNAND64:
9629     case BUILT_IN_ISNAND128:
9630       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9631 
9632     case BUILT_IN_FREE:
9633       if (integer_zerop (arg0))
9634 	return build_empty_stmt (loc);
9635       break;
9636 
9637     default:
9638       break;
9639     }
9640 
9641   return NULL_TREE;
9642 
9643 }
9644 
9645 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9646    This function returns NULL_TREE if no simplification was possible.  */
9647 
9648 static tree
9649 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9650 {
9651   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9652   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9653 
9654   if (TREE_CODE (arg0) == ERROR_MARK
9655       || TREE_CODE (arg1) == ERROR_MARK)
9656     return NULL_TREE;
9657 
9658   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9659     return ret;
9660 
9661   switch (fcode)
9662     {
9663     CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9664     CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9665       if (validate_arg (arg0, REAL_TYPE)
9666 	  && validate_arg (arg1, POINTER_TYPE))
9667 	return do_mpfr_lgamma_r (arg0, arg1, type);
9668     break;
9669 
9670     CASE_FLT_FN (BUILT_IN_FREXP):
9671       return fold_builtin_frexp (loc, arg0, arg1, type);
9672 
9673     CASE_FLT_FN (BUILT_IN_MODF):
9674       return fold_builtin_modf (loc, arg0, arg1, type);
9675 
9676     case BUILT_IN_STRSPN:
9677       return fold_builtin_strspn (loc, arg0, arg1);
9678 
9679     case BUILT_IN_STRCSPN:
9680       return fold_builtin_strcspn (loc, arg0, arg1);
9681 
9682     case BUILT_IN_STRPBRK:
9683       return fold_builtin_strpbrk (loc, arg0, arg1, type);
9684 
9685     case BUILT_IN_EXPECT:
9686       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9687 
9688     case BUILT_IN_ISGREATER:
9689       return fold_builtin_unordered_cmp (loc, fndecl,
9690 					 arg0, arg1, UNLE_EXPR, LE_EXPR);
9691     case BUILT_IN_ISGREATEREQUAL:
9692       return fold_builtin_unordered_cmp (loc, fndecl,
9693 					 arg0, arg1, UNLT_EXPR, LT_EXPR);
9694     case BUILT_IN_ISLESS:
9695       return fold_builtin_unordered_cmp (loc, fndecl,
9696 					 arg0, arg1, UNGE_EXPR, GE_EXPR);
9697     case BUILT_IN_ISLESSEQUAL:
9698       return fold_builtin_unordered_cmp (loc, fndecl,
9699 					 arg0, arg1, UNGT_EXPR, GT_EXPR);
9700     case BUILT_IN_ISLESSGREATER:
9701       return fold_builtin_unordered_cmp (loc, fndecl,
9702 					 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9703     case BUILT_IN_ISUNORDERED:
9704       return fold_builtin_unordered_cmp (loc, fndecl,
9705 					 arg0, arg1, UNORDERED_EXPR,
9706 					 NOP_EXPR);
9707 
9708       /* We do the folding for va_start in the expander.  */
9709     case BUILT_IN_VA_START:
9710       break;
9711 
9712     case BUILT_IN_OBJECT_SIZE:
9713       return fold_builtin_object_size (arg0, arg1);
9714 
9715     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9716       return fold_builtin_atomic_always_lock_free (arg0, arg1);
9717 
9718     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9719       return fold_builtin_atomic_is_lock_free (arg0, arg1);
9720 
9721     default:
9722       break;
9723     }
9724   return NULL_TREE;
9725 }
9726 
9727 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9728    and ARG2.
9729    This function returns NULL_TREE if no simplification was possible.  */
9730 
9731 static tree
9732 fold_builtin_3 (location_t loc, tree fndecl,
9733 		tree arg0, tree arg1, tree arg2)
9734 {
9735   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9736   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9737 
9738   if (TREE_CODE (arg0) == ERROR_MARK
9739       || TREE_CODE (arg1) == ERROR_MARK
9740       || TREE_CODE (arg2) == ERROR_MARK)
9741     return NULL_TREE;
9742 
9743   if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9744 				  arg0, arg1, arg2))
9745     return ret;
9746 
9747   switch (fcode)
9748     {
9749 
9750     CASE_FLT_FN (BUILT_IN_SINCOS):
9751       return fold_builtin_sincos (loc, arg0, arg1, arg2);
9752 
9753     CASE_FLT_FN (BUILT_IN_REMQUO):
9754       if (validate_arg (arg0, REAL_TYPE)
9755 	  && validate_arg (arg1, REAL_TYPE)
9756 	  && validate_arg (arg2, POINTER_TYPE))
9757 	return do_mpfr_remquo (arg0, arg1, arg2);
9758     break;
9759 
9760     case BUILT_IN_MEMCMP:
9761       return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9762 
9763     case BUILT_IN_EXPECT:
9764       return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9765 
9766     case BUILT_IN_EXPECT_WITH_PROBABILITY:
9767       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9768 
9769     case BUILT_IN_ADD_OVERFLOW:
9770     case BUILT_IN_SUB_OVERFLOW:
9771     case BUILT_IN_MUL_OVERFLOW:
9772     case BUILT_IN_ADD_OVERFLOW_P:
9773     case BUILT_IN_SUB_OVERFLOW_P:
9774     case BUILT_IN_MUL_OVERFLOW_P:
9775     case BUILT_IN_SADD_OVERFLOW:
9776     case BUILT_IN_SADDL_OVERFLOW:
9777     case BUILT_IN_SADDLL_OVERFLOW:
9778     case BUILT_IN_SSUB_OVERFLOW:
9779     case BUILT_IN_SSUBL_OVERFLOW:
9780     case BUILT_IN_SSUBLL_OVERFLOW:
9781     case BUILT_IN_SMUL_OVERFLOW:
9782     case BUILT_IN_SMULL_OVERFLOW:
9783     case BUILT_IN_SMULLL_OVERFLOW:
9784     case BUILT_IN_UADD_OVERFLOW:
9785     case BUILT_IN_UADDL_OVERFLOW:
9786     case BUILT_IN_UADDLL_OVERFLOW:
9787     case BUILT_IN_USUB_OVERFLOW:
9788     case BUILT_IN_USUBL_OVERFLOW:
9789     case BUILT_IN_USUBLL_OVERFLOW:
9790     case BUILT_IN_UMUL_OVERFLOW:
9791     case BUILT_IN_UMULL_OVERFLOW:
9792     case BUILT_IN_UMULLL_OVERFLOW:
9793       return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9794 
9795     default:
9796       break;
9797     }
9798   return NULL_TREE;
9799 }
9800 
9801 /* Fold a call to built-in function FNDECL.  ARGS is an array of NARGS
9802    arguments.  IGNORE is true if the result of the
9803    function call is ignored.  This function returns NULL_TREE if no
9804    simplification was possible.  */
9805 
9806 tree
9807 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9808 {
9809   tree ret = NULL_TREE;
9810 
9811   switch (nargs)
9812     {
9813     case 0:
9814       ret = fold_builtin_0 (loc, fndecl);
9815       break;
9816     case 1:
9817       ret = fold_builtin_1 (loc, fndecl, args[0]);
9818       break;
9819     case 2:
9820       ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9821       break;
9822     case 3:
9823       ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9824       break;
9825     default:
9826       ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9827       break;
9828     }
9829   if (ret)
9830     {
9831       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9832       SET_EXPR_LOCATION (ret, loc);
9833       return ret;
9834     }
9835   return NULL_TREE;
9836 }
9837 
9838 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9839    list ARGS along with N new arguments in NEWARGS.  SKIP is the number
9840    of arguments in ARGS to be omitted.  OLDNARGS is the number of
9841    elements in ARGS.  */
9842 
9843 static tree
9844 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9845 			  int skip, tree fndecl, int n, va_list newargs)
9846 {
9847   int nargs = oldnargs - skip + n;
9848   tree *buffer;
9849 
9850   if (n > 0)
9851     {
9852       int i, j;
9853 
9854       buffer = XALLOCAVEC (tree, nargs);
9855       for (i = 0; i < n; i++)
9856 	buffer[i] = va_arg (newargs, tree);
9857       for (j = skip; j < oldnargs; j++, i++)
9858 	buffer[i] = args[j];
9859     }
9860   else
9861     buffer = args + skip;
9862 
9863   return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9864 }
9865 
9866 /* Return true if FNDECL shouldn't be folded right now.
9867    If a built-in function has an inline attribute always_inline
9868    wrapper, defer folding it after always_inline functions have
9869    been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9870    might not be performed.  */
9871 
9872 bool
9873 avoid_folding_inline_builtin (tree fndecl)
9874 {
9875   return (DECL_DECLARED_INLINE_P (fndecl)
9876 	  && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9877 	  && cfun
9878 	  && !cfun->always_inline_functions_inlined
9879 	  && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9880 }
9881 
9882 /* A wrapper function for builtin folding that prevents warnings for
9883    "statement without effect" and the like, caused by removing the
9884    call node earlier than the warning is generated.  */
9885 
9886 tree
9887 fold_call_expr (location_t loc, tree exp, bool ignore)
9888 {
9889   tree ret = NULL_TREE;
9890   tree fndecl = get_callee_fndecl (exp);
9891   if (fndecl && fndecl_built_in_p (fndecl)
9892       /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9893 	 yet.  Defer folding until we see all the arguments
9894 	 (after inlining).  */
9895       && !CALL_EXPR_VA_ARG_PACK (exp))
9896     {
9897       int nargs = call_expr_nargs (exp);
9898 
9899       /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9900 	 instead last argument is __builtin_va_arg_pack ().  Defer folding
9901 	 even in that case, until arguments are finalized.  */
9902       if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9903 	{
9904 	  tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9905 	  if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9906 	    return NULL_TREE;
9907 	}
9908 
9909       if (avoid_folding_inline_builtin (fndecl))
9910 	return NULL_TREE;
9911 
9912       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9913         return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9914 				     CALL_EXPR_ARGP (exp), ignore);
9915       else
9916 	{
9917 	  tree *args = CALL_EXPR_ARGP (exp);
9918 	  ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9919 	  if (ret)
9920 	    return ret;
9921 	}
9922     }
9923   return NULL_TREE;
9924 }
9925 
9926 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9927    N arguments are passed in the array ARGARRAY.  Return a folded
9928    expression or NULL_TREE if no simplification was possible.  */
9929 
9930 tree
9931 fold_builtin_call_array (location_t loc, tree,
9932 			 tree fn,
9933 			 int n,
9934 			 tree *argarray)
9935 {
9936   if (TREE_CODE (fn) != ADDR_EXPR)
9937     return NULL_TREE;
9938 
9939   tree fndecl = TREE_OPERAND (fn, 0);
9940   if (TREE_CODE (fndecl) == FUNCTION_DECL
9941       && fndecl_built_in_p (fndecl))
9942     {
9943       /* If last argument is __builtin_va_arg_pack (), arguments to this
9944 	 function are not finalized yet.  Defer folding until they are.  */
9945       if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9946 	{
9947 	  tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9948 	  if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9949 	    return NULL_TREE;
9950 	}
9951       if (avoid_folding_inline_builtin (fndecl))
9952 	return NULL_TREE;
9953       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9954 	return targetm.fold_builtin (fndecl, n, argarray, false);
9955       else
9956 	return fold_builtin_n (loc, fndecl, argarray, n, false);
9957     }
9958 
9959   return NULL_TREE;
9960 }
9961 
9962 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9963    along with N new arguments specified as the "..." parameters.  SKIP
9964    is the number of arguments in EXP to be omitted.  This function is used
9965    to do varargs-to-varargs transformations.  */
9966 
9967 static tree
9968 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9969 {
9970   va_list ap;
9971   tree t;
9972 
9973   va_start (ap, n);
9974   t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9975 				CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9976   va_end (ap);
9977 
9978   return t;
9979 }
9980 
9981 /* Validate a single argument ARG against a tree code CODE representing
9982    a type.  Return true when argument is valid.  */
9983 
9984 static bool
9985 validate_arg (const_tree arg, enum tree_code code)
9986 {
9987   if (!arg)
9988     return false;
9989   else if (code == POINTER_TYPE)
9990     return POINTER_TYPE_P (TREE_TYPE (arg));
9991   else if (code == INTEGER_TYPE)
9992     return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9993   return code == TREE_CODE (TREE_TYPE (arg));
9994 }
9995 
9996 /* This function validates the types of a function call argument list
9997    against a specified list of tree_codes.  If the last specifier is a 0,
9998    that represents an ellipses, otherwise the last specifier must be a
9999    VOID_TYPE.
10000 
10001    This is the GIMPLE version of validate_arglist.  Eventually we want to
10002    completely convert builtins.c to work from GIMPLEs and the tree based
10003    validate_arglist will then be removed.  */
10004 
10005 bool
10006 validate_gimple_arglist (const gcall *call, ...)
10007 {
10008   enum tree_code code;
10009   bool res = 0;
10010   va_list ap;
10011   const_tree arg;
10012   size_t i;
10013 
10014   va_start (ap, call);
10015   i = 0;
10016 
10017   do
10018     {
10019       code = (enum tree_code) va_arg (ap, int);
10020       switch (code)
10021 	{
10022 	case 0:
10023 	  /* This signifies an ellipses, any further arguments are all ok.  */
10024 	  res = true;
10025 	  goto end;
10026 	case VOID_TYPE:
10027 	  /* This signifies an endlink, if no arguments remain, return
10028 	     true, otherwise return false.  */
10029 	  res = (i == gimple_call_num_args (call));
10030 	  goto end;
10031 	default:
10032 	  /* If no parameters remain or the parameter's code does not
10033 	     match the specified code, return false.  Otherwise continue
10034 	     checking any remaining arguments.  */
10035 	  arg = gimple_call_arg (call, i++);
10036 	  if (!validate_arg (arg, code))
10037 	    goto end;
10038 	  break;
10039 	}
10040     }
10041   while (1);
10042 
10043   /* We need gotos here since we can only have one VA_CLOSE in a
10044      function.  */
10045  end: ;
10046   va_end (ap);
10047 
10048   return res;
10049 }
10050 
10051 /* Default target-specific builtin expander that does nothing.  */
10052 
10053 rtx
10054 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10055 			rtx target ATTRIBUTE_UNUSED,
10056 			rtx subtarget ATTRIBUTE_UNUSED,
10057 			machine_mode mode ATTRIBUTE_UNUSED,
10058 			int ignore ATTRIBUTE_UNUSED)
10059 {
10060   return NULL_RTX;
10061 }
10062 
10063 /* Returns true is EXP represents data that would potentially reside
10064    in a readonly section.  */
10065 
10066 bool
10067 readonly_data_expr (tree exp)
10068 {
10069   STRIP_NOPS (exp);
10070 
10071   if (TREE_CODE (exp) != ADDR_EXPR)
10072     return false;
10073 
10074   exp = get_base_address (TREE_OPERAND (exp, 0));
10075   if (!exp)
10076     return false;
10077 
10078   /* Make sure we call decl_readonly_section only for trees it
10079      can handle (since it returns true for everything it doesn't
10080      understand).  */
10081   if (TREE_CODE (exp) == STRING_CST
10082       || TREE_CODE (exp) == CONSTRUCTOR
10083       || (VAR_P (exp) && TREE_STATIC (exp)))
10084     return decl_readonly_section (exp, 0);
10085   else
10086     return false;
10087 }
10088 
10089 /* Simplify a call to the strpbrk builtin.  S1 and S2 are the arguments
10090    to the call, and TYPE is its return type.
10091 
10092    Return NULL_TREE if no simplification was possible, otherwise return the
10093    simplified form of the call as a tree.
10094 
10095    The simplified form may be a constant or other expression which
10096    computes the same value, but in a more efficient manner (including
10097    calls to other builtin functions).
10098 
10099    The call may contain arguments which need to be evaluated, but
10100    which are not useful to determine the result of the call.  In
10101    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10102    COMPOUND_EXPR will be an argument which must be evaluated.
10103    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10104    COMPOUND_EXPR in the chain will contain the tree for the simplified
10105    form of the builtin function call.  */
10106 
10107 static tree
10108 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10109 {
10110   if (!validate_arg (s1, POINTER_TYPE)
10111       || !validate_arg (s2, POINTER_TYPE))
10112     return NULL_TREE;
10113   else
10114     {
10115       tree fn;
10116       const char *p1, *p2;
10117 
10118       p2 = c_getstr (s2);
10119       if (p2 == NULL)
10120 	return NULL_TREE;
10121 
10122       p1 = c_getstr (s1);
10123       if (p1 != NULL)
10124 	{
10125 	  const char *r = strpbrk (p1, p2);
10126 	  tree tem;
10127 
10128 	  if (r == NULL)
10129 	    return build_int_cst (TREE_TYPE (s1), 0);
10130 
10131 	  /* Return an offset into the constant string argument.  */
10132 	  tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10133 	  return fold_convert_loc (loc, type, tem);
10134 	}
10135 
10136       if (p2[0] == '\0')
10137 	/* strpbrk(x, "") == NULL.
10138 	   Evaluate and ignore s1 in case it had side-effects.  */
10139 	return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10140 
10141       if (p2[1] != '\0')
10142 	return NULL_TREE;  /* Really call strpbrk.  */
10143 
10144       fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10145       if (!fn)
10146 	return NULL_TREE;
10147 
10148       /* New argument list transforming strpbrk(s1, s2) to
10149 	 strchr(s1, s2[0]).  */
10150       return build_call_expr_loc (loc, fn, 2, s1,
10151 				  build_int_cst (integer_type_node, p2[0]));
10152     }
10153 }
10154 
10155 /* Simplify a call to the strspn builtin.  S1 and S2 are the arguments
10156    to the call.
10157 
10158    Return NULL_TREE if no simplification was possible, otherwise return the
10159    simplified form of the call as a tree.
10160 
10161    The simplified form may be a constant or other expression which
10162    computes the same value, but in a more efficient manner (including
10163    calls to other builtin functions).
10164 
10165    The call may contain arguments which need to be evaluated, but
10166    which are not useful to determine the result of the call.  In
10167    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10168    COMPOUND_EXPR will be an argument which must be evaluated.
10169    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10170    COMPOUND_EXPR in the chain will contain the tree for the simplified
10171    form of the builtin function call.  */
10172 
10173 static tree
10174 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10175 {
10176   if (!validate_arg (s1, POINTER_TYPE)
10177       || !validate_arg (s2, POINTER_TYPE))
10178     return NULL_TREE;
10179   else
10180     {
10181       const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10182 
10183       /* If either argument is "", return NULL_TREE.  */
10184       if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10185 	/* Evaluate and ignore both arguments in case either one has
10186 	   side-effects.  */
10187 	return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10188 				  s1, s2);
10189       return NULL_TREE;
10190     }
10191 }
10192 
10193 /* Simplify a call to the strcspn builtin.  S1 and S2 are the arguments
10194    to the call.
10195 
10196    Return NULL_TREE if no simplification was possible, otherwise return the
10197    simplified form of the call as a tree.
10198 
10199    The simplified form may be a constant or other expression which
10200    computes the same value, but in a more efficient manner (including
10201    calls to other builtin functions).
10202 
10203    The call may contain arguments which need to be evaluated, but
10204    which are not useful to determine the result of the call.  In
10205    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10206    COMPOUND_EXPR will be an argument which must be evaluated.
10207    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10208    COMPOUND_EXPR in the chain will contain the tree for the simplified
10209    form of the builtin function call.  */
10210 
10211 static tree
10212 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10213 {
10214   if (!validate_arg (s1, POINTER_TYPE)
10215       || !validate_arg (s2, POINTER_TYPE))
10216     return NULL_TREE;
10217   else
10218     {
10219       /* If the first argument is "", return NULL_TREE.  */
10220       const char *p1 = c_getstr (s1);
10221       if (p1 && *p1 == '\0')
10222 	{
10223 	  /* Evaluate and ignore argument s2 in case it has
10224 	     side-effects.  */
10225 	  return omit_one_operand_loc (loc, size_type_node,
10226 				   size_zero_node, s2);
10227 	}
10228 
10229       /* If the second argument is "", return __builtin_strlen(s1).  */
10230       const char *p2 = c_getstr (s2);
10231       if (p2 && *p2 == '\0')
10232 	{
10233 	  tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10234 
10235 	  /* If the replacement _DECL isn't initialized, don't do the
10236 	     transformation.  */
10237 	  if (!fn)
10238 	    return NULL_TREE;
10239 
10240 	  return build_call_expr_loc (loc, fn, 1, s1);
10241 	}
10242       return NULL_TREE;
10243     }
10244 }
10245 
10246 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10247    produced.  False otherwise.  This is done so that we don't output the error
10248    or warning twice or three times.  */
10249 
10250 bool
10251 fold_builtin_next_arg (tree exp, bool va_start_p)
10252 {
10253   tree fntype = TREE_TYPE (current_function_decl);
10254   int nargs = call_expr_nargs (exp);
10255   tree arg;
10256   /* There is good chance the current input_location points inside the
10257      definition of the va_start macro (perhaps on the token for
10258      builtin) in a system header, so warnings will not be emitted.
10259      Use the location in real source code.  */
10260   location_t current_location =
10261     linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10262 					      NULL);
10263 
10264   if (!stdarg_p (fntype))
10265     {
10266       error ("%<va_start%> used in function with fixed args");
10267       return true;
10268     }
10269 
10270   if (va_start_p)
10271     {
10272       if (va_start_p && (nargs != 2))
10273 	{
10274 	  error ("wrong number of arguments to function %<va_start%>");
10275 	  return true;
10276 	}
10277       arg = CALL_EXPR_ARG (exp, 1);
10278     }
10279   /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10280      when we checked the arguments and if needed issued a warning.  */
10281   else
10282     {
10283       if (nargs == 0)
10284 	{
10285 	  /* Evidently an out of date version of <stdarg.h>; can't validate
10286 	     va_start's second argument, but can still work as intended.  */
10287 	  warning_at (current_location,
10288 		      OPT_Wvarargs,
10289 		   "%<__builtin_next_arg%> called without an argument");
10290 	  return true;
10291 	}
10292       else if (nargs > 1)
10293 	{
10294 	  error ("wrong number of arguments to function %<__builtin_next_arg%>");
10295 	  return true;
10296 	}
10297       arg = CALL_EXPR_ARG (exp, 0);
10298     }
10299 
10300   if (TREE_CODE (arg) == SSA_NAME)
10301     arg = SSA_NAME_VAR (arg);
10302 
10303   /* We destructively modify the call to be __builtin_va_start (ap, 0)
10304      or __builtin_next_arg (0) the first time we see it, after checking
10305      the arguments and if needed issuing a warning.  */
10306   if (!integer_zerop (arg))
10307     {
10308       tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10309 
10310       /* Strip off all nops for the sake of the comparison.  This
10311 	 is not quite the same as STRIP_NOPS.  It does more.
10312 	 We must also strip off INDIRECT_EXPR for C++ reference
10313 	 parameters.  */
10314       while (CONVERT_EXPR_P (arg)
10315 	     || TREE_CODE (arg) == INDIRECT_REF)
10316 	arg = TREE_OPERAND (arg, 0);
10317       if (arg != last_parm)
10318 	{
10319 	  /* FIXME: Sometimes with the tree optimizers we can get the
10320 	     not the last argument even though the user used the last
10321 	     argument.  We just warn and set the arg to be the last
10322 	     argument so that we will get wrong-code because of
10323 	     it.  */
10324 	  warning_at (current_location,
10325 		      OPT_Wvarargs,
10326 		      "second parameter of %<va_start%> not last named argument");
10327 	}
10328 
10329       /* Undefined by C99 7.15.1.4p4 (va_start):
10330          "If the parameter parmN is declared with the register storage
10331          class, with a function or array type, or with a type that is
10332          not compatible with the type that results after application of
10333          the default argument promotions, the behavior is undefined."
10334       */
10335       else if (DECL_REGISTER (arg))
10336 	{
10337 	  warning_at (current_location,
10338 		      OPT_Wvarargs,
10339 		      "undefined behavior when second parameter of "
10340 		      "%<va_start%> is declared with %<register%> storage");
10341 	}
10342 
10343       /* We want to verify the second parameter just once before the tree
10344 	 optimizers are run and then avoid keeping it in the tree,
10345 	 as otherwise we could warn even for correct code like:
10346 	 void foo (int i, ...)
10347 	 { va_list ap; i++; va_start (ap, i); va_end (ap); }  */
10348       if (va_start_p)
10349 	CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10350       else
10351 	CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10352     }
10353   return false;
10354 }
10355 
10356 
10357 /* Expand a call EXP to __builtin_object_size.  */
10358 
10359 static rtx
10360 expand_builtin_object_size (tree exp)
10361 {
10362   tree ost;
10363   int object_size_type;
10364   tree fndecl = get_callee_fndecl (exp);
10365 
10366   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10367     {
10368       error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10369 	     exp, fndecl);
10370       expand_builtin_trap ();
10371       return const0_rtx;
10372     }
10373 
10374   ost = CALL_EXPR_ARG (exp, 1);
10375   STRIP_NOPS (ost);
10376 
10377   if (TREE_CODE (ost) != INTEGER_CST
10378       || tree_int_cst_sgn (ost) < 0
10379       || compare_tree_int (ost, 3) > 0)
10380     {
10381       error ("%Klast argument of %qD is not integer constant between 0 and 3",
10382 	     exp, fndecl);
10383       expand_builtin_trap ();
10384       return const0_rtx;
10385     }
10386 
10387   object_size_type = tree_to_shwi (ost);
10388 
10389   return object_size_type < 2 ? constm1_rtx : const0_rtx;
10390 }
10391 
10392 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10393    FCODE is the BUILT_IN_* to use.
10394    Return NULL_RTX if we failed; the caller should emit a normal call,
10395    otherwise try to get the result in TARGET, if convenient (and in
10396    mode MODE if that's convenient).  */
10397 
10398 static rtx
10399 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10400 			   enum built_in_function fcode)
10401 {
10402   if (!validate_arglist (exp,
10403 			 POINTER_TYPE,
10404 			 fcode == BUILT_IN_MEMSET_CHK
10405 			 ? INTEGER_TYPE : POINTER_TYPE,
10406 			 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10407     return NULL_RTX;
10408 
10409   tree dest = CALL_EXPR_ARG (exp, 0);
10410   tree src = CALL_EXPR_ARG (exp, 1);
10411   tree len = CALL_EXPR_ARG (exp, 2);
10412   tree size = CALL_EXPR_ARG (exp, 3);
10413 
10414   bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10415 				/*str=*/NULL_TREE, size);
10416 
10417   if (!tree_fits_uhwi_p (size))
10418     return NULL_RTX;
10419 
10420   if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10421     {
10422       /* Avoid transforming the checking call to an ordinary one when
10423 	 an overflow has been detected or when the call couldn't be
10424 	 validated because the size is not constant.  */
10425       if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10426 	return NULL_RTX;
10427 
10428       tree fn = NULL_TREE;
10429       /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10430 	 mem{cpy,pcpy,move,set} is available.  */
10431       switch (fcode)
10432 	{
10433 	case BUILT_IN_MEMCPY_CHK:
10434 	  fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10435 	  break;
10436 	case BUILT_IN_MEMPCPY_CHK:
10437 	  fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10438 	  break;
10439 	case BUILT_IN_MEMMOVE_CHK:
10440 	  fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10441 	  break;
10442 	case BUILT_IN_MEMSET_CHK:
10443 	  fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10444 	  break;
10445 	default:
10446 	  break;
10447 	}
10448 
10449       if (! fn)
10450 	return NULL_RTX;
10451 
10452       fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10453       gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10454       CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10455       return expand_expr (fn, target, mode, EXPAND_NORMAL);
10456     }
10457   else if (fcode == BUILT_IN_MEMSET_CHK)
10458     return NULL_RTX;
10459   else
10460     {
10461       unsigned int dest_align = get_pointer_alignment (dest);
10462 
10463       /* If DEST is not a pointer type, call the normal function.  */
10464       if (dest_align == 0)
10465 	return NULL_RTX;
10466 
10467       /* If SRC and DEST are the same (and not volatile), do nothing.  */
10468       if (operand_equal_p (src, dest, 0))
10469 	{
10470 	  tree expr;
10471 
10472 	  if (fcode != BUILT_IN_MEMPCPY_CHK)
10473 	    {
10474 	      /* Evaluate and ignore LEN in case it has side-effects.  */
10475 	      expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10476 	      return expand_expr (dest, target, mode, EXPAND_NORMAL);
10477 	    }
10478 
10479 	  expr = fold_build_pointer_plus (dest, len);
10480 	  return expand_expr (expr, target, mode, EXPAND_NORMAL);
10481 	}
10482 
10483       /* __memmove_chk special case.  */
10484       if (fcode == BUILT_IN_MEMMOVE_CHK)
10485 	{
10486 	  unsigned int src_align = get_pointer_alignment (src);
10487 
10488 	  if (src_align == 0)
10489 	    return NULL_RTX;
10490 
10491 	  /* If src is categorized for a readonly section we can use
10492 	     normal __memcpy_chk.  */
10493 	  if (readonly_data_expr (src))
10494 	    {
10495 	      tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10496 	      if (!fn)
10497 		return NULL_RTX;
10498 	      fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10499 					  dest, src, len, size);
10500 	      gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10501 	      CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10502 	      return expand_expr (fn, target, mode, EXPAND_NORMAL);
10503 	    }
10504 	}
10505       return NULL_RTX;
10506     }
10507 }
10508 
10509 /* Emit warning if a buffer overflow is detected at compile time.  */
10510 
10511 static void
10512 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10513 {
10514   /* The source string.  */
10515   tree srcstr = NULL_TREE;
10516   /* The size of the destination object.  */
10517   tree objsize = NULL_TREE;
10518   /* The string that is being concatenated with (as in __strcat_chk)
10519      or null if it isn't.  */
10520   tree catstr = NULL_TREE;
10521   /* The maximum length of the source sequence in a bounded operation
10522      (such as __strncat_chk) or null if the operation isn't bounded
10523      (such as __strcat_chk).  */
10524   tree maxread = NULL_TREE;
10525   /* The exact size of the access (such as in __strncpy_chk).  */
10526   tree size = NULL_TREE;
10527 
10528   switch (fcode)
10529     {
10530     case BUILT_IN_STRCPY_CHK:
10531     case BUILT_IN_STPCPY_CHK:
10532       srcstr = CALL_EXPR_ARG (exp, 1);
10533       objsize = CALL_EXPR_ARG (exp, 2);
10534       break;
10535 
10536     case BUILT_IN_STRCAT_CHK:
10537       /* For __strcat_chk the warning will be emitted only if overflowing
10538 	 by at least strlen (dest) + 1 bytes.  */
10539       catstr = CALL_EXPR_ARG (exp, 0);
10540       srcstr = CALL_EXPR_ARG (exp, 1);
10541       objsize = CALL_EXPR_ARG (exp, 2);
10542       break;
10543 
10544     case BUILT_IN_STRNCAT_CHK:
10545       catstr = CALL_EXPR_ARG (exp, 0);
10546       srcstr = CALL_EXPR_ARG (exp, 1);
10547       maxread = CALL_EXPR_ARG (exp, 2);
10548       objsize = CALL_EXPR_ARG (exp, 3);
10549       break;
10550 
10551     case BUILT_IN_STRNCPY_CHK:
10552     case BUILT_IN_STPNCPY_CHK:
10553       srcstr = CALL_EXPR_ARG (exp, 1);
10554       size = CALL_EXPR_ARG (exp, 2);
10555       objsize = CALL_EXPR_ARG (exp, 3);
10556       break;
10557 
10558     case BUILT_IN_SNPRINTF_CHK:
10559     case BUILT_IN_VSNPRINTF_CHK:
10560       maxread = CALL_EXPR_ARG (exp, 1);
10561       objsize = CALL_EXPR_ARG (exp, 3);
10562       break;
10563     default:
10564       gcc_unreachable ();
10565     }
10566 
10567   if (catstr && maxread)
10568     {
10569       /* Check __strncat_chk.  There is no way to determine the length
10570 	 of the string to which the source string is being appended so
10571 	 just warn when the length of the source string is not known.  */
10572       check_strncat_sizes (exp, objsize);
10573       return;
10574     }
10575 
10576   /* The destination argument is the first one for all built-ins above.  */
10577   tree dst = CALL_EXPR_ARG (exp, 0);
10578 
10579   check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10580 }
10581 
10582 /* Emit warning if a buffer overflow is detected at compile time
10583    in __sprintf_chk/__vsprintf_chk calls.  */
10584 
10585 static void
10586 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10587 {
10588   tree size, len, fmt;
10589   const char *fmt_str;
10590   int nargs = call_expr_nargs (exp);
10591 
10592   /* Verify the required arguments in the original call.  */
10593 
10594   if (nargs < 4)
10595     return;
10596   size = CALL_EXPR_ARG (exp, 2);
10597   fmt = CALL_EXPR_ARG (exp, 3);
10598 
10599   if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10600     return;
10601 
10602   /* Check whether the format is a literal string constant.  */
10603   fmt_str = c_getstr (fmt);
10604   if (fmt_str == NULL)
10605     return;
10606 
10607   if (!init_target_chars ())
10608     return;
10609 
10610   /* If the format doesn't contain % args or %%, we know its size.  */
10611   if (strchr (fmt_str, target_percent) == 0)
10612     len = build_int_cstu (size_type_node, strlen (fmt_str));
10613   /* If the format is "%s" and first ... argument is a string literal,
10614      we know it too.  */
10615   else if (fcode == BUILT_IN_SPRINTF_CHK
10616 	   && strcmp (fmt_str, target_percent_s) == 0)
10617     {
10618       tree arg;
10619 
10620       if (nargs < 5)
10621 	return;
10622       arg = CALL_EXPR_ARG (exp, 4);
10623       if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10624 	return;
10625 
10626       len = c_strlen (arg, 1);
10627       if (!len || ! tree_fits_uhwi_p (len))
10628 	return;
10629     }
10630   else
10631     return;
10632 
10633   /* Add one for the terminating nul.  */
10634   len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10635 
10636   check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10637 		/*maxread=*/NULL_TREE, len, size);
10638 }
10639 
10640 /* Emit warning if a free is called with address of a variable.  */
10641 
10642 static void
10643 maybe_emit_free_warning (tree exp)
10644 {
10645   if (call_expr_nargs (exp) != 1)
10646     return;
10647 
10648   tree arg = CALL_EXPR_ARG (exp, 0);
10649 
10650   STRIP_NOPS (arg);
10651   if (TREE_CODE (arg) != ADDR_EXPR)
10652     return;
10653 
10654   arg = get_base_address (TREE_OPERAND (arg, 0));
10655   if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10656     return;
10657 
10658   if (SSA_VAR_P (arg))
10659     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10660 		"%Kattempt to free a non-heap object %qD", exp, arg);
10661   else
10662     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10663 		"%Kattempt to free a non-heap object", exp);
10664 }
10665 
10666 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10667    if possible.  */
10668 
10669 static tree
10670 fold_builtin_object_size (tree ptr, tree ost)
10671 {
10672   unsigned HOST_WIDE_INT bytes;
10673   int object_size_type;
10674 
10675   if (!validate_arg (ptr, POINTER_TYPE)
10676       || !validate_arg (ost, INTEGER_TYPE))
10677     return NULL_TREE;
10678 
10679   STRIP_NOPS (ost);
10680 
10681   if (TREE_CODE (ost) != INTEGER_CST
10682       || tree_int_cst_sgn (ost) < 0
10683       || compare_tree_int (ost, 3) > 0)
10684     return NULL_TREE;
10685 
10686   object_size_type = tree_to_shwi (ost);
10687 
10688   /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10689      if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10690      and (size_t) 0 for types 2 and 3.  */
10691   if (TREE_SIDE_EFFECTS (ptr))
10692     return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10693 
10694   if (TREE_CODE (ptr) == ADDR_EXPR)
10695     {
10696       compute_builtin_object_size (ptr, object_size_type, &bytes);
10697       if (wi::fits_to_tree_p (bytes, size_type_node))
10698 	return build_int_cstu (size_type_node, bytes);
10699     }
10700   else if (TREE_CODE (ptr) == SSA_NAME)
10701     {
10702       /* If object size is not known yet, delay folding until
10703        later.  Maybe subsequent passes will help determining
10704        it.  */
10705       if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10706 	  && wi::fits_to_tree_p (bytes, size_type_node))
10707 	return build_int_cstu (size_type_node, bytes);
10708     }
10709 
10710   return NULL_TREE;
10711 }
10712 
10713 /* Builtins with folding operations that operate on "..." arguments
10714    need special handling; we need to store the arguments in a convenient
10715    data structure before attempting any folding.  Fortunately there are
10716    only a few builtins that fall into this category.  FNDECL is the
10717    function, EXP is the CALL_EXPR for the call.  */
10718 
10719 static tree
10720 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10721 {
10722   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10723   tree ret = NULL_TREE;
10724 
10725   switch (fcode)
10726     {
10727     case BUILT_IN_FPCLASSIFY:
10728       ret = fold_builtin_fpclassify (loc, args, nargs);
10729       break;
10730 
10731     default:
10732       break;
10733     }
10734   if (ret)
10735     {
10736       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10737       SET_EXPR_LOCATION (ret, loc);
10738       TREE_NO_WARNING (ret) = 1;
10739       return ret;
10740     }
10741   return NULL_TREE;
10742 }
10743 
10744 /* Initialize format string characters in the target charset.  */
10745 
10746 bool
10747 init_target_chars (void)
10748 {
10749   static bool init;
10750   if (!init)
10751     {
10752       target_newline = lang_hooks.to_target_charset ('\n');
10753       target_percent = lang_hooks.to_target_charset ('%');
10754       target_c = lang_hooks.to_target_charset ('c');
10755       target_s = lang_hooks.to_target_charset ('s');
10756       if (target_newline == 0 || target_percent == 0 || target_c == 0
10757 	  || target_s == 0)
10758 	return false;
10759 
10760       target_percent_c[0] = target_percent;
10761       target_percent_c[1] = target_c;
10762       target_percent_c[2] = '\0';
10763 
10764       target_percent_s[0] = target_percent;
10765       target_percent_s[1] = target_s;
10766       target_percent_s[2] = '\0';
10767 
10768       target_percent_s_newline[0] = target_percent;
10769       target_percent_s_newline[1] = target_s;
10770       target_percent_s_newline[2] = target_newline;
10771       target_percent_s_newline[3] = '\0';
10772 
10773       init = true;
10774     }
10775   return true;
10776 }
10777 
10778 /* Helper function for do_mpfr_arg*().  Ensure M is a normal number
10779    and no overflow/underflow occurred.  INEXACT is true if M was not
10780    exactly calculated.  TYPE is the tree type for the result.  This
10781    function assumes that you cleared the MPFR flags and then
10782    calculated M to see if anything subsequently set a flag prior to
10783    entering this function.  Return NULL_TREE if any checks fail.  */
10784 
10785 static tree
10786 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10787 {
10788   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10789      overflow/underflow occurred.  If -frounding-math, proceed iff the
10790      result of calling FUNC was exact.  */
10791   if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10792       && (!flag_rounding_math || !inexact))
10793     {
10794       REAL_VALUE_TYPE rr;
10795 
10796       real_from_mpfr (&rr, m, type, GMP_RNDN);
10797       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10798 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
10799 	 but the mpft_t is not, then we underflowed in the
10800 	 conversion.  */
10801       if (real_isfinite (&rr)
10802 	  && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10803         {
10804 	  REAL_VALUE_TYPE rmode;
10805 
10806 	  real_convert (&rmode, TYPE_MODE (type), &rr);
10807 	  /* Proceed iff the specified mode can hold the value.  */
10808 	  if (real_identical (&rmode, &rr))
10809 	    return build_real (type, rmode);
10810 	}
10811     }
10812   return NULL_TREE;
10813 }
10814 
10815 /* Helper function for do_mpc_arg*().  Ensure M is a normal complex
10816    number and no overflow/underflow occurred.  INEXACT is true if M
10817    was not exactly calculated.  TYPE is the tree type for the result.
10818    This function assumes that you cleared the MPFR flags and then
10819    calculated M to see if anything subsequently set a flag prior to
10820    entering this function.  Return NULL_TREE if any checks fail, if
10821    FORCE_CONVERT is true, then bypass the checks.  */
10822 
10823 static tree
10824 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10825 {
10826   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10827      overflow/underflow occurred.  If -frounding-math, proceed iff the
10828      result of calling FUNC was exact.  */
10829   if (force_convert
10830       || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10831 	  && !mpfr_overflow_p () && !mpfr_underflow_p ()
10832 	  && (!flag_rounding_math || !inexact)))
10833     {
10834       REAL_VALUE_TYPE re, im;
10835 
10836       real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10837       real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10838       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10839 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
10840 	 but the mpft_t is not, then we underflowed in the
10841 	 conversion.  */
10842       if (force_convert
10843 	  || (real_isfinite (&re) && real_isfinite (&im)
10844 	      && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10845 	      && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10846         {
10847 	  REAL_VALUE_TYPE re_mode, im_mode;
10848 
10849 	  real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10850 	  real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10851 	  /* Proceed iff the specified mode can hold the value.  */
10852 	  if (force_convert
10853 	      || (real_identical (&re_mode, &re)
10854 		  && real_identical (&im_mode, &im)))
10855 	    return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10856 				  build_real (TREE_TYPE (type), im_mode));
10857 	}
10858     }
10859   return NULL_TREE;
10860 }
10861 
10862 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10863    the pointer *(ARG_QUO) and return the result.  The type is taken
10864    from the type of ARG0 and is used for setting the precision of the
10865    calculation and results.  */
10866 
10867 static tree
10868 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10869 {
10870   tree const type = TREE_TYPE (arg0);
10871   tree result = NULL_TREE;
10872 
10873   STRIP_NOPS (arg0);
10874   STRIP_NOPS (arg1);
10875 
10876   /* To proceed, MPFR must exactly represent the target floating point
10877      format, which only happens when the target base equals two.  */
10878   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10879       && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10880       && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10881     {
10882       const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10883       const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10884 
10885       if (real_isfinite (ra0) && real_isfinite (ra1))
10886         {
10887 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10888 	  const int prec = fmt->p;
10889 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10890 	  tree result_rem;
10891 	  long integer_quo;
10892 	  mpfr_t m0, m1;
10893 
10894 	  mpfr_inits2 (prec, m0, m1, NULL);
10895 	  mpfr_from_real (m0, ra0, GMP_RNDN);
10896 	  mpfr_from_real (m1, ra1, GMP_RNDN);
10897 	  mpfr_clear_flags ();
10898 	  mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10899 	  /* Remquo is independent of the rounding mode, so pass
10900 	     inexact=0 to do_mpfr_ckconv().  */
10901 	  result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10902 	  mpfr_clears (m0, m1, NULL);
10903 	  if (result_rem)
10904 	    {
10905 	      /* MPFR calculates quo in the host's long so it may
10906 		 return more bits in quo than the target int can hold
10907 		 if sizeof(host long) > sizeof(target int).  This can
10908 		 happen even for native compilers in LP64 mode.  In
10909 		 these cases, modulo the quo value with the largest
10910 		 number that the target int can hold while leaving one
10911 		 bit for the sign.  */
10912 	      if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10913 		integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10914 
10915 	      /* Dereference the quo pointer argument.  */
10916 	      arg_quo = build_fold_indirect_ref (arg_quo);
10917 	      /* Proceed iff a valid pointer type was passed in.  */
10918 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10919 	        {
10920 		  /* Set the value. */
10921 		  tree result_quo
10922 		    = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10923 				   build_int_cst (TREE_TYPE (arg_quo),
10924 						  integer_quo));
10925 		  TREE_SIDE_EFFECTS (result_quo) = 1;
10926 		  /* Combine the quo assignment with the rem.  */
10927 		  result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10928 						    result_quo, result_rem));
10929 		}
10930 	    }
10931 	}
10932     }
10933   return result;
10934 }
10935 
10936 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10937    resulting value as a tree with type TYPE.  The mpfr precision is
10938    set to the precision of TYPE.  We assume that this mpfr function
10939    returns zero if the result could be calculated exactly within the
10940    requested precision.  In addition, the integer pointer represented
10941    by ARG_SG will be dereferenced and set to the appropriate signgam
10942    (-1,1) value.  */
10943 
10944 static tree
10945 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10946 {
10947   tree result = NULL_TREE;
10948 
10949   STRIP_NOPS (arg);
10950 
10951   /* To proceed, MPFR must exactly represent the target floating point
10952      format, which only happens when the target base equals two.  Also
10953      verify ARG is a constant and that ARG_SG is an int pointer.  */
10954   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10955       && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10956       && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10957       && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10958     {
10959       const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10960 
10961       /* In addition to NaN and Inf, the argument cannot be zero or a
10962 	 negative integer.  */
10963       if (real_isfinite (ra)
10964 	  && ra->cl != rvc_zero
10965 	  && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10966         {
10967 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10968 	  const int prec = fmt->p;
10969 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10970 	  int inexact, sg;
10971 	  mpfr_t m;
10972 	  tree result_lg;
10973 
10974 	  mpfr_init2 (m, prec);
10975 	  mpfr_from_real (m, ra, GMP_RNDN);
10976 	  mpfr_clear_flags ();
10977 	  inexact = mpfr_lgamma (m, &sg, m, rnd);
10978 	  result_lg = do_mpfr_ckconv (m, type, inexact);
10979 	  mpfr_clear (m);
10980 	  if (result_lg)
10981 	    {
10982 	      tree result_sg;
10983 
10984 	      /* Dereference the arg_sg pointer argument.  */
10985 	      arg_sg = build_fold_indirect_ref (arg_sg);
10986 	      /* Assign the signgam value into *arg_sg. */
10987 	      result_sg = fold_build2 (MODIFY_EXPR,
10988 				       TREE_TYPE (arg_sg), arg_sg,
10989 				       build_int_cst (TREE_TYPE (arg_sg), sg));
10990 	      TREE_SIDE_EFFECTS (result_sg) = 1;
10991 	      /* Combine the signgam assignment with the lgamma result.  */
10992 	      result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10993 						result_sg, result_lg));
10994 	    }
10995 	}
10996     }
10997 
10998   return result;
10999 }
11000 
11001 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11002    mpc function FUNC on it and return the resulting value as a tree
11003    with type TYPE.  The mpfr precision is set to the precision of
11004    TYPE.  We assume that function FUNC returns zero if the result
11005    could be calculated exactly within the requested precision.  If
11006    DO_NONFINITE is true, then fold expressions containing Inf or NaN
11007    in the arguments and/or results.  */
11008 
11009 tree
11010 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11011 	     int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11012 {
11013   tree result = NULL_TREE;
11014 
11015   STRIP_NOPS (arg0);
11016   STRIP_NOPS (arg1);
11017 
11018   /* To proceed, MPFR must exactly represent the target floating point
11019      format, which only happens when the target base equals two.  */
11020   if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11021       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11022       && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11023       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11024       && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11025     {
11026       const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11027       const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11028       const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11029       const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11030 
11031       if (do_nonfinite
11032 	  || (real_isfinite (re0) && real_isfinite (im0)
11033 	      && real_isfinite (re1) && real_isfinite (im1)))
11034         {
11035 	  const struct real_format *const fmt =
11036 	    REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11037 	  const int prec = fmt->p;
11038 	  const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11039 	  const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11040 	  int inexact;
11041 	  mpc_t m0, m1;
11042 
11043 	  mpc_init2 (m0, prec);
11044 	  mpc_init2 (m1, prec);
11045 	  mpfr_from_real (mpc_realref (m0), re0, rnd);
11046 	  mpfr_from_real (mpc_imagref (m0), im0, rnd);
11047 	  mpfr_from_real (mpc_realref (m1), re1, rnd);
11048 	  mpfr_from_real (mpc_imagref (m1), im1, rnd);
11049 	  mpfr_clear_flags ();
11050 	  inexact = func (m0, m0, m1, crnd);
11051 	  result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11052 	  mpc_clear (m0);
11053 	  mpc_clear (m1);
11054 	}
11055     }
11056 
11057   return result;
11058 }
11059 
11060 /* A wrapper function for builtin folding that prevents warnings for
11061    "statement without effect" and the like, caused by removing the
11062    call node earlier than the warning is generated.  */
11063 
11064 tree
11065 fold_call_stmt (gcall *stmt, bool ignore)
11066 {
11067   tree ret = NULL_TREE;
11068   tree fndecl = gimple_call_fndecl (stmt);
11069   location_t loc = gimple_location (stmt);
11070   if (fndecl && fndecl_built_in_p (fndecl)
11071       && !gimple_call_va_arg_pack_p (stmt))
11072     {
11073       int nargs = gimple_call_num_args (stmt);
11074       tree *args = (nargs > 0
11075 		    ? gimple_call_arg_ptr (stmt, 0)
11076 		    : &error_mark_node);
11077 
11078       if (avoid_folding_inline_builtin (fndecl))
11079 	return NULL_TREE;
11080       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11081         {
11082 	  return targetm.fold_builtin (fndecl, nargs, args, ignore);
11083         }
11084       else
11085 	{
11086 	  ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11087 	  if (ret)
11088 	    {
11089 	      /* Propagate location information from original call to
11090 		 expansion of builtin.  Otherwise things like
11091 		 maybe_emit_chk_warning, that operate on the expansion
11092 		 of a builtin, will use the wrong location information.  */
11093 	      if (gimple_has_location (stmt))
11094                 {
11095 		  tree realret = ret;
11096 		  if (TREE_CODE (ret) == NOP_EXPR)
11097 		    realret = TREE_OPERAND (ret, 0);
11098 		  if (CAN_HAVE_LOCATION_P (realret)
11099 		      && !EXPR_HAS_LOCATION (realret))
11100 		    SET_EXPR_LOCATION (realret, loc);
11101                   return realret;
11102                 }
11103 	      return ret;
11104 	    }
11105 	}
11106     }
11107   return NULL_TREE;
11108 }
11109 
11110 /* Look up the function in builtin_decl that corresponds to DECL
11111    and set ASMSPEC as its user assembler name.  DECL must be a
11112    function decl that declares a builtin.  */
11113 
11114 void
11115 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11116 {
11117   gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11118 	      && asmspec != 0);
11119 
11120   tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11121   set_user_assembler_name (builtin, asmspec);
11122 
11123   if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11124       && INT_TYPE_SIZE < BITS_PER_WORD)
11125     {
11126       scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11127       set_user_assembler_libfunc ("ffs", asmspec);
11128       set_optab_libfunc (ffs_optab, mode, "ffs");
11129     }
11130 }
11131 
11132 /* Return true if DECL is a builtin that expands to a constant or similarly
11133    simple code.  */
11134 bool
11135 is_simple_builtin (tree decl)
11136 {
11137   if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11138     switch (DECL_FUNCTION_CODE (decl))
11139       {
11140 	/* Builtins that expand to constants.  */
11141       case BUILT_IN_CONSTANT_P:
11142       case BUILT_IN_EXPECT:
11143       case BUILT_IN_OBJECT_SIZE:
11144       case BUILT_IN_UNREACHABLE:
11145 	/* Simple register moves or loads from stack.  */
11146       case BUILT_IN_ASSUME_ALIGNED:
11147       case BUILT_IN_RETURN_ADDRESS:
11148       case BUILT_IN_EXTRACT_RETURN_ADDR:
11149       case BUILT_IN_FROB_RETURN_ADDR:
11150       case BUILT_IN_RETURN:
11151       case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11152       case BUILT_IN_FRAME_ADDRESS:
11153       case BUILT_IN_VA_END:
11154       case BUILT_IN_STACK_SAVE:
11155       case BUILT_IN_STACK_RESTORE:
11156 	/* Exception state returns or moves registers around.  */
11157       case BUILT_IN_EH_FILTER:
11158       case BUILT_IN_EH_POINTER:
11159       case BUILT_IN_EH_COPY_VALUES:
11160 	return true;
11161 
11162       default:
11163 	return false;
11164       }
11165 
11166   return false;
11167 }
11168 
11169 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11170    most probably expanded inline into reasonably simple code.  This is a
11171    superset of is_simple_builtin.  */
11172 bool
11173 is_inexpensive_builtin (tree decl)
11174 {
11175   if (!decl)
11176     return false;
11177   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11178     return true;
11179   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11180     switch (DECL_FUNCTION_CODE (decl))
11181       {
11182       case BUILT_IN_ABS:
11183       CASE_BUILT_IN_ALLOCA:
11184       case BUILT_IN_BSWAP16:
11185       case BUILT_IN_BSWAP32:
11186       case BUILT_IN_BSWAP64:
11187       case BUILT_IN_CLZ:
11188       case BUILT_IN_CLZIMAX:
11189       case BUILT_IN_CLZL:
11190       case BUILT_IN_CLZLL:
11191       case BUILT_IN_CTZ:
11192       case BUILT_IN_CTZIMAX:
11193       case BUILT_IN_CTZL:
11194       case BUILT_IN_CTZLL:
11195       case BUILT_IN_FFS:
11196       case BUILT_IN_FFSIMAX:
11197       case BUILT_IN_FFSL:
11198       case BUILT_IN_FFSLL:
11199       case BUILT_IN_IMAXABS:
11200       case BUILT_IN_FINITE:
11201       case BUILT_IN_FINITEF:
11202       case BUILT_IN_FINITEL:
11203       case BUILT_IN_FINITED32:
11204       case BUILT_IN_FINITED64:
11205       case BUILT_IN_FINITED128:
11206       case BUILT_IN_FPCLASSIFY:
11207       case BUILT_IN_ISFINITE:
11208       case BUILT_IN_ISINF_SIGN:
11209       case BUILT_IN_ISINF:
11210       case BUILT_IN_ISINFF:
11211       case BUILT_IN_ISINFL:
11212       case BUILT_IN_ISINFD32:
11213       case BUILT_IN_ISINFD64:
11214       case BUILT_IN_ISINFD128:
11215       case BUILT_IN_ISNAN:
11216       case BUILT_IN_ISNANF:
11217       case BUILT_IN_ISNANL:
11218       case BUILT_IN_ISNAND32:
11219       case BUILT_IN_ISNAND64:
11220       case BUILT_IN_ISNAND128:
11221       case BUILT_IN_ISNORMAL:
11222       case BUILT_IN_ISGREATER:
11223       case BUILT_IN_ISGREATEREQUAL:
11224       case BUILT_IN_ISLESS:
11225       case BUILT_IN_ISLESSEQUAL:
11226       case BUILT_IN_ISLESSGREATER:
11227       case BUILT_IN_ISUNORDERED:
11228       case BUILT_IN_VA_ARG_PACK:
11229       case BUILT_IN_VA_ARG_PACK_LEN:
11230       case BUILT_IN_VA_COPY:
11231       case BUILT_IN_TRAP:
11232       case BUILT_IN_SAVEREGS:
11233       case BUILT_IN_POPCOUNTL:
11234       case BUILT_IN_POPCOUNTLL:
11235       case BUILT_IN_POPCOUNTIMAX:
11236       case BUILT_IN_POPCOUNT:
11237       case BUILT_IN_PARITYL:
11238       case BUILT_IN_PARITYLL:
11239       case BUILT_IN_PARITYIMAX:
11240       case BUILT_IN_PARITY:
11241       case BUILT_IN_LABS:
11242       case BUILT_IN_LLABS:
11243       case BUILT_IN_PREFETCH:
11244       case BUILT_IN_ACC_ON_DEVICE:
11245 	return true;
11246 
11247       default:
11248 	return is_simple_builtin (decl);
11249       }
11250 
11251   return false;
11252 }
11253 
11254 /* Return true if T is a constant and the value cast to a target char
11255    can be represented by a host char.
11256    Store the casted char constant in *P if so.  */
11257 
11258 bool
11259 target_char_cst_p (tree t, char *p)
11260 {
11261   if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11262     return false;
11263 
11264   *p = (char)tree_to_uhwi (t);
11265   return true;
11266 }
11267 
11268 /* Return true if the builtin DECL is implemented in a standard library.
11269    Otherwise returns false which doesn't guarantee it is not (thus the list of
11270    handled builtins below may be incomplete).  */
11271 
11272 bool
11273 builtin_with_linkage_p (tree decl)
11274 {
11275   if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11276     switch (DECL_FUNCTION_CODE (decl))
11277     {
11278       CASE_FLT_FN (BUILT_IN_ACOS):
11279       CASE_FLT_FN (BUILT_IN_ACOSH):
11280       CASE_FLT_FN (BUILT_IN_ASIN):
11281       CASE_FLT_FN (BUILT_IN_ASINH):
11282       CASE_FLT_FN (BUILT_IN_ATAN):
11283       CASE_FLT_FN (BUILT_IN_ATANH):
11284       CASE_FLT_FN (BUILT_IN_ATAN2):
11285       CASE_FLT_FN (BUILT_IN_CBRT):
11286       CASE_FLT_FN (BUILT_IN_CEIL):
11287       CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11288       CASE_FLT_FN (BUILT_IN_COPYSIGN):
11289       CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11290       CASE_FLT_FN (BUILT_IN_COS):
11291       CASE_FLT_FN (BUILT_IN_COSH):
11292       CASE_FLT_FN (BUILT_IN_ERF):
11293       CASE_FLT_FN (BUILT_IN_ERFC):
11294       CASE_FLT_FN (BUILT_IN_EXP):
11295       CASE_FLT_FN (BUILT_IN_EXP2):
11296       CASE_FLT_FN (BUILT_IN_EXPM1):
11297       CASE_FLT_FN (BUILT_IN_FABS):
11298       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11299       CASE_FLT_FN (BUILT_IN_FDIM):
11300       CASE_FLT_FN (BUILT_IN_FLOOR):
11301       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11302       CASE_FLT_FN (BUILT_IN_FMA):
11303       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11304       CASE_FLT_FN (BUILT_IN_FMAX):
11305       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11306       CASE_FLT_FN (BUILT_IN_FMIN):
11307       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11308       CASE_FLT_FN (BUILT_IN_FMOD):
11309       CASE_FLT_FN (BUILT_IN_FREXP):
11310       CASE_FLT_FN (BUILT_IN_HYPOT):
11311       CASE_FLT_FN (BUILT_IN_ILOGB):
11312       CASE_FLT_FN (BUILT_IN_LDEXP):
11313       CASE_FLT_FN (BUILT_IN_LGAMMA):
11314       CASE_FLT_FN (BUILT_IN_LLRINT):
11315       CASE_FLT_FN (BUILT_IN_LLROUND):
11316       CASE_FLT_FN (BUILT_IN_LOG):
11317       CASE_FLT_FN (BUILT_IN_LOG10):
11318       CASE_FLT_FN (BUILT_IN_LOG1P):
11319       CASE_FLT_FN (BUILT_IN_LOG2):
11320       CASE_FLT_FN (BUILT_IN_LOGB):
11321       CASE_FLT_FN (BUILT_IN_LRINT):
11322       CASE_FLT_FN (BUILT_IN_LROUND):
11323       CASE_FLT_FN (BUILT_IN_MODF):
11324       CASE_FLT_FN (BUILT_IN_NAN):
11325       CASE_FLT_FN (BUILT_IN_NEARBYINT):
11326       CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11327       CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11328       CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11329       CASE_FLT_FN (BUILT_IN_POW):
11330       CASE_FLT_FN (BUILT_IN_REMAINDER):
11331       CASE_FLT_FN (BUILT_IN_REMQUO):
11332       CASE_FLT_FN (BUILT_IN_RINT):
11333       CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11334       CASE_FLT_FN (BUILT_IN_ROUND):
11335       CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11336       CASE_FLT_FN (BUILT_IN_SCALBLN):
11337       CASE_FLT_FN (BUILT_IN_SCALBN):
11338       CASE_FLT_FN (BUILT_IN_SIN):
11339       CASE_FLT_FN (BUILT_IN_SINH):
11340       CASE_FLT_FN (BUILT_IN_SINCOS):
11341       CASE_FLT_FN (BUILT_IN_SQRT):
11342       CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11343       CASE_FLT_FN (BUILT_IN_TAN):
11344       CASE_FLT_FN (BUILT_IN_TANH):
11345       CASE_FLT_FN (BUILT_IN_TGAMMA):
11346       CASE_FLT_FN (BUILT_IN_TRUNC):
11347       CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11348 	return true;
11349       default:
11350 	break;
11351     }
11352   return false;
11353 }
11354