xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/builtins.c (revision c38e7cc395b1472a774ff828e46123de44c628e9)
1 /* Expand builtin functions.
2    Copyright (C) 1988-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "varasm.h"
40 #include "tree-object-size.h"
41 #include "realmpfr.h"
42 #include "predict.h"
43 #include "hashtab.h"
44 #include "hard-reg-set.h"
45 #include "function.h"
46 #include "cfgrtl.h"
47 #include "basic-block.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "flags.h"
54 #include "regs.h"
55 #include "except.h"
56 #include "insn-config.h"
57 #include "statistics.h"
58 #include "real.h"
59 #include "fixed-value.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "emit-rtl.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "insn-codes.h"
67 #include "optabs.h"
68 #include "libfuncs.h"
69 #include "recog.h"
70 #include "output.h"
71 #include "typeclass.h"
72 #include "tm_p.h"
73 #include "target.h"
74 #include "langhooks.h"
75 #include "tree-ssanames.h"
76 #include "tree-dfa.h"
77 #include "value-prof.h"
78 #include "diagnostic-core.h"
79 #include "builtins.h"
80 #include "asan.h"
81 #include "cilk.h"
82 #include "ipa-ref.h"
83 #include "lto-streamer.h"
84 #include "cgraph.h"
85 #include "tree-chkp.h"
86 #include "rtl-chkp.h"
87 #include "gomp-constants.h"
88 
89 
90 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
91 
92 struct target_builtins default_target_builtins;
93 #if SWITCHABLE_TARGET
94 struct target_builtins *this_target_builtins = &default_target_builtins;
95 #endif
96 
97 /* Define the names of the builtin function types and codes.  */
98 const char *const built_in_class_names[BUILT_IN_LAST]
99   = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
100 
101 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
102 const char * built_in_names[(int) END_BUILTINS] =
103 {
104 #include "builtins.def"
105 };
106 #undef DEF_BUILTIN
107 
108 /* Setup an array of builtin_info_type, make sure each element decl is
109    initialized to NULL_TREE.  */
110 builtin_info_type builtin_info[(int)END_BUILTINS];
111 
112 /* Non-zero if __builtin_constant_p should be folded right away.  */
113 bool force_folding_builtin_constant_p;
114 
115 static rtx c_readstr (const char *, machine_mode);
116 static int target_char_cast (tree, char *);
117 static rtx get_memory_rtx (tree, tree);
118 static int apply_args_size (void);
119 static int apply_result_size (void);
120 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
121 static rtx result_vector (int, rtx);
122 #endif
123 static void expand_builtin_update_setjmp_buf (rtx);
124 static void expand_builtin_prefetch (tree);
125 static rtx expand_builtin_apply_args (void);
126 static rtx expand_builtin_apply_args_1 (void);
127 static rtx expand_builtin_apply (rtx, rtx, rtx);
128 static void expand_builtin_return (rtx);
129 static enum type_class type_to_class (tree);
130 static rtx expand_builtin_classify_type (tree);
131 static void expand_errno_check (tree, rtx);
132 static rtx expand_builtin_mathfn (tree, rtx, rtx);
133 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
134 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
135 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
136 static rtx expand_builtin_interclass_mathfn (tree, rtx);
137 static rtx expand_builtin_sincos (tree);
138 static rtx expand_builtin_cexpi (tree, rtx);
139 static rtx expand_builtin_int_roundingfn (tree, rtx);
140 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
141 static rtx expand_builtin_next_arg (void);
142 static rtx expand_builtin_va_start (tree);
143 static rtx expand_builtin_va_end (tree);
144 static rtx expand_builtin_va_copy (tree);
145 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
146 static rtx expand_builtin_strcmp (tree, rtx);
147 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
148 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
149 static rtx expand_builtin_memcpy (tree, rtx);
150 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
151 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
152 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
153 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
154 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
155 					machine_mode, int, tree);
156 static rtx expand_builtin_strcpy (tree, rtx);
157 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
158 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
159 static rtx expand_builtin_strncpy (tree, rtx);
160 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
161 static rtx expand_builtin_memset (tree, rtx, machine_mode);
162 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
163 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
164 static rtx expand_builtin_bzero (tree);
165 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
166 static rtx expand_builtin_alloca (tree, bool);
167 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
168 static rtx expand_builtin_frame_address (tree, tree);
169 static tree stabilize_va_list_loc (location_t, tree, int);
170 static rtx expand_builtin_expect (tree, rtx);
171 static tree fold_builtin_constant_p (tree);
172 static tree fold_builtin_classify_type (tree);
173 static tree fold_builtin_strlen (location_t, tree, tree);
174 static tree fold_builtin_inf (location_t, tree, int);
175 static tree fold_builtin_nan (tree, tree, int);
176 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
177 static bool validate_arg (const_tree, enum tree_code code);
178 static bool integer_valued_real_p (tree);
179 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
180 static rtx expand_builtin_fabs (tree, rtx, rtx);
181 static rtx expand_builtin_signbit (tree, rtx);
182 static tree fold_builtin_sqrt (location_t, tree, tree);
183 static tree fold_builtin_cbrt (location_t, tree, tree);
184 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
185 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
186 static tree fold_builtin_cos (location_t, tree, tree, tree);
187 static tree fold_builtin_cosh (location_t, tree, tree, tree);
188 static tree fold_builtin_tan (tree, tree);
189 static tree fold_builtin_trunc (location_t, tree, tree);
190 static tree fold_builtin_floor (location_t, tree, tree);
191 static tree fold_builtin_ceil (location_t, tree, tree);
192 static tree fold_builtin_round (location_t, tree, tree);
193 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
194 static tree fold_builtin_bitop (tree, tree);
195 static tree fold_builtin_strchr (location_t, tree, tree, tree);
196 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
197 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
198 static tree fold_builtin_strcmp (location_t, tree, tree);
199 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
200 static tree fold_builtin_signbit (location_t, tree, tree);
201 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
202 static tree fold_builtin_isascii (location_t, tree);
203 static tree fold_builtin_toascii (location_t, tree);
204 static tree fold_builtin_isdigit (location_t, tree);
205 static tree fold_builtin_fabs (location_t, tree, tree);
206 static tree fold_builtin_abs (location_t, tree, tree);
207 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
208 					enum tree_code);
209 static tree fold_builtin_0 (location_t, tree);
210 static tree fold_builtin_1 (location_t, tree, tree);
211 static tree fold_builtin_2 (location_t, tree, tree, tree);
212 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
213 static tree fold_builtin_varargs (location_t, tree, tree*, int);
214 
215 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
216 static tree fold_builtin_strstr (location_t, tree, tree, tree);
217 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
218 static tree fold_builtin_strspn (location_t, tree, tree);
219 static tree fold_builtin_strcspn (location_t, tree, tree);
220 
221 static rtx expand_builtin_object_size (tree);
222 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
223 				      enum built_in_function);
224 static void maybe_emit_chk_warning (tree, enum built_in_function);
225 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
226 static void maybe_emit_free_warning (tree);
227 static tree fold_builtin_object_size (tree, tree);
228 
229 unsigned HOST_WIDE_INT target_newline;
230 unsigned HOST_WIDE_INT target_percent;
231 static unsigned HOST_WIDE_INT target_c;
232 static unsigned HOST_WIDE_INT target_s;
233 char target_percent_c[3];
234 char target_percent_s[3];
235 char target_percent_s_newline[4];
236 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
237 			  const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
238 static tree do_mpfr_arg2 (tree, tree, tree,
239 			  int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_arg3 (tree, tree, tree, tree,
241 			  int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
242 static tree do_mpfr_sincos (tree, tree, tree);
243 static tree do_mpfr_bessel_n (tree, tree, tree,
244 			      int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
245 			      const REAL_VALUE_TYPE *, bool);
246 static tree do_mpfr_remquo (tree, tree, tree);
247 static tree do_mpfr_lgamma_r (tree, tree, tree);
248 static void expand_builtin_sync_synchronize (void);
249 
250 /* Return true if NAME starts with __builtin_ or __sync_.  */
251 
252 static bool
253 is_builtin_name (const char *name)
254 {
255   if (strncmp (name, "__builtin_", 10) == 0)
256     return true;
257   if (strncmp (name, "__sync_", 7) == 0)
258     return true;
259   if (strncmp (name, "__atomic_", 9) == 0)
260     return true;
261   if (flag_cilkplus
262       && (!strcmp (name, "__cilkrts_detach")
263 	  || !strcmp (name, "__cilkrts_pop_frame")))
264     return true;
265   return false;
266 }
267 
268 
269 /* Return true if DECL is a function symbol representing a built-in.  */
270 
271 bool
272 is_builtin_fn (tree decl)
273 {
274   return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
275 }
276 
277 /* Return true if NODE should be considered for inline expansion regardless
278    of the optimization level.  This means whenever a function is invoked with
279    its "internal" name, which normally contains the prefix "__builtin".  */
280 
281 static bool
282 called_as_built_in (tree node)
283 {
284   /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
285      we want the name used to call the function, not the name it
286      will have. */
287   const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
288   return is_builtin_name (name);
289 }
290 
291 /* Compute values M and N such that M divides (address of EXP - N) and such
292    that N < M.  If these numbers can be determined, store M in alignp and N in
293    *BITPOSP and return true.  Otherwise return false and store BITS_PER_UNIT to
294    *alignp and any bit-offset to *bitposp.
295 
296    Note that the address (and thus the alignment) computed here is based
297    on the address to which a symbol resolves, whereas DECL_ALIGN is based
298    on the address at which an object is actually located.  These two
299    addresses are not always the same.  For example, on ARM targets,
300    the address &foo of a Thumb function foo() has the lowest bit set,
301    whereas foo() itself starts on an even address.
302 
303    If ADDR_P is true we are taking the address of the memory reference EXP
304    and thus cannot rely on the access taking place.  */
305 
306 static bool
307 get_object_alignment_2 (tree exp, unsigned int *alignp,
308 			unsigned HOST_WIDE_INT *bitposp, bool addr_p)
309 {
310   HOST_WIDE_INT bitsize, bitpos;
311   tree offset;
312   machine_mode mode;
313   int unsignedp, volatilep;
314   unsigned int align = BITS_PER_UNIT;
315   bool known_alignment = false;
316 
317   /* Get the innermost object and the constant (bitpos) and possibly
318      variable (offset) offset of the access.  */
319   exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
320 			     &mode, &unsignedp, &volatilep, true);
321 
322   /* Extract alignment information from the innermost object and
323      possibly adjust bitpos and offset.  */
324   if (TREE_CODE (exp) == FUNCTION_DECL)
325     {
326       /* Function addresses can encode extra information besides their
327 	 alignment.  However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
328 	 allows the low bit to be used as a virtual bit, we know
329 	 that the address itself must be at least 2-byte aligned.  */
330       if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
331 	align = 2 * BITS_PER_UNIT;
332     }
333   else if (TREE_CODE (exp) == LABEL_DECL)
334     ;
335   else if (TREE_CODE (exp) == CONST_DECL)
336     {
337       /* The alignment of a CONST_DECL is determined by its initializer.  */
338       exp = DECL_INITIAL (exp);
339       align = TYPE_ALIGN (TREE_TYPE (exp));
340 #ifdef CONSTANT_ALIGNMENT
341       if (CONSTANT_CLASS_P (exp))
342 	align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
343 #endif
344       known_alignment = true;
345     }
346   else if (DECL_P (exp))
347     {
348       align = DECL_ALIGN (exp);
349       known_alignment = true;
350     }
351   else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
352     {
353       align = TYPE_ALIGN (TREE_TYPE (exp));
354     }
355   else if (TREE_CODE (exp) == INDIRECT_REF
356 	   || TREE_CODE (exp) == MEM_REF
357 	   || TREE_CODE (exp) == TARGET_MEM_REF)
358     {
359       tree addr = TREE_OPERAND (exp, 0);
360       unsigned ptr_align;
361       unsigned HOST_WIDE_INT ptr_bitpos;
362       unsigned HOST_WIDE_INT ptr_bitmask = ~0;
363 
364       /* If the address is explicitely aligned, handle that.  */
365       if (TREE_CODE (addr) == BIT_AND_EXPR
366 	  && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
367 	{
368 	  ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
369 	  ptr_bitmask *= BITS_PER_UNIT;
370 	  align = ptr_bitmask & -ptr_bitmask;
371 	  addr = TREE_OPERAND (addr, 0);
372 	}
373 
374       known_alignment
375 	= get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
376       align = MAX (ptr_align, align);
377 
378       /* Re-apply explicit alignment to the bitpos.  */
379       ptr_bitpos &= ptr_bitmask;
380 
381       /* The alignment of the pointer operand in a TARGET_MEM_REF
382 	 has to take the variable offset parts into account.  */
383       if (TREE_CODE (exp) == TARGET_MEM_REF)
384 	{
385 	  if (TMR_INDEX (exp))
386 	    {
387 	      unsigned HOST_WIDE_INT step = 1;
388 	      if (TMR_STEP (exp))
389 		step = TREE_INT_CST_LOW (TMR_STEP (exp));
390 	      align = MIN (align, (step & -step) * BITS_PER_UNIT);
391 	    }
392 	  if (TMR_INDEX2 (exp))
393 	    align = BITS_PER_UNIT;
394 	  known_alignment = false;
395 	}
396 
397       /* When EXP is an actual memory reference then we can use
398 	 TYPE_ALIGN of a pointer indirection to derive alignment.
399 	 Do so only if get_pointer_alignment_1 did not reveal absolute
400 	 alignment knowledge and if using that alignment would
401 	 improve the situation.  */
402       if (!addr_p && !known_alignment
403 	  && TYPE_ALIGN (TREE_TYPE (exp)) > align)
404 	align = TYPE_ALIGN (TREE_TYPE (exp));
405       else
406 	{
407 	  /* Else adjust bitpos accordingly.  */
408 	  bitpos += ptr_bitpos;
409 	  if (TREE_CODE (exp) == MEM_REF
410 	      || TREE_CODE (exp) == TARGET_MEM_REF)
411 	    bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
412 	}
413     }
414   else if (TREE_CODE (exp) == STRING_CST)
415     {
416       /* STRING_CST are the only constant objects we allow to be not
417          wrapped inside a CONST_DECL.  */
418       align = TYPE_ALIGN (TREE_TYPE (exp));
419 #ifdef CONSTANT_ALIGNMENT
420       if (CONSTANT_CLASS_P (exp))
421 	align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
422 #endif
423       known_alignment = true;
424     }
425 
426   /* If there is a non-constant offset part extract the maximum
427      alignment that can prevail.  */
428   if (offset)
429     {
430       unsigned int trailing_zeros = tree_ctz (offset);
431       if (trailing_zeros < HOST_BITS_PER_INT)
432 	{
433 	  unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
434 	  if (inner)
435 	    align = MIN (align, inner);
436 	}
437     }
438 
439   *alignp = align;
440   *bitposp = bitpos & (*alignp - 1);
441   return known_alignment;
442 }
443 
444 /* For a memory reference expression EXP compute values M and N such that M
445    divides (&EXP - N) and such that N < M.  If these numbers can be determined,
446    store M in alignp and N in *BITPOSP and return true.  Otherwise return false
447    and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.  */
448 
449 bool
450 get_object_alignment_1 (tree exp, unsigned int *alignp,
451 			unsigned HOST_WIDE_INT *bitposp)
452 {
453   return get_object_alignment_2 (exp, alignp, bitposp, false);
454 }
455 
456 /* Return the alignment in bits of EXP, an object.  */
457 
458 unsigned int
459 get_object_alignment (tree exp)
460 {
461   unsigned HOST_WIDE_INT bitpos = 0;
462   unsigned int align;
463 
464   get_object_alignment_1 (exp, &align, &bitpos);
465 
466   /* align and bitpos now specify known low bits of the pointer.
467      ptr & (align - 1) == bitpos.  */
468 
469   if (bitpos != 0)
470     align = (bitpos & -bitpos);
471   return align;
472 }
473 
474 /* For a pointer valued expression EXP compute values M and N such that M
475    divides (EXP - N) and such that N < M.  If these numbers can be determined,
476    store M in alignp and N in *BITPOSP and return true.  Return false if
477    the results are just a conservative approximation.
478 
479    If EXP is not a pointer, false is returned too.  */
480 
481 bool
482 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
483 			 unsigned HOST_WIDE_INT *bitposp)
484 {
485   STRIP_NOPS (exp);
486 
487   if (TREE_CODE (exp) == ADDR_EXPR)
488     return get_object_alignment_2 (TREE_OPERAND (exp, 0),
489 				   alignp, bitposp, true);
490   else if (TREE_CODE (exp) == SSA_NAME
491 	   && POINTER_TYPE_P (TREE_TYPE (exp)))
492     {
493       unsigned int ptr_align, ptr_misalign;
494       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
495 
496       if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
497 	{
498 	  *bitposp = ptr_misalign * BITS_PER_UNIT;
499 	  *alignp = ptr_align * BITS_PER_UNIT;
500 	  /* Make sure to return a sensible alignment when the multiplication
501 	     by BITS_PER_UNIT overflowed.  */
502 	  if (*alignp == 0)
503 	    *alignp = 1u << (HOST_BITS_PER_INT - 1);
504 	  /* We cannot really tell whether this result is an approximation.  */
505 	  return false;
506 	}
507       else
508 	{
509 	  *bitposp = 0;
510 	  *alignp = BITS_PER_UNIT;
511 	  return false;
512 	}
513     }
514   else if (TREE_CODE (exp) == INTEGER_CST)
515     {
516       *alignp = BIGGEST_ALIGNMENT;
517       *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
518 		  & (BIGGEST_ALIGNMENT - 1));
519       return true;
520     }
521 
522   *bitposp = 0;
523   *alignp = BITS_PER_UNIT;
524   return false;
525 }
526 
527 /* Return the alignment in bits of EXP, a pointer valued expression.
528    The alignment returned is, by default, the alignment of the thing that
529    EXP points to.  If it is not a POINTER_TYPE, 0 is returned.
530 
531    Otherwise, look at the expression to see if we can do better, i.e., if the
532    expression is actually pointing at an object whose alignment is tighter.  */
533 
534 unsigned int
535 get_pointer_alignment (tree exp)
536 {
537   unsigned HOST_WIDE_INT bitpos = 0;
538   unsigned int align;
539 
540   get_pointer_alignment_1 (exp, &align, &bitpos);
541 
542   /* align and bitpos now specify known low bits of the pointer.
543      ptr & (align - 1) == bitpos.  */
544 
545   if (bitpos != 0)
546     align = (bitpos & -bitpos);
547 
548   return align;
549 }
550 
551 /* Compute the length of a C string.  TREE_STRING_LENGTH is not the right
552    way, because it could contain a zero byte in the middle.
553    TREE_STRING_LENGTH is the size of the character array, not the string.
554 
555    ONLY_VALUE should be nonzero if the result is not going to be emitted
556    into the instruction stream and zero if it is going to be expanded.
557    E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
558    is returned, otherwise NULL, since
559    len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
560    evaluate the side-effects.
561 
562    If ONLY_VALUE is two then we do not emit warnings about out-of-bound
563    accesses.  Note that this implies the result is not going to be emitted
564    into the instruction stream.
565 
566    The value returned is of type `ssizetype'.
567 
568    Unfortunately, string_constant can't access the values of const char
569    arrays with initializers, so neither can we do so here.  */
570 
571 tree
572 c_strlen (tree src, int only_value)
573 {
574   tree offset_node;
575   HOST_WIDE_INT offset;
576   int max;
577   const char *ptr;
578   location_t loc;
579 
580   STRIP_NOPS (src);
581   if (TREE_CODE (src) == COND_EXPR
582       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
583     {
584       tree len1, len2;
585 
586       len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
587       len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
588       if (tree_int_cst_equal (len1, len2))
589 	return len1;
590     }
591 
592   if (TREE_CODE (src) == COMPOUND_EXPR
593       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
594     return c_strlen (TREE_OPERAND (src, 1), only_value);
595 
596   loc = EXPR_LOC_OR_LOC (src, input_location);
597 
598   src = string_constant (src, &offset_node);
599   if (src == 0)
600     return NULL_TREE;
601 
602   max = TREE_STRING_LENGTH (src) - 1;
603   ptr = TREE_STRING_POINTER (src);
604 
605   if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
606     {
607       /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
608 	 compute the offset to the following null if we don't know where to
609 	 start searching for it.  */
610       int i;
611 
612       for (i = 0; i < max; i++)
613 	if (ptr[i] == 0)
614 	  return NULL_TREE;
615 
616       /* We don't know the starting offset, but we do know that the string
617 	 has no internal zero bytes.  We can assume that the offset falls
618 	 within the bounds of the string; otherwise, the programmer deserves
619 	 what he gets.  Subtract the offset from the length of the string,
620 	 and return that.  This would perhaps not be valid if we were dealing
621 	 with named arrays in addition to literal string constants.  */
622 
623       return size_diffop_loc (loc, size_int (max), offset_node);
624     }
625 
626   /* We have a known offset into the string.  Start searching there for
627      a null character if we can represent it as a single HOST_WIDE_INT.  */
628   if (offset_node == 0)
629     offset = 0;
630   else if (! tree_fits_shwi_p (offset_node))
631     offset = -1;
632   else
633     offset = tree_to_shwi (offset_node);
634 
635   /* If the offset is known to be out of bounds, warn, and call strlen at
636      runtime.  */
637   if (offset < 0 || offset > max)
638     {
639      /* Suppress multiple warnings for propagated constant strings.  */
640       if (only_value != 2
641 	  && !TREE_NO_WARNING (src))
642         {
643           warning_at (loc, 0, "offset outside bounds of constant string");
644           TREE_NO_WARNING (src) = 1;
645         }
646       return NULL_TREE;
647     }
648 
649   /* Use strlen to search for the first zero byte.  Since any strings
650      constructed with build_string will have nulls appended, we win even
651      if we get handed something like (char[4])"abcd".
652 
653      Since OFFSET is our starting index into the string, no further
654      calculation is needed.  */
655   return ssize_int (strlen (ptr + offset));
656 }
657 
658 /* Return a char pointer for a C string if it is a string constant
659    or sum of string constant and integer constant.  */
660 
661 const char *
662 c_getstr (tree src)
663 {
664   tree offset_node;
665 
666   src = string_constant (src, &offset_node);
667   if (src == 0)
668     return 0;
669 
670   if (offset_node == 0)
671     return TREE_STRING_POINTER (src);
672   else if (!tree_fits_uhwi_p (offset_node)
673 	   || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
674     return 0;
675 
676   return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
677 }
678 
679 /* Return a constant integer corresponding to target reading
680    GET_MODE_BITSIZE (MODE) bits from string constant STR.  */
681 
682 static rtx
683 c_readstr (const char *str, machine_mode mode)
684 {
685   HOST_WIDE_INT ch;
686   unsigned int i, j;
687   HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
688 
689   gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
690   unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
691     / HOST_BITS_PER_WIDE_INT;
692 
693   gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
694   for (i = 0; i < len; i++)
695     tmp[i] = 0;
696 
697   ch = 1;
698   for (i = 0; i < GET_MODE_SIZE (mode); i++)
699     {
700       j = i;
701       if (WORDS_BIG_ENDIAN)
702 	j = GET_MODE_SIZE (mode) - i - 1;
703       if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
704 	  && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
705 	j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
706       j *= BITS_PER_UNIT;
707 
708       if (ch)
709 	ch = (unsigned char) str[i];
710       tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
711     }
712 
713   wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
714   return immed_wide_int_const (c, mode);
715 }
716 
717 /* Cast a target constant CST to target CHAR and if that value fits into
718    host char type, return zero and put that value into variable pointed to by
719    P.  */
720 
721 static int
722 target_char_cast (tree cst, char *p)
723 {
724   unsigned HOST_WIDE_INT val, hostval;
725 
726   if (TREE_CODE (cst) != INTEGER_CST
727       || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
728     return 1;
729 
730   /* Do not care if it fits or not right here.  */
731   val = TREE_INT_CST_LOW (cst);
732 
733   if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
734     val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
735 
736   hostval = val;
737   if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
738     hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
739 
740   if (val != hostval)
741     return 1;
742 
743   *p = hostval;
744   return 0;
745 }
746 
747 /* Similar to save_expr, but assumes that arbitrary code is not executed
748    in between the multiple evaluations.  In particular, we assume that a
749    non-addressable local variable will not be modified.  */
750 
751 static tree
752 builtin_save_expr (tree exp)
753 {
754   if (TREE_CODE (exp) == SSA_NAME
755       || (TREE_ADDRESSABLE (exp) == 0
756 	  && (TREE_CODE (exp) == PARM_DECL
757 	      || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
758     return exp;
759 
760   return save_expr (exp);
761 }
762 
763 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
764    times to get the address of either a higher stack frame, or a return
765    address located within it (depending on FNDECL_CODE).  */
766 
767 static rtx
768 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
769 {
770   int i;
771 
772 #ifdef INITIAL_FRAME_ADDRESS_RTX
773   rtx tem = INITIAL_FRAME_ADDRESS_RTX;
774 #else
775   rtx tem;
776 
777   /* For a zero count with __builtin_return_address, we don't care what
778      frame address we return, because target-specific definitions will
779      override us.  Therefore frame pointer elimination is OK, and using
780      the soft frame pointer is OK.
781 
782      For a nonzero count, or a zero count with __builtin_frame_address,
783      we require a stable offset from the current frame pointer to the
784      previous one, so we must use the hard frame pointer, and
785      we must disable frame pointer elimination.  */
786   if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
787     tem = frame_pointer_rtx;
788   else
789     {
790       tem = hard_frame_pointer_rtx;
791 
792       /* Tell reload not to eliminate the frame pointer.  */
793       crtl->accesses_prior_frames = 1;
794     }
795 #endif
796 
797   /* Some machines need special handling before we can access
798      arbitrary frames.  For example, on the SPARC, we must first flush
799      all register windows to the stack.  */
800 #ifdef SETUP_FRAME_ADDRESSES
801   if (count > 0)
802     SETUP_FRAME_ADDRESSES ();
803 #endif
804 
805   /* On the SPARC, the return address is not in the frame, it is in a
806      register.  There is no way to access it off of the current frame
807      pointer, but it can be accessed off the previous frame pointer by
808      reading the value from the register window save area.  */
809   if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
810     count--;
811 
812   /* Scan back COUNT frames to the specified frame.  */
813   for (i = 0; i < count; i++)
814     {
815       /* Assume the dynamic chain pointer is in the word that the
816 	 frame address points to, unless otherwise specified.  */
817 #ifdef DYNAMIC_CHAIN_ADDRESS
818       tem = DYNAMIC_CHAIN_ADDRESS (tem);
819 #endif
820       tem = memory_address (Pmode, tem);
821       tem = gen_frame_mem (Pmode, tem);
822       tem = copy_to_reg (tem);
823     }
824 
825   /* For __builtin_frame_address, return what we've got.  But, on
826      the SPARC for example, we may have to add a bias.  */
827   if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
828 #ifdef FRAME_ADDR_RTX
829     return FRAME_ADDR_RTX (tem);
830 #else
831     return tem;
832 #endif
833 
834   /* For __builtin_return_address, get the return address from that frame.  */
835 #ifdef RETURN_ADDR_RTX
836   tem = RETURN_ADDR_RTX (count, tem);
837 #else
838   tem = memory_address (Pmode,
839 			plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
840   tem = gen_frame_mem (Pmode, tem);
841 #endif
842   return tem;
843 }
844 
845 /* Alias set used for setjmp buffer.  */
846 static alias_set_type setjmp_alias_set = -1;
847 
848 /* Construct the leading half of a __builtin_setjmp call.  Control will
849    return to RECEIVER_LABEL.  This is also called directly by the SJLJ
850    exception handling code.  */
851 
852 void
853 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
854 {
855   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
856   rtx stack_save;
857   rtx mem;
858 
859   if (setjmp_alias_set == -1)
860     setjmp_alias_set = new_alias_set ();
861 
862   buf_addr = convert_memory_address (Pmode, buf_addr);
863 
864   buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
865 
866   /* We store the frame pointer and the address of receiver_label in
867      the buffer and use the rest of it for the stack save area, which
868      is machine-dependent.  */
869 
870   mem = gen_rtx_MEM (Pmode, buf_addr);
871   set_mem_alias_set (mem, setjmp_alias_set);
872   emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
873 
874   mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
875 					   GET_MODE_SIZE (Pmode))),
876   set_mem_alias_set (mem, setjmp_alias_set);
877 
878   emit_move_insn (validize_mem (mem),
879 		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
880 
881   stack_save = gen_rtx_MEM (sa_mode,
882 			    plus_constant (Pmode, buf_addr,
883 					   2 * GET_MODE_SIZE (Pmode)));
884   set_mem_alias_set (stack_save, setjmp_alias_set);
885   emit_stack_save (SAVE_NONLOCAL, &stack_save);
886 
887   /* If there is further processing to do, do it.  */
888 #ifdef HAVE_builtin_setjmp_setup
889   if (HAVE_builtin_setjmp_setup)
890     emit_insn (gen_builtin_setjmp_setup (buf_addr));
891 #endif
892 
893   /* We have a nonlocal label.   */
894   cfun->has_nonlocal_label = 1;
895 }
896 
897 /* Construct the trailing part of a __builtin_setjmp call.  This is
898    also called directly by the SJLJ exception handling code.
899    If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler.  */
900 
901 void
902 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
903 {
904   rtx chain;
905 
906   /* Mark the FP as used when we get here, so we have to make sure it's
907      marked as used by this function.  */
908   emit_use (hard_frame_pointer_rtx);
909 
910   /* Mark the static chain as clobbered here so life information
911      doesn't get messed up for it.  */
912   chain = targetm.calls.static_chain (current_function_decl, true);
913   if (chain && REG_P (chain))
914     emit_clobber (chain);
915 
916   /* Now put in the code to restore the frame pointer, and argument
917      pointer, if needed.  */
918 #ifdef HAVE_nonlocal_goto
919   if (! HAVE_nonlocal_goto)
920 #endif
921     {
922       /* First adjust our frame pointer to its actual value.  It was
923 	 previously set to the start of the virtual area corresponding to
924 	 the stacked variables when we branched here and now needs to be
925 	 adjusted to the actual hardware fp value.
926 
927 	 Assignments to virtual registers are converted by
928 	 instantiate_virtual_regs into the corresponding assignment
929 	 to the underlying register (fp in this case) that makes
930 	 the original assignment true.
931 	 So the following insn will actually be decrementing fp by
932 	 STARTING_FRAME_OFFSET.  */
933       emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
934 
935       /* Restoring the frame pointer also modifies the hard frame pointer.
936 	 Mark it used (so that the previous assignment remains live once
937 	 the frame pointer is eliminated) and clobbered (to represent the
938 	 implicit update from the assignment).  */
939       emit_use (hard_frame_pointer_rtx);
940       emit_clobber (hard_frame_pointer_rtx);
941     }
942 
943 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
944   if (fixed_regs[ARG_POINTER_REGNUM])
945     {
946 #ifdef ELIMINABLE_REGS
947       /* If the argument pointer can be eliminated in favor of the
948 	 frame pointer, we don't need to restore it.  We assume here
949 	 that if such an elimination is present, it can always be used.
950 	 This is the case on all known machines; if we don't make this
951 	 assumption, we do unnecessary saving on many machines.  */
952       size_t i;
953       static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
954 
955       for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
956 	if (elim_regs[i].from == ARG_POINTER_REGNUM
957 	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
958 	  break;
959 
960       if (i == ARRAY_SIZE (elim_regs))
961 #endif
962 	{
963 	  /* Now restore our arg pointer from the address at which it
964 	     was saved in our stack frame.  */
965 	  emit_move_insn (crtl->args.internal_arg_pointer,
966 			  copy_to_reg (get_arg_pointer_save_area ()));
967 	}
968     }
969 #endif
970 
971 #ifdef HAVE_builtin_setjmp_receiver
972   if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
973     emit_insn (gen_builtin_setjmp_receiver (receiver_label));
974   else
975 #endif
976 #ifdef HAVE_nonlocal_goto_receiver
977     if (HAVE_nonlocal_goto_receiver)
978       emit_insn (gen_nonlocal_goto_receiver ());
979     else
980 #endif
981       { /* Nothing */ }
982 
983   /* We must not allow the code we just generated to be reordered by
984      scheduling.  Specifically, the update of the frame pointer must
985      happen immediately, not later.  */
986   emit_insn (gen_blockage ());
987 }
988 
989 /* __builtin_longjmp is passed a pointer to an array of five words (not
990    all will be used on all machines).  It operates similarly to the C
991    library function of the same name, but is more efficient.  Much of
992    the code below is copied from the handling of non-local gotos.  */
993 
994 static void
995 expand_builtin_longjmp (rtx buf_addr, rtx value)
996 {
997   rtx fp, lab, stack;
998   rtx_insn *insn, *last;
999   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1000 
1001   /* DRAP is needed for stack realign if longjmp is expanded to current
1002      function  */
1003   if (SUPPORTS_STACK_ALIGNMENT)
1004     crtl->need_drap = true;
1005 
1006   if (setjmp_alias_set == -1)
1007     setjmp_alias_set = new_alias_set ();
1008 
1009   buf_addr = convert_memory_address (Pmode, buf_addr);
1010 
1011   buf_addr = force_reg (Pmode, buf_addr);
1012 
1013   /* We require that the user must pass a second argument of 1, because
1014      that is what builtin_setjmp will return.  */
1015   gcc_assert (value == const1_rtx);
1016 
1017   last = get_last_insn ();
1018 #ifdef HAVE_builtin_longjmp
1019   if (HAVE_builtin_longjmp)
1020     emit_insn (gen_builtin_longjmp (buf_addr));
1021   else
1022 #endif
1023     {
1024       fp = gen_rtx_MEM (Pmode, buf_addr);
1025       lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1026 					       GET_MODE_SIZE (Pmode)));
1027 
1028       stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1029 						   2 * GET_MODE_SIZE (Pmode)));
1030       set_mem_alias_set (fp, setjmp_alias_set);
1031       set_mem_alias_set (lab, setjmp_alias_set);
1032       set_mem_alias_set (stack, setjmp_alias_set);
1033 
1034       /* Pick up FP, label, and SP from the block and jump.  This code is
1035 	 from expand_goto in stmt.c; see there for detailed comments.  */
1036 #ifdef HAVE_nonlocal_goto
1037       if (HAVE_nonlocal_goto)
1038 	/* We have to pass a value to the nonlocal_goto pattern that will
1039 	   get copied into the static_chain pointer, but it does not matter
1040 	   what that value is, because builtin_setjmp does not use it.  */
1041 	emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1042       else
1043 #endif
1044 	{
1045 	  lab = copy_to_reg (lab);
1046 
1047 	  emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1048 	  emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1049 
1050 	  emit_move_insn (hard_frame_pointer_rtx, fp);
1051 	  emit_stack_restore (SAVE_NONLOCAL, stack);
1052 
1053 	  emit_use (hard_frame_pointer_rtx);
1054 	  emit_use (stack_pointer_rtx);
1055 	  emit_indirect_jump (lab);
1056 	}
1057     }
1058 
1059   /* Search backwards and mark the jump insn as a non-local goto.
1060      Note that this precludes the use of __builtin_longjmp to a
1061      __builtin_setjmp target in the same function.  However, we've
1062      already cautioned the user that these functions are for
1063      internal exception handling use only.  */
1064   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1065     {
1066       gcc_assert (insn != last);
1067 
1068       if (JUMP_P (insn))
1069 	{
1070 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1071 	  break;
1072 	}
1073       else if (CALL_P (insn))
1074 	break;
1075     }
1076 }
1077 
1078 static inline bool
1079 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1080 {
1081   return (iter->i < iter->n);
1082 }
1083 
1084 /* This function validates the types of a function call argument list
1085    against a specified list of tree_codes.  If the last specifier is a 0,
1086    that represents an ellipses, otherwise the last specifier must be a
1087    VOID_TYPE.  */
1088 
1089 static bool
1090 validate_arglist (const_tree callexpr, ...)
1091 {
1092   enum tree_code code;
1093   bool res = 0;
1094   va_list ap;
1095   const_call_expr_arg_iterator iter;
1096   const_tree arg;
1097 
1098   va_start (ap, callexpr);
1099   init_const_call_expr_arg_iterator (callexpr, &iter);
1100 
1101   do
1102     {
1103       code = (enum tree_code) va_arg (ap, int);
1104       switch (code)
1105 	{
1106 	case 0:
1107 	  /* This signifies an ellipses, any further arguments are all ok.  */
1108 	  res = true;
1109 	  goto end;
1110 	case VOID_TYPE:
1111 	  /* This signifies an endlink, if no arguments remain, return
1112 	     true, otherwise return false.  */
1113 	  res = !more_const_call_expr_args_p (&iter);
1114 	  goto end;
1115 	default:
1116 	  /* If no parameters remain or the parameter's code does not
1117 	     match the specified code, return false.  Otherwise continue
1118 	     checking any remaining arguments.  */
1119 	  arg = next_const_call_expr_arg (&iter);
1120 	  if (!validate_arg (arg, code))
1121 	    goto end;
1122 	  break;
1123 	}
1124     }
1125   while (1);
1126 
1127   /* We need gotos here since we can only have one VA_CLOSE in a
1128      function.  */
1129  end: ;
1130   va_end (ap);
1131 
1132   return res;
1133 }
1134 
1135 /* Expand a call to __builtin_nonlocal_goto.  We're passed the target label
1136    and the address of the save area.  */
1137 
1138 static rtx
1139 expand_builtin_nonlocal_goto (tree exp)
1140 {
1141   tree t_label, t_save_area;
1142   rtx r_label, r_save_area, r_fp, r_sp;
1143   rtx_insn *insn;
1144 
1145   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1146     return NULL_RTX;
1147 
1148   t_label = CALL_EXPR_ARG (exp, 0);
1149   t_save_area = CALL_EXPR_ARG (exp, 1);
1150 
1151   r_label = expand_normal (t_label);
1152   r_label = convert_memory_address (Pmode, r_label);
1153   r_save_area = expand_normal (t_save_area);
1154   r_save_area = convert_memory_address (Pmode, r_save_area);
1155   /* Copy the address of the save location to a register just in case it was
1156      based on the frame pointer.   */
1157   r_save_area = copy_to_reg (r_save_area);
1158   r_fp = gen_rtx_MEM (Pmode, r_save_area);
1159   r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1160 		      plus_constant (Pmode, r_save_area,
1161 				     GET_MODE_SIZE (Pmode)));
1162 
1163   crtl->has_nonlocal_goto = 1;
1164 
1165 #ifdef HAVE_nonlocal_goto
1166   /* ??? We no longer need to pass the static chain value, afaik.  */
1167   if (HAVE_nonlocal_goto)
1168     emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1169   else
1170 #endif
1171     {
1172       r_label = copy_to_reg (r_label);
1173 
1174       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1175       emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1176 
1177       /* Restore frame pointer for containing function.  */
1178       emit_move_insn (hard_frame_pointer_rtx, r_fp);
1179       emit_stack_restore (SAVE_NONLOCAL, r_sp);
1180 
1181       /* USE of hard_frame_pointer_rtx added for consistency;
1182 	 not clear if really needed.  */
1183       emit_use (hard_frame_pointer_rtx);
1184       emit_use (stack_pointer_rtx);
1185 
1186       /* If the architecture is using a GP register, we must
1187 	 conservatively assume that the target function makes use of it.
1188 	 The prologue of functions with nonlocal gotos must therefore
1189 	 initialize the GP register to the appropriate value, and we
1190 	 must then make sure that this value is live at the point
1191 	 of the jump.  (Note that this doesn't necessarily apply
1192 	 to targets with a nonlocal_goto pattern; they are free
1193 	 to implement it in their own way.  Note also that this is
1194 	 a no-op if the GP register is a global invariant.)  */
1195       if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1196 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1197 	emit_use (pic_offset_table_rtx);
1198 
1199       emit_indirect_jump (r_label);
1200     }
1201 
1202   /* Search backwards to the jump insn and mark it as a
1203      non-local goto.  */
1204   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1205     {
1206       if (JUMP_P (insn))
1207 	{
1208 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1209 	  break;
1210 	}
1211       else if (CALL_P (insn))
1212 	break;
1213     }
1214 
1215   return const0_rtx;
1216 }
1217 
1218 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1219    (not all will be used on all machines) that was passed to __builtin_setjmp.
1220    It updates the stack pointer in that block to correspond to the current
1221    stack pointer.  */
1222 
1223 static void
1224 expand_builtin_update_setjmp_buf (rtx buf_addr)
1225 {
1226   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1227   rtx stack_save
1228     = gen_rtx_MEM (sa_mode,
1229 		   memory_address
1230 		   (sa_mode,
1231 		    plus_constant (Pmode, buf_addr,
1232 				   2 * GET_MODE_SIZE (Pmode))));
1233 
1234   emit_stack_save (SAVE_NONLOCAL, &stack_save);
1235 }
1236 
1237 /* Expand a call to __builtin_prefetch.  For a target that does not support
1238    data prefetch, evaluate the memory address argument in case it has side
1239    effects.  */
1240 
1241 static void
1242 expand_builtin_prefetch (tree exp)
1243 {
1244   tree arg0, arg1, arg2;
1245   int nargs;
1246   rtx op0, op1, op2;
1247 
1248   if (!validate_arglist (exp, POINTER_TYPE, 0))
1249     return;
1250 
1251   arg0 = CALL_EXPR_ARG (exp, 0);
1252 
1253   /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1254      zero (read) and argument 2 (locality) defaults to 3 (high degree of
1255      locality).  */
1256   nargs = call_expr_nargs (exp);
1257   if (nargs > 1)
1258     arg1 = CALL_EXPR_ARG (exp, 1);
1259   else
1260     arg1 = integer_zero_node;
1261   if (nargs > 2)
1262     arg2 = CALL_EXPR_ARG (exp, 2);
1263   else
1264     arg2 = integer_three_node;
1265 
1266   /* Argument 0 is an address.  */
1267   op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1268 
1269   /* Argument 1 (read/write flag) must be a compile-time constant int.  */
1270   if (TREE_CODE (arg1) != INTEGER_CST)
1271     {
1272       error ("second argument to %<__builtin_prefetch%> must be a constant");
1273       arg1 = integer_zero_node;
1274     }
1275   op1 = expand_normal (arg1);
1276   /* Argument 1 must be either zero or one.  */
1277   if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1278     {
1279       warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1280 	       " using zero");
1281       op1 = const0_rtx;
1282     }
1283 
1284   /* Argument 2 (locality) must be a compile-time constant int.  */
1285   if (TREE_CODE (arg2) != INTEGER_CST)
1286     {
1287       error ("third argument to %<__builtin_prefetch%> must be a constant");
1288       arg2 = integer_zero_node;
1289     }
1290   op2 = expand_normal (arg2);
1291   /* Argument 2 must be 0, 1, 2, or 3.  */
1292   if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1293     {
1294       warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1295       op2 = const0_rtx;
1296     }
1297 
1298 #ifdef HAVE_prefetch
1299   if (HAVE_prefetch)
1300     {
1301       struct expand_operand ops[3];
1302 
1303       create_address_operand (&ops[0], op0);
1304       create_integer_operand (&ops[1], INTVAL (op1));
1305       create_integer_operand (&ops[2], INTVAL (op2));
1306       if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1307 	return;
1308     }
1309 #endif
1310 
1311   /* Don't do anything with direct references to volatile memory, but
1312      generate code to handle other side effects.  */
1313   if (!MEM_P (op0) && side_effects_p (op0))
1314     emit_insn (op0);
1315 }
1316 
1317 /* Get a MEM rtx for expression EXP which is the address of an operand
1318    to be used in a string instruction (cmpstrsi, movmemsi, ..).  LEN is
1319    the maximum length of the block of memory that might be accessed or
1320    NULL if unknown.  */
1321 
1322 static rtx
1323 get_memory_rtx (tree exp, tree len)
1324 {
1325   tree orig_exp = exp;
1326   rtx addr, mem;
1327 
1328   /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1329      from its expression, for expr->a.b only <variable>.a.b is recorded.  */
1330   if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1331     exp = TREE_OPERAND (exp, 0);
1332 
1333   addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1334   mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1335 
1336   /* Get an expression we can use to find the attributes to assign to MEM.
1337      First remove any nops.  */
1338   while (CONVERT_EXPR_P (exp)
1339 	 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1340     exp = TREE_OPERAND (exp, 0);
1341 
1342   /* Build a MEM_REF representing the whole accessed area as a byte blob,
1343      (as builtin stringops may alias with anything).  */
1344   exp = fold_build2 (MEM_REF,
1345 		     build_array_type (char_type_node,
1346 				       build_range_type (sizetype,
1347 							 size_one_node, len)),
1348 		     exp, build_int_cst (ptr_type_node, 0));
1349 
1350   /* If the MEM_REF has no acceptable address, try to get the base object
1351      from the original address we got, and build an all-aliasing
1352      unknown-sized access to that one.  */
1353   if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1354     set_mem_attributes (mem, exp, 0);
1355   else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1356 	   && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1357 						     0))))
1358     {
1359       exp = build_fold_addr_expr (exp);
1360       exp = fold_build2 (MEM_REF,
1361 			 build_array_type (char_type_node,
1362 					   build_range_type (sizetype,
1363 							     size_zero_node,
1364 							     NULL)),
1365 			 exp, build_int_cst (ptr_type_node, 0));
1366       set_mem_attributes (mem, exp, 0);
1367     }
1368   set_mem_alias_set (mem, 0);
1369   return mem;
1370 }
1371 
1372 /* Built-in functions to perform an untyped call and return.  */
1373 
1374 #define apply_args_mode \
1375   (this_target_builtins->x_apply_args_mode)
1376 #define apply_result_mode \
1377   (this_target_builtins->x_apply_result_mode)
1378 
1379 /* Return the size required for the block returned by __builtin_apply_args,
1380    and initialize apply_args_mode.  */
1381 
1382 static int
1383 apply_args_size (void)
1384 {
1385   static int size = -1;
1386   int align;
1387   unsigned int regno;
1388   machine_mode mode;
1389 
1390   /* The values computed by this function never change.  */
1391   if (size < 0)
1392     {
1393       /* The first value is the incoming arg-pointer.  */
1394       size = GET_MODE_SIZE (Pmode);
1395 
1396       /* The second value is the structure value address unless this is
1397 	 passed as an "invisible" first argument.  */
1398       if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1399 	size += GET_MODE_SIZE (Pmode);
1400 
1401       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 	if (FUNCTION_ARG_REGNO_P (regno))
1403 	  {
1404 	    mode = targetm.calls.get_raw_arg_mode (regno);
1405 
1406 	    gcc_assert (mode != VOIDmode);
1407 
1408 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1409 	    if (size % align != 0)
1410 	      size = CEIL (size, align) * align;
1411 	    size += GET_MODE_SIZE (mode);
1412 	    apply_args_mode[regno] = mode;
1413 	  }
1414 	else
1415 	  {
1416 	    apply_args_mode[regno] = VOIDmode;
1417 	  }
1418     }
1419   return size;
1420 }
1421 
1422 /* Return the size required for the block returned by __builtin_apply,
1423    and initialize apply_result_mode.  */
1424 
1425 static int
1426 apply_result_size (void)
1427 {
1428   static int size = -1;
1429   int align, regno;
1430   machine_mode mode;
1431 
1432   /* The values computed by this function never change.  */
1433   if (size < 0)
1434     {
1435       size = 0;
1436 
1437       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1438 	if (targetm.calls.function_value_regno_p (regno))
1439 	  {
1440 	    mode = targetm.calls.get_raw_result_mode (regno);
1441 
1442 	    gcc_assert (mode != VOIDmode);
1443 
1444 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1445 	    if (size % align != 0)
1446 	      size = CEIL (size, align) * align;
1447 	    size += GET_MODE_SIZE (mode);
1448 	    apply_result_mode[regno] = mode;
1449 	  }
1450 	else
1451 	  apply_result_mode[regno] = VOIDmode;
1452 
1453       /* Allow targets that use untyped_call and untyped_return to override
1454 	 the size so that machine-specific information can be stored here.  */
1455 #ifdef APPLY_RESULT_SIZE
1456       size = APPLY_RESULT_SIZE;
1457 #endif
1458     }
1459   return size;
1460 }
1461 
1462 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1463 /* Create a vector describing the result block RESULT.  If SAVEP is true,
1464    the result block is used to save the values; otherwise it is used to
1465    restore the values.  */
1466 
1467 static rtx
1468 result_vector (int savep, rtx result)
1469 {
1470   int regno, size, align, nelts;
1471   machine_mode mode;
1472   rtx reg, mem;
1473   rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1474 
1475   size = nelts = 0;
1476   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1477     if ((mode = apply_result_mode[regno]) != VOIDmode)
1478       {
1479 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1480 	if (size % align != 0)
1481 	  size = CEIL (size, align) * align;
1482 	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1483 	mem = adjust_address (result, mode, size);
1484 	savevec[nelts++] = (savep
1485 			    ? gen_rtx_SET (VOIDmode, mem, reg)
1486 			    : gen_rtx_SET (VOIDmode, reg, mem));
1487 	size += GET_MODE_SIZE (mode);
1488       }
1489   return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1490 }
1491 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1492 
1493 /* Save the state required to perform an untyped call with the same
1494    arguments as were passed to the current function.  */
1495 
1496 static rtx
1497 expand_builtin_apply_args_1 (void)
1498 {
1499   rtx registers, tem;
1500   int size, align, regno;
1501   machine_mode mode;
1502   rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1503 
1504   /* Create a block where the arg-pointer, structure value address,
1505      and argument registers can be saved.  */
1506   registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1507 
1508   /* Walk past the arg-pointer and structure value address.  */
1509   size = GET_MODE_SIZE (Pmode);
1510   if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1511     size += GET_MODE_SIZE (Pmode);
1512 
1513   /* Save each register used in calling a function to the block.  */
1514   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1515     if ((mode = apply_args_mode[regno]) != VOIDmode)
1516       {
1517 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1518 	if (size % align != 0)
1519 	  size = CEIL (size, align) * align;
1520 
1521 	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1522 
1523 	emit_move_insn (adjust_address (registers, mode, size), tem);
1524 	size += GET_MODE_SIZE (mode);
1525       }
1526 
1527   /* Save the arg pointer to the block.  */
1528   tem = copy_to_reg (crtl->args.internal_arg_pointer);
1529 #ifdef STACK_GROWS_DOWNWARD
1530   /* We need the pointer as the caller actually passed them to us, not
1531      as we might have pretended they were passed.  Make sure it's a valid
1532      operand, as emit_move_insn isn't expected to handle a PLUS.  */
1533   tem
1534     = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1535 		     NULL_RTX);
1536 #endif
1537   emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1538 
1539   size = GET_MODE_SIZE (Pmode);
1540 
1541   /* Save the structure value address unless this is passed as an
1542      "invisible" first argument.  */
1543   if (struct_incoming_value)
1544     {
1545       emit_move_insn (adjust_address (registers, Pmode, size),
1546 		      copy_to_reg (struct_incoming_value));
1547       size += GET_MODE_SIZE (Pmode);
1548     }
1549 
1550   /* Return the address of the block.  */
1551   return copy_addr_to_reg (XEXP (registers, 0));
1552 }
1553 
1554 /* __builtin_apply_args returns block of memory allocated on
1555    the stack into which is stored the arg pointer, structure
1556    value address, static chain, and all the registers that might
1557    possibly be used in performing a function call.  The code is
1558    moved to the start of the function so the incoming values are
1559    saved.  */
1560 
1561 static rtx
1562 expand_builtin_apply_args (void)
1563 {
1564   /* Don't do __builtin_apply_args more than once in a function.
1565      Save the result of the first call and reuse it.  */
1566   if (apply_args_value != 0)
1567     return apply_args_value;
1568   {
1569     /* When this function is called, it means that registers must be
1570        saved on entry to this function.  So we migrate the
1571        call to the first insn of this function.  */
1572     rtx temp;
1573     rtx seq;
1574 
1575     start_sequence ();
1576     temp = expand_builtin_apply_args_1 ();
1577     seq = get_insns ();
1578     end_sequence ();
1579 
1580     apply_args_value = temp;
1581 
1582     /* Put the insns after the NOTE that starts the function.
1583        If this is inside a start_sequence, make the outer-level insn
1584        chain current, so the code is placed at the start of the
1585        function.  If internal_arg_pointer is a non-virtual pseudo,
1586        it needs to be placed after the function that initializes
1587        that pseudo.  */
1588     push_topmost_sequence ();
1589     if (REG_P (crtl->args.internal_arg_pointer)
1590 	&& REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1591       emit_insn_before (seq, parm_birth_insn);
1592     else
1593       emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1594     pop_topmost_sequence ();
1595     return temp;
1596   }
1597 }
1598 
1599 /* Perform an untyped call and save the state required to perform an
1600    untyped return of whatever value was returned by the given function.  */
1601 
1602 static rtx
1603 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1604 {
1605   int size, align, regno;
1606   machine_mode mode;
1607   rtx incoming_args, result, reg, dest, src;
1608   rtx_call_insn *call_insn;
1609   rtx old_stack_level = 0;
1610   rtx call_fusage = 0;
1611   rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1612 
1613   arguments = convert_memory_address (Pmode, arguments);
1614 
1615   /* Create a block where the return registers can be saved.  */
1616   result = assign_stack_local (BLKmode, apply_result_size (), -1);
1617 
1618   /* Fetch the arg pointer from the ARGUMENTS block.  */
1619   incoming_args = gen_reg_rtx (Pmode);
1620   emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1621 #ifndef STACK_GROWS_DOWNWARD
1622   incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1623 				       incoming_args, 0, OPTAB_LIB_WIDEN);
1624 #endif
1625 
1626   /* Push a new argument block and copy the arguments.  Do not allow
1627      the (potential) memcpy call below to interfere with our stack
1628      manipulations.  */
1629   do_pending_stack_adjust ();
1630   NO_DEFER_POP;
1631 
1632   /* Save the stack with nonlocal if available.  */
1633 #ifdef HAVE_save_stack_nonlocal
1634   if (HAVE_save_stack_nonlocal)
1635     emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1636   else
1637 #endif
1638     emit_stack_save (SAVE_BLOCK, &old_stack_level);
1639 
1640   /* Allocate a block of memory onto the stack and copy the memory
1641      arguments to the outgoing arguments address.  We can pass TRUE
1642      as the 4th argument because we just saved the stack pointer
1643      and will restore it right after the call.  */
1644   allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1645 
1646   /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1647      may have already set current_function_calls_alloca to true.
1648      current_function_calls_alloca won't be set if argsize is zero,
1649      so we have to guarantee need_drap is true here.  */
1650   if (SUPPORTS_STACK_ALIGNMENT)
1651     crtl->need_drap = true;
1652 
1653   dest = virtual_outgoing_args_rtx;
1654 #ifndef STACK_GROWS_DOWNWARD
1655   if (CONST_INT_P (argsize))
1656     dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1657   else
1658     dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1659 #endif
1660   dest = gen_rtx_MEM (BLKmode, dest);
1661   set_mem_align (dest, PARM_BOUNDARY);
1662   src = gen_rtx_MEM (BLKmode, incoming_args);
1663   set_mem_align (src, PARM_BOUNDARY);
1664   emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1665 
1666   /* Refer to the argument block.  */
1667   apply_args_size ();
1668   arguments = gen_rtx_MEM (BLKmode, arguments);
1669   set_mem_align (arguments, PARM_BOUNDARY);
1670 
1671   /* Walk past the arg-pointer and structure value address.  */
1672   size = GET_MODE_SIZE (Pmode);
1673   if (struct_value)
1674     size += GET_MODE_SIZE (Pmode);
1675 
1676   /* Restore each of the registers previously saved.  Make USE insns
1677      for each of these registers for use in making the call.  */
1678   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1679     if ((mode = apply_args_mode[regno]) != VOIDmode)
1680       {
1681 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1682 	if (size % align != 0)
1683 	  size = CEIL (size, align) * align;
1684 	reg = gen_rtx_REG (mode, regno);
1685 	emit_move_insn (reg, adjust_address (arguments, mode, size));
1686 	use_reg (&call_fusage, reg);
1687 	size += GET_MODE_SIZE (mode);
1688       }
1689 
1690   /* Restore the structure value address unless this is passed as an
1691      "invisible" first argument.  */
1692   size = GET_MODE_SIZE (Pmode);
1693   if (struct_value)
1694     {
1695       rtx value = gen_reg_rtx (Pmode);
1696       emit_move_insn (value, adjust_address (arguments, Pmode, size));
1697       emit_move_insn (struct_value, value);
1698       if (REG_P (struct_value))
1699 	use_reg (&call_fusage, struct_value);
1700       size += GET_MODE_SIZE (Pmode);
1701     }
1702 
1703   /* All arguments and registers used for the call are set up by now!  */
1704   function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1705 
1706   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
1707      and we don't want to load it into a register as an optimization,
1708      because prepare_call_address already did it if it should be done.  */
1709   if (GET_CODE (function) != SYMBOL_REF)
1710     function = memory_address (FUNCTION_MODE, function);
1711 
1712   /* Generate the actual call instruction and save the return value.  */
1713 #ifdef HAVE_untyped_call
1714   if (HAVE_untyped_call)
1715     emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1716 				      result, result_vector (1, result)));
1717   else
1718 #endif
1719 #ifdef HAVE_call_value
1720   if (HAVE_call_value)
1721     {
1722       rtx valreg = 0;
1723 
1724       /* Locate the unique return register.  It is not possible to
1725 	 express a call that sets more than one return register using
1726 	 call_value; use untyped_call for that.  In fact, untyped_call
1727 	 only needs to save the return registers in the given block.  */
1728       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1729 	if ((mode = apply_result_mode[regno]) != VOIDmode)
1730 	  {
1731 	    gcc_assert (!valreg); /* HAVE_untyped_call required.  */
1732 
1733 	    valreg = gen_rtx_REG (mode, regno);
1734 	  }
1735 
1736       emit_call_insn (GEN_CALL_VALUE (valreg,
1737 				      gen_rtx_MEM (FUNCTION_MODE, function),
1738 				      const0_rtx, NULL_RTX, const0_rtx));
1739 
1740       emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1741     }
1742   else
1743 #endif
1744     gcc_unreachable ();
1745 
1746   /* Find the CALL insn we just emitted, and attach the register usage
1747      information.  */
1748   call_insn = last_call_insn ();
1749   add_function_usage_to (call_insn, call_fusage);
1750 
1751   /* Restore the stack.  */
1752 #ifdef HAVE_save_stack_nonlocal
1753   if (HAVE_save_stack_nonlocal)
1754     emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1755   else
1756 #endif
1757     emit_stack_restore (SAVE_BLOCK, old_stack_level);
1758   fixup_args_size_notes (call_insn, get_last_insn (), 0);
1759 
1760   OK_DEFER_POP;
1761 
1762   /* Return the address of the result block.  */
1763   result = copy_addr_to_reg (XEXP (result, 0));
1764   return convert_memory_address (ptr_mode, result);
1765 }
1766 
1767 /* Perform an untyped return.  */
1768 
1769 static void
1770 expand_builtin_return (rtx result)
1771 {
1772   int size, align, regno;
1773   machine_mode mode;
1774   rtx reg;
1775   rtx_insn *call_fusage = 0;
1776 
1777   result = convert_memory_address (Pmode, result);
1778 
1779   apply_result_size ();
1780   result = gen_rtx_MEM (BLKmode, result);
1781 
1782 #ifdef HAVE_untyped_return
1783   if (HAVE_untyped_return)
1784     {
1785       emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1786       emit_barrier ();
1787       return;
1788     }
1789 #endif
1790 
1791   /* Restore the return value and note that each value is used.  */
1792   size = 0;
1793   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1794     if ((mode = apply_result_mode[regno]) != VOIDmode)
1795       {
1796 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1797 	if (size % align != 0)
1798 	  size = CEIL (size, align) * align;
1799 	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1800 	emit_move_insn (reg, adjust_address (result, mode, size));
1801 
1802 	push_to_sequence (call_fusage);
1803 	emit_use (reg);
1804 	call_fusage = get_insns ();
1805 	end_sequence ();
1806 	size += GET_MODE_SIZE (mode);
1807       }
1808 
1809   /* Put the USE insns before the return.  */
1810   emit_insn (call_fusage);
1811 
1812   /* Return whatever values was restored by jumping directly to the end
1813      of the function.  */
1814   expand_naked_return ();
1815 }
1816 
1817 /* Used by expand_builtin_classify_type and fold_builtin_classify_type.  */
1818 
1819 static enum type_class
1820 type_to_class (tree type)
1821 {
1822   switch (TREE_CODE (type))
1823     {
1824     case VOID_TYPE:	   return void_type_class;
1825     case INTEGER_TYPE:	   return integer_type_class;
1826     case ENUMERAL_TYPE:	   return enumeral_type_class;
1827     case BOOLEAN_TYPE:	   return boolean_type_class;
1828     case POINTER_TYPE:	   return pointer_type_class;
1829     case REFERENCE_TYPE:   return reference_type_class;
1830     case OFFSET_TYPE:	   return offset_type_class;
1831     case REAL_TYPE:	   return real_type_class;
1832     case COMPLEX_TYPE:	   return complex_type_class;
1833     case FUNCTION_TYPE:	   return function_type_class;
1834     case METHOD_TYPE:	   return method_type_class;
1835     case RECORD_TYPE:	   return record_type_class;
1836     case UNION_TYPE:
1837     case QUAL_UNION_TYPE:  return union_type_class;
1838     case ARRAY_TYPE:	   return (TYPE_STRING_FLAG (type)
1839 				   ? string_type_class : array_type_class);
1840     case LANG_TYPE:	   return lang_type_class;
1841     default:		   return no_type_class;
1842     }
1843 }
1844 
1845 /* Expand a call EXP to __builtin_classify_type.  */
1846 
1847 static rtx
1848 expand_builtin_classify_type (tree exp)
1849 {
1850   if (call_expr_nargs (exp))
1851     return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1852   return GEN_INT (no_type_class);
1853 }
1854 
1855 /* This helper macro, meant to be used in mathfn_built_in below,
1856    determines which among a set of three builtin math functions is
1857    appropriate for a given type mode.  The `F' and `L' cases are
1858    automatically generated from the `double' case.  */
1859 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1860   case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1861   fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1862   fcodel = BUILT_IN_MATHFN##L ; break;
1863 /* Similar to above, but appends _R after any F/L suffix.  */
1864 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1865   case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1866   fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1867   fcodel = BUILT_IN_MATHFN##L_R ; break;
1868 
1869 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1870    if available.  If IMPLICIT is true use the implicit builtin declaration,
1871    otherwise use the explicit declaration.  If we can't do the conversion,
1872    return zero.  */
1873 
1874 static tree
1875 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1876 {
1877   enum built_in_function fcode, fcodef, fcodel, fcode2;
1878 
1879   switch (fn)
1880     {
1881       CASE_MATHFN (BUILT_IN_ACOS)
1882       CASE_MATHFN (BUILT_IN_ACOSH)
1883       CASE_MATHFN (BUILT_IN_ASIN)
1884       CASE_MATHFN (BUILT_IN_ASINH)
1885       CASE_MATHFN (BUILT_IN_ATAN)
1886       CASE_MATHFN (BUILT_IN_ATAN2)
1887       CASE_MATHFN (BUILT_IN_ATANH)
1888       CASE_MATHFN (BUILT_IN_CBRT)
1889       CASE_MATHFN (BUILT_IN_CEIL)
1890       CASE_MATHFN (BUILT_IN_CEXPI)
1891       CASE_MATHFN (BUILT_IN_COPYSIGN)
1892       CASE_MATHFN (BUILT_IN_COS)
1893       CASE_MATHFN (BUILT_IN_COSH)
1894       CASE_MATHFN (BUILT_IN_DREM)
1895       CASE_MATHFN (BUILT_IN_ERF)
1896       CASE_MATHFN (BUILT_IN_ERFC)
1897       CASE_MATHFN (BUILT_IN_EXP)
1898       CASE_MATHFN (BUILT_IN_EXP10)
1899       CASE_MATHFN (BUILT_IN_EXP2)
1900       CASE_MATHFN (BUILT_IN_EXPM1)
1901       CASE_MATHFN (BUILT_IN_FABS)
1902       CASE_MATHFN (BUILT_IN_FDIM)
1903       CASE_MATHFN (BUILT_IN_FLOOR)
1904       CASE_MATHFN (BUILT_IN_FMA)
1905       CASE_MATHFN (BUILT_IN_FMAX)
1906       CASE_MATHFN (BUILT_IN_FMIN)
1907       CASE_MATHFN (BUILT_IN_FMOD)
1908       CASE_MATHFN (BUILT_IN_FREXP)
1909       CASE_MATHFN (BUILT_IN_GAMMA)
1910       CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1911       CASE_MATHFN (BUILT_IN_HUGE_VAL)
1912       CASE_MATHFN (BUILT_IN_HYPOT)
1913       CASE_MATHFN (BUILT_IN_ILOGB)
1914       CASE_MATHFN (BUILT_IN_ICEIL)
1915       CASE_MATHFN (BUILT_IN_IFLOOR)
1916       CASE_MATHFN (BUILT_IN_INF)
1917       CASE_MATHFN (BUILT_IN_IRINT)
1918       CASE_MATHFN (BUILT_IN_IROUND)
1919       CASE_MATHFN (BUILT_IN_ISINF)
1920       CASE_MATHFN (BUILT_IN_J0)
1921       CASE_MATHFN (BUILT_IN_J1)
1922       CASE_MATHFN (BUILT_IN_JN)
1923       CASE_MATHFN (BUILT_IN_LCEIL)
1924       CASE_MATHFN (BUILT_IN_LDEXP)
1925       CASE_MATHFN (BUILT_IN_LFLOOR)
1926       CASE_MATHFN (BUILT_IN_LGAMMA)
1927       CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1928       CASE_MATHFN (BUILT_IN_LLCEIL)
1929       CASE_MATHFN (BUILT_IN_LLFLOOR)
1930       CASE_MATHFN (BUILT_IN_LLRINT)
1931       CASE_MATHFN (BUILT_IN_LLROUND)
1932       CASE_MATHFN (BUILT_IN_LOG)
1933       CASE_MATHFN (BUILT_IN_LOG10)
1934       CASE_MATHFN (BUILT_IN_LOG1P)
1935       CASE_MATHFN (BUILT_IN_LOG2)
1936       CASE_MATHFN (BUILT_IN_LOGB)
1937       CASE_MATHFN (BUILT_IN_LRINT)
1938       CASE_MATHFN (BUILT_IN_LROUND)
1939       CASE_MATHFN (BUILT_IN_MODF)
1940       CASE_MATHFN (BUILT_IN_NAN)
1941       CASE_MATHFN (BUILT_IN_NANS)
1942       CASE_MATHFN (BUILT_IN_NEARBYINT)
1943       CASE_MATHFN (BUILT_IN_NEXTAFTER)
1944       CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1945       CASE_MATHFN (BUILT_IN_POW)
1946       CASE_MATHFN (BUILT_IN_POWI)
1947       CASE_MATHFN (BUILT_IN_POW10)
1948       CASE_MATHFN (BUILT_IN_REMAINDER)
1949       CASE_MATHFN (BUILT_IN_REMQUO)
1950       CASE_MATHFN (BUILT_IN_RINT)
1951       CASE_MATHFN (BUILT_IN_ROUND)
1952       CASE_MATHFN (BUILT_IN_SCALB)
1953       CASE_MATHFN (BUILT_IN_SCALBLN)
1954       CASE_MATHFN (BUILT_IN_SCALBN)
1955       CASE_MATHFN (BUILT_IN_SIGNBIT)
1956       CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1957       CASE_MATHFN (BUILT_IN_SIN)
1958       CASE_MATHFN (BUILT_IN_SINCOS)
1959       CASE_MATHFN (BUILT_IN_SINH)
1960       CASE_MATHFN (BUILT_IN_SQRT)
1961       CASE_MATHFN (BUILT_IN_TAN)
1962       CASE_MATHFN (BUILT_IN_TANH)
1963       CASE_MATHFN (BUILT_IN_TGAMMA)
1964       CASE_MATHFN (BUILT_IN_TRUNC)
1965       CASE_MATHFN (BUILT_IN_Y0)
1966       CASE_MATHFN (BUILT_IN_Y1)
1967       CASE_MATHFN (BUILT_IN_YN)
1968 
1969       default:
1970 	return NULL_TREE;
1971       }
1972 
1973   if (TYPE_MAIN_VARIANT (type) == double_type_node)
1974     fcode2 = fcode;
1975   else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1976     fcode2 = fcodef;
1977   else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1978     fcode2 = fcodel;
1979   else
1980     return NULL_TREE;
1981 
1982   if (implicit_p && !builtin_decl_implicit_p (fcode2))
1983     return NULL_TREE;
1984 
1985   return builtin_decl_explicit (fcode2);
1986 }
1987 
1988 /* Like mathfn_built_in_1(), but always use the implicit array.  */
1989 
1990 tree
1991 mathfn_built_in (tree type, enum built_in_function fn)
1992 {
1993   return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1994 }
1995 
1996 /* If errno must be maintained, expand the RTL to check if the result,
1997    TARGET, of a built-in function call, EXP, is NaN, and if so set
1998    errno to EDOM.  */
1999 
2000 static void
2001 expand_errno_check (tree exp, rtx target)
2002 {
2003   rtx_code_label *lab = gen_label_rtx ();
2004 
2005   /* Test the result; if it is NaN, set errno=EDOM because
2006      the argument was not in the domain.  */
2007   do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
2008 			   NULL_RTX, NULL_RTX, lab,
2009 			   /* The jump is very likely.  */
2010 			   REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2011 
2012 #ifdef TARGET_EDOM
2013   /* If this built-in doesn't throw an exception, set errno directly.  */
2014   if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2015     {
2016 #ifdef GEN_ERRNO_RTX
2017       rtx errno_rtx = GEN_ERRNO_RTX;
2018 #else
2019       rtx errno_rtx
2020 	  = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2021 #endif
2022       emit_move_insn (errno_rtx,
2023 		      gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2024       emit_label (lab);
2025       return;
2026     }
2027 #endif
2028 
2029   /* Make sure the library call isn't expanded as a tail call.  */
2030   CALL_EXPR_TAILCALL (exp) = 0;
2031 
2032   /* We can't set errno=EDOM directly; let the library call do it.
2033      Pop the arguments right away in case the call gets deleted.  */
2034   NO_DEFER_POP;
2035   expand_call (exp, target, 0);
2036   OK_DEFER_POP;
2037   emit_label (lab);
2038 }
2039 
2040 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2041    Return NULL_RTX if a normal call should be emitted rather than expanding
2042    the function in-line.  EXP is the expression that is a call to the builtin
2043    function; if convenient, the result should be placed in TARGET.
2044    SUBTARGET may be used as the target for computing one of EXP's operands.  */
2045 
2046 static rtx
2047 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2048 {
2049   optab builtin_optab;
2050   rtx op0;
2051   rtx_insn *insns;
2052   tree fndecl = get_callee_fndecl (exp);
2053   machine_mode mode;
2054   bool errno_set = false;
2055   bool try_widening = false;
2056   tree arg;
2057 
2058   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2059     return NULL_RTX;
2060 
2061   arg = CALL_EXPR_ARG (exp, 0);
2062 
2063   switch (DECL_FUNCTION_CODE (fndecl))
2064     {
2065     CASE_FLT_FN (BUILT_IN_SQRT):
2066       errno_set = ! tree_expr_nonnegative_p (arg);
2067       try_widening = true;
2068       builtin_optab = sqrt_optab;
2069       break;
2070     CASE_FLT_FN (BUILT_IN_EXP):
2071       errno_set = true; builtin_optab = exp_optab; break;
2072     CASE_FLT_FN (BUILT_IN_EXP10):
2073     CASE_FLT_FN (BUILT_IN_POW10):
2074       errno_set = true; builtin_optab = exp10_optab; break;
2075     CASE_FLT_FN (BUILT_IN_EXP2):
2076       errno_set = true; builtin_optab = exp2_optab; break;
2077     CASE_FLT_FN (BUILT_IN_EXPM1):
2078       errno_set = true; builtin_optab = expm1_optab; break;
2079     CASE_FLT_FN (BUILT_IN_LOGB):
2080       errno_set = true; builtin_optab = logb_optab; break;
2081     CASE_FLT_FN (BUILT_IN_LOG):
2082       errno_set = true; builtin_optab = log_optab; break;
2083     CASE_FLT_FN (BUILT_IN_LOG10):
2084       errno_set = true; builtin_optab = log10_optab; break;
2085     CASE_FLT_FN (BUILT_IN_LOG2):
2086       errno_set = true; builtin_optab = log2_optab; break;
2087     CASE_FLT_FN (BUILT_IN_LOG1P):
2088       errno_set = true; builtin_optab = log1p_optab; break;
2089     CASE_FLT_FN (BUILT_IN_ASIN):
2090       builtin_optab = asin_optab; break;
2091     CASE_FLT_FN (BUILT_IN_ACOS):
2092       builtin_optab = acos_optab; break;
2093     CASE_FLT_FN (BUILT_IN_TAN):
2094       builtin_optab = tan_optab; break;
2095     CASE_FLT_FN (BUILT_IN_ATAN):
2096       builtin_optab = atan_optab; break;
2097     CASE_FLT_FN (BUILT_IN_FLOOR):
2098       builtin_optab = floor_optab; break;
2099     CASE_FLT_FN (BUILT_IN_CEIL):
2100       builtin_optab = ceil_optab; break;
2101     CASE_FLT_FN (BUILT_IN_TRUNC):
2102       builtin_optab = btrunc_optab; break;
2103     CASE_FLT_FN (BUILT_IN_ROUND):
2104       builtin_optab = round_optab; break;
2105     CASE_FLT_FN (BUILT_IN_NEARBYINT):
2106       builtin_optab = nearbyint_optab;
2107       if (flag_trapping_math)
2108 	break;
2109       /* Else fallthrough and expand as rint.  */
2110     CASE_FLT_FN (BUILT_IN_RINT):
2111       builtin_optab = rint_optab; break;
2112     CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2113       builtin_optab = significand_optab; break;
2114     default:
2115       gcc_unreachable ();
2116     }
2117 
2118   /* Make a suitable register to place result in.  */
2119   mode = TYPE_MODE (TREE_TYPE (exp));
2120 
2121   if (! flag_errno_math || ! HONOR_NANS (mode))
2122     errno_set = false;
2123 
2124   /* Before working hard, check whether the instruction is available, but try
2125      to widen the mode for specific operations.  */
2126   if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2127        || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2128       && (!errno_set || !optimize_insn_for_size_p ()))
2129     {
2130       rtx result = gen_reg_rtx (mode);
2131 
2132       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2133 	 need to expand the argument again.  This way, we will not perform
2134 	 side-effects more the once.  */
2135       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2136 
2137       op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2138 
2139       start_sequence ();
2140 
2141       /* Compute into RESULT.
2142 	 Set RESULT to wherever the result comes back.  */
2143       result = expand_unop (mode, builtin_optab, op0, result, 0);
2144 
2145       if (result != 0)
2146 	{
2147 	  if (errno_set)
2148 	    expand_errno_check (exp, result);
2149 
2150 	  /* Output the entire sequence.  */
2151 	  insns = get_insns ();
2152 	  end_sequence ();
2153 	  emit_insn (insns);
2154 	  return result;
2155 	}
2156 
2157       /* If we were unable to expand via the builtin, stop the sequence
2158 	 (without outputting the insns) and call to the library function
2159 	 with the stabilized argument list.  */
2160       end_sequence ();
2161     }
2162 
2163   return expand_call (exp, target, target == const0_rtx);
2164 }
2165 
2166 /* Expand a call to the builtin binary math functions (pow and atan2).
2167    Return NULL_RTX if a normal call should be emitted rather than expanding the
2168    function in-line.  EXP is the expression that is a call to the builtin
2169    function; if convenient, the result should be placed in TARGET.
2170    SUBTARGET may be used as the target for computing one of EXP's
2171    operands.  */
2172 
2173 static rtx
2174 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2175 {
2176   optab builtin_optab;
2177   rtx op0, op1, result;
2178   rtx_insn *insns;
2179   int op1_type = REAL_TYPE;
2180   tree fndecl = get_callee_fndecl (exp);
2181   tree arg0, arg1;
2182   machine_mode mode;
2183   bool errno_set = true;
2184 
2185   switch (DECL_FUNCTION_CODE (fndecl))
2186     {
2187     CASE_FLT_FN (BUILT_IN_SCALBN):
2188     CASE_FLT_FN (BUILT_IN_SCALBLN):
2189     CASE_FLT_FN (BUILT_IN_LDEXP):
2190       op1_type = INTEGER_TYPE;
2191     default:
2192       break;
2193     }
2194 
2195   if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2196     return NULL_RTX;
2197 
2198   arg0 = CALL_EXPR_ARG (exp, 0);
2199   arg1 = CALL_EXPR_ARG (exp, 1);
2200 
2201   switch (DECL_FUNCTION_CODE (fndecl))
2202     {
2203     CASE_FLT_FN (BUILT_IN_POW):
2204       builtin_optab = pow_optab; break;
2205     CASE_FLT_FN (BUILT_IN_ATAN2):
2206       builtin_optab = atan2_optab; break;
2207     CASE_FLT_FN (BUILT_IN_SCALB):
2208       if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2209 	return 0;
2210       builtin_optab = scalb_optab; break;
2211     CASE_FLT_FN (BUILT_IN_SCALBN):
2212     CASE_FLT_FN (BUILT_IN_SCALBLN):
2213       if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2214 	return 0;
2215     /* Fall through... */
2216     CASE_FLT_FN (BUILT_IN_LDEXP):
2217       builtin_optab = ldexp_optab; break;
2218     CASE_FLT_FN (BUILT_IN_FMOD):
2219       builtin_optab = fmod_optab; break;
2220     CASE_FLT_FN (BUILT_IN_REMAINDER):
2221     CASE_FLT_FN (BUILT_IN_DREM):
2222       builtin_optab = remainder_optab; break;
2223     default:
2224       gcc_unreachable ();
2225     }
2226 
2227   /* Make a suitable register to place result in.  */
2228   mode = TYPE_MODE (TREE_TYPE (exp));
2229 
2230   /* Before working hard, check whether the instruction is available.  */
2231   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2232     return NULL_RTX;
2233 
2234   result = gen_reg_rtx (mode);
2235 
2236   if (! flag_errno_math || ! HONOR_NANS (mode))
2237     errno_set = false;
2238 
2239   if (errno_set && optimize_insn_for_size_p ())
2240     return 0;
2241 
2242   /* Always stabilize the argument list.  */
2243   CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2244   CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2245 
2246   op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2247   op1 = expand_normal (arg1);
2248 
2249   start_sequence ();
2250 
2251   /* Compute into RESULT.
2252      Set RESULT to wherever the result comes back.  */
2253   result = expand_binop (mode, builtin_optab, op0, op1,
2254 			 result, 0, OPTAB_DIRECT);
2255 
2256   /* If we were unable to expand via the builtin, stop the sequence
2257      (without outputting the insns) and call to the library function
2258      with the stabilized argument list.  */
2259   if (result == 0)
2260     {
2261       end_sequence ();
2262       return expand_call (exp, target, target == const0_rtx);
2263     }
2264 
2265   if (errno_set)
2266     expand_errno_check (exp, result);
2267 
2268   /* Output the entire sequence.  */
2269   insns = get_insns ();
2270   end_sequence ();
2271   emit_insn (insns);
2272 
2273   return result;
2274 }
2275 
2276 /* Expand a call to the builtin trinary math functions (fma).
2277    Return NULL_RTX if a normal call should be emitted rather than expanding the
2278    function in-line.  EXP is the expression that is a call to the builtin
2279    function; if convenient, the result should be placed in TARGET.
2280    SUBTARGET may be used as the target for computing one of EXP's
2281    operands.  */
2282 
2283 static rtx
2284 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2285 {
2286   optab builtin_optab;
2287   rtx op0, op1, op2, result;
2288   rtx_insn *insns;
2289   tree fndecl = get_callee_fndecl (exp);
2290   tree arg0, arg1, arg2;
2291   machine_mode mode;
2292 
2293   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2294     return NULL_RTX;
2295 
2296   arg0 = CALL_EXPR_ARG (exp, 0);
2297   arg1 = CALL_EXPR_ARG (exp, 1);
2298   arg2 = CALL_EXPR_ARG (exp, 2);
2299 
2300   switch (DECL_FUNCTION_CODE (fndecl))
2301     {
2302     CASE_FLT_FN (BUILT_IN_FMA):
2303       builtin_optab = fma_optab; break;
2304     default:
2305       gcc_unreachable ();
2306     }
2307 
2308   /* Make a suitable register to place result in.  */
2309   mode = TYPE_MODE (TREE_TYPE (exp));
2310 
2311   /* Before working hard, check whether the instruction is available.  */
2312   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2313     return NULL_RTX;
2314 
2315   result = gen_reg_rtx (mode);
2316 
2317   /* Always stabilize the argument list.  */
2318   CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2319   CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2320   CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2321 
2322   op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2323   op1 = expand_normal (arg1);
2324   op2 = expand_normal (arg2);
2325 
2326   start_sequence ();
2327 
2328   /* Compute into RESULT.
2329      Set RESULT to wherever the result comes back.  */
2330   result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2331 			      result, 0);
2332 
2333   /* If we were unable to expand via the builtin, stop the sequence
2334      (without outputting the insns) and call to the library function
2335      with the stabilized argument list.  */
2336   if (result == 0)
2337     {
2338       end_sequence ();
2339       return expand_call (exp, target, target == const0_rtx);
2340     }
2341 
2342   /* Output the entire sequence.  */
2343   insns = get_insns ();
2344   end_sequence ();
2345   emit_insn (insns);
2346 
2347   return result;
2348 }
2349 
2350 /* Expand a call to the builtin sin and cos math functions.
2351    Return NULL_RTX if a normal call should be emitted rather than expanding the
2352    function in-line.  EXP is the expression that is a call to the builtin
2353    function; if convenient, the result should be placed in TARGET.
2354    SUBTARGET may be used as the target for computing one of EXP's
2355    operands.  */
2356 
2357 static rtx
2358 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2359 {
2360   optab builtin_optab;
2361   rtx op0;
2362   rtx_insn *insns;
2363   tree fndecl = get_callee_fndecl (exp);
2364   machine_mode mode;
2365   tree arg;
2366 
2367   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2368     return NULL_RTX;
2369 
2370   arg = CALL_EXPR_ARG (exp, 0);
2371 
2372   switch (DECL_FUNCTION_CODE (fndecl))
2373     {
2374     CASE_FLT_FN (BUILT_IN_SIN):
2375     CASE_FLT_FN (BUILT_IN_COS):
2376       builtin_optab = sincos_optab; break;
2377     default:
2378       gcc_unreachable ();
2379     }
2380 
2381   /* Make a suitable register to place result in.  */
2382   mode = TYPE_MODE (TREE_TYPE (exp));
2383 
2384   /* Check if sincos insn is available, otherwise fallback
2385      to sin or cos insn.  */
2386   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2387     switch (DECL_FUNCTION_CODE (fndecl))
2388       {
2389       CASE_FLT_FN (BUILT_IN_SIN):
2390 	builtin_optab = sin_optab; break;
2391       CASE_FLT_FN (BUILT_IN_COS):
2392 	builtin_optab = cos_optab; break;
2393       default:
2394 	gcc_unreachable ();
2395       }
2396 
2397   /* Before working hard, check whether the instruction is available.  */
2398   if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2399     {
2400       rtx result = gen_reg_rtx (mode);
2401 
2402       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2403 	 need to expand the argument again.  This way, we will not perform
2404 	 side-effects more the once.  */
2405       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2406 
2407       op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2408 
2409       start_sequence ();
2410 
2411       /* Compute into RESULT.
2412 	 Set RESULT to wherever the result comes back.  */
2413       if (builtin_optab == sincos_optab)
2414 	{
2415 	  int ok;
2416 
2417 	  switch (DECL_FUNCTION_CODE (fndecl))
2418 	    {
2419 	    CASE_FLT_FN (BUILT_IN_SIN):
2420 	      ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2421 	      break;
2422 	    CASE_FLT_FN (BUILT_IN_COS):
2423 	      ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2424 	      break;
2425 	    default:
2426 	      gcc_unreachable ();
2427 	    }
2428 	  gcc_assert (ok);
2429 	}
2430       else
2431 	result = expand_unop (mode, builtin_optab, op0, result, 0);
2432 
2433       if (result != 0)
2434 	{
2435 	  /* Output the entire sequence.  */
2436 	  insns = get_insns ();
2437 	  end_sequence ();
2438 	  emit_insn (insns);
2439 	  return result;
2440 	}
2441 
2442       /* If we were unable to expand via the builtin, stop the sequence
2443 	 (without outputting the insns) and call to the library function
2444 	 with the stabilized argument list.  */
2445       end_sequence ();
2446     }
2447 
2448   return expand_call (exp, target, target == const0_rtx);
2449 }
2450 
2451 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2452    return an RTL instruction code that implements the functionality.
2453    If that isn't possible or available return CODE_FOR_nothing.  */
2454 
2455 static enum insn_code
2456 interclass_mathfn_icode (tree arg, tree fndecl)
2457 {
2458   bool errno_set = false;
2459   optab builtin_optab = unknown_optab;
2460   machine_mode mode;
2461 
2462   switch (DECL_FUNCTION_CODE (fndecl))
2463     {
2464     CASE_FLT_FN (BUILT_IN_ILOGB):
2465       errno_set = true; builtin_optab = ilogb_optab; break;
2466     CASE_FLT_FN (BUILT_IN_ISINF):
2467       builtin_optab = isinf_optab; break;
2468     case BUILT_IN_ISNORMAL:
2469     case BUILT_IN_ISFINITE:
2470     CASE_FLT_FN (BUILT_IN_FINITE):
2471     case BUILT_IN_FINITED32:
2472     case BUILT_IN_FINITED64:
2473     case BUILT_IN_FINITED128:
2474     case BUILT_IN_ISINFD32:
2475     case BUILT_IN_ISINFD64:
2476     case BUILT_IN_ISINFD128:
2477       /* These builtins have no optabs (yet).  */
2478       break;
2479     default:
2480       gcc_unreachable ();
2481     }
2482 
2483   /* There's no easy way to detect the case we need to set EDOM.  */
2484   if (flag_errno_math && errno_set)
2485     return CODE_FOR_nothing;
2486 
2487   /* Optab mode depends on the mode of the input argument.  */
2488   mode = TYPE_MODE (TREE_TYPE (arg));
2489 
2490   if (builtin_optab)
2491     return optab_handler (builtin_optab, mode);
2492   return CODE_FOR_nothing;
2493 }
2494 
2495 /* Expand a call to one of the builtin math functions that operate on
2496    floating point argument and output an integer result (ilogb, isinf,
2497    isnan, etc).
2498    Return 0 if a normal call should be emitted rather than expanding the
2499    function in-line.  EXP is the expression that is a call to the builtin
2500    function; if convenient, the result should be placed in TARGET.  */
2501 
2502 static rtx
2503 expand_builtin_interclass_mathfn (tree exp, rtx target)
2504 {
2505   enum insn_code icode = CODE_FOR_nothing;
2506   rtx op0;
2507   tree fndecl = get_callee_fndecl (exp);
2508   machine_mode mode;
2509   tree arg;
2510 
2511   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2512     return NULL_RTX;
2513 
2514   arg = CALL_EXPR_ARG (exp, 0);
2515   icode = interclass_mathfn_icode (arg, fndecl);
2516   mode = TYPE_MODE (TREE_TYPE (arg));
2517 
2518   if (icode != CODE_FOR_nothing)
2519     {
2520       struct expand_operand ops[1];
2521       rtx_insn *last = get_last_insn ();
2522       tree orig_arg = arg;
2523 
2524       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2525 	 need to expand the argument again.  This way, we will not perform
2526 	 side-effects more the once.  */
2527       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2528 
2529       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2530 
2531       if (mode != GET_MODE (op0))
2532 	op0 = convert_to_mode (mode, op0, 0);
2533 
2534       create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2535       if (maybe_legitimize_operands (icode, 0, 1, ops)
2536 	  && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2537 	return ops[0].value;
2538 
2539       delete_insns_since (last);
2540       CALL_EXPR_ARG (exp, 0) = orig_arg;
2541     }
2542 
2543   return NULL_RTX;
2544 }
2545 
2546 /* Expand a call to the builtin sincos math function.
2547    Return NULL_RTX if a normal call should be emitted rather than expanding the
2548    function in-line.  EXP is the expression that is a call to the builtin
2549    function.  */
2550 
2551 static rtx
2552 expand_builtin_sincos (tree exp)
2553 {
2554   rtx op0, op1, op2, target1, target2;
2555   machine_mode mode;
2556   tree arg, sinp, cosp;
2557   int result;
2558   location_t loc = EXPR_LOCATION (exp);
2559   tree alias_type, alias_off;
2560 
2561   if (!validate_arglist (exp, REAL_TYPE,
2562  			 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2563     return NULL_RTX;
2564 
2565   arg = CALL_EXPR_ARG (exp, 0);
2566   sinp = CALL_EXPR_ARG (exp, 1);
2567   cosp = CALL_EXPR_ARG (exp, 2);
2568 
2569   /* Make a suitable register to place result in.  */
2570   mode = TYPE_MODE (TREE_TYPE (arg));
2571 
2572   /* Check if sincos insn is available, otherwise emit the call.  */
2573   if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2574     return NULL_RTX;
2575 
2576   target1 = gen_reg_rtx (mode);
2577   target2 = gen_reg_rtx (mode);
2578 
2579   op0 = expand_normal (arg);
2580   alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2581   alias_off = build_int_cst (alias_type, 0);
2582   op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2583 					sinp, alias_off));
2584   op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2585 					cosp, alias_off));
2586 
2587   /* Compute into target1 and target2.
2588      Set TARGET to wherever the result comes back.  */
2589   result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2590   gcc_assert (result);
2591 
2592   /* Move target1 and target2 to the memory locations indicated
2593      by op1 and op2.  */
2594   emit_move_insn (op1, target1);
2595   emit_move_insn (op2, target2);
2596 
2597   return const0_rtx;
2598 }
2599 
2600 /* Expand a call to the internal cexpi builtin to the sincos math function.
2601    EXP is the expression that is a call to the builtin function; if convenient,
2602    the result should be placed in TARGET.  */
2603 
2604 static rtx
2605 expand_builtin_cexpi (tree exp, rtx target)
2606 {
2607   tree fndecl = get_callee_fndecl (exp);
2608   tree arg, type;
2609   machine_mode mode;
2610   rtx op0, op1, op2;
2611   location_t loc = EXPR_LOCATION (exp);
2612 
2613   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2614     return NULL_RTX;
2615 
2616   arg = CALL_EXPR_ARG (exp, 0);
2617   type = TREE_TYPE (arg);
2618   mode = TYPE_MODE (TREE_TYPE (arg));
2619 
2620   /* Try expanding via a sincos optab, fall back to emitting a libcall
2621      to sincos or cexp.  We are sure we have sincos or cexp because cexpi
2622      is only generated from sincos, cexp or if we have either of them.  */
2623   if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2624     {
2625       op1 = gen_reg_rtx (mode);
2626       op2 = gen_reg_rtx (mode);
2627 
2628       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2629 
2630       /* Compute into op1 and op2.  */
2631       expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2632     }
2633   else if (targetm.libc_has_function (function_sincos))
2634     {
2635       tree call, fn = NULL_TREE;
2636       tree top1, top2;
2637       rtx op1a, op2a;
2638 
2639       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2640 	fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2641       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2642 	fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2643       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2644 	fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2645       else
2646 	gcc_unreachable ();
2647 
2648       op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2649       op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2650       op1a = copy_addr_to_reg (XEXP (op1, 0));
2651       op2a = copy_addr_to_reg (XEXP (op2, 0));
2652       top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2653       top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2654 
2655       /* Make sure not to fold the sincos call again.  */
2656       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2657       expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2658 				      call, 3, arg, top1, top2));
2659     }
2660   else
2661     {
2662       tree call, fn = NULL_TREE, narg;
2663       tree ctype = build_complex_type (type);
2664 
2665       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2666 	fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2667       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2668 	fn = builtin_decl_explicit (BUILT_IN_CEXP);
2669       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2670 	fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2671       else
2672 	gcc_unreachable ();
2673 
2674       /* If we don't have a decl for cexp create one.  This is the
2675 	 friendliest fallback if the user calls __builtin_cexpi
2676 	 without full target C99 function support.  */
2677       if (fn == NULL_TREE)
2678 	{
2679 	  tree fntype;
2680 	  const char *name = NULL;
2681 
2682 	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2683 	    name = "cexpf";
2684 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2685 	    name = "cexp";
2686 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2687 	    name = "cexpl";
2688 
2689 	  fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2690 	  fn = build_fn_decl (name, fntype);
2691 	}
2692 
2693       narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2694 			  build_real (type, dconst0), arg);
2695 
2696       /* Make sure not to fold the cexp call again.  */
2697       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2698       return expand_expr (build_call_nary (ctype, call, 1, narg),
2699 			  target, VOIDmode, EXPAND_NORMAL);
2700     }
2701 
2702   /* Now build the proper return type.  */
2703   return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2704 			      make_tree (TREE_TYPE (arg), op2),
2705 			      make_tree (TREE_TYPE (arg), op1)),
2706 		      target, VOIDmode, EXPAND_NORMAL);
2707 }
2708 
2709 /* Conveniently construct a function call expression.  FNDECL names the
2710    function to be called, N is the number of arguments, and the "..."
2711    parameters are the argument expressions.  Unlike build_call_exr
2712    this doesn't fold the call, hence it will always return a CALL_EXPR.  */
2713 
2714 static tree
2715 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2716 {
2717   va_list ap;
2718   tree fntype = TREE_TYPE (fndecl);
2719   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2720 
2721   va_start (ap, n);
2722   fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2723   va_end (ap);
2724   SET_EXPR_LOCATION (fn, loc);
2725   return fn;
2726 }
2727 
2728 /* Expand a call to one of the builtin rounding functions gcc defines
2729    as an extension (lfloor and lceil).  As these are gcc extensions we
2730    do not need to worry about setting errno to EDOM.
2731    If expanding via optab fails, lower expression to (int)(floor(x)).
2732    EXP is the expression that is a call to the builtin function;
2733    if convenient, the result should be placed in TARGET.  */
2734 
2735 static rtx
2736 expand_builtin_int_roundingfn (tree exp, rtx target)
2737 {
2738   convert_optab builtin_optab;
2739   rtx op0, tmp;
2740   rtx_insn *insns;
2741   tree fndecl = get_callee_fndecl (exp);
2742   enum built_in_function fallback_fn;
2743   tree fallback_fndecl;
2744   machine_mode mode;
2745   tree arg;
2746 
2747   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2748     gcc_unreachable ();
2749 
2750   arg = CALL_EXPR_ARG (exp, 0);
2751 
2752   switch (DECL_FUNCTION_CODE (fndecl))
2753     {
2754     CASE_FLT_FN (BUILT_IN_ICEIL):
2755     CASE_FLT_FN (BUILT_IN_LCEIL):
2756     CASE_FLT_FN (BUILT_IN_LLCEIL):
2757       builtin_optab = lceil_optab;
2758       fallback_fn = BUILT_IN_CEIL;
2759       break;
2760 
2761     CASE_FLT_FN (BUILT_IN_IFLOOR):
2762     CASE_FLT_FN (BUILT_IN_LFLOOR):
2763     CASE_FLT_FN (BUILT_IN_LLFLOOR):
2764       builtin_optab = lfloor_optab;
2765       fallback_fn = BUILT_IN_FLOOR;
2766       break;
2767 
2768     default:
2769       gcc_unreachable ();
2770     }
2771 
2772   /* Make a suitable register to place result in.  */
2773   mode = TYPE_MODE (TREE_TYPE (exp));
2774 
2775   target = gen_reg_rtx (mode);
2776 
2777   /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2778      need to expand the argument again.  This way, we will not perform
2779      side-effects more the once.  */
2780   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2781 
2782   op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2783 
2784   start_sequence ();
2785 
2786   /* Compute into TARGET.  */
2787   if (expand_sfix_optab (target, op0, builtin_optab))
2788     {
2789       /* Output the entire sequence.  */
2790       insns = get_insns ();
2791       end_sequence ();
2792       emit_insn (insns);
2793       return target;
2794     }
2795 
2796   /* If we were unable to expand via the builtin, stop the sequence
2797      (without outputting the insns).  */
2798   end_sequence ();
2799 
2800   /* Fall back to floating point rounding optab.  */
2801   fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2802 
2803   /* For non-C99 targets we may end up without a fallback fndecl here
2804      if the user called __builtin_lfloor directly.  In this case emit
2805      a call to the floor/ceil variants nevertheless.  This should result
2806      in the best user experience for not full C99 targets.  */
2807   if (fallback_fndecl == NULL_TREE)
2808     {
2809       tree fntype;
2810       const char *name = NULL;
2811 
2812       switch (DECL_FUNCTION_CODE (fndecl))
2813 	{
2814 	case BUILT_IN_ICEIL:
2815 	case BUILT_IN_LCEIL:
2816 	case BUILT_IN_LLCEIL:
2817 	  name = "ceil";
2818 	  break;
2819 	case BUILT_IN_ICEILF:
2820 	case BUILT_IN_LCEILF:
2821 	case BUILT_IN_LLCEILF:
2822 	  name = "ceilf";
2823 	  break;
2824 	case BUILT_IN_ICEILL:
2825 	case BUILT_IN_LCEILL:
2826 	case BUILT_IN_LLCEILL:
2827 	  name = "ceill";
2828 	  break;
2829 	case BUILT_IN_IFLOOR:
2830 	case BUILT_IN_LFLOOR:
2831 	case BUILT_IN_LLFLOOR:
2832 	  name = "floor";
2833 	  break;
2834 	case BUILT_IN_IFLOORF:
2835 	case BUILT_IN_LFLOORF:
2836 	case BUILT_IN_LLFLOORF:
2837 	  name = "floorf";
2838 	  break;
2839 	case BUILT_IN_IFLOORL:
2840 	case BUILT_IN_LFLOORL:
2841 	case BUILT_IN_LLFLOORL:
2842 	  name = "floorl";
2843 	  break;
2844 	default:
2845 	  gcc_unreachable ();
2846 	}
2847 
2848       fntype = build_function_type_list (TREE_TYPE (arg),
2849 					 TREE_TYPE (arg), NULL_TREE);
2850       fallback_fndecl = build_fn_decl (name, fntype);
2851     }
2852 
2853   exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2854 
2855   tmp = expand_normal (exp);
2856   tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2857 
2858   /* Truncate the result of floating point optab to integer
2859      via expand_fix ().  */
2860   target = gen_reg_rtx (mode);
2861   expand_fix (target, tmp, 0);
2862 
2863   return target;
2864 }
2865 
2866 /* Expand a call to one of the builtin math functions doing integer
2867    conversion (lrint).
2868    Return 0 if a normal call should be emitted rather than expanding the
2869    function in-line.  EXP is the expression that is a call to the builtin
2870    function; if convenient, the result should be placed in TARGET.  */
2871 
2872 static rtx
2873 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2874 {
2875   convert_optab builtin_optab;
2876   rtx op0;
2877   rtx_insn *insns;
2878   tree fndecl = get_callee_fndecl (exp);
2879   tree arg;
2880   machine_mode mode;
2881   enum built_in_function fallback_fn = BUILT_IN_NONE;
2882 
2883   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2884      gcc_unreachable ();
2885 
2886   arg = CALL_EXPR_ARG (exp, 0);
2887 
2888   switch (DECL_FUNCTION_CODE (fndecl))
2889     {
2890     CASE_FLT_FN (BUILT_IN_IRINT):
2891       fallback_fn = BUILT_IN_LRINT;
2892       /* FALLTHRU */
2893     CASE_FLT_FN (BUILT_IN_LRINT):
2894     CASE_FLT_FN (BUILT_IN_LLRINT):
2895       builtin_optab = lrint_optab;
2896       break;
2897 
2898     CASE_FLT_FN (BUILT_IN_IROUND):
2899       fallback_fn = BUILT_IN_LROUND;
2900       /* FALLTHRU */
2901     CASE_FLT_FN (BUILT_IN_LROUND):
2902     CASE_FLT_FN (BUILT_IN_LLROUND):
2903       builtin_optab = lround_optab;
2904       break;
2905 
2906     default:
2907       gcc_unreachable ();
2908     }
2909 
2910   /* There's no easy way to detect the case we need to set EDOM.  */
2911   if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2912     return NULL_RTX;
2913 
2914   /* Make a suitable register to place result in.  */
2915   mode = TYPE_MODE (TREE_TYPE (exp));
2916 
2917   /* There's no easy way to detect the case we need to set EDOM.  */
2918   if (!flag_errno_math)
2919     {
2920       rtx result = gen_reg_rtx (mode);
2921 
2922       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2923 	 need to expand the argument again.  This way, we will not perform
2924 	 side-effects more the once.  */
2925       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2926 
2927       op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2928 
2929       start_sequence ();
2930 
2931       if (expand_sfix_optab (result, op0, builtin_optab))
2932 	{
2933 	  /* Output the entire sequence.  */
2934 	  insns = get_insns ();
2935 	  end_sequence ();
2936 	  emit_insn (insns);
2937 	  return result;
2938 	}
2939 
2940       /* If we were unable to expand via the builtin, stop the sequence
2941 	 (without outputting the insns) and call to the library function
2942 	 with the stabilized argument list.  */
2943       end_sequence ();
2944     }
2945 
2946   if (fallback_fn != BUILT_IN_NONE)
2947     {
2948       /* Fall back to rounding to long int.  Use implicit_p 0 - for non-C99
2949 	 targets, (int) round (x) should never be transformed into
2950 	 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2951 	 a call to lround in the hope that the target provides at least some
2952 	 C99 functions.  This should result in the best user experience for
2953 	 not full C99 targets.  */
2954       tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2955 						fallback_fn, 0);
2956 
2957       exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2958 				   fallback_fndecl, 1, arg);
2959 
2960       target = expand_call (exp, NULL_RTX, target == const0_rtx);
2961       target = maybe_emit_group_store (target, TREE_TYPE (exp));
2962       return convert_to_mode (mode, target, 0);
2963     }
2964 
2965   return expand_call (exp, target, target == const0_rtx);
2966 }
2967 
2968 /* Expand a call to the powi built-in mathematical function.  Return NULL_RTX if
2969    a normal call should be emitted rather than expanding the function
2970    in-line.  EXP is the expression that is a call to the builtin
2971    function; if convenient, the result should be placed in TARGET.  */
2972 
2973 static rtx
2974 expand_builtin_powi (tree exp, rtx target)
2975 {
2976   tree arg0, arg1;
2977   rtx op0, op1;
2978   machine_mode mode;
2979   machine_mode mode2;
2980 
2981   if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2982     return NULL_RTX;
2983 
2984   arg0 = CALL_EXPR_ARG (exp, 0);
2985   arg1 = CALL_EXPR_ARG (exp, 1);
2986   mode = TYPE_MODE (TREE_TYPE (exp));
2987 
2988   /* Emit a libcall to libgcc.  */
2989 
2990   /* Mode of the 2nd argument must match that of an int.  */
2991   mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2992 
2993   if (target == NULL_RTX)
2994     target = gen_reg_rtx (mode);
2995 
2996   op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2997   if (GET_MODE (op0) != mode)
2998     op0 = convert_to_mode (mode, op0, 0);
2999   op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3000   if (GET_MODE (op1) != mode2)
3001     op1 = convert_to_mode (mode2, op1, 0);
3002 
3003   target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3004 				    target, LCT_CONST, mode, 2,
3005 				    op0, mode, op1, mode2);
3006 
3007   return target;
3008 }
3009 
3010 /* Expand expression EXP which is a call to the strlen builtin.  Return
3011    NULL_RTX if we failed the caller should emit a normal call, otherwise
3012    try to get the result in TARGET, if convenient.  */
3013 
3014 static rtx
3015 expand_builtin_strlen (tree exp, rtx target,
3016 		       machine_mode target_mode)
3017 {
3018   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3019     return NULL_RTX;
3020   else
3021     {
3022       struct expand_operand ops[4];
3023       rtx pat;
3024       tree len;
3025       tree src = CALL_EXPR_ARG (exp, 0);
3026       rtx src_reg;
3027       rtx_insn *before_strlen;
3028       machine_mode insn_mode = target_mode;
3029       enum insn_code icode = CODE_FOR_nothing;
3030       unsigned int align;
3031 
3032       /* If the length can be computed at compile-time, return it.  */
3033       len = c_strlen (src, 0);
3034       if (len)
3035 	return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3036 
3037       /* If the length can be computed at compile-time and is constant
3038 	 integer, but there are side-effects in src, evaluate
3039 	 src for side-effects, then return len.
3040 	 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3041 	 can be optimized into: i++; x = 3;  */
3042       len = c_strlen (src, 1);
3043       if (len && TREE_CODE (len) == INTEGER_CST)
3044 	{
3045 	  expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3046 	  return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3047 	}
3048 
3049       align = get_pointer_alignment (src) / BITS_PER_UNIT;
3050 
3051       /* If SRC is not a pointer type, don't do this operation inline.  */
3052       if (align == 0)
3053 	return NULL_RTX;
3054 
3055       /* Bail out if we can't compute strlen in the right mode.  */
3056       while (insn_mode != VOIDmode)
3057 	{
3058 	  icode = optab_handler (strlen_optab, insn_mode);
3059 	  if (icode != CODE_FOR_nothing)
3060 	    break;
3061 
3062 	  insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3063 	}
3064       if (insn_mode == VOIDmode)
3065 	return NULL_RTX;
3066 
3067       /* Make a place to hold the source address.  We will not expand
3068 	 the actual source until we are sure that the expansion will
3069 	 not fail -- there are trees that cannot be expanded twice.  */
3070       src_reg = gen_reg_rtx (Pmode);
3071 
3072       /* Mark the beginning of the strlen sequence so we can emit the
3073 	 source operand later.  */
3074       before_strlen = get_last_insn ();
3075 
3076       create_output_operand (&ops[0], target, insn_mode);
3077       create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3078       create_integer_operand (&ops[2], 0);
3079       create_integer_operand (&ops[3], align);
3080       if (!maybe_expand_insn (icode, 4, ops))
3081 	return NULL_RTX;
3082 
3083       /* Now that we are assured of success, expand the source.  */
3084       start_sequence ();
3085       pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3086       if (pat != src_reg)
3087 	{
3088 #ifdef POINTERS_EXTEND_UNSIGNED
3089 	  if (GET_MODE (pat) != Pmode)
3090 	    pat = convert_to_mode (Pmode, pat,
3091 				   POINTERS_EXTEND_UNSIGNED);
3092 #endif
3093 	  emit_move_insn (src_reg, pat);
3094 	}
3095       pat = get_insns ();
3096       end_sequence ();
3097 
3098       if (before_strlen)
3099 	emit_insn_after (pat, before_strlen);
3100       else
3101 	emit_insn_before (pat, get_insns ());
3102 
3103       /* Return the value in the proper mode for this function.  */
3104       if (GET_MODE (ops[0].value) == target_mode)
3105 	target = ops[0].value;
3106       else if (target != 0)
3107 	convert_move (target, ops[0].value, 0);
3108       else
3109 	target = convert_to_mode (target_mode, ops[0].value, 0);
3110 
3111       return target;
3112     }
3113 }
3114 
3115 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3116    bytes from constant string DATA + OFFSET and return it as target
3117    constant.  */
3118 
3119 static rtx
3120 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3121 			 machine_mode mode)
3122 {
3123   const char *str = (const char *) data;
3124 
3125   gcc_assert (offset >= 0
3126 	      && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3127 		  <= strlen (str) + 1));
3128 
3129   return c_readstr (str + offset, mode);
3130 }
3131 
3132 /* LEN specify length of the block of memcpy/memset operation.
3133    Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3134    In some cases we can make very likely guess on max size, then we
3135    set it into PROBABLE_MAX_SIZE.  */
3136 
3137 static void
3138 determine_block_size (tree len, rtx len_rtx,
3139 		      unsigned HOST_WIDE_INT *min_size,
3140 		      unsigned HOST_WIDE_INT *max_size,
3141 		      unsigned HOST_WIDE_INT *probable_max_size)
3142 {
3143   if (CONST_INT_P (len_rtx))
3144     {
3145       *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3146       return;
3147     }
3148   else
3149     {
3150       wide_int min, max;
3151       enum value_range_type range_type = VR_UNDEFINED;
3152 
3153       /* Determine bounds from the type.  */
3154       if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3155 	*min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3156       else
3157 	*min_size = 0;
3158       if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3159 	*probable_max_size = *max_size
3160 	  = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3161       else
3162 	*probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3163 
3164       if (TREE_CODE (len) == SSA_NAME)
3165 	range_type = get_range_info (len, &min, &max);
3166       if (range_type == VR_RANGE)
3167 	{
3168 	  if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3169 	    *min_size = min.to_uhwi ();
3170 	  if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3171 	    *probable_max_size = *max_size = max.to_uhwi ();
3172 	}
3173       else if (range_type == VR_ANTI_RANGE)
3174 	{
3175 	  /* Anti range 0...N lets us to determine minimal size to N+1.  */
3176 	  if (min == 0)
3177 	    {
3178 	      if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3179 		*min_size = max.to_uhwi () + 1;
3180 	    }
3181 	  /* Code like
3182 
3183 	     int n;
3184 	     if (n < 100)
3185 	       memcpy (a, b, n)
3186 
3187 	     Produce anti range allowing negative values of N.  We still
3188 	     can use the information and make a guess that N is not negative.
3189 	     */
3190 	  else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3191 	    *probable_max_size = min.to_uhwi () - 1;
3192 	}
3193     }
3194   gcc_checking_assert (*max_size <=
3195 		       (unsigned HOST_WIDE_INT)
3196 			  GET_MODE_MASK (GET_MODE (len_rtx)));
3197 }
3198 
3199 /* Helper function to do the actual work for expand_builtin_memcpy.  */
3200 
3201 static rtx
3202 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3203 {
3204   const char *src_str;
3205   unsigned int src_align = get_pointer_alignment (src);
3206   unsigned int dest_align = get_pointer_alignment (dest);
3207   rtx dest_mem, src_mem, dest_addr, len_rtx;
3208   HOST_WIDE_INT expected_size = -1;
3209   unsigned int expected_align = 0;
3210   unsigned HOST_WIDE_INT min_size;
3211   unsigned HOST_WIDE_INT max_size;
3212   unsigned HOST_WIDE_INT probable_max_size;
3213 
3214   /* If DEST is not a pointer type, call the normal function.  */
3215   if (dest_align == 0)
3216     return NULL_RTX;
3217 
3218   /* If either SRC is not a pointer type, don't do this
3219      operation in-line.  */
3220   if (src_align == 0)
3221     return NULL_RTX;
3222 
3223   if (currently_expanding_gimple_stmt)
3224     stringop_block_profile (currently_expanding_gimple_stmt,
3225 			    &expected_align, &expected_size);
3226 
3227   if (expected_align < dest_align)
3228     expected_align = dest_align;
3229   dest_mem = get_memory_rtx (dest, len);
3230   set_mem_align (dest_mem, dest_align);
3231   len_rtx = expand_normal (len);
3232   determine_block_size (len, len_rtx, &min_size, &max_size,
3233 			&probable_max_size);
3234   src_str = c_getstr (src);
3235 
3236   /* If SRC is a string constant and block move would be done
3237      by pieces, we can avoid loading the string from memory
3238      and only stored the computed constants.  */
3239   if (src_str
3240       && CONST_INT_P (len_rtx)
3241       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3242       && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3243 			      CONST_CAST (char *, src_str),
3244 			      dest_align, false))
3245     {
3246       dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3247 				  builtin_memcpy_read_str,
3248 				  CONST_CAST (char *, src_str),
3249 				  dest_align, false, 0);
3250       dest_mem = force_operand (XEXP (dest_mem, 0), target);
3251       dest_mem = convert_memory_address (ptr_mode, dest_mem);
3252       return dest_mem;
3253     }
3254 
3255   src_mem = get_memory_rtx (src, len);
3256   set_mem_align (src_mem, src_align);
3257 
3258   /* Copy word part most expediently.  */
3259   dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3260 				     CALL_EXPR_TAILCALL (exp)
3261 				     ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3262 				     expected_align, expected_size,
3263 				     min_size, max_size, probable_max_size);
3264 
3265   if (dest_addr == 0)
3266     {
3267       dest_addr = force_operand (XEXP (dest_mem, 0), target);
3268       dest_addr = convert_memory_address (ptr_mode, dest_addr);
3269     }
3270 
3271   return dest_addr;
3272 }
3273 
3274 /* Expand a call EXP to the memcpy builtin.
3275    Return NULL_RTX if we failed, the caller should emit a normal call,
3276    otherwise try to get the result in TARGET, if convenient (and in
3277    mode MODE if that's convenient).  */
3278 
3279 static rtx
3280 expand_builtin_memcpy (tree exp, rtx target)
3281 {
3282   if (!validate_arglist (exp,
3283  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3284     return NULL_RTX;
3285   else
3286     {
3287       tree dest = CALL_EXPR_ARG (exp, 0);
3288       tree src = CALL_EXPR_ARG (exp, 1);
3289       tree len = CALL_EXPR_ARG (exp, 2);
3290       return expand_builtin_memcpy_args (dest, src, len, target, exp);
3291     }
3292 }
3293 
3294 /* Expand an instrumented call EXP to the memcpy builtin.
3295    Return NULL_RTX if we failed, the caller should emit a normal call,
3296    otherwise try to get the result in TARGET, if convenient (and in
3297    mode MODE if that's convenient).  */
3298 
3299 static rtx
3300 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3301 {
3302   if (!validate_arglist (exp,
3303 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3304 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3305 			 INTEGER_TYPE, VOID_TYPE))
3306     return NULL_RTX;
3307   else
3308     {
3309       tree dest = CALL_EXPR_ARG (exp, 0);
3310       tree src = CALL_EXPR_ARG (exp, 2);
3311       tree len = CALL_EXPR_ARG (exp, 4);
3312       rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3313 
3314       /* Return src bounds with the result.  */
3315       if (res)
3316 	{
3317 	  rtx bnd = force_reg (targetm.chkp_bound_mode (),
3318 			       expand_normal (CALL_EXPR_ARG (exp, 1)));
3319 	  res = chkp_join_splitted_slot (res, bnd);
3320 	}
3321       return res;
3322     }
3323 }
3324 
3325 /* Expand a call EXP to the mempcpy builtin.
3326    Return NULL_RTX if we failed; the caller should emit a normal call,
3327    otherwise try to get the result in TARGET, if convenient (and in
3328    mode MODE if that's convenient).  If ENDP is 0 return the
3329    destination pointer, if ENDP is 1 return the end pointer ala
3330    mempcpy, and if ENDP is 2 return the end pointer minus one ala
3331    stpcpy.  */
3332 
3333 static rtx
3334 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3335 {
3336   if (!validate_arglist (exp,
3337  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3338     return NULL_RTX;
3339   else
3340     {
3341       tree dest = CALL_EXPR_ARG (exp, 0);
3342       tree src = CALL_EXPR_ARG (exp, 1);
3343       tree len = CALL_EXPR_ARG (exp, 2);
3344       return expand_builtin_mempcpy_args (dest, src, len,
3345 					  target, mode, /*endp=*/ 1,
3346 					  exp);
3347     }
3348 }
3349 
3350 /* Expand an instrumented call EXP to the mempcpy builtin.
3351    Return NULL_RTX if we failed, the caller should emit a normal call,
3352    otherwise try to get the result in TARGET, if convenient (and in
3353    mode MODE if that's convenient).  */
3354 
3355 static rtx
3356 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3357 {
3358   if (!validate_arglist (exp,
3359 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3360 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3361 			 INTEGER_TYPE, VOID_TYPE))
3362     return NULL_RTX;
3363   else
3364     {
3365       tree dest = CALL_EXPR_ARG (exp, 0);
3366       tree src = CALL_EXPR_ARG (exp, 2);
3367       tree len = CALL_EXPR_ARG (exp, 4);
3368       rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3369 					     mode, 1, exp);
3370 
3371       /* Return src bounds with the result.  */
3372       if (res)
3373 	{
3374 	  rtx bnd = force_reg (targetm.chkp_bound_mode (),
3375 			       expand_normal (CALL_EXPR_ARG (exp, 1)));
3376 	  res = chkp_join_splitted_slot (res, bnd);
3377 	}
3378       return res;
3379     }
3380 }
3381 
3382 /* Helper function to do the actual work for expand_builtin_mempcpy.  The
3383    arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3384    so that this can also be called without constructing an actual CALL_EXPR.
3385    The other arguments and return value are the same as for
3386    expand_builtin_mempcpy.  */
3387 
3388 static rtx
3389 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3390 			     rtx target, machine_mode mode, int endp,
3391 			     tree orig_exp)
3392 {
3393   tree fndecl = get_callee_fndecl (orig_exp);
3394 
3395     /* If return value is ignored, transform mempcpy into memcpy.  */
3396   if (target == const0_rtx
3397       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3398       && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3399     {
3400       tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3401       tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3402 					   dest, src, len);
3403       return expand_expr (result, target, mode, EXPAND_NORMAL);
3404     }
3405   else if (target == const0_rtx
3406 	   && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3407     {
3408       tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3409       tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3410 					   dest, src, len);
3411       return expand_expr (result, target, mode, EXPAND_NORMAL);
3412     }
3413   else
3414     {
3415       const char *src_str;
3416       unsigned int src_align = get_pointer_alignment (src);
3417       unsigned int dest_align = get_pointer_alignment (dest);
3418       rtx dest_mem, src_mem, len_rtx;
3419 
3420       /* If either SRC or DEST is not a pointer type, don't do this
3421 	 operation in-line.  */
3422       if (dest_align == 0 || src_align == 0)
3423 	return NULL_RTX;
3424 
3425       /* If LEN is not constant, call the normal function.  */
3426       if (! tree_fits_uhwi_p (len))
3427 	return NULL_RTX;
3428 
3429       len_rtx = expand_normal (len);
3430       src_str = c_getstr (src);
3431 
3432       /* If SRC is a string constant and block move would be done
3433 	 by pieces, we can avoid loading the string from memory
3434 	 and only stored the computed constants.  */
3435       if (src_str
3436 	  && CONST_INT_P (len_rtx)
3437 	  && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3438 	  && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3439 				  CONST_CAST (char *, src_str),
3440 				  dest_align, false))
3441 	{
3442 	  dest_mem = get_memory_rtx (dest, len);
3443 	  set_mem_align (dest_mem, dest_align);
3444 	  dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3445 				      builtin_memcpy_read_str,
3446 				      CONST_CAST (char *, src_str),
3447 				      dest_align, false, endp);
3448 	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3449 	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3450 	  return dest_mem;
3451 	}
3452 
3453       if (CONST_INT_P (len_rtx)
3454 	  && can_move_by_pieces (INTVAL (len_rtx),
3455 				 MIN (dest_align, src_align)))
3456 	{
3457 	  dest_mem = get_memory_rtx (dest, len);
3458 	  set_mem_align (dest_mem, dest_align);
3459 	  src_mem = get_memory_rtx (src, len);
3460 	  set_mem_align (src_mem, src_align);
3461 	  dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3462 				     MIN (dest_align, src_align), endp);
3463 	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3464 	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3465 	  return dest_mem;
3466 	}
3467 
3468       return NULL_RTX;
3469     }
3470 }
3471 
3472 #ifndef HAVE_movstr
3473 # define HAVE_movstr 0
3474 # define CODE_FOR_movstr CODE_FOR_nothing
3475 #endif
3476 
3477 /* Expand into a movstr instruction, if one is available.  Return NULL_RTX if
3478    we failed, the caller should emit a normal call, otherwise try to
3479    get the result in TARGET, if convenient.  If ENDP is 0 return the
3480    destination pointer, if ENDP is 1 return the end pointer ala
3481    mempcpy, and if ENDP is 2 return the end pointer minus one ala
3482    stpcpy.  */
3483 
3484 static rtx
3485 expand_movstr (tree dest, tree src, rtx target, int endp)
3486 {
3487   struct expand_operand ops[3];
3488   rtx dest_mem;
3489   rtx src_mem;
3490 
3491   if (!HAVE_movstr)
3492     return NULL_RTX;
3493 
3494   dest_mem = get_memory_rtx (dest, NULL);
3495   src_mem = get_memory_rtx (src, NULL);
3496   if (!endp)
3497     {
3498       target = force_reg (Pmode, XEXP (dest_mem, 0));
3499       dest_mem = replace_equiv_address (dest_mem, target);
3500     }
3501 
3502   create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3503   create_fixed_operand (&ops[1], dest_mem);
3504   create_fixed_operand (&ops[2], src_mem);
3505   if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3506     return NULL_RTX;
3507 
3508   if (endp && target != const0_rtx)
3509     {
3510       target = ops[0].value;
3511       /* movstr is supposed to set end to the address of the NUL
3512 	 terminator.  If the caller requested a mempcpy-like return value,
3513 	 adjust it.  */
3514       if (endp == 1)
3515 	{
3516 	  rtx tem = plus_constant (GET_MODE (target),
3517 				   gen_lowpart (GET_MODE (target), target), 1);
3518 	  emit_move_insn (target, force_operand (tem, NULL_RTX));
3519 	}
3520     }
3521   return target;
3522 }
3523 
3524 /* Expand expression EXP, which is a call to the strcpy builtin.  Return
3525    NULL_RTX if we failed the caller should emit a normal call, otherwise
3526    try to get the result in TARGET, if convenient (and in mode MODE if that's
3527    convenient).  */
3528 
3529 static rtx
3530 expand_builtin_strcpy (tree exp, rtx target)
3531 {
3532   if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3533    {
3534      tree dest = CALL_EXPR_ARG (exp, 0);
3535      tree src = CALL_EXPR_ARG (exp, 1);
3536      return expand_builtin_strcpy_args (dest, src, target);
3537    }
3538    return NULL_RTX;
3539 }
3540 
3541 /* Helper function to do the actual work for expand_builtin_strcpy.  The
3542    arguments to the builtin_strcpy call DEST and SRC are broken out
3543    so that this can also be called without constructing an actual CALL_EXPR.
3544    The other arguments and return value are the same as for
3545    expand_builtin_strcpy.  */
3546 
3547 static rtx
3548 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3549 {
3550   return expand_movstr (dest, src, target, /*endp=*/0);
3551 }
3552 
3553 /* Expand a call EXP to the stpcpy builtin.
3554    Return NULL_RTX if we failed the caller should emit a normal call,
3555    otherwise try to get the result in TARGET, if convenient (and in
3556    mode MODE if that's convenient).  */
3557 
3558 static rtx
3559 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3560 {
3561   tree dst, src;
3562   location_t loc = EXPR_LOCATION (exp);
3563 
3564   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3565     return NULL_RTX;
3566 
3567   dst = CALL_EXPR_ARG (exp, 0);
3568   src = CALL_EXPR_ARG (exp, 1);
3569 
3570   /* If return value is ignored, transform stpcpy into strcpy.  */
3571   if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3572     {
3573       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3574       tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3575       return expand_expr (result, target, mode, EXPAND_NORMAL);
3576     }
3577   else
3578     {
3579       tree len, lenp1;
3580       rtx ret;
3581 
3582       /* Ensure we get an actual string whose length can be evaluated at
3583 	 compile-time, not an expression containing a string.  This is
3584 	 because the latter will potentially produce pessimized code
3585 	 when used to produce the return value.  */
3586       if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3587 	return expand_movstr (dst, src, target, /*endp=*/2);
3588 
3589       lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3590       ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3591 					 target, mode, /*endp=*/2,
3592 					 exp);
3593 
3594       if (ret)
3595 	return ret;
3596 
3597       if (TREE_CODE (len) == INTEGER_CST)
3598 	{
3599 	  rtx len_rtx = expand_normal (len);
3600 
3601 	  if (CONST_INT_P (len_rtx))
3602 	    {
3603 	      ret = expand_builtin_strcpy_args (dst, src, target);
3604 
3605 	      if (ret)
3606 		{
3607 		  if (! target)
3608 		    {
3609 		      if (mode != VOIDmode)
3610 			target = gen_reg_rtx (mode);
3611 		      else
3612 			target = gen_reg_rtx (GET_MODE (ret));
3613 		    }
3614 		  if (GET_MODE (target) != GET_MODE (ret))
3615 		    ret = gen_lowpart (GET_MODE (target), ret);
3616 
3617 		  ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3618 		  ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3619 		  gcc_assert (ret);
3620 
3621 		  return target;
3622 		}
3623 	    }
3624 	}
3625 
3626       return expand_movstr (dst, src, target, /*endp=*/2);
3627     }
3628 }
3629 
3630 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3631    bytes from constant string DATA + OFFSET and return it as target
3632    constant.  */
3633 
3634 rtx
3635 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3636 			  machine_mode mode)
3637 {
3638   const char *str = (const char *) data;
3639 
3640   if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3641     return const0_rtx;
3642 
3643   return c_readstr (str + offset, mode);
3644 }
3645 
3646 /* Expand expression EXP, which is a call to the strncpy builtin.  Return
3647    NULL_RTX if we failed the caller should emit a normal call.  */
3648 
3649 static rtx
3650 expand_builtin_strncpy (tree exp, rtx target)
3651 {
3652   location_t loc = EXPR_LOCATION (exp);
3653 
3654   if (validate_arglist (exp,
3655  			POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3656     {
3657       tree dest = CALL_EXPR_ARG (exp, 0);
3658       tree src = CALL_EXPR_ARG (exp, 1);
3659       tree len = CALL_EXPR_ARG (exp, 2);
3660       tree slen = c_strlen (src, 1);
3661 
3662       /* We must be passed a constant len and src parameter.  */
3663       if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3664 	return NULL_RTX;
3665 
3666       slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3667 
3668       /* We're required to pad with trailing zeros if the requested
3669 	 len is greater than strlen(s2)+1.  In that case try to
3670 	 use store_by_pieces, if it fails, punt.  */
3671       if (tree_int_cst_lt (slen, len))
3672 	{
3673 	  unsigned int dest_align = get_pointer_alignment (dest);
3674 	  const char *p = c_getstr (src);
3675 	  rtx dest_mem;
3676 
3677 	  if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3678 	      || !can_store_by_pieces (tree_to_uhwi (len),
3679 				       builtin_strncpy_read_str,
3680 				       CONST_CAST (char *, p),
3681 				       dest_align, false))
3682 	    return NULL_RTX;
3683 
3684 	  dest_mem = get_memory_rtx (dest, len);
3685 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
3686 			   builtin_strncpy_read_str,
3687 			   CONST_CAST (char *, p), dest_align, false, 0);
3688 	  dest_mem = force_operand (XEXP (dest_mem, 0), target);
3689 	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3690 	  return dest_mem;
3691 	}
3692     }
3693   return NULL_RTX;
3694 }
3695 
3696 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3697    bytes from constant string DATA + OFFSET and return it as target
3698    constant.  */
3699 
3700 rtx
3701 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3702 			 machine_mode mode)
3703 {
3704   const char *c = (const char *) data;
3705   char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3706 
3707   memset (p, *c, GET_MODE_SIZE (mode));
3708 
3709   return c_readstr (p, mode);
3710 }
3711 
3712 /* Callback routine for store_by_pieces.  Return the RTL of a register
3713    containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3714    char value given in the RTL register data.  For example, if mode is
3715    4 bytes wide, return the RTL for 0x01010101*data.  */
3716 
3717 static rtx
3718 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3719 			machine_mode mode)
3720 {
3721   rtx target, coeff;
3722   size_t size;
3723   char *p;
3724 
3725   size = GET_MODE_SIZE (mode);
3726   if (size == 1)
3727     return (rtx) data;
3728 
3729   p = XALLOCAVEC (char, size);
3730   memset (p, 1, size);
3731   coeff = c_readstr (p, mode);
3732 
3733   target = convert_to_mode (mode, (rtx) data, 1);
3734   target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3735   return force_reg (mode, target);
3736 }
3737 
3738 /* Expand expression EXP, which is a call to the memset builtin.  Return
3739    NULL_RTX if we failed the caller should emit a normal call, otherwise
3740    try to get the result in TARGET, if convenient (and in mode MODE if that's
3741    convenient).  */
3742 
3743 static rtx
3744 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3745 {
3746   if (!validate_arglist (exp,
3747  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3748     return NULL_RTX;
3749   else
3750     {
3751       tree dest = CALL_EXPR_ARG (exp, 0);
3752       tree val = CALL_EXPR_ARG (exp, 1);
3753       tree len = CALL_EXPR_ARG (exp, 2);
3754       return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3755     }
3756 }
3757 
3758 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3759    Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3760    try to get the result in TARGET, if convenient (and in mode MODE if that's
3761    convenient).  */
3762 
3763 static rtx
3764 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3765 {
3766   if (!validate_arglist (exp,
3767 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3768 			 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3769     return NULL_RTX;
3770   else
3771     {
3772       tree dest = CALL_EXPR_ARG (exp, 0);
3773       tree val = CALL_EXPR_ARG (exp, 2);
3774       tree len = CALL_EXPR_ARG (exp, 3);
3775       rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3776 
3777       /* Return src bounds with the result.  */
3778       if (res)
3779 	{
3780 	  rtx bnd = force_reg (targetm.chkp_bound_mode (),
3781 			       expand_normal (CALL_EXPR_ARG (exp, 1)));
3782 	  res = chkp_join_splitted_slot (res, bnd);
3783 	}
3784       return res;
3785     }
3786 }
3787 
3788 /* Helper function to do the actual work for expand_builtin_memset.  The
3789    arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3790    so that this can also be called without constructing an actual CALL_EXPR.
3791    The other arguments and return value are the same as for
3792    expand_builtin_memset.  */
3793 
3794 static rtx
3795 expand_builtin_memset_args (tree dest, tree val, tree len,
3796 			    rtx target, machine_mode mode, tree orig_exp)
3797 {
3798   tree fndecl, fn;
3799   enum built_in_function fcode;
3800   machine_mode val_mode;
3801   char c;
3802   unsigned int dest_align;
3803   rtx dest_mem, dest_addr, len_rtx;
3804   HOST_WIDE_INT expected_size = -1;
3805   unsigned int expected_align = 0;
3806   unsigned HOST_WIDE_INT min_size;
3807   unsigned HOST_WIDE_INT max_size;
3808   unsigned HOST_WIDE_INT probable_max_size;
3809 
3810   dest_align = get_pointer_alignment (dest);
3811 
3812   /* If DEST is not a pointer type, don't do this operation in-line.  */
3813   if (dest_align == 0)
3814     return NULL_RTX;
3815 
3816   if (currently_expanding_gimple_stmt)
3817     stringop_block_profile (currently_expanding_gimple_stmt,
3818 			    &expected_align, &expected_size);
3819 
3820   if (expected_align < dest_align)
3821     expected_align = dest_align;
3822 
3823   /* If the LEN parameter is zero, return DEST.  */
3824   if (integer_zerop (len))
3825     {
3826       /* Evaluate and ignore VAL in case it has side-effects.  */
3827       expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3828       return expand_expr (dest, target, mode, EXPAND_NORMAL);
3829     }
3830 
3831   /* Stabilize the arguments in case we fail.  */
3832   dest = builtin_save_expr (dest);
3833   val = builtin_save_expr (val);
3834   len = builtin_save_expr (len);
3835 
3836   len_rtx = expand_normal (len);
3837   determine_block_size (len, len_rtx, &min_size, &max_size,
3838 			&probable_max_size);
3839   dest_mem = get_memory_rtx (dest, len);
3840   val_mode = TYPE_MODE (unsigned_char_type_node);
3841 
3842   if (TREE_CODE (val) != INTEGER_CST)
3843     {
3844       rtx val_rtx;
3845 
3846       val_rtx = expand_normal (val);
3847       val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3848 
3849       /* Assume that we can memset by pieces if we can store
3850        * the coefficients by pieces (in the required modes).
3851        * We can't pass builtin_memset_gen_str as that emits RTL.  */
3852       c = 1;
3853       if (tree_fits_uhwi_p (len)
3854 	  && can_store_by_pieces (tree_to_uhwi (len),
3855 				  builtin_memset_read_str, &c, dest_align,
3856 				  true))
3857 	{
3858 	  val_rtx = force_reg (val_mode, val_rtx);
3859 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
3860 			   builtin_memset_gen_str, val_rtx, dest_align,
3861 			   true, 0);
3862 	}
3863       else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3864 					dest_align, expected_align,
3865 					expected_size, min_size, max_size,
3866 					probable_max_size))
3867 	goto do_libcall;
3868 
3869       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3870       dest_mem = convert_memory_address (ptr_mode, dest_mem);
3871       return dest_mem;
3872     }
3873 
3874   if (target_char_cast (val, &c))
3875     goto do_libcall;
3876 
3877   if (c)
3878     {
3879       if (tree_fits_uhwi_p (len)
3880 	  && can_store_by_pieces (tree_to_uhwi (len),
3881 				  builtin_memset_read_str, &c, dest_align,
3882 				  true))
3883 	store_by_pieces (dest_mem, tree_to_uhwi (len),
3884 			 builtin_memset_read_str, &c, dest_align, true, 0);
3885       else if (!set_storage_via_setmem (dest_mem, len_rtx,
3886 					gen_int_mode (c, val_mode),
3887 					dest_align, expected_align,
3888 					expected_size, min_size, max_size,
3889 					probable_max_size))
3890 	goto do_libcall;
3891 
3892       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3893       dest_mem = convert_memory_address (ptr_mode, dest_mem);
3894       return dest_mem;
3895     }
3896 
3897   set_mem_align (dest_mem, dest_align);
3898   dest_addr = clear_storage_hints (dest_mem, len_rtx,
3899 				   CALL_EXPR_TAILCALL (orig_exp)
3900 				   ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3901 				   expected_align, expected_size,
3902 				   min_size, max_size,
3903 				   probable_max_size);
3904 
3905   if (dest_addr == 0)
3906     {
3907       dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3908       dest_addr = convert_memory_address (ptr_mode, dest_addr);
3909     }
3910 
3911   return dest_addr;
3912 
3913  do_libcall:
3914   fndecl = get_callee_fndecl (orig_exp);
3915   fcode = DECL_FUNCTION_CODE (fndecl);
3916   if (fcode == BUILT_IN_MEMSET
3917       || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3918     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3919 				dest, val, len);
3920   else if (fcode == BUILT_IN_BZERO)
3921     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3922 				dest, len);
3923   else
3924     gcc_unreachable ();
3925   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3926   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3927   return expand_call (fn, target, target == const0_rtx);
3928 }
3929 
3930 /* Expand expression EXP, which is a call to the bzero builtin.  Return
3931    NULL_RTX if we failed the caller should emit a normal call.  */
3932 
3933 static rtx
3934 expand_builtin_bzero (tree exp)
3935 {
3936   tree dest, size;
3937   location_t loc = EXPR_LOCATION (exp);
3938 
3939   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3940     return NULL_RTX;
3941 
3942   dest = CALL_EXPR_ARG (exp, 0);
3943   size = CALL_EXPR_ARG (exp, 1);
3944 
3945   /* New argument list transforming bzero(ptr x, int y) to
3946      memset(ptr x, int 0, size_t y).   This is done this way
3947      so that if it isn't expanded inline, we fallback to
3948      calling bzero instead of memset.  */
3949 
3950   return expand_builtin_memset_args (dest, integer_zero_node,
3951 				     fold_convert_loc (loc,
3952 						       size_type_node, size),
3953 				     const0_rtx, VOIDmode, exp);
3954 }
3955 
3956 /* Expand expression EXP, which is a call to the memcmp built-in function.
3957    Return NULL_RTX if we failed and the caller should emit a normal call,
3958    otherwise try to get the result in TARGET, if convenient (and in mode
3959    MODE, if that's convenient).  */
3960 
3961 static rtx
3962 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3963 		       ATTRIBUTE_UNUSED machine_mode mode)
3964 {
3965   location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3966 
3967   if (!validate_arglist (exp,
3968  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3969     return NULL_RTX;
3970 
3971   /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3972      implementing memcmp because it will stop if it encounters two
3973      zero bytes.  */
3974 #if defined HAVE_cmpmemsi
3975   {
3976     rtx arg1_rtx, arg2_rtx, arg3_rtx;
3977     rtx result;
3978     rtx insn;
3979     tree arg1 = CALL_EXPR_ARG (exp, 0);
3980     tree arg2 = CALL_EXPR_ARG (exp, 1);
3981     tree len = CALL_EXPR_ARG (exp, 2);
3982 
3983     unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3984     unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3985     machine_mode insn_mode;
3986 
3987     if (HAVE_cmpmemsi)
3988       insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3989     else
3990       return NULL_RTX;
3991 
3992     /* If we don't have POINTER_TYPE, call the function.  */
3993     if (arg1_align == 0 || arg2_align == 0)
3994       return NULL_RTX;
3995 
3996     /* Make a place to write the result of the instruction.  */
3997     result = target;
3998     if (! (result != 0
3999 	   && REG_P (result) && GET_MODE (result) == insn_mode
4000 	   && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4001       result = gen_reg_rtx (insn_mode);
4002 
4003     arg1_rtx = get_memory_rtx (arg1, len);
4004     arg2_rtx = get_memory_rtx (arg2, len);
4005     arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4006 
4007     /* Set MEM_SIZE as appropriate.  */
4008     if (CONST_INT_P (arg3_rtx))
4009       {
4010 	set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4011 	set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4012       }
4013 
4014     if (HAVE_cmpmemsi)
4015       insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4016 			   GEN_INT (MIN (arg1_align, arg2_align)));
4017     else
4018       gcc_unreachable ();
4019 
4020     if (insn)
4021       emit_insn (insn);
4022     else
4023       emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4024 			       TYPE_MODE (integer_type_node), 3,
4025 			       XEXP (arg1_rtx, 0), Pmode,
4026 			       XEXP (arg2_rtx, 0), Pmode,
4027 			       convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4028 						TYPE_UNSIGNED (sizetype)),
4029 			       TYPE_MODE (sizetype));
4030 
4031     /* Return the value in the proper mode for this function.  */
4032     mode = TYPE_MODE (TREE_TYPE (exp));
4033     if (GET_MODE (result) == mode)
4034       return result;
4035     else if (target != 0)
4036       {
4037 	convert_move (target, result, 0);
4038 	return target;
4039       }
4040     else
4041       return convert_to_mode (mode, result, 0);
4042   }
4043 #endif /* HAVE_cmpmemsi.  */
4044 
4045   return NULL_RTX;
4046 }
4047 
4048 /* Expand expression EXP, which is a call to the strcmp builtin.  Return NULL_RTX
4049    if we failed the caller should emit a normal call, otherwise try to get
4050    the result in TARGET, if convenient.  */
4051 
4052 static rtx
4053 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4054 {
4055   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4056     return NULL_RTX;
4057 
4058 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4059   if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4060       || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4061     {
4062       rtx arg1_rtx, arg2_rtx;
4063       rtx result, insn = NULL_RTX;
4064       tree fndecl, fn;
4065       tree arg1 = CALL_EXPR_ARG (exp, 0);
4066       tree arg2 = CALL_EXPR_ARG (exp, 1);
4067 
4068       unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4069       unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4070 
4071       /* If we don't have POINTER_TYPE, call the function.  */
4072       if (arg1_align == 0 || arg2_align == 0)
4073 	return NULL_RTX;
4074 
4075       /* Stabilize the arguments in case gen_cmpstr(n)si fail.  */
4076       arg1 = builtin_save_expr (arg1);
4077       arg2 = builtin_save_expr (arg2);
4078 
4079       arg1_rtx = get_memory_rtx (arg1, NULL);
4080       arg2_rtx = get_memory_rtx (arg2, NULL);
4081 
4082 #ifdef HAVE_cmpstrsi
4083       /* Try to call cmpstrsi.  */
4084       if (HAVE_cmpstrsi)
4085 	{
4086 	  machine_mode insn_mode
4087 	    = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4088 
4089 	  /* Make a place to write the result of the instruction.  */
4090 	  result = target;
4091 	  if (! (result != 0
4092 		 && REG_P (result) && GET_MODE (result) == insn_mode
4093 		 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4094 	    result = gen_reg_rtx (insn_mode);
4095 
4096 	  insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4097 			       GEN_INT (MIN (arg1_align, arg2_align)));
4098 	}
4099 #endif
4100 #ifdef HAVE_cmpstrnsi
4101       /* Try to determine at least one length and call cmpstrnsi.  */
4102       if (!insn && HAVE_cmpstrnsi)
4103 	{
4104 	  tree len;
4105 	  rtx arg3_rtx;
4106 
4107 	  machine_mode insn_mode
4108 	    = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4109 	  tree len1 = c_strlen (arg1, 1);
4110 	  tree len2 = c_strlen (arg2, 1);
4111 
4112 	  if (len1)
4113 	    len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4114 	  if (len2)
4115 	    len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4116 
4117 	  /* If we don't have a constant length for the first, use the length
4118 	     of the second, if we know it.  We don't require a constant for
4119 	     this case; some cost analysis could be done if both are available
4120 	     but neither is constant.  For now, assume they're equally cheap,
4121 	     unless one has side effects.  If both strings have constant lengths,
4122 	     use the smaller.  */
4123 
4124 	  if (!len1)
4125 	    len = len2;
4126 	  else if (!len2)
4127 	    len = len1;
4128 	  else if (TREE_SIDE_EFFECTS (len1))
4129 	    len = len2;
4130 	  else if (TREE_SIDE_EFFECTS (len2))
4131 	    len = len1;
4132 	  else if (TREE_CODE (len1) != INTEGER_CST)
4133 	    len = len2;
4134 	  else if (TREE_CODE (len2) != INTEGER_CST)
4135 	    len = len1;
4136 	  else if (tree_int_cst_lt (len1, len2))
4137 	    len = len1;
4138 	  else
4139 	    len = len2;
4140 
4141 	  /* If both arguments have side effects, we cannot optimize.  */
4142 	  if (!len || TREE_SIDE_EFFECTS (len))
4143 	    goto do_libcall;
4144 
4145 	  arg3_rtx = expand_normal (len);
4146 
4147 	  /* Make a place to write the result of the instruction.  */
4148 	  result = target;
4149 	  if (! (result != 0
4150 		 && REG_P (result) && GET_MODE (result) == insn_mode
4151 		 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4152 	    result = gen_reg_rtx (insn_mode);
4153 
4154 	  insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4155 				GEN_INT (MIN (arg1_align, arg2_align)));
4156 	}
4157 #endif
4158 
4159       if (insn)
4160 	{
4161 	  machine_mode mode;
4162 	  emit_insn (insn);
4163 
4164 	  /* Return the value in the proper mode for this function.  */
4165 	  mode = TYPE_MODE (TREE_TYPE (exp));
4166 	  if (GET_MODE (result) == mode)
4167 	    return result;
4168 	  if (target == 0)
4169 	    return convert_to_mode (mode, result, 0);
4170 	  convert_move (target, result, 0);
4171 	  return target;
4172 	}
4173 
4174       /* Expand the library call ourselves using a stabilized argument
4175 	 list to avoid re-evaluating the function's arguments twice.  */
4176 #ifdef HAVE_cmpstrnsi
4177     do_libcall:
4178 #endif
4179       fndecl = get_callee_fndecl (exp);
4180       fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4181       gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4182       CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4183       return expand_call (fn, target, target == const0_rtx);
4184     }
4185 #endif
4186   return NULL_RTX;
4187 }
4188 
4189 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4190    NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4191    the result in TARGET, if convenient.  */
4192 
4193 static rtx
4194 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4195 			ATTRIBUTE_UNUSED machine_mode mode)
4196 {
4197   location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4198 
4199   if (!validate_arglist (exp,
4200  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4201     return NULL_RTX;
4202 
4203   /* If c_strlen can determine an expression for one of the string
4204      lengths, and it doesn't have side effects, then emit cmpstrnsi
4205      using length MIN(strlen(string)+1, arg3).  */
4206 #ifdef HAVE_cmpstrnsi
4207   if (HAVE_cmpstrnsi)
4208   {
4209     tree len, len1, len2;
4210     rtx arg1_rtx, arg2_rtx, arg3_rtx;
4211     rtx result, insn;
4212     tree fndecl, fn;
4213     tree arg1 = CALL_EXPR_ARG (exp, 0);
4214     tree arg2 = CALL_EXPR_ARG (exp, 1);
4215     tree arg3 = CALL_EXPR_ARG (exp, 2);
4216 
4217     unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4218     unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4219     machine_mode insn_mode
4220       = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4221 
4222     len1 = c_strlen (arg1, 1);
4223     len2 = c_strlen (arg2, 1);
4224 
4225     if (len1)
4226       len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4227     if (len2)
4228       len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4229 
4230     /* If we don't have a constant length for the first, use the length
4231        of the second, if we know it.  We don't require a constant for
4232        this case; some cost analysis could be done if both are available
4233        but neither is constant.  For now, assume they're equally cheap,
4234        unless one has side effects.  If both strings have constant lengths,
4235        use the smaller.  */
4236 
4237     if (!len1)
4238       len = len2;
4239     else if (!len2)
4240       len = len1;
4241     else if (TREE_SIDE_EFFECTS (len1))
4242       len = len2;
4243     else if (TREE_SIDE_EFFECTS (len2))
4244       len = len1;
4245     else if (TREE_CODE (len1) != INTEGER_CST)
4246       len = len2;
4247     else if (TREE_CODE (len2) != INTEGER_CST)
4248       len = len1;
4249     else if (tree_int_cst_lt (len1, len2))
4250       len = len1;
4251     else
4252       len = len2;
4253 
4254     /* If both arguments have side effects, we cannot optimize.  */
4255     if (!len || TREE_SIDE_EFFECTS (len))
4256       return NULL_RTX;
4257 
4258     /* The actual new length parameter is MIN(len,arg3).  */
4259     len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4260 		       fold_convert_loc (loc, TREE_TYPE (len), arg3));
4261 
4262     /* If we don't have POINTER_TYPE, call the function.  */
4263     if (arg1_align == 0 || arg2_align == 0)
4264       return NULL_RTX;
4265 
4266     /* Make a place to write the result of the instruction.  */
4267     result = target;
4268     if (! (result != 0
4269 	   && REG_P (result) && GET_MODE (result) == insn_mode
4270 	   && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4271       result = gen_reg_rtx (insn_mode);
4272 
4273     /* Stabilize the arguments in case gen_cmpstrnsi fails.  */
4274     arg1 = builtin_save_expr (arg1);
4275     arg2 = builtin_save_expr (arg2);
4276     len = builtin_save_expr (len);
4277 
4278     arg1_rtx = get_memory_rtx (arg1, len);
4279     arg2_rtx = get_memory_rtx (arg2, len);
4280     arg3_rtx = expand_normal (len);
4281     insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4282 			  GEN_INT (MIN (arg1_align, arg2_align)));
4283     if (insn)
4284       {
4285 	emit_insn (insn);
4286 
4287 	/* Return the value in the proper mode for this function.  */
4288 	mode = TYPE_MODE (TREE_TYPE (exp));
4289 	if (GET_MODE (result) == mode)
4290 	  return result;
4291 	if (target == 0)
4292 	  return convert_to_mode (mode, result, 0);
4293 	convert_move (target, result, 0);
4294 	return target;
4295       }
4296 
4297     /* Expand the library call ourselves using a stabilized argument
4298        list to avoid re-evaluating the function's arguments twice.  */
4299     fndecl = get_callee_fndecl (exp);
4300     fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4301 				arg1, arg2, len);
4302     gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4303     CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4304     return expand_call (fn, target, target == const0_rtx);
4305   }
4306 #endif
4307   return NULL_RTX;
4308 }
4309 
4310 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4311    if that's convenient.  */
4312 
4313 rtx
4314 expand_builtin_saveregs (void)
4315 {
4316   rtx val;
4317   rtx_insn *seq;
4318 
4319   /* Don't do __builtin_saveregs more than once in a function.
4320      Save the result of the first call and reuse it.  */
4321   if (saveregs_value != 0)
4322     return saveregs_value;
4323 
4324   /* When this function is called, it means that registers must be
4325      saved on entry to this function.  So we migrate the call to the
4326      first insn of this function.  */
4327 
4328   start_sequence ();
4329 
4330   /* Do whatever the machine needs done in this case.  */
4331   val = targetm.calls.expand_builtin_saveregs ();
4332 
4333   seq = get_insns ();
4334   end_sequence ();
4335 
4336   saveregs_value = val;
4337 
4338   /* Put the insns after the NOTE that starts the function.  If this
4339      is inside a start_sequence, make the outer-level insn chain current, so
4340      the code is placed at the start of the function.  */
4341   push_topmost_sequence ();
4342   emit_insn_after (seq, entry_of_function ());
4343   pop_topmost_sequence ();
4344 
4345   return val;
4346 }
4347 
4348 /* Expand a call to __builtin_next_arg.  */
4349 
4350 static rtx
4351 expand_builtin_next_arg (void)
4352 {
4353   /* Checking arguments is already done in fold_builtin_next_arg
4354      that must be called before this function.  */
4355   return expand_binop (ptr_mode, add_optab,
4356 		       crtl->args.internal_arg_pointer,
4357 		       crtl->args.arg_offset_rtx,
4358 		       NULL_RTX, 0, OPTAB_LIB_WIDEN);
4359 }
4360 
4361 /* Make it easier for the backends by protecting the valist argument
4362    from multiple evaluations.  */
4363 
4364 static tree
4365 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4366 {
4367   tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4368 
4369   /* The current way of determining the type of valist is completely
4370      bogus.  We should have the information on the va builtin instead.  */
4371   if (!vatype)
4372     vatype = targetm.fn_abi_va_list (cfun->decl);
4373 
4374   if (TREE_CODE (vatype) == ARRAY_TYPE)
4375     {
4376       if (TREE_SIDE_EFFECTS (valist))
4377 	valist = save_expr (valist);
4378 
4379       /* For this case, the backends will be expecting a pointer to
4380 	 vatype, but it's possible we've actually been given an array
4381 	 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4382 	 So fix it.  */
4383       if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4384 	{
4385 	  tree p1 = build_pointer_type (TREE_TYPE (vatype));
4386 	  valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4387 	}
4388     }
4389   else
4390     {
4391       tree pt = build_pointer_type (vatype);
4392 
4393       if (! needs_lvalue)
4394 	{
4395 	  if (! TREE_SIDE_EFFECTS (valist))
4396 	    return valist;
4397 
4398 	  valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4399 	  TREE_SIDE_EFFECTS (valist) = 1;
4400 	}
4401 
4402       if (TREE_SIDE_EFFECTS (valist))
4403 	valist = save_expr (valist);
4404       valist = fold_build2_loc (loc, MEM_REF,
4405 				vatype, valist, build_int_cst (pt, 0));
4406     }
4407 
4408   return valist;
4409 }
4410 
4411 /* The "standard" definition of va_list is void*.  */
4412 
4413 tree
4414 std_build_builtin_va_list (void)
4415 {
4416   return ptr_type_node;
4417 }
4418 
4419 /* The "standard" abi va_list is va_list_type_node.  */
4420 
4421 tree
4422 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4423 {
4424   return va_list_type_node;
4425 }
4426 
4427 /* The "standard" type of va_list is va_list_type_node.  */
4428 
4429 tree
4430 std_canonical_va_list_type (tree type)
4431 {
4432   tree wtype, htype;
4433 
4434   if (INDIRECT_REF_P (type))
4435     type = TREE_TYPE (type);
4436   else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4437     type = TREE_TYPE (type);
4438   wtype = va_list_type_node;
4439   htype = type;
4440   /* Treat structure va_list types.  */
4441   if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4442     htype = TREE_TYPE (htype);
4443   else if (TREE_CODE (wtype) == ARRAY_TYPE)
4444     {
4445       /* If va_list is an array type, the argument may have decayed
4446 	 to a pointer type, e.g. by being passed to another function.
4447 	 In that case, unwrap both types so that we can compare the
4448 	 underlying records.  */
4449       if (TREE_CODE (htype) == ARRAY_TYPE
4450 	  || POINTER_TYPE_P (htype))
4451 	{
4452 	  wtype = TREE_TYPE (wtype);
4453 	  htype = TREE_TYPE (htype);
4454 	}
4455     }
4456   if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4457     return va_list_type_node;
4458 
4459   return NULL_TREE;
4460 }
4461 
4462 /* The "standard" implementation of va_start: just assign `nextarg' to
4463    the variable.  */
4464 
4465 void
4466 std_expand_builtin_va_start (tree valist, rtx nextarg)
4467 {
4468   rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4469   convert_move (va_r, nextarg, 0);
4470 
4471   /* We do not have any valid bounds for the pointer, so
4472      just store zero bounds for it.  */
4473   if (chkp_function_instrumented_p (current_function_decl))
4474     chkp_expand_bounds_reset_for_mem (valist,
4475 				      make_tree (TREE_TYPE (valist),
4476 						 nextarg));
4477 }
4478 
4479 /* Expand EXP, a call to __builtin_va_start.  */
4480 
4481 static rtx
4482 expand_builtin_va_start (tree exp)
4483 {
4484   rtx nextarg;
4485   tree valist;
4486   location_t loc = EXPR_LOCATION (exp);
4487 
4488   if (call_expr_nargs (exp) < 2)
4489     {
4490       error_at (loc, "too few arguments to function %<va_start%>");
4491       return const0_rtx;
4492     }
4493 
4494   if (fold_builtin_next_arg (exp, true))
4495     return const0_rtx;
4496 
4497   nextarg = expand_builtin_next_arg ();
4498   valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4499 
4500   if (targetm.expand_builtin_va_start)
4501     targetm.expand_builtin_va_start (valist, nextarg);
4502   else
4503     std_expand_builtin_va_start (valist, nextarg);
4504 
4505   return const0_rtx;
4506 }
4507 
4508 /* Expand EXP, a call to __builtin_va_end.  */
4509 
4510 static rtx
4511 expand_builtin_va_end (tree exp)
4512 {
4513   tree valist = CALL_EXPR_ARG (exp, 0);
4514 
4515   /* Evaluate for side effects, if needed.  I hate macros that don't
4516      do that.  */
4517   if (TREE_SIDE_EFFECTS (valist))
4518     expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4519 
4520   return const0_rtx;
4521 }
4522 
4523 /* Expand EXP, a call to __builtin_va_copy.  We do this as a
4524    builtin rather than just as an assignment in stdarg.h because of the
4525    nastiness of array-type va_list types.  */
4526 
4527 static rtx
4528 expand_builtin_va_copy (tree exp)
4529 {
4530   tree dst, src, t;
4531   location_t loc = EXPR_LOCATION (exp);
4532 
4533   dst = CALL_EXPR_ARG (exp, 0);
4534   src = CALL_EXPR_ARG (exp, 1);
4535 
4536   dst = stabilize_va_list_loc (loc, dst, 1);
4537   src = stabilize_va_list_loc (loc, src, 0);
4538 
4539   gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4540 
4541   if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4542     {
4543       t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4544       TREE_SIDE_EFFECTS (t) = 1;
4545       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4546     }
4547   else
4548     {
4549       rtx dstb, srcb, size;
4550 
4551       /* Evaluate to pointers.  */
4552       dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4553       srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4554       size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4555       		  NULL_RTX, VOIDmode, EXPAND_NORMAL);
4556 
4557       dstb = convert_memory_address (Pmode, dstb);
4558       srcb = convert_memory_address (Pmode, srcb);
4559 
4560       /* "Dereference" to BLKmode memories.  */
4561       dstb = gen_rtx_MEM (BLKmode, dstb);
4562       set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4563       set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4564       srcb = gen_rtx_MEM (BLKmode, srcb);
4565       set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4566       set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4567 
4568       /* Copy.  */
4569       emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4570     }
4571 
4572   return const0_rtx;
4573 }
4574 
4575 /* Expand a call to one of the builtin functions __builtin_frame_address or
4576    __builtin_return_address.  */
4577 
4578 static rtx
4579 expand_builtin_frame_address (tree fndecl, tree exp)
4580 {
4581   /* The argument must be a nonnegative integer constant.
4582      It counts the number of frames to scan up the stack.
4583      The value is the return address saved in that frame.  */
4584   if (call_expr_nargs (exp) == 0)
4585     /* Warning about missing arg was already issued.  */
4586     return const0_rtx;
4587   else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4588     {
4589       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4590 	error ("invalid argument to %<__builtin_frame_address%>");
4591       else
4592 	error ("invalid argument to %<__builtin_return_address%>");
4593       return const0_rtx;
4594     }
4595   else
4596     {
4597       rtx tem
4598 	= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4599 				      tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4600 
4601       /* Some ports cannot access arbitrary stack frames.  */
4602       if (tem == NULL)
4603 	{
4604 	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4605 	    warning (0, "unsupported argument to %<__builtin_frame_address%>");
4606 	  else
4607 	    warning (0, "unsupported argument to %<__builtin_return_address%>");
4608 	  return const0_rtx;
4609 	}
4610 
4611       /* For __builtin_frame_address, return what we've got.  */
4612       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4613 	return tem;
4614 
4615       if (!REG_P (tem)
4616 	  && ! CONSTANT_P (tem))
4617 	tem = copy_addr_to_reg (tem);
4618       return tem;
4619     }
4620 }
4621 
4622 /* Expand EXP, a call to the alloca builtin.  Return NULL_RTX if we
4623    failed and the caller should emit a normal call.  CANNOT_ACCUMULATE
4624    is the same as for allocate_dynamic_stack_space.  */
4625 
4626 static rtx
4627 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4628 {
4629   rtx op0;
4630   rtx result;
4631   bool valid_arglist;
4632   unsigned int align;
4633   bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4634 			    == BUILT_IN_ALLOCA_WITH_ALIGN);
4635 
4636   valid_arglist
4637     = (alloca_with_align
4638        ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4639        : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4640 
4641   if (!valid_arglist)
4642     return NULL_RTX;
4643 
4644   /* Compute the argument.  */
4645   op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4646 
4647   /* Compute the alignment.  */
4648   align = (alloca_with_align
4649 	   ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4650 	   : BIGGEST_ALIGNMENT);
4651 
4652   /* Allocate the desired space.  */
4653   result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4654   result = convert_memory_address (ptr_mode, result);
4655 
4656   return result;
4657 }
4658 
4659 /* Expand a call to bswap builtin in EXP.
4660    Return NULL_RTX if a normal call should be emitted rather than expanding the
4661    function in-line.  If convenient, the result should be placed in TARGET.
4662    SUBTARGET may be used as the target for computing one of EXP's operands.  */
4663 
4664 static rtx
4665 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4666 		      rtx subtarget)
4667 {
4668   tree arg;
4669   rtx op0;
4670 
4671   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4672     return NULL_RTX;
4673 
4674   arg = CALL_EXPR_ARG (exp, 0);
4675   op0 = expand_expr (arg,
4676 		     subtarget && GET_MODE (subtarget) == target_mode
4677 		     ? subtarget : NULL_RTX,
4678 		     target_mode, EXPAND_NORMAL);
4679   if (GET_MODE (op0) != target_mode)
4680     op0 = convert_to_mode (target_mode, op0, 1);
4681 
4682   target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4683 
4684   gcc_assert (target);
4685 
4686   return convert_to_mode (target_mode, target, 1);
4687 }
4688 
4689 /* Expand a call to a unary builtin in EXP.
4690    Return NULL_RTX if a normal call should be emitted rather than expanding the
4691    function in-line.  If convenient, the result should be placed in TARGET.
4692    SUBTARGET may be used as the target for computing one of EXP's operands.  */
4693 
4694 static rtx
4695 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4696 		     rtx subtarget, optab op_optab)
4697 {
4698   rtx op0;
4699 
4700   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4701     return NULL_RTX;
4702 
4703   /* Compute the argument.  */
4704   op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4705 		     (subtarget
4706 		      && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4707 			  == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4708 		     VOIDmode, EXPAND_NORMAL);
4709   /* Compute op, into TARGET if possible.
4710      Set TARGET to wherever the result comes back.  */
4711   target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4712 			op_optab, op0, target, op_optab != clrsb_optab);
4713   gcc_assert (target);
4714 
4715   return convert_to_mode (target_mode, target, 0);
4716 }
4717 
4718 /* Expand a call to __builtin_expect.  We just return our argument
4719    as the builtin_expect semantic should've been already executed by
4720    tree branch prediction pass. */
4721 
4722 static rtx
4723 expand_builtin_expect (tree exp, rtx target)
4724 {
4725   tree arg;
4726 
4727   if (call_expr_nargs (exp) < 2)
4728     return const0_rtx;
4729   arg = CALL_EXPR_ARG (exp, 0);
4730 
4731   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4732   /* When guessing was done, the hints should be already stripped away.  */
4733   gcc_assert (!flag_guess_branch_prob
4734 	      || optimize == 0 || seen_error ());
4735   return target;
4736 }
4737 
4738 /* Expand a call to __builtin_assume_aligned.  We just return our first
4739    argument as the builtin_assume_aligned semantic should've been already
4740    executed by CCP.  */
4741 
4742 static rtx
4743 expand_builtin_assume_aligned (tree exp, rtx target)
4744 {
4745   if (call_expr_nargs (exp) < 2)
4746     return const0_rtx;
4747   target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4748 			EXPAND_NORMAL);
4749   gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4750 	      && (call_expr_nargs (exp) < 3
4751 		  || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4752   return target;
4753 }
4754 
4755 void
4756 expand_builtin_trap (void)
4757 {
4758 #ifdef HAVE_trap
4759   if (HAVE_trap)
4760     {
4761       rtx insn = emit_insn (gen_trap ());
4762       /* For trap insns when not accumulating outgoing args force
4763 	 REG_ARGS_SIZE note to prevent crossjumping of calls with
4764 	 different args sizes.  */
4765       if (!ACCUMULATE_OUTGOING_ARGS)
4766 	add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4767     }
4768   else
4769 #endif
4770     emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4771   emit_barrier ();
4772 }
4773 
4774 /* Expand a call to __builtin_unreachable.  We do nothing except emit
4775    a barrier saying that control flow will not pass here.
4776 
4777    It is the responsibility of the program being compiled to ensure
4778    that control flow does never reach __builtin_unreachable.  */
4779 static void
4780 expand_builtin_unreachable (void)
4781 {
4782   emit_barrier ();
4783 }
4784 
4785 /* Expand EXP, a call to fabs, fabsf or fabsl.
4786    Return NULL_RTX if a normal call should be emitted rather than expanding
4787    the function inline.  If convenient, the result should be placed
4788    in TARGET.  SUBTARGET may be used as the target for computing
4789    the operand.  */
4790 
4791 static rtx
4792 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4793 {
4794   machine_mode mode;
4795   tree arg;
4796   rtx op0;
4797 
4798   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4799     return NULL_RTX;
4800 
4801   arg = CALL_EXPR_ARG (exp, 0);
4802   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4803   mode = TYPE_MODE (TREE_TYPE (arg));
4804   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4805   return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4806 }
4807 
4808 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4809    Return NULL is a normal call should be emitted rather than expanding the
4810    function inline.  If convenient, the result should be placed in TARGET.
4811    SUBTARGET may be used as the target for computing the operand.  */
4812 
4813 static rtx
4814 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4815 {
4816   rtx op0, op1;
4817   tree arg;
4818 
4819   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4820     return NULL_RTX;
4821 
4822   arg = CALL_EXPR_ARG (exp, 0);
4823   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4824 
4825   arg = CALL_EXPR_ARG (exp, 1);
4826   op1 = expand_normal (arg);
4827 
4828   return expand_copysign (op0, op1, target);
4829 }
4830 
4831 /* Expand a call to __builtin___clear_cache.  */
4832 
4833 static rtx
4834 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4835 {
4836 #ifndef HAVE_clear_cache
4837 #ifdef CLEAR_INSN_CACHE
4838   /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4839      does something.  Just do the default expansion to a call to
4840      __clear_cache().  */
4841   return NULL_RTX;
4842 #else
4843   /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4844      does nothing.  There is no need to call it.  Do nothing.  */
4845   return const0_rtx;
4846 #endif /* CLEAR_INSN_CACHE */
4847 #else
4848   /* We have a "clear_cache" insn, and it will handle everything.  */
4849   tree begin, end;
4850   rtx begin_rtx, end_rtx;
4851 
4852   /* We must not expand to a library call.  If we did, any
4853      fallback library function in libgcc that might contain a call to
4854      __builtin___clear_cache() would recurse infinitely.  */
4855   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4856     {
4857       error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4858       return const0_rtx;
4859     }
4860 
4861   if (HAVE_clear_cache)
4862     {
4863       struct expand_operand ops[2];
4864 
4865       begin = CALL_EXPR_ARG (exp, 0);
4866       begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4867 
4868       end = CALL_EXPR_ARG (exp, 1);
4869       end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4870 
4871       create_address_operand (&ops[0], begin_rtx);
4872       create_address_operand (&ops[1], end_rtx);
4873       if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4874 	return const0_rtx;
4875     }
4876   return const0_rtx;
4877 #endif /* HAVE_clear_cache */
4878 }
4879 
4880 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT.  */
4881 
4882 static rtx
4883 round_trampoline_addr (rtx tramp)
4884 {
4885   rtx temp, addend, mask;
4886 
4887   /* If we don't need too much alignment, we'll have been guaranteed
4888      proper alignment by get_trampoline_type.  */
4889   if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4890     return tramp;
4891 
4892   /* Round address up to desired boundary.  */
4893   temp = gen_reg_rtx (Pmode);
4894   addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4895   mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4896 
4897   temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
4898 			       temp, 0, OPTAB_LIB_WIDEN);
4899   tramp = expand_simple_binop (Pmode, AND, temp, mask,
4900 			       temp, 0, OPTAB_LIB_WIDEN);
4901 
4902   return tramp;
4903 }
4904 
4905 static rtx
4906 expand_builtin_init_trampoline (tree exp, bool onstack)
4907 {
4908   tree t_tramp, t_func, t_chain;
4909   rtx m_tramp, r_tramp, r_chain, tmp;
4910 
4911   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4912 			 POINTER_TYPE, VOID_TYPE))
4913     return NULL_RTX;
4914 
4915   t_tramp = CALL_EXPR_ARG (exp, 0);
4916   t_func = CALL_EXPR_ARG (exp, 1);
4917   t_chain = CALL_EXPR_ARG (exp, 2);
4918 
4919   r_tramp = expand_normal (t_tramp);
4920   m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4921   MEM_NOTRAP_P (m_tramp) = 1;
4922 
4923   /* If ONSTACK, the TRAMP argument should be the address of a field
4924      within the local function's FRAME decl.  Either way, let's see if
4925      we can fill in the MEM_ATTRs for this memory.  */
4926   if (TREE_CODE (t_tramp) == ADDR_EXPR)
4927     set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4928 
4929   /* Creator of a heap trampoline is responsible for making sure the
4930      address is aligned to at least STACK_BOUNDARY.  Normally malloc
4931      will ensure this anyhow.  */
4932   tmp = round_trampoline_addr (r_tramp);
4933   if (tmp != r_tramp)
4934     {
4935       m_tramp = change_address (m_tramp, BLKmode, tmp);
4936       set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4937       set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4938     }
4939 
4940   /* The FUNC argument should be the address of the nested function.
4941      Extract the actual function decl to pass to the hook.  */
4942   gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4943   t_func = TREE_OPERAND (t_func, 0);
4944   gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4945 
4946   r_chain = expand_normal (t_chain);
4947 
4948   /* Generate insns to initialize the trampoline.  */
4949   targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4950 
4951   if (onstack)
4952     {
4953       trampolines_created = 1;
4954 
4955       warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4956 		  "trampoline generated for nested function %qD", t_func);
4957     }
4958 
4959   return const0_rtx;
4960 }
4961 
4962 static rtx
4963 expand_builtin_adjust_trampoline (tree exp)
4964 {
4965   rtx tramp;
4966 
4967   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4968     return NULL_RTX;
4969 
4970   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4971   tramp = round_trampoline_addr (tramp);
4972   if (targetm.calls.trampoline_adjust_address)
4973     tramp = targetm.calls.trampoline_adjust_address (tramp);
4974 
4975   return tramp;
4976 }
4977 
4978 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4979    function.  The function first checks whether the back end provides
4980    an insn to implement signbit for the respective mode.  If not, it
4981    checks whether the floating point format of the value is such that
4982    the sign bit can be extracted.  If that is not the case, the
4983    function returns NULL_RTX to indicate that a normal call should be
4984    emitted rather than expanding the function in-line.  EXP is the
4985    expression that is a call to the builtin function; if convenient,
4986    the result should be placed in TARGET.  */
4987 static rtx
4988 expand_builtin_signbit (tree exp, rtx target)
4989 {
4990   const struct real_format *fmt;
4991   machine_mode fmode, imode, rmode;
4992   tree arg;
4993   int word, bitpos;
4994   enum insn_code icode;
4995   rtx temp;
4996   location_t loc = EXPR_LOCATION (exp);
4997 
4998   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4999     return NULL_RTX;
5000 
5001   arg = CALL_EXPR_ARG (exp, 0);
5002   fmode = TYPE_MODE (TREE_TYPE (arg));
5003   rmode = TYPE_MODE (TREE_TYPE (exp));
5004   fmt = REAL_MODE_FORMAT (fmode);
5005 
5006   arg = builtin_save_expr (arg);
5007 
5008   /* Expand the argument yielding a RTX expression. */
5009   temp = expand_normal (arg);
5010 
5011   /* Check if the back end provides an insn that handles signbit for the
5012      argument's mode. */
5013   icode = optab_handler (signbit_optab, fmode);
5014   if (icode != CODE_FOR_nothing)
5015     {
5016       rtx_insn *last = get_last_insn ();
5017       target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5018       if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5019 	return target;
5020       delete_insns_since (last);
5021     }
5022 
5023   /* For floating point formats without a sign bit, implement signbit
5024      as "ARG < 0.0".  */
5025   bitpos = fmt->signbit_ro;
5026   if (bitpos < 0)
5027   {
5028     /* But we can't do this if the format supports signed zero.  */
5029     if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5030       return NULL_RTX;
5031 
5032     arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5033 		       build_real (TREE_TYPE (arg), dconst0));
5034     return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5035   }
5036 
5037   if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5038     {
5039       imode = int_mode_for_mode (fmode);
5040       if (imode == BLKmode)
5041 	return NULL_RTX;
5042       temp = gen_lowpart (imode, temp);
5043     }
5044   else
5045     {
5046       imode = word_mode;
5047       /* Handle targets with different FP word orders.  */
5048       if (FLOAT_WORDS_BIG_ENDIAN)
5049 	word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5050       else
5051 	word = bitpos / BITS_PER_WORD;
5052       temp = operand_subword_force (temp, word, fmode);
5053       bitpos = bitpos % BITS_PER_WORD;
5054     }
5055 
5056   /* Force the intermediate word_mode (or narrower) result into a
5057      register.  This avoids attempting to create paradoxical SUBREGs
5058      of floating point modes below.  */
5059   temp = force_reg (imode, temp);
5060 
5061   /* If the bitpos is within the "result mode" lowpart, the operation
5062      can be implement with a single bitwise AND.  Otherwise, we need
5063      a right shift and an AND.  */
5064 
5065   if (bitpos < GET_MODE_BITSIZE (rmode))
5066     {
5067       wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5068 
5069       if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5070 	temp = gen_lowpart (rmode, temp);
5071       temp = expand_binop (rmode, and_optab, temp,
5072 			   immed_wide_int_const (mask, rmode),
5073 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5074     }
5075   else
5076     {
5077       /* Perform a logical right shift to place the signbit in the least
5078 	 significant bit, then truncate the result to the desired mode
5079 	 and mask just this bit.  */
5080       temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5081       temp = gen_lowpart (rmode, temp);
5082       temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5083 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5084     }
5085 
5086   return temp;
5087 }
5088 
5089 /* Expand fork or exec calls.  TARGET is the desired target of the
5090    call.  EXP is the call. FN is the
5091    identificator of the actual function.  IGNORE is nonzero if the
5092    value is to be ignored.  */
5093 
5094 static rtx
5095 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5096 {
5097   tree id, decl;
5098   tree call;
5099 
5100   /* If we are not profiling, just call the function.  */
5101   if (!profile_arc_flag)
5102     return NULL_RTX;
5103 
5104   /* Otherwise call the wrapper.  This should be equivalent for the rest of
5105      compiler, so the code does not diverge, and the wrapper may run the
5106      code necessary for keeping the profiling sane.  */
5107 
5108   switch (DECL_FUNCTION_CODE (fn))
5109     {
5110     case BUILT_IN_FORK:
5111       id = get_identifier ("__gcov_fork");
5112       break;
5113 
5114     case BUILT_IN_EXECL:
5115       id = get_identifier ("__gcov_execl");
5116       break;
5117 
5118     case BUILT_IN_EXECV:
5119       id = get_identifier ("__gcov_execv");
5120       break;
5121 
5122     case BUILT_IN_EXECLP:
5123       id = get_identifier ("__gcov_execlp");
5124       break;
5125 
5126     case BUILT_IN_EXECLE:
5127       id = get_identifier ("__gcov_execle");
5128       break;
5129 
5130     case BUILT_IN_EXECVP:
5131       id = get_identifier ("__gcov_execvp");
5132       break;
5133 
5134     case BUILT_IN_EXECVE:
5135       id = get_identifier ("__gcov_execve");
5136       break;
5137 
5138     default:
5139       gcc_unreachable ();
5140     }
5141 
5142   decl = build_decl (DECL_SOURCE_LOCATION (fn),
5143 		     FUNCTION_DECL, id, TREE_TYPE (fn));
5144   DECL_EXTERNAL (decl) = 1;
5145   TREE_PUBLIC (decl) = 1;
5146   DECL_ARTIFICIAL (decl) = 1;
5147   TREE_NOTHROW (decl) = 1;
5148   DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5149   DECL_VISIBILITY_SPECIFIED (decl) = 1;
5150   call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5151   return expand_call (call, target, ignore);
5152  }
5153 
5154 
5155 
5156 /* Reconstitute a mode for a __sync intrinsic operation.  Since the type of
5157    the pointer in these functions is void*, the tree optimizers may remove
5158    casts.  The mode computed in expand_builtin isn't reliable either, due
5159    to __sync_bool_compare_and_swap.
5160 
5161    FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5162    group of builtins.  This gives us log2 of the mode size.  */
5163 
5164 static inline machine_mode
5165 get_builtin_sync_mode (int fcode_diff)
5166 {
5167   /* The size is not negotiable, so ask not to get BLKmode in return
5168      if the target indicates that a smaller size would be better.  */
5169   return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5170 }
5171 
5172 /* Expand the memory expression LOC and return the appropriate memory operand
5173    for the builtin_sync operations.  */
5174 
5175 static rtx
5176 get_builtin_sync_mem (tree loc, machine_mode mode)
5177 {
5178   rtx addr, mem;
5179 
5180   addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5181   addr = convert_memory_address (Pmode, addr);
5182 
5183   /* Note that we explicitly do not want any alias information for this
5184      memory, so that we kill all other live memories.  Otherwise we don't
5185      satisfy the full barrier semantics of the intrinsic.  */
5186   mem = validize_mem (gen_rtx_MEM (mode, addr));
5187 
5188   /* The alignment needs to be at least according to that of the mode.  */
5189   set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5190 			   get_pointer_alignment (loc)));
5191   set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5192   MEM_VOLATILE_P (mem) = 1;
5193 
5194   return mem;
5195 }
5196 
5197 /* Make sure an argument is in the right mode.
5198    EXP is the tree argument.
5199    MODE is the mode it should be in.  */
5200 
5201 static rtx
5202 expand_expr_force_mode (tree exp, machine_mode mode)
5203 {
5204   rtx val;
5205   machine_mode old_mode;
5206 
5207   val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5208   /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
5209      of CONST_INTs, where we know the old_mode only from the call argument.  */
5210 
5211   old_mode = GET_MODE (val);
5212   if (old_mode == VOIDmode)
5213     old_mode = TYPE_MODE (TREE_TYPE (exp));
5214   val = convert_modes (mode, old_mode, val, 1);
5215   return val;
5216 }
5217 
5218 
5219 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5220    EXP is the CALL_EXPR.  CODE is the rtx code
5221    that corresponds to the arithmetic or logical operation from the name;
5222    an exception here is that NOT actually means NAND.  TARGET is an optional
5223    place for us to store the results; AFTER is true if this is the
5224    fetch_and_xxx form.  */
5225 
5226 static rtx
5227 expand_builtin_sync_operation (machine_mode mode, tree exp,
5228 			       enum rtx_code code, bool after,
5229 			       rtx target)
5230 {
5231   rtx val, mem;
5232   location_t loc = EXPR_LOCATION (exp);
5233 
5234   if (code == NOT && warn_sync_nand)
5235     {
5236       tree fndecl = get_callee_fndecl (exp);
5237       enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5238 
5239       static bool warned_f_a_n, warned_n_a_f;
5240 
5241       switch (fcode)
5242 	{
5243 	case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5244 	case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5245 	case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5246 	case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5247 	case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5248 	  if (warned_f_a_n)
5249 	    break;
5250 
5251 	  fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5252 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5253 	  warned_f_a_n = true;
5254 	  break;
5255 
5256 	case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5257 	case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5258 	case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5259 	case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5260 	case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5261 	  if (warned_n_a_f)
5262 	    break;
5263 
5264 	 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5265 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5266 	  warned_n_a_f = true;
5267 	  break;
5268 
5269 	default:
5270 	  gcc_unreachable ();
5271 	}
5272     }
5273 
5274   /* Expand the operands.  */
5275   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5276   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5277 
5278   return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5279 				 after);
5280 }
5281 
5282 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5283    intrinsics. EXP is the CALL_EXPR.  IS_BOOL is
5284    true if this is the boolean form.  TARGET is a place for us to store the
5285    results; this is NOT optional if IS_BOOL is true.  */
5286 
5287 static rtx
5288 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5289 				 bool is_bool, rtx target)
5290 {
5291   rtx old_val, new_val, mem;
5292   rtx *pbool, *poval;
5293 
5294   /* Expand the operands.  */
5295   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5296   old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5297   new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5298 
5299   pbool = poval = NULL;
5300   if (target != const0_rtx)
5301     {
5302       if (is_bool)
5303 	pbool = &target;
5304       else
5305 	poval = &target;
5306     }
5307   if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5308 				       false, MEMMODEL_SYNC_SEQ_CST,
5309 				       MEMMODEL_SYNC_SEQ_CST))
5310     return NULL_RTX;
5311 
5312   return target;
5313 }
5314 
5315 /* Expand the __sync_lock_test_and_set intrinsic.  Note that the most
5316    general form is actually an atomic exchange, and some targets only
5317    support a reduced form with the second argument being a constant 1.
5318    EXP is the CALL_EXPR; TARGET is an optional place for us to store
5319    the results.  */
5320 
5321 static rtx
5322 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5323 				       rtx target)
5324 {
5325   rtx val, mem;
5326 
5327   /* Expand the operands.  */
5328   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5329   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5330 
5331   return expand_sync_lock_test_and_set (target, mem, val);
5332 }
5333 
5334 /* Expand the __sync_lock_release intrinsic.  EXP is the CALL_EXPR.  */
5335 
5336 static void
5337 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5338 {
5339   rtx mem;
5340 
5341   /* Expand the operands.  */
5342   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5343 
5344   expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5345 }
5346 
5347 /* Given an integer representing an ``enum memmodel'', verify its
5348    correctness and return the memory model enum.  */
5349 
5350 static enum memmodel
5351 get_memmodel (tree exp)
5352 {
5353   rtx op;
5354   unsigned HOST_WIDE_INT val;
5355 
5356   /* If the parameter is not a constant, it's a run time value so we'll just
5357      convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking.  */
5358   if (TREE_CODE (exp) != INTEGER_CST)
5359     return MEMMODEL_SEQ_CST;
5360 
5361   op = expand_normal (exp);
5362 
5363   val = INTVAL (op);
5364   if (targetm.memmodel_check)
5365     val = targetm.memmodel_check (val);
5366   else if (val & ~MEMMODEL_MASK)
5367     {
5368       warning (OPT_Winvalid_memory_model,
5369 	       "Unknown architecture specifier in memory model to builtin.");
5370       return MEMMODEL_SEQ_CST;
5371     }
5372 
5373   /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5374   if (memmodel_base (val) >= MEMMODEL_LAST)
5375     {
5376       warning (OPT_Winvalid_memory_model,
5377 	       "invalid memory model argument to builtin");
5378       return MEMMODEL_SEQ_CST;
5379     }
5380 
5381   /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5382      be conservative and promote consume to acquire.  */
5383   if (val == MEMMODEL_CONSUME)
5384     val = MEMMODEL_ACQUIRE;
5385 
5386   return (enum memmodel) val;
5387 }
5388 
5389 /* Expand the __atomic_exchange intrinsic:
5390    	TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5391    EXP is the CALL_EXPR.
5392    TARGET is an optional place for us to store the results.  */
5393 
5394 static rtx
5395 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5396 {
5397   rtx val, mem;
5398   enum memmodel model;
5399 
5400   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5401 
5402   if (!flag_inline_atomics)
5403     return NULL_RTX;
5404 
5405   /* Expand the operands.  */
5406   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5407   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5408 
5409   return expand_atomic_exchange (target, mem, val, model);
5410 }
5411 
5412 /* Expand the __atomic_compare_exchange intrinsic:
5413    	bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5414 					TYPE desired, BOOL weak,
5415 					enum memmodel success,
5416 					enum memmodel failure)
5417    EXP is the CALL_EXPR.
5418    TARGET is an optional place for us to store the results.  */
5419 
5420 static rtx
5421 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5422 					rtx target)
5423 {
5424   rtx expect, desired, mem, oldval;
5425   rtx_code_label *label;
5426   enum memmodel success, failure;
5427   tree weak;
5428   bool is_weak;
5429 
5430   success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5431   failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5432 
5433   if (failure > success)
5434     {
5435       warning (OPT_Winvalid_memory_model,
5436 	       "failure memory model cannot be stronger than success memory "
5437 	       "model for %<__atomic_compare_exchange%>");
5438       success = MEMMODEL_SEQ_CST;
5439     }
5440 
5441   if (is_mm_release (failure) || is_mm_acq_rel (failure))
5442     {
5443       warning (OPT_Winvalid_memory_model,
5444 	       "invalid failure memory model for "
5445 	       "%<__atomic_compare_exchange%>");
5446       failure = MEMMODEL_SEQ_CST;
5447       success = MEMMODEL_SEQ_CST;
5448     }
5449 
5450 
5451   if (!flag_inline_atomics)
5452     return NULL_RTX;
5453 
5454   /* Expand the operands.  */
5455   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5456 
5457   expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5458   expect = convert_memory_address (Pmode, expect);
5459   expect = gen_rtx_MEM (mode, expect);
5460   desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5461 
5462   weak = CALL_EXPR_ARG (exp, 3);
5463   is_weak = false;
5464   if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5465     is_weak = true;
5466 
5467   if (target == const0_rtx)
5468     target = NULL;
5469 
5470   /* Lest the rtl backend create a race condition with an imporoper store
5471      to memory, always create a new pseudo for OLDVAL.  */
5472   oldval = NULL;
5473 
5474   if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5475 				       is_weak, success, failure))
5476     return NULL_RTX;
5477 
5478   /* Conditionally store back to EXPECT, lest we create a race condition
5479      with an improper store to memory.  */
5480   /* ??? With a rearrangement of atomics at the gimple level, we can handle
5481      the normal case where EXPECT is totally private, i.e. a register.  At
5482      which point the store can be unconditional.  */
5483   label = gen_label_rtx ();
5484   emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5485   emit_move_insn (expect, oldval);
5486   emit_label (label);
5487 
5488   return target;
5489 }
5490 
5491 /* Expand the __atomic_load intrinsic:
5492    	TYPE __atomic_load (TYPE *object, enum memmodel)
5493    EXP is the CALL_EXPR.
5494    TARGET is an optional place for us to store the results.  */
5495 
5496 static rtx
5497 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5498 {
5499   rtx mem;
5500   enum memmodel model;
5501 
5502   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5503   if (is_mm_release (model) || is_mm_acq_rel (model))
5504     {
5505       warning (OPT_Winvalid_memory_model,
5506 	       "invalid memory model for %<__atomic_load%>");
5507       model = MEMMODEL_SEQ_CST;
5508     }
5509 
5510   if (!flag_inline_atomics)
5511     return NULL_RTX;
5512 
5513   /* Expand the operand.  */
5514   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5515 
5516   return expand_atomic_load (target, mem, model);
5517 }
5518 
5519 
5520 /* Expand the __atomic_store intrinsic:
5521    	void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5522    EXP is the CALL_EXPR.
5523    TARGET is an optional place for us to store the results.  */
5524 
5525 static rtx
5526 expand_builtin_atomic_store (machine_mode mode, tree exp)
5527 {
5528   rtx mem, val;
5529   enum memmodel model;
5530 
5531   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5532   if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5533 	|| is_mm_release (model)))
5534     {
5535       warning (OPT_Winvalid_memory_model,
5536 	       "invalid memory model for %<__atomic_store%>");
5537       model = MEMMODEL_SEQ_CST;
5538     }
5539 
5540   if (!flag_inline_atomics)
5541     return NULL_RTX;
5542 
5543   /* Expand the operands.  */
5544   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5545   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5546 
5547   return expand_atomic_store (mem, val, model, false);
5548 }
5549 
5550 /* Expand the __atomic_fetch_XXX intrinsic:
5551    	TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5552    EXP is the CALL_EXPR.
5553    TARGET is an optional place for us to store the results.
5554    CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5555    FETCH_AFTER is true if returning the result of the operation.
5556    FETCH_AFTER is false if returning the value before the operation.
5557    IGNORE is true if the result is not used.
5558    EXT_CALL is the correct builtin for an external call if this cannot be
5559    resolved to an instruction sequence.  */
5560 
5561 static rtx
5562 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5563 				enum rtx_code code, bool fetch_after,
5564 				bool ignore, enum built_in_function ext_call)
5565 {
5566   rtx val, mem, ret;
5567   enum memmodel model;
5568   tree fndecl;
5569   tree addr;
5570 
5571   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5572 
5573   /* Expand the operands.  */
5574   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5575   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5576 
5577   /* Only try generating instructions if inlining is turned on.  */
5578   if (flag_inline_atomics)
5579     {
5580       ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5581       if (ret)
5582 	return ret;
5583     }
5584 
5585   /* Return if a different routine isn't needed for the library call.  */
5586   if (ext_call == BUILT_IN_NONE)
5587     return NULL_RTX;
5588 
5589   /* Change the call to the specified function.  */
5590   fndecl = get_callee_fndecl (exp);
5591   addr = CALL_EXPR_FN (exp);
5592   STRIP_NOPS (addr);
5593 
5594   gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5595   TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5596 
5597   /* If we will emit code after the call, the call can not be a tail call.
5598      If it is emitted as a tail call, a barrier is emitted after it, and
5599      then all trailing code is removed.  */
5600   if (!ignore)
5601     CALL_EXPR_TAILCALL (exp) = 0;
5602 
5603   /* Expand the call here so we can emit trailing code.  */
5604   ret = expand_call (exp, target, ignore);
5605 
5606   /* Replace the original function just in case it matters.  */
5607   TREE_OPERAND (addr, 0) = fndecl;
5608 
5609   /* Then issue the arithmetic correction to return the right result.  */
5610   if (!ignore)
5611     {
5612       if (code == NOT)
5613 	{
5614 	  ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5615 				     OPTAB_LIB_WIDEN);
5616 	  ret = expand_simple_unop (mode, NOT, ret, target, true);
5617 	}
5618       else
5619 	ret = expand_simple_binop (mode, code, ret, val, target, true,
5620 				   OPTAB_LIB_WIDEN);
5621     }
5622   return ret;
5623 }
5624 
5625 
5626 #ifndef HAVE_atomic_clear
5627 # define HAVE_atomic_clear 0
5628 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5629 #endif
5630 
5631 /* Expand an atomic clear operation.
5632 	void _atomic_clear (BOOL *obj, enum memmodel)
5633    EXP is the call expression.  */
5634 
5635 static rtx
5636 expand_builtin_atomic_clear (tree exp)
5637 {
5638   machine_mode mode;
5639   rtx mem, ret;
5640   enum memmodel model;
5641 
5642   mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5643   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5644   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5645 
5646   if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5647     {
5648       warning (OPT_Winvalid_memory_model,
5649 	       "invalid memory model for %<__atomic_store%>");
5650       model = MEMMODEL_SEQ_CST;
5651     }
5652 
5653   if (HAVE_atomic_clear)
5654     {
5655       emit_insn (gen_atomic_clear (mem, model));
5656       return const0_rtx;
5657     }
5658 
5659   /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5660      Failing that, a store is issued by __atomic_store.  The only way this can
5661      fail is if the bool type is larger than a word size.  Unlikely, but
5662      handle it anyway for completeness.  Assume a single threaded model since
5663      there is no atomic support in this case, and no barriers are required.  */
5664   ret = expand_atomic_store (mem, const0_rtx, model, true);
5665   if (!ret)
5666     emit_move_insn (mem, const0_rtx);
5667   return const0_rtx;
5668 }
5669 
5670 /* Expand an atomic test_and_set operation.
5671 	bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5672    EXP is the call expression.  */
5673 
5674 static rtx
5675 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5676 {
5677   rtx mem;
5678   enum memmodel model;
5679   machine_mode mode;
5680 
5681   mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5682   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5683   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5684 
5685   return expand_atomic_test_and_set (target, mem, model);
5686 }
5687 
5688 
5689 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5690    this architecture.  If ARG1 is NULL, use typical alignment for size ARG0.  */
5691 
5692 static tree
5693 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5694 {
5695   int size;
5696   machine_mode mode;
5697   unsigned int mode_align, type_align;
5698 
5699   if (TREE_CODE (arg0) != INTEGER_CST)
5700     return NULL_TREE;
5701 
5702   size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5703   mode = mode_for_size (size, MODE_INT, 0);
5704   mode_align = GET_MODE_ALIGNMENT (mode);
5705 
5706   if (TREE_CODE (arg1) == INTEGER_CST)
5707     {
5708       unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5709 
5710       /* Either this argument is null, or it's a fake pointer encoding
5711          the alignment of the object.  */
5712       val = val & -val;
5713       val *= BITS_PER_UNIT;
5714 
5715       if (val == 0 || mode_align < val)
5716         type_align = mode_align;
5717       else
5718         type_align = val;
5719     }
5720   else
5721     {
5722       tree ttype = TREE_TYPE (arg1);
5723 
5724       /* This function is usually invoked and folded immediately by the front
5725 	 end before anything else has a chance to look at it.  The pointer
5726 	 parameter at this point is usually cast to a void *, so check for that
5727 	 and look past the cast.  */
5728       if (CONVERT_EXPR_P (arg1)
5729 	  && POINTER_TYPE_P (ttype)
5730 	  && VOID_TYPE_P (TREE_TYPE (ttype))
5731 	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5732 	arg1 = TREE_OPERAND (arg1, 0);
5733 
5734       ttype = TREE_TYPE (arg1);
5735       gcc_assert (POINTER_TYPE_P (ttype));
5736 
5737       /* Get the underlying type of the object.  */
5738       ttype = TREE_TYPE (ttype);
5739       type_align = TYPE_ALIGN (ttype);
5740     }
5741 
5742   /* If the object has smaller alignment, the the lock free routines cannot
5743      be used.  */
5744   if (type_align < mode_align)
5745     return boolean_false_node;
5746 
5747   /* Check if a compare_and_swap pattern exists for the mode which represents
5748      the required size.  The pattern is not allowed to fail, so the existence
5749      of the pattern indicates support is present.  */
5750   if (can_compare_and_swap_p (mode, true))
5751     return boolean_true_node;
5752   else
5753     return boolean_false_node;
5754 }
5755 
5756 /* Return true if the parameters to call EXP represent an object which will
5757    always generate lock free instructions.  The first argument represents the
5758    size of the object, and the second parameter is a pointer to the object
5759    itself.  If NULL is passed for the object, then the result is based on
5760    typical alignment for an object of the specified size.  Otherwise return
5761    false.  */
5762 
5763 static rtx
5764 expand_builtin_atomic_always_lock_free (tree exp)
5765 {
5766   tree size;
5767   tree arg0 = CALL_EXPR_ARG (exp, 0);
5768   tree arg1 = CALL_EXPR_ARG (exp, 1);
5769 
5770   if (TREE_CODE (arg0) != INTEGER_CST)
5771     {
5772       error ("non-constant argument 1 to __atomic_always_lock_free");
5773       return const0_rtx;
5774     }
5775 
5776   size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5777   if (size == boolean_true_node)
5778     return const1_rtx;
5779   return const0_rtx;
5780 }
5781 
5782 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5783    is lock free on this architecture.  */
5784 
5785 static tree
5786 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5787 {
5788   if (!flag_inline_atomics)
5789     return NULL_TREE;
5790 
5791   /* If it isn't always lock free, don't generate a result.  */
5792   if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5793     return boolean_true_node;
5794 
5795   return NULL_TREE;
5796 }
5797 
5798 /* Return true if the parameters to call EXP represent an object which will
5799    always generate lock free instructions.  The first argument represents the
5800    size of the object, and the second parameter is a pointer to the object
5801    itself.  If NULL is passed for the object, then the result is based on
5802    typical alignment for an object of the specified size.  Otherwise return
5803    NULL*/
5804 
5805 static rtx
5806 expand_builtin_atomic_is_lock_free (tree exp)
5807 {
5808   tree size;
5809   tree arg0 = CALL_EXPR_ARG (exp, 0);
5810   tree arg1 = CALL_EXPR_ARG (exp, 1);
5811 
5812   if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5813     {
5814       error ("non-integer argument 1 to __atomic_is_lock_free");
5815       return NULL_RTX;
5816     }
5817 
5818   if (!flag_inline_atomics)
5819     return NULL_RTX;
5820 
5821   /* If the value is known at compile time, return the RTX for it.  */
5822   size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5823   if (size == boolean_true_node)
5824     return const1_rtx;
5825 
5826   return NULL_RTX;
5827 }
5828 
5829 /* Expand the __atomic_thread_fence intrinsic:
5830    	void __atomic_thread_fence (enum memmodel)
5831    EXP is the CALL_EXPR.  */
5832 
5833 static void
5834 expand_builtin_atomic_thread_fence (tree exp)
5835 {
5836   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5837   expand_mem_thread_fence (model);
5838 }
5839 
5840 /* Expand the __atomic_signal_fence intrinsic:
5841    	void __atomic_signal_fence (enum memmodel)
5842    EXP is the CALL_EXPR.  */
5843 
5844 static void
5845 expand_builtin_atomic_signal_fence (tree exp)
5846 {
5847   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5848   expand_mem_signal_fence (model);
5849 }
5850 
5851 /* Expand the __sync_synchronize intrinsic.  */
5852 
5853 static void
5854 expand_builtin_sync_synchronize (void)
5855 {
5856   expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5857 }
5858 
5859 static rtx
5860 expand_builtin_thread_pointer (tree exp, rtx target)
5861 {
5862   enum insn_code icode;
5863   if (!validate_arglist (exp, VOID_TYPE))
5864     return const0_rtx;
5865   icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5866   if (icode != CODE_FOR_nothing)
5867     {
5868       struct expand_operand op;
5869       /* If the target is not sutitable then create a new target. */
5870       if (target == NULL_RTX
5871 	  || !REG_P (target)
5872 	  || GET_MODE (target) != Pmode)
5873 	target = gen_reg_rtx (Pmode);
5874       create_output_operand (&op, target, Pmode);
5875       expand_insn (icode, 1, &op);
5876       return target;
5877     }
5878   error ("__builtin_thread_pointer is not supported on this target");
5879   return const0_rtx;
5880 }
5881 
5882 static void
5883 expand_builtin_set_thread_pointer (tree exp)
5884 {
5885   enum insn_code icode;
5886   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5887     return;
5888   icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5889   if (icode != CODE_FOR_nothing)
5890     {
5891       struct expand_operand op;
5892       rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5893 			     Pmode, EXPAND_NORMAL);
5894       create_input_operand (&op, val, Pmode);
5895       expand_insn (icode, 1, &op);
5896       return;
5897     }
5898   error ("__builtin_set_thread_pointer is not supported on this target");
5899 }
5900 
5901 
5902 /* Emit code to restore the current value of stack.  */
5903 
5904 static void
5905 expand_stack_restore (tree var)
5906 {
5907   rtx_insn *prev;
5908   rtx sa = expand_normal (var);
5909 
5910   sa = convert_memory_address (Pmode, sa);
5911 
5912   prev = get_last_insn ();
5913   emit_stack_restore (SAVE_BLOCK, sa);
5914   fixup_args_size_notes (prev, get_last_insn (), 0);
5915 }
5916 
5917 
5918 /* Emit code to save the current value of stack.  */
5919 
5920 static rtx
5921 expand_stack_save (void)
5922 {
5923   rtx ret = NULL_RTX;
5924 
5925   do_pending_stack_adjust ();
5926   emit_stack_save (SAVE_BLOCK, &ret);
5927   return ret;
5928 }
5929 
5930 
5931 /* Expand OpenACC acc_on_device.
5932 
5933    This has to happen late (that is, not in early folding; expand_builtin_*,
5934    rather than fold_builtin_*), as we have to act differently for host and
5935    acceleration device (ACCEL_COMPILER conditional).  */
5936 
5937 static rtx
5938 expand_builtin_acc_on_device (tree exp ATTRIBUTE_UNUSED,
5939 			      rtx target ATTRIBUTE_UNUSED)
5940 {
5941 #ifdef ACCEL_COMPILER
5942   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5943     return NULL_RTX;
5944 
5945   tree arg = CALL_EXPR_ARG (exp, 0);
5946 
5947   /* Return (arg == v1 || arg == v2) ? 1 : 0.  */
5948   machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5949   rtx v = expand_normal (arg), v1, v2;
5950   v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5951   v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5952   machine_mode target_mode = TYPE_MODE (integer_type_node);
5953   if (!target || !register_operand (target, target_mode))
5954     target = gen_reg_rtx (target_mode);
5955   emit_move_insn (target, const1_rtx);
5956   rtx_code_label *done_label = gen_label_rtx ();
5957   do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5958 			   NULL_RTX, done_label, PROB_EVEN);
5959   do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5960 			   NULL_RTX, done_label, PROB_EVEN);
5961   emit_move_insn (target, const0_rtx);
5962   emit_label (done_label);
5963 
5964   return target;
5965 #else
5966   return NULL;
5967 #endif
5968 }
5969 
5970 
5971 /* Expand an expression EXP that calls a built-in function,
5972    with result going to TARGET if that's convenient
5973    (and in mode MODE if that's convenient).
5974    SUBTARGET may be used as the target for computing one of EXP's operands.
5975    IGNORE is nonzero if the value is to be ignored.  */
5976 
5977 rtx
5978 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5979 		int ignore)
5980 {
5981   tree fndecl = get_callee_fndecl (exp);
5982   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5983   machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5984   int flags;
5985 
5986   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5987     return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5988 
5989   /* When ASan is enabled, we don't want to expand some memory/string
5990      builtins and rely on libsanitizer's hooks.  This allows us to avoid
5991      redundant checks and be sure, that possible overflow will be detected
5992      by ASan.  */
5993 
5994   if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5995     return expand_call (exp, target, ignore);
5996 
5997   /* When not optimizing, generate calls to library functions for a certain
5998      set of builtins.  */
5999   if (!optimize
6000       && !called_as_built_in (fndecl)
6001       && fcode != BUILT_IN_FORK
6002       && fcode != BUILT_IN_EXECL
6003       && fcode != BUILT_IN_EXECV
6004       && fcode != BUILT_IN_EXECLP
6005       && fcode != BUILT_IN_EXECLE
6006       && fcode != BUILT_IN_EXECVP
6007       && fcode != BUILT_IN_EXECVE
6008       && fcode != BUILT_IN_ALLOCA
6009       && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6010       && fcode != BUILT_IN_FREE
6011       && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6012       && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6013       && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6014       && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6015       && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6016       && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6017       && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6018       && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6019       && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6020       && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6021       && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6022       && fcode != BUILT_IN_CHKP_BNDRET)
6023     return expand_call (exp, target, ignore);
6024 
6025   /* The built-in function expanders test for target == const0_rtx
6026      to determine whether the function's result will be ignored.  */
6027   if (ignore)
6028     target = const0_rtx;
6029 
6030   /* If the result of a pure or const built-in function is ignored, and
6031      none of its arguments are volatile, we can avoid expanding the
6032      built-in call and just evaluate the arguments for side-effects.  */
6033   if (target == const0_rtx
6034       && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6035       && !(flags & ECF_LOOPING_CONST_OR_PURE))
6036     {
6037       bool volatilep = false;
6038       tree arg;
6039       call_expr_arg_iterator iter;
6040 
6041       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6042 	if (TREE_THIS_VOLATILE (arg))
6043 	  {
6044 	    volatilep = true;
6045 	    break;
6046 	  }
6047 
6048       if (! volatilep)
6049 	{
6050 	  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6051 	    expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6052 	  return const0_rtx;
6053 	}
6054     }
6055 
6056   /* expand_builtin_with_bounds is supposed to be used for
6057      instrumented builtin calls.  */
6058   gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6059 
6060   switch (fcode)
6061     {
6062     CASE_FLT_FN (BUILT_IN_FABS):
6063     case BUILT_IN_FABSD32:
6064     case BUILT_IN_FABSD64:
6065     case BUILT_IN_FABSD128:
6066       target = expand_builtin_fabs (exp, target, subtarget);
6067       if (target)
6068 	return target;
6069       break;
6070 
6071     CASE_FLT_FN (BUILT_IN_COPYSIGN):
6072       target = expand_builtin_copysign (exp, target, subtarget);
6073       if (target)
6074 	return target;
6075       break;
6076 
6077       /* Just do a normal library call if we were unable to fold
6078 	 the values.  */
6079     CASE_FLT_FN (BUILT_IN_CABS):
6080       break;
6081 
6082     CASE_FLT_FN (BUILT_IN_EXP):
6083     CASE_FLT_FN (BUILT_IN_EXP10):
6084     CASE_FLT_FN (BUILT_IN_POW10):
6085     CASE_FLT_FN (BUILT_IN_EXP2):
6086     CASE_FLT_FN (BUILT_IN_EXPM1):
6087     CASE_FLT_FN (BUILT_IN_LOGB):
6088     CASE_FLT_FN (BUILT_IN_LOG):
6089     CASE_FLT_FN (BUILT_IN_LOG10):
6090     CASE_FLT_FN (BUILT_IN_LOG2):
6091     CASE_FLT_FN (BUILT_IN_LOG1P):
6092     CASE_FLT_FN (BUILT_IN_TAN):
6093     CASE_FLT_FN (BUILT_IN_ASIN):
6094     CASE_FLT_FN (BUILT_IN_ACOS):
6095     CASE_FLT_FN (BUILT_IN_ATAN):
6096     CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6097       /* Treat these like sqrt only if unsafe math optimizations are allowed,
6098 	 because of possible accuracy problems.  */
6099       if (! flag_unsafe_math_optimizations)
6100 	break;
6101     CASE_FLT_FN (BUILT_IN_SQRT):
6102     CASE_FLT_FN (BUILT_IN_FLOOR):
6103     CASE_FLT_FN (BUILT_IN_CEIL):
6104     CASE_FLT_FN (BUILT_IN_TRUNC):
6105     CASE_FLT_FN (BUILT_IN_ROUND):
6106     CASE_FLT_FN (BUILT_IN_NEARBYINT):
6107     CASE_FLT_FN (BUILT_IN_RINT):
6108       target = expand_builtin_mathfn (exp, target, subtarget);
6109       if (target)
6110 	return target;
6111       break;
6112 
6113     CASE_FLT_FN (BUILT_IN_FMA):
6114       target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6115       if (target)
6116 	return target;
6117       break;
6118 
6119     CASE_FLT_FN (BUILT_IN_ILOGB):
6120       if (! flag_unsafe_math_optimizations)
6121 	break;
6122     CASE_FLT_FN (BUILT_IN_ISINF):
6123     CASE_FLT_FN (BUILT_IN_FINITE):
6124     case BUILT_IN_ISFINITE:
6125     case BUILT_IN_ISNORMAL:
6126       target = expand_builtin_interclass_mathfn (exp, target);
6127       if (target)
6128 	return target;
6129       break;
6130 
6131     CASE_FLT_FN (BUILT_IN_ICEIL):
6132     CASE_FLT_FN (BUILT_IN_LCEIL):
6133     CASE_FLT_FN (BUILT_IN_LLCEIL):
6134     CASE_FLT_FN (BUILT_IN_LFLOOR):
6135     CASE_FLT_FN (BUILT_IN_IFLOOR):
6136     CASE_FLT_FN (BUILT_IN_LLFLOOR):
6137       target = expand_builtin_int_roundingfn (exp, target);
6138       if (target)
6139 	return target;
6140       break;
6141 
6142     CASE_FLT_FN (BUILT_IN_IRINT):
6143     CASE_FLT_FN (BUILT_IN_LRINT):
6144     CASE_FLT_FN (BUILT_IN_LLRINT):
6145     CASE_FLT_FN (BUILT_IN_IROUND):
6146     CASE_FLT_FN (BUILT_IN_LROUND):
6147     CASE_FLT_FN (BUILT_IN_LLROUND):
6148       target = expand_builtin_int_roundingfn_2 (exp, target);
6149       if (target)
6150 	return target;
6151       break;
6152 
6153     CASE_FLT_FN (BUILT_IN_POWI):
6154       target = expand_builtin_powi (exp, target);
6155       if (target)
6156 	return target;
6157       break;
6158 
6159     CASE_FLT_FN (BUILT_IN_ATAN2):
6160     CASE_FLT_FN (BUILT_IN_LDEXP):
6161     CASE_FLT_FN (BUILT_IN_SCALB):
6162     CASE_FLT_FN (BUILT_IN_SCALBN):
6163     CASE_FLT_FN (BUILT_IN_SCALBLN):
6164       if (! flag_unsafe_math_optimizations)
6165 	break;
6166 
6167     CASE_FLT_FN (BUILT_IN_FMOD):
6168     CASE_FLT_FN (BUILT_IN_REMAINDER):
6169     CASE_FLT_FN (BUILT_IN_DREM):
6170     CASE_FLT_FN (BUILT_IN_POW):
6171       target = expand_builtin_mathfn_2 (exp, target, subtarget);
6172       if (target)
6173 	return target;
6174       break;
6175 
6176     CASE_FLT_FN (BUILT_IN_CEXPI):
6177       target = expand_builtin_cexpi (exp, target);
6178       gcc_assert (target);
6179       return target;
6180 
6181     CASE_FLT_FN (BUILT_IN_SIN):
6182     CASE_FLT_FN (BUILT_IN_COS):
6183       if (! flag_unsafe_math_optimizations)
6184 	break;
6185       target = expand_builtin_mathfn_3 (exp, target, subtarget);
6186       if (target)
6187 	return target;
6188       break;
6189 
6190     CASE_FLT_FN (BUILT_IN_SINCOS):
6191       if (! flag_unsafe_math_optimizations)
6192 	break;
6193       target = expand_builtin_sincos (exp);
6194       if (target)
6195 	return target;
6196       break;
6197 
6198     case BUILT_IN_APPLY_ARGS:
6199       return expand_builtin_apply_args ();
6200 
6201       /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6202 	 FUNCTION with a copy of the parameters described by
6203 	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
6204 	 allocated on the stack into which is stored all the registers
6205 	 that might possibly be used for returning the result of a
6206 	 function.  ARGUMENTS is the value returned by
6207 	 __builtin_apply_args.  ARGSIZE is the number of bytes of
6208 	 arguments that must be copied.  ??? How should this value be
6209 	 computed?  We'll also need a safe worst case value for varargs
6210 	 functions.  */
6211     case BUILT_IN_APPLY:
6212       if (!validate_arglist (exp, POINTER_TYPE,
6213 			     POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6214 	  && !validate_arglist (exp, REFERENCE_TYPE,
6215 				POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6216 	return const0_rtx;
6217       else
6218 	{
6219 	  rtx ops[3];
6220 
6221 	  ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6222 	  ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6223 	  ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6224 
6225 	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
6226 	}
6227 
6228       /* __builtin_return (RESULT) causes the function to return the
6229 	 value described by RESULT.  RESULT is address of the block of
6230 	 memory returned by __builtin_apply.  */
6231     case BUILT_IN_RETURN:
6232       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6233 	expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6234       return const0_rtx;
6235 
6236     case BUILT_IN_SAVEREGS:
6237       return expand_builtin_saveregs ();
6238 
6239     case BUILT_IN_VA_ARG_PACK:
6240       /* All valid uses of __builtin_va_arg_pack () are removed during
6241 	 inlining.  */
6242       error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6243       return const0_rtx;
6244 
6245     case BUILT_IN_VA_ARG_PACK_LEN:
6246       /* All valid uses of __builtin_va_arg_pack_len () are removed during
6247 	 inlining.  */
6248       error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6249       return const0_rtx;
6250 
6251       /* Return the address of the first anonymous stack arg.  */
6252     case BUILT_IN_NEXT_ARG:
6253       if (fold_builtin_next_arg (exp, false))
6254 	return const0_rtx;
6255       return expand_builtin_next_arg ();
6256 
6257     case BUILT_IN_CLEAR_CACHE:
6258       target = expand_builtin___clear_cache (exp);
6259       if (target)
6260         return target;
6261       break;
6262 
6263     case BUILT_IN_CLASSIFY_TYPE:
6264       return expand_builtin_classify_type (exp);
6265 
6266     case BUILT_IN_CONSTANT_P:
6267       return const0_rtx;
6268 
6269     case BUILT_IN_FRAME_ADDRESS:
6270     case BUILT_IN_RETURN_ADDRESS:
6271       return expand_builtin_frame_address (fndecl, exp);
6272 
6273     /* Returns the address of the area where the structure is returned.
6274        0 otherwise.  */
6275     case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6276       if (call_expr_nargs (exp) != 0
6277 	  || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6278 	  || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6279 	return const0_rtx;
6280       else
6281 	return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6282 
6283     case BUILT_IN_ALLOCA:
6284     case BUILT_IN_ALLOCA_WITH_ALIGN:
6285       /* If the allocation stems from the declaration of a variable-sized
6286 	 object, it cannot accumulate.  */
6287       target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6288       if (target)
6289 	return target;
6290       break;
6291 
6292     case BUILT_IN_STACK_SAVE:
6293       return expand_stack_save ();
6294 
6295     case BUILT_IN_STACK_RESTORE:
6296       expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6297       return const0_rtx;
6298 
6299     case BUILT_IN_BSWAP16:
6300     case BUILT_IN_BSWAP32:
6301     case BUILT_IN_BSWAP64:
6302       target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6303       if (target)
6304 	return target;
6305       break;
6306 
6307     CASE_INT_FN (BUILT_IN_FFS):
6308       target = expand_builtin_unop (target_mode, exp, target,
6309 				    subtarget, ffs_optab);
6310       if (target)
6311 	return target;
6312       break;
6313 
6314     CASE_INT_FN (BUILT_IN_CLZ):
6315       target = expand_builtin_unop (target_mode, exp, target,
6316 				    subtarget, clz_optab);
6317       if (target)
6318 	return target;
6319       break;
6320 
6321     CASE_INT_FN (BUILT_IN_CTZ):
6322       target = expand_builtin_unop (target_mode, exp, target,
6323 				    subtarget, ctz_optab);
6324       if (target)
6325 	return target;
6326       break;
6327 
6328     CASE_INT_FN (BUILT_IN_CLRSB):
6329       target = expand_builtin_unop (target_mode, exp, target,
6330 				    subtarget, clrsb_optab);
6331       if (target)
6332 	return target;
6333       break;
6334 
6335     CASE_INT_FN (BUILT_IN_POPCOUNT):
6336       target = expand_builtin_unop (target_mode, exp, target,
6337 				    subtarget, popcount_optab);
6338       if (target)
6339 	return target;
6340       break;
6341 
6342     CASE_INT_FN (BUILT_IN_PARITY):
6343       target = expand_builtin_unop (target_mode, exp, target,
6344 				    subtarget, parity_optab);
6345       if (target)
6346 	return target;
6347       break;
6348 
6349     case BUILT_IN_STRLEN:
6350       target = expand_builtin_strlen (exp, target, target_mode);
6351       if (target)
6352 	return target;
6353       break;
6354 
6355     case BUILT_IN_STRCPY:
6356       target = expand_builtin_strcpy (exp, target);
6357       if (target)
6358 	return target;
6359       break;
6360 
6361     case BUILT_IN_STRNCPY:
6362       target = expand_builtin_strncpy (exp, target);
6363       if (target)
6364 	return target;
6365       break;
6366 
6367     case BUILT_IN_STPCPY:
6368       target = expand_builtin_stpcpy (exp, target, mode);
6369       if (target)
6370 	return target;
6371       break;
6372 
6373     case BUILT_IN_MEMCPY:
6374       target = expand_builtin_memcpy (exp, target);
6375       if (target)
6376 	return target;
6377       break;
6378 
6379     case BUILT_IN_MEMPCPY:
6380       target = expand_builtin_mempcpy (exp, target, mode);
6381       if (target)
6382 	return target;
6383       break;
6384 
6385     case BUILT_IN_MEMSET:
6386       target = expand_builtin_memset (exp, target, mode);
6387       if (target)
6388 	return target;
6389       break;
6390 
6391     case BUILT_IN_BZERO:
6392       target = expand_builtin_bzero (exp);
6393       if (target)
6394 	return target;
6395       break;
6396 
6397     case BUILT_IN_STRCMP:
6398       target = expand_builtin_strcmp (exp, target);
6399       if (target)
6400 	return target;
6401       break;
6402 
6403     case BUILT_IN_STRNCMP:
6404       target = expand_builtin_strncmp (exp, target, mode);
6405       if (target)
6406 	return target;
6407       break;
6408 
6409     case BUILT_IN_BCMP:
6410     case BUILT_IN_MEMCMP:
6411       target = expand_builtin_memcmp (exp, target, mode);
6412       if (target)
6413 	return target;
6414       break;
6415 
6416     case BUILT_IN_SETJMP:
6417       /* This should have been lowered to the builtins below.  */
6418       gcc_unreachable ();
6419 
6420     case BUILT_IN_SETJMP_SETUP:
6421       /* __builtin_setjmp_setup is passed a pointer to an array of five words
6422           and the receiver label.  */
6423       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6424 	{
6425 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6426 				      VOIDmode, EXPAND_NORMAL);
6427 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6428 	  rtx label_r = label_rtx (label);
6429 
6430 	  /* This is copied from the handling of non-local gotos.  */
6431 	  expand_builtin_setjmp_setup (buf_addr, label_r);
6432 	  nonlocal_goto_handler_labels
6433 	    = gen_rtx_INSN_LIST (VOIDmode, label_r,
6434 				 nonlocal_goto_handler_labels);
6435 	  /* ??? Do not let expand_label treat us as such since we would
6436 	     not want to be both on the list of non-local labels and on
6437 	     the list of forced labels.  */
6438 	  FORCED_LABEL (label) = 0;
6439 	  return const0_rtx;
6440 	}
6441       break;
6442 
6443     case BUILT_IN_SETJMP_RECEIVER:
6444        /* __builtin_setjmp_receiver is passed the receiver label.  */
6445       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6446 	{
6447 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6448 	  rtx label_r = label_rtx (label);
6449 
6450 	  expand_builtin_setjmp_receiver (label_r);
6451 	  return const0_rtx;
6452 	}
6453       break;
6454 
6455       /* __builtin_longjmp is passed a pointer to an array of five words.
6456 	 It's similar to the C library longjmp function but works with
6457 	 __builtin_setjmp above.  */
6458     case BUILT_IN_LONGJMP:
6459       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6460 	{
6461 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6462 				      VOIDmode, EXPAND_NORMAL);
6463 	  rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6464 
6465 	  if (value != const1_rtx)
6466 	    {
6467 	      error ("%<__builtin_longjmp%> second argument must be 1");
6468 	      return const0_rtx;
6469 	    }
6470 
6471 	  expand_builtin_longjmp (buf_addr, value);
6472 	  return const0_rtx;
6473 	}
6474       break;
6475 
6476     case BUILT_IN_NONLOCAL_GOTO:
6477       target = expand_builtin_nonlocal_goto (exp);
6478       if (target)
6479 	return target;
6480       break;
6481 
6482       /* This updates the setjmp buffer that is its argument with the value
6483 	 of the current stack pointer.  */
6484     case BUILT_IN_UPDATE_SETJMP_BUF:
6485       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6486 	{
6487 	  rtx buf_addr
6488 	    = expand_normal (CALL_EXPR_ARG (exp, 0));
6489 
6490 	  expand_builtin_update_setjmp_buf (buf_addr);
6491 	  return const0_rtx;
6492 	}
6493       break;
6494 
6495     case BUILT_IN_TRAP:
6496       expand_builtin_trap ();
6497       return const0_rtx;
6498 
6499     case BUILT_IN_UNREACHABLE:
6500       expand_builtin_unreachable ();
6501       return const0_rtx;
6502 
6503     CASE_FLT_FN (BUILT_IN_SIGNBIT):
6504     case BUILT_IN_SIGNBITD32:
6505     case BUILT_IN_SIGNBITD64:
6506     case BUILT_IN_SIGNBITD128:
6507       target = expand_builtin_signbit (exp, target);
6508       if (target)
6509 	return target;
6510       break;
6511 
6512       /* Various hooks for the DWARF 2 __throw routine.  */
6513     case BUILT_IN_UNWIND_INIT:
6514       expand_builtin_unwind_init ();
6515       return const0_rtx;
6516     case BUILT_IN_DWARF_CFA:
6517       return virtual_cfa_rtx;
6518 #ifdef DWARF2_UNWIND_INFO
6519     case BUILT_IN_DWARF_SP_COLUMN:
6520       return expand_builtin_dwarf_sp_column ();
6521     case BUILT_IN_INIT_DWARF_REG_SIZES:
6522       expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6523       return const0_rtx;
6524 #endif
6525     case BUILT_IN_FROB_RETURN_ADDR:
6526       return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6527     case BUILT_IN_EXTRACT_RETURN_ADDR:
6528       return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6529     case BUILT_IN_EH_RETURN:
6530       expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6531 				CALL_EXPR_ARG (exp, 1));
6532       return const0_rtx;
6533 #ifdef EH_RETURN_DATA_REGNO
6534     case BUILT_IN_EH_RETURN_DATA_REGNO:
6535       return expand_builtin_eh_return_data_regno (exp);
6536 #endif
6537     case BUILT_IN_EXTEND_POINTER:
6538       return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6539     case BUILT_IN_EH_POINTER:
6540       return expand_builtin_eh_pointer (exp);
6541     case BUILT_IN_EH_FILTER:
6542       return expand_builtin_eh_filter (exp);
6543     case BUILT_IN_EH_COPY_VALUES:
6544       return expand_builtin_eh_copy_values (exp);
6545 
6546     case BUILT_IN_VA_START:
6547       return expand_builtin_va_start (exp);
6548     case BUILT_IN_VA_END:
6549       return expand_builtin_va_end (exp);
6550     case BUILT_IN_VA_COPY:
6551       return expand_builtin_va_copy (exp);
6552     case BUILT_IN_EXPECT:
6553       return expand_builtin_expect (exp, target);
6554     case BUILT_IN_ASSUME_ALIGNED:
6555       return expand_builtin_assume_aligned (exp, target);
6556     case BUILT_IN_PREFETCH:
6557       expand_builtin_prefetch (exp);
6558       return const0_rtx;
6559 
6560     case BUILT_IN_INIT_TRAMPOLINE:
6561       return expand_builtin_init_trampoline (exp, true);
6562     case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6563       return expand_builtin_init_trampoline (exp, false);
6564     case BUILT_IN_ADJUST_TRAMPOLINE:
6565       return expand_builtin_adjust_trampoline (exp);
6566 
6567     case BUILT_IN_FORK:
6568     case BUILT_IN_EXECL:
6569     case BUILT_IN_EXECV:
6570     case BUILT_IN_EXECLP:
6571     case BUILT_IN_EXECLE:
6572     case BUILT_IN_EXECVP:
6573     case BUILT_IN_EXECVE:
6574       target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6575       if (target)
6576 	return target;
6577       break;
6578 
6579     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6580     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6581     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6582     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6583     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6584       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6585       target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6586       if (target)
6587 	return target;
6588       break;
6589 
6590     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6591     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6592     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6593     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6594     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6595       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6596       target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6597       if (target)
6598 	return target;
6599       break;
6600 
6601     case BUILT_IN_SYNC_FETCH_AND_OR_1:
6602     case BUILT_IN_SYNC_FETCH_AND_OR_2:
6603     case BUILT_IN_SYNC_FETCH_AND_OR_4:
6604     case BUILT_IN_SYNC_FETCH_AND_OR_8:
6605     case BUILT_IN_SYNC_FETCH_AND_OR_16:
6606       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6607       target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6608       if (target)
6609 	return target;
6610       break;
6611 
6612     case BUILT_IN_SYNC_FETCH_AND_AND_1:
6613     case BUILT_IN_SYNC_FETCH_AND_AND_2:
6614     case BUILT_IN_SYNC_FETCH_AND_AND_4:
6615     case BUILT_IN_SYNC_FETCH_AND_AND_8:
6616     case BUILT_IN_SYNC_FETCH_AND_AND_16:
6617       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6618       target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6619       if (target)
6620 	return target;
6621       break;
6622 
6623     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6624     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6625     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6626     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6627     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6628       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6629       target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6630       if (target)
6631 	return target;
6632       break;
6633 
6634     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6635     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6636     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6637     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6638     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6639       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6640       target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6641       if (target)
6642 	return target;
6643       break;
6644 
6645     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6646     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6647     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6648     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6649     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6650       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6651       target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6652       if (target)
6653 	return target;
6654       break;
6655 
6656     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6657     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6658     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6659     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6660     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6661       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6662       target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6663       if (target)
6664 	return target;
6665       break;
6666 
6667     case BUILT_IN_SYNC_OR_AND_FETCH_1:
6668     case BUILT_IN_SYNC_OR_AND_FETCH_2:
6669     case BUILT_IN_SYNC_OR_AND_FETCH_4:
6670     case BUILT_IN_SYNC_OR_AND_FETCH_8:
6671     case BUILT_IN_SYNC_OR_AND_FETCH_16:
6672       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6673       target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6674       if (target)
6675 	return target;
6676       break;
6677 
6678     case BUILT_IN_SYNC_AND_AND_FETCH_1:
6679     case BUILT_IN_SYNC_AND_AND_FETCH_2:
6680     case BUILT_IN_SYNC_AND_AND_FETCH_4:
6681     case BUILT_IN_SYNC_AND_AND_FETCH_8:
6682     case BUILT_IN_SYNC_AND_AND_FETCH_16:
6683       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6684       target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6685       if (target)
6686 	return target;
6687       break;
6688 
6689     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6690     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6691     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6692     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6693     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6694       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6695       target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6696       if (target)
6697 	return target;
6698       break;
6699 
6700     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6701     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6702     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6703     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6704     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6705       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6706       target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6707       if (target)
6708 	return target;
6709       break;
6710 
6711     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6712     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6713     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6714     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6715     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6716       if (mode == VOIDmode)
6717 	mode = TYPE_MODE (boolean_type_node);
6718       if (!target || !register_operand (target, mode))
6719 	target = gen_reg_rtx (mode);
6720 
6721       mode = get_builtin_sync_mode
6722 				(fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6723       target = expand_builtin_compare_and_swap (mode, exp, true, target);
6724       if (target)
6725 	return target;
6726       break;
6727 
6728     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6729     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6730     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6731     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6732     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6733       mode = get_builtin_sync_mode
6734 				(fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6735       target = expand_builtin_compare_and_swap (mode, exp, false, target);
6736       if (target)
6737 	return target;
6738       break;
6739 
6740     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6741     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6742     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6743     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6744     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6745       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6746       target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6747       if (target)
6748 	return target;
6749       break;
6750 
6751     case BUILT_IN_SYNC_LOCK_RELEASE_1:
6752     case BUILT_IN_SYNC_LOCK_RELEASE_2:
6753     case BUILT_IN_SYNC_LOCK_RELEASE_4:
6754     case BUILT_IN_SYNC_LOCK_RELEASE_8:
6755     case BUILT_IN_SYNC_LOCK_RELEASE_16:
6756       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6757       expand_builtin_sync_lock_release (mode, exp);
6758       return const0_rtx;
6759 
6760     case BUILT_IN_SYNC_SYNCHRONIZE:
6761       expand_builtin_sync_synchronize ();
6762       return const0_rtx;
6763 
6764     case BUILT_IN_ATOMIC_EXCHANGE_1:
6765     case BUILT_IN_ATOMIC_EXCHANGE_2:
6766     case BUILT_IN_ATOMIC_EXCHANGE_4:
6767     case BUILT_IN_ATOMIC_EXCHANGE_8:
6768     case BUILT_IN_ATOMIC_EXCHANGE_16:
6769       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6770       target = expand_builtin_atomic_exchange (mode, exp, target);
6771       if (target)
6772 	return target;
6773       break;
6774 
6775     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6776     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6777     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6778     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6779     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6780       {
6781 	unsigned int nargs, z;
6782 	vec<tree, va_gc> *vec;
6783 
6784 	mode =
6785 	    get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6786 	target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6787 	if (target)
6788 	  return target;
6789 
6790 	/* If this is turned into an external library call, the weak parameter
6791 	   must be dropped to match the expected parameter list.  */
6792 	nargs = call_expr_nargs (exp);
6793 	vec_alloc (vec, nargs - 1);
6794 	for (z = 0; z < 3; z++)
6795 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
6796 	/* Skip the boolean weak parameter.  */
6797 	for (z = 4; z < 6; z++)
6798 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
6799 	exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6800 	break;
6801       }
6802 
6803     case BUILT_IN_ATOMIC_LOAD_1:
6804     case BUILT_IN_ATOMIC_LOAD_2:
6805     case BUILT_IN_ATOMIC_LOAD_4:
6806     case BUILT_IN_ATOMIC_LOAD_8:
6807     case BUILT_IN_ATOMIC_LOAD_16:
6808       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6809       target = expand_builtin_atomic_load (mode, exp, target);
6810       if (target)
6811 	return target;
6812       break;
6813 
6814     case BUILT_IN_ATOMIC_STORE_1:
6815     case BUILT_IN_ATOMIC_STORE_2:
6816     case BUILT_IN_ATOMIC_STORE_4:
6817     case BUILT_IN_ATOMIC_STORE_8:
6818     case BUILT_IN_ATOMIC_STORE_16:
6819       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6820       target = expand_builtin_atomic_store (mode, exp);
6821       if (target)
6822 	return const0_rtx;
6823       break;
6824 
6825     case BUILT_IN_ATOMIC_ADD_FETCH_1:
6826     case BUILT_IN_ATOMIC_ADD_FETCH_2:
6827     case BUILT_IN_ATOMIC_ADD_FETCH_4:
6828     case BUILT_IN_ATOMIC_ADD_FETCH_8:
6829     case BUILT_IN_ATOMIC_ADD_FETCH_16:
6830       {
6831 	enum built_in_function lib;
6832 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6833 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6834 				       (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6835 	target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6836 						 ignore, lib);
6837 	if (target)
6838 	  return target;
6839 	break;
6840       }
6841     case BUILT_IN_ATOMIC_SUB_FETCH_1:
6842     case BUILT_IN_ATOMIC_SUB_FETCH_2:
6843     case BUILT_IN_ATOMIC_SUB_FETCH_4:
6844     case BUILT_IN_ATOMIC_SUB_FETCH_8:
6845     case BUILT_IN_ATOMIC_SUB_FETCH_16:
6846       {
6847 	enum built_in_function lib;
6848 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6849 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6850 				       (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6851 	target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6852 						 ignore, lib);
6853 	if (target)
6854 	  return target;
6855 	break;
6856       }
6857     case BUILT_IN_ATOMIC_AND_FETCH_1:
6858     case BUILT_IN_ATOMIC_AND_FETCH_2:
6859     case BUILT_IN_ATOMIC_AND_FETCH_4:
6860     case BUILT_IN_ATOMIC_AND_FETCH_8:
6861     case BUILT_IN_ATOMIC_AND_FETCH_16:
6862       {
6863 	enum built_in_function lib;
6864 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6865 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6866 				       (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6867 	target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6868 						 ignore, lib);
6869 	if (target)
6870 	  return target;
6871 	break;
6872       }
6873     case BUILT_IN_ATOMIC_NAND_FETCH_1:
6874     case BUILT_IN_ATOMIC_NAND_FETCH_2:
6875     case BUILT_IN_ATOMIC_NAND_FETCH_4:
6876     case BUILT_IN_ATOMIC_NAND_FETCH_8:
6877     case BUILT_IN_ATOMIC_NAND_FETCH_16:
6878       {
6879 	enum built_in_function lib;
6880 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6881 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6882 				       (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6883 	target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6884 						 ignore, lib);
6885 	if (target)
6886 	  return target;
6887 	break;
6888       }
6889     case BUILT_IN_ATOMIC_XOR_FETCH_1:
6890     case BUILT_IN_ATOMIC_XOR_FETCH_2:
6891     case BUILT_IN_ATOMIC_XOR_FETCH_4:
6892     case BUILT_IN_ATOMIC_XOR_FETCH_8:
6893     case BUILT_IN_ATOMIC_XOR_FETCH_16:
6894       {
6895 	enum built_in_function lib;
6896 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6897 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6898 				       (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6899 	target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6900 						 ignore, lib);
6901 	if (target)
6902 	  return target;
6903 	break;
6904       }
6905     case BUILT_IN_ATOMIC_OR_FETCH_1:
6906     case BUILT_IN_ATOMIC_OR_FETCH_2:
6907     case BUILT_IN_ATOMIC_OR_FETCH_4:
6908     case BUILT_IN_ATOMIC_OR_FETCH_8:
6909     case BUILT_IN_ATOMIC_OR_FETCH_16:
6910       {
6911 	enum built_in_function lib;
6912 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6913 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6914 				       (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6915 	target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6916 						 ignore, lib);
6917 	if (target)
6918 	  return target;
6919 	break;
6920       }
6921     case BUILT_IN_ATOMIC_FETCH_ADD_1:
6922     case BUILT_IN_ATOMIC_FETCH_ADD_2:
6923     case BUILT_IN_ATOMIC_FETCH_ADD_4:
6924     case BUILT_IN_ATOMIC_FETCH_ADD_8:
6925     case BUILT_IN_ATOMIC_FETCH_ADD_16:
6926       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6927       target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6928 					       ignore, BUILT_IN_NONE);
6929       if (target)
6930 	return target;
6931       break;
6932 
6933     case BUILT_IN_ATOMIC_FETCH_SUB_1:
6934     case BUILT_IN_ATOMIC_FETCH_SUB_2:
6935     case BUILT_IN_ATOMIC_FETCH_SUB_4:
6936     case BUILT_IN_ATOMIC_FETCH_SUB_8:
6937     case BUILT_IN_ATOMIC_FETCH_SUB_16:
6938       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6939       target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6940 					       ignore, BUILT_IN_NONE);
6941       if (target)
6942 	return target;
6943       break;
6944 
6945     case BUILT_IN_ATOMIC_FETCH_AND_1:
6946     case BUILT_IN_ATOMIC_FETCH_AND_2:
6947     case BUILT_IN_ATOMIC_FETCH_AND_4:
6948     case BUILT_IN_ATOMIC_FETCH_AND_8:
6949     case BUILT_IN_ATOMIC_FETCH_AND_16:
6950       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6951       target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6952 					       ignore, BUILT_IN_NONE);
6953       if (target)
6954 	return target;
6955       break;
6956 
6957     case BUILT_IN_ATOMIC_FETCH_NAND_1:
6958     case BUILT_IN_ATOMIC_FETCH_NAND_2:
6959     case BUILT_IN_ATOMIC_FETCH_NAND_4:
6960     case BUILT_IN_ATOMIC_FETCH_NAND_8:
6961     case BUILT_IN_ATOMIC_FETCH_NAND_16:
6962       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6963       target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6964 					       ignore, BUILT_IN_NONE);
6965       if (target)
6966 	return target;
6967       break;
6968 
6969     case BUILT_IN_ATOMIC_FETCH_XOR_1:
6970     case BUILT_IN_ATOMIC_FETCH_XOR_2:
6971     case BUILT_IN_ATOMIC_FETCH_XOR_4:
6972     case BUILT_IN_ATOMIC_FETCH_XOR_8:
6973     case BUILT_IN_ATOMIC_FETCH_XOR_16:
6974       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6975       target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6976 					       ignore, BUILT_IN_NONE);
6977       if (target)
6978 	return target;
6979       break;
6980 
6981     case BUILT_IN_ATOMIC_FETCH_OR_1:
6982     case BUILT_IN_ATOMIC_FETCH_OR_2:
6983     case BUILT_IN_ATOMIC_FETCH_OR_4:
6984     case BUILT_IN_ATOMIC_FETCH_OR_8:
6985     case BUILT_IN_ATOMIC_FETCH_OR_16:
6986       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6987       target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6988 					       ignore, BUILT_IN_NONE);
6989       if (target)
6990 	return target;
6991       break;
6992 
6993     case BUILT_IN_ATOMIC_TEST_AND_SET:
6994       return expand_builtin_atomic_test_and_set (exp, target);
6995 
6996     case BUILT_IN_ATOMIC_CLEAR:
6997       return expand_builtin_atomic_clear (exp);
6998 
6999     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7000       return expand_builtin_atomic_always_lock_free (exp);
7001 
7002     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7003       target = expand_builtin_atomic_is_lock_free (exp);
7004       if (target)
7005         return target;
7006       break;
7007 
7008     case BUILT_IN_ATOMIC_THREAD_FENCE:
7009       expand_builtin_atomic_thread_fence (exp);
7010       return const0_rtx;
7011 
7012     case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7013       expand_builtin_atomic_signal_fence (exp);
7014       return const0_rtx;
7015 
7016     case BUILT_IN_OBJECT_SIZE:
7017       return expand_builtin_object_size (exp);
7018 
7019     case BUILT_IN_MEMCPY_CHK:
7020     case BUILT_IN_MEMPCPY_CHK:
7021     case BUILT_IN_MEMMOVE_CHK:
7022     case BUILT_IN_MEMSET_CHK:
7023       target = expand_builtin_memory_chk (exp, target, mode, fcode);
7024       if (target)
7025 	return target;
7026       break;
7027 
7028     case BUILT_IN_STRCPY_CHK:
7029     case BUILT_IN_STPCPY_CHK:
7030     case BUILT_IN_STRNCPY_CHK:
7031     case BUILT_IN_STPNCPY_CHK:
7032     case BUILT_IN_STRCAT_CHK:
7033     case BUILT_IN_STRNCAT_CHK:
7034     case BUILT_IN_SNPRINTF_CHK:
7035     case BUILT_IN_VSNPRINTF_CHK:
7036       maybe_emit_chk_warning (exp, fcode);
7037       break;
7038 
7039     case BUILT_IN_SPRINTF_CHK:
7040     case BUILT_IN_VSPRINTF_CHK:
7041       maybe_emit_sprintf_chk_warning (exp, fcode);
7042       break;
7043 
7044     case BUILT_IN_FREE:
7045       if (warn_free_nonheap_object)
7046 	maybe_emit_free_warning (exp);
7047       break;
7048 
7049     case BUILT_IN_THREAD_POINTER:
7050       return expand_builtin_thread_pointer (exp, target);
7051 
7052     case BUILT_IN_SET_THREAD_POINTER:
7053       expand_builtin_set_thread_pointer (exp);
7054       return const0_rtx;
7055 
7056     case BUILT_IN_CILK_DETACH:
7057       expand_builtin_cilk_detach (exp);
7058       return const0_rtx;
7059 
7060     case BUILT_IN_CILK_POP_FRAME:
7061       expand_builtin_cilk_pop_frame (exp);
7062       return const0_rtx;
7063 
7064     case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7065     case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7066     case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7067     case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7068     case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7069     case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7070     case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7071     case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7072     case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7073     case BUILT_IN_CHKP_GET_PTR_LBOUND:
7074     case BUILT_IN_CHKP_GET_PTR_UBOUND:
7075       /* We allow user CHKP builtins if Pointer Bounds
7076 	 Checker is off.  */
7077       if (!chkp_function_instrumented_p (current_function_decl))
7078 	{
7079 	  if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7080 	      || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7081 	      || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7082 	      || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7083 	      || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7084 	    return expand_normal (CALL_EXPR_ARG (exp, 0));
7085 	  else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7086 	    return expand_normal (size_zero_node);
7087 	  else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7088 	    return expand_normal (size_int (-1));
7089 	  else
7090 	    return const0_rtx;
7091 	}
7092       /* FALLTHROUGH */
7093 
7094     case BUILT_IN_CHKP_BNDMK:
7095     case BUILT_IN_CHKP_BNDSTX:
7096     case BUILT_IN_CHKP_BNDCL:
7097     case BUILT_IN_CHKP_BNDCU:
7098     case BUILT_IN_CHKP_BNDLDX:
7099     case BUILT_IN_CHKP_BNDRET:
7100     case BUILT_IN_CHKP_INTERSECT:
7101     case BUILT_IN_CHKP_NARROW:
7102     case BUILT_IN_CHKP_EXTRACT_LOWER:
7103     case BUILT_IN_CHKP_EXTRACT_UPPER:
7104       /* Software implementation of Pointer Bounds Checker is NYI.
7105 	 Target support is required.  */
7106       error ("Your target platform does not support -fcheck-pointer-bounds");
7107       break;
7108 
7109     case BUILT_IN_ACC_ON_DEVICE:
7110       target = expand_builtin_acc_on_device (exp, target);
7111       if (target)
7112 	return target;
7113       break;
7114 
7115     default:	/* just do library call, if unknown builtin */
7116       break;
7117     }
7118 
7119   /* The switch statement above can drop through to cause the function
7120      to be called normally.  */
7121   return expand_call (exp, target, ignore);
7122 }
7123 
7124 /* Similar to expand_builtin but is used for instrumented calls.  */
7125 
7126 rtx
7127 expand_builtin_with_bounds (tree exp, rtx target,
7128 			    rtx subtarget ATTRIBUTE_UNUSED,
7129 			    machine_mode mode, int ignore)
7130 {
7131   tree fndecl = get_callee_fndecl (exp);
7132   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7133 
7134   gcc_assert (CALL_WITH_BOUNDS_P (exp));
7135 
7136   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7137     return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7138 
7139   gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7140 	      && fcode < END_CHKP_BUILTINS);
7141 
7142   switch (fcode)
7143     {
7144     case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7145       target = expand_builtin_memcpy_with_bounds (exp, target);
7146       if (target)
7147 	return target;
7148       break;
7149 
7150     case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7151       target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7152       if (target)
7153 	return target;
7154       break;
7155 
7156     case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7157       target = expand_builtin_memset_with_bounds (exp, target, mode);
7158       if (target)
7159 	return target;
7160       break;
7161 
7162     default:
7163       break;
7164     }
7165 
7166   /* The switch statement above can drop through to cause the function
7167      to be called normally.  */
7168   return expand_call (exp, target, ignore);
7169  }
7170 
7171 /* Determine whether a tree node represents a call to a built-in
7172    function.  If the tree T is a call to a built-in function with
7173    the right number of arguments of the appropriate types, return
7174    the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7175    Otherwise the return value is END_BUILTINS.  */
7176 
7177 enum built_in_function
7178 builtin_mathfn_code (const_tree t)
7179 {
7180   const_tree fndecl, arg, parmlist;
7181   const_tree argtype, parmtype;
7182   const_call_expr_arg_iterator iter;
7183 
7184   if (TREE_CODE (t) != CALL_EXPR
7185       || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7186     return END_BUILTINS;
7187 
7188   fndecl = get_callee_fndecl (t);
7189   if (fndecl == NULL_TREE
7190       || TREE_CODE (fndecl) != FUNCTION_DECL
7191       || ! DECL_BUILT_IN (fndecl)
7192       || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7193     return END_BUILTINS;
7194 
7195   parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7196   init_const_call_expr_arg_iterator (t, &iter);
7197   for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7198     {
7199       /* If a function doesn't take a variable number of arguments,
7200 	 the last element in the list will have type `void'.  */
7201       parmtype = TREE_VALUE (parmlist);
7202       if (VOID_TYPE_P (parmtype))
7203 	{
7204 	  if (more_const_call_expr_args_p (&iter))
7205 	    return END_BUILTINS;
7206 	  return DECL_FUNCTION_CODE (fndecl);
7207 	}
7208 
7209       if (! more_const_call_expr_args_p (&iter))
7210 	return END_BUILTINS;
7211 
7212       arg = next_const_call_expr_arg (&iter);
7213       argtype = TREE_TYPE (arg);
7214 
7215       if (SCALAR_FLOAT_TYPE_P (parmtype))
7216 	{
7217 	  if (! SCALAR_FLOAT_TYPE_P (argtype))
7218 	    return END_BUILTINS;
7219 	}
7220       else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7221 	{
7222 	  if (! COMPLEX_FLOAT_TYPE_P (argtype))
7223 	    return END_BUILTINS;
7224 	}
7225       else if (POINTER_TYPE_P (parmtype))
7226 	{
7227 	  if (! POINTER_TYPE_P (argtype))
7228 	    return END_BUILTINS;
7229 	}
7230       else if (INTEGRAL_TYPE_P (parmtype))
7231 	{
7232 	  if (! INTEGRAL_TYPE_P (argtype))
7233 	    return END_BUILTINS;
7234 	}
7235       else
7236 	return END_BUILTINS;
7237     }
7238 
7239   /* Variable-length argument list.  */
7240   return DECL_FUNCTION_CODE (fndecl);
7241 }
7242 
7243 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7244    evaluate to a constant.  */
7245 
7246 static tree
7247 fold_builtin_constant_p (tree arg)
7248 {
7249   /* We return 1 for a numeric type that's known to be a constant
7250      value at compile-time or for an aggregate type that's a
7251      literal constant.  */
7252   STRIP_NOPS (arg);
7253 
7254   /* If we know this is a constant, emit the constant of one.  */
7255   if (CONSTANT_CLASS_P (arg)
7256       || (TREE_CODE (arg) == CONSTRUCTOR
7257 	  && TREE_CONSTANT (arg)))
7258     return integer_one_node;
7259   if (TREE_CODE (arg) == ADDR_EXPR)
7260     {
7261        tree op = TREE_OPERAND (arg, 0);
7262        if (TREE_CODE (op) == STRING_CST
7263 	   || (TREE_CODE (op) == ARRAY_REF
7264 	       && integer_zerop (TREE_OPERAND (op, 1))
7265 	       && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7266 	 return integer_one_node;
7267     }
7268 
7269   /* If this expression has side effects, show we don't know it to be a
7270      constant.  Likewise if it's a pointer or aggregate type since in
7271      those case we only want literals, since those are only optimized
7272      when generating RTL, not later.
7273      And finally, if we are compiling an initializer, not code, we
7274      need to return a definite result now; there's not going to be any
7275      more optimization done.  */
7276   if (TREE_SIDE_EFFECTS (arg)
7277       || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7278       || POINTER_TYPE_P (TREE_TYPE (arg))
7279       || cfun == 0
7280       || folding_initializer
7281       || force_folding_builtin_constant_p)
7282     return integer_zero_node;
7283 
7284   return NULL_TREE;
7285 }
7286 
7287 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7288    return it as a truthvalue.  */
7289 
7290 static tree
7291 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7292 				tree predictor)
7293 {
7294   tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7295 
7296   fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7297   arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7298   ret_type = TREE_TYPE (TREE_TYPE (fn));
7299   pred_type = TREE_VALUE (arg_types);
7300   expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7301 
7302   pred = fold_convert_loc (loc, pred_type, pred);
7303   expected = fold_convert_loc (loc, expected_type, expected);
7304   call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7305 				   predictor);
7306 
7307   return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7308 		 build_int_cst (ret_type, 0));
7309 }
7310 
7311 /* Fold a call to builtin_expect with arguments ARG0 and ARG1.  Return
7312    NULL_TREE if no simplification is possible.  */
7313 
7314 tree
7315 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7316 {
7317   tree inner, fndecl, inner_arg0;
7318   enum tree_code code;
7319 
7320   /* Distribute the expected value over short-circuiting operators.
7321      See through the cast from truthvalue_type_node to long.  */
7322   inner_arg0 = arg0;
7323   while (CONVERT_EXPR_P (inner_arg0)
7324 	 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7325 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7326     inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7327 
7328   /* If this is a builtin_expect within a builtin_expect keep the
7329      inner one.  See through a comparison against a constant.  It
7330      might have been added to create a thruthvalue.  */
7331   inner = inner_arg0;
7332 
7333   if (COMPARISON_CLASS_P (inner)
7334       && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7335     inner = TREE_OPERAND (inner, 0);
7336 
7337   if (TREE_CODE (inner) == CALL_EXPR
7338       && (fndecl = get_callee_fndecl (inner))
7339       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7340       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7341     return arg0;
7342 
7343   inner = inner_arg0;
7344   code = TREE_CODE (inner);
7345   if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7346     {
7347       tree op0 = TREE_OPERAND (inner, 0);
7348       tree op1 = TREE_OPERAND (inner, 1);
7349 
7350       op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7351       op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7352       inner = build2 (code, TREE_TYPE (inner), op0, op1);
7353 
7354       return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7355     }
7356 
7357   /* If the argument isn't invariant then there's nothing else we can do.  */
7358   if (!TREE_CONSTANT (inner_arg0))
7359     return NULL_TREE;
7360 
7361   /* If we expect that a comparison against the argument will fold to
7362      a constant return the constant.  In practice, this means a true
7363      constant or the address of a non-weak symbol.  */
7364   inner = inner_arg0;
7365   STRIP_NOPS (inner);
7366   if (TREE_CODE (inner) == ADDR_EXPR)
7367     {
7368       do
7369 	{
7370 	  inner = TREE_OPERAND (inner, 0);
7371 	}
7372       while (TREE_CODE (inner) == COMPONENT_REF
7373 	     || TREE_CODE (inner) == ARRAY_REF);
7374       if ((TREE_CODE (inner) == VAR_DECL
7375            || TREE_CODE (inner) == FUNCTION_DECL)
7376 	  && DECL_WEAK (inner))
7377 	return NULL_TREE;
7378     }
7379 
7380   /* Otherwise, ARG0 already has the proper type for the return value.  */
7381   return arg0;
7382 }
7383 
7384 /* Fold a call to __builtin_classify_type with argument ARG.  */
7385 
7386 static tree
7387 fold_builtin_classify_type (tree arg)
7388 {
7389   if (arg == 0)
7390     return build_int_cst (integer_type_node, no_type_class);
7391 
7392   return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7393 }
7394 
7395 /* Fold a call to __builtin_strlen with argument ARG.  */
7396 
7397 static tree
7398 fold_builtin_strlen (location_t loc, tree type, tree arg)
7399 {
7400   if (!validate_arg (arg, POINTER_TYPE))
7401     return NULL_TREE;
7402   else
7403     {
7404       tree len = c_strlen (arg, 0);
7405 
7406       if (len)
7407 	return fold_convert_loc (loc, type, len);
7408 
7409       return NULL_TREE;
7410     }
7411 }
7412 
7413 /* Fold a call to __builtin_inf or __builtin_huge_val.  */
7414 
7415 static tree
7416 fold_builtin_inf (location_t loc, tree type, int warn)
7417 {
7418   REAL_VALUE_TYPE real;
7419 
7420   /* __builtin_inff is intended to be usable to define INFINITY on all
7421      targets.  If an infinity is not available, INFINITY expands "to a
7422      positive constant of type float that overflows at translation
7423      time", footnote "In this case, using INFINITY will violate the
7424      constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7425      Thus we pedwarn to ensure this constraint violation is
7426      diagnosed.  */
7427   if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7428     pedwarn (loc, 0, "target format does not support infinity");
7429 
7430   real_inf (&real);
7431   return build_real (type, real);
7432 }
7433 
7434 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG.  */
7435 
7436 static tree
7437 fold_builtin_nan (tree arg, tree type, int quiet)
7438 {
7439   REAL_VALUE_TYPE real;
7440   const char *str;
7441 
7442   if (!validate_arg (arg, POINTER_TYPE))
7443     return NULL_TREE;
7444   str = c_getstr (arg);
7445   if (!str)
7446     return NULL_TREE;
7447 
7448   if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7449     return NULL_TREE;
7450 
7451   return build_real (type, real);
7452 }
7453 
7454 /* Return true if the floating point expression T has an integer value.
7455    We also allow +Inf, -Inf and NaN to be considered integer values.  */
7456 
7457 static bool
7458 integer_valued_real_p (tree t)
7459 {
7460   switch (TREE_CODE (t))
7461     {
7462     case FLOAT_EXPR:
7463       return true;
7464 
7465     case ABS_EXPR:
7466     case SAVE_EXPR:
7467       return integer_valued_real_p (TREE_OPERAND (t, 0));
7468 
7469     case COMPOUND_EXPR:
7470     case MODIFY_EXPR:
7471     case BIND_EXPR:
7472       return integer_valued_real_p (TREE_OPERAND (t, 1));
7473 
7474     case PLUS_EXPR:
7475     case MINUS_EXPR:
7476     case MULT_EXPR:
7477     case MIN_EXPR:
7478     case MAX_EXPR:
7479       return integer_valued_real_p (TREE_OPERAND (t, 0))
7480 	     && integer_valued_real_p (TREE_OPERAND (t, 1));
7481 
7482     case COND_EXPR:
7483       return integer_valued_real_p (TREE_OPERAND (t, 1))
7484 	     && integer_valued_real_p (TREE_OPERAND (t, 2));
7485 
7486     case REAL_CST:
7487       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7488 
7489     CASE_CONVERT:
7490       {
7491 	tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7492 	if (TREE_CODE (type) == INTEGER_TYPE)
7493 	  return true;
7494 	if (TREE_CODE (type) == REAL_TYPE)
7495 	  return integer_valued_real_p (TREE_OPERAND (t, 0));
7496 	break;
7497       }
7498 
7499     case CALL_EXPR:
7500       switch (builtin_mathfn_code (t))
7501 	{
7502 	CASE_FLT_FN (BUILT_IN_CEIL):
7503 	CASE_FLT_FN (BUILT_IN_FLOOR):
7504 	CASE_FLT_FN (BUILT_IN_NEARBYINT):
7505 	CASE_FLT_FN (BUILT_IN_RINT):
7506 	CASE_FLT_FN (BUILT_IN_ROUND):
7507 	CASE_FLT_FN (BUILT_IN_TRUNC):
7508 	  return true;
7509 
7510 	CASE_FLT_FN (BUILT_IN_FMIN):
7511 	CASE_FLT_FN (BUILT_IN_FMAX):
7512 	  return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7513  	    && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7514 
7515 	default:
7516 	  break;
7517 	}
7518       break;
7519 
7520     default:
7521       break;
7522     }
7523   return false;
7524 }
7525 
7526 /* FNDECL is assumed to be a builtin where truncation can be propagated
7527    across (for instance floor((double)f) == (double)floorf (f).
7528    Do the transformation for a call with argument ARG.  */
7529 
7530 static tree
7531 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7532 {
7533   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7534 
7535   if (!validate_arg (arg, REAL_TYPE))
7536     return NULL_TREE;
7537 
7538   /* Integer rounding functions are idempotent.  */
7539   if (fcode == builtin_mathfn_code (arg))
7540     return arg;
7541 
7542   /* If argument is already integer valued, and we don't need to worry
7543      about setting errno, there's no need to perform rounding.  */
7544   if (! flag_errno_math && integer_valued_real_p (arg))
7545     return arg;
7546 
7547   if (optimize)
7548     {
7549       tree arg0 = strip_float_extensions (arg);
7550       tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7551       tree newtype = TREE_TYPE (arg0);
7552       tree decl;
7553 
7554       if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7555 	  && (decl = mathfn_built_in (newtype, fcode)))
7556 	return fold_convert_loc (loc, ftype,
7557 				 build_call_expr_loc (loc, decl, 1,
7558 						  fold_convert_loc (loc,
7559 								    newtype,
7560 								    arg0)));
7561     }
7562   return NULL_TREE;
7563 }
7564 
7565 /* FNDECL is assumed to be builtin which can narrow the FP type of
7566    the argument, for instance lround((double)f) -> lroundf (f).
7567    Do the transformation for a call with argument ARG.  */
7568 
7569 static tree
7570 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7571 {
7572   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7573 
7574   if (!validate_arg (arg, REAL_TYPE))
7575     return NULL_TREE;
7576 
7577   /* If argument is already integer valued, and we don't need to worry
7578      about setting errno, there's no need to perform rounding.  */
7579   if (! flag_errno_math && integer_valued_real_p (arg))
7580     return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7581 			TREE_TYPE (TREE_TYPE (fndecl)), arg);
7582 
7583   if (optimize)
7584     {
7585       tree ftype = TREE_TYPE (arg);
7586       tree arg0 = strip_float_extensions (arg);
7587       tree newtype = TREE_TYPE (arg0);
7588       tree decl;
7589 
7590       if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7591 	  && (decl = mathfn_built_in (newtype, fcode)))
7592 	return build_call_expr_loc (loc, decl, 1,
7593 				fold_convert_loc (loc, newtype, arg0));
7594     }
7595 
7596   /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7597      sizeof (int) == sizeof (long).  */
7598   if (TYPE_PRECISION (integer_type_node)
7599       == TYPE_PRECISION (long_integer_type_node))
7600     {
7601       tree newfn = NULL_TREE;
7602       switch (fcode)
7603 	{
7604 	CASE_FLT_FN (BUILT_IN_ICEIL):
7605 	  newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7606 	  break;
7607 
7608 	CASE_FLT_FN (BUILT_IN_IFLOOR):
7609 	  newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7610 	  break;
7611 
7612 	CASE_FLT_FN (BUILT_IN_IROUND):
7613 	  newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7614 	  break;
7615 
7616 	CASE_FLT_FN (BUILT_IN_IRINT):
7617 	  newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7618 	  break;
7619 
7620 	default:
7621 	  break;
7622 	}
7623 
7624       if (newfn)
7625 	{
7626 	  tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7627 	  return fold_convert_loc (loc,
7628 				   TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7629 	}
7630     }
7631 
7632   /* Canonicalize llround (x) to lround (x) on LP64 targets where
7633      sizeof (long long) == sizeof (long).  */
7634   if (TYPE_PRECISION (long_long_integer_type_node)
7635       == TYPE_PRECISION (long_integer_type_node))
7636     {
7637       tree newfn = NULL_TREE;
7638       switch (fcode)
7639 	{
7640 	CASE_FLT_FN (BUILT_IN_LLCEIL):
7641 	  newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7642 	  break;
7643 
7644 	CASE_FLT_FN (BUILT_IN_LLFLOOR):
7645 	  newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7646 	  break;
7647 
7648 	CASE_FLT_FN (BUILT_IN_LLROUND):
7649 	  newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7650 	  break;
7651 
7652 	CASE_FLT_FN (BUILT_IN_LLRINT):
7653 	  newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7654 	  break;
7655 
7656 	default:
7657 	  break;
7658 	}
7659 
7660       if (newfn)
7661 	{
7662 	  tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7663 	  return fold_convert_loc (loc,
7664 				   TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7665 	}
7666     }
7667 
7668   return NULL_TREE;
7669 }
7670 
7671 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG.  TYPE is the
7672    return type.  Return NULL_TREE if no simplification can be made.  */
7673 
7674 static tree
7675 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7676 {
7677   tree res;
7678 
7679   if (!validate_arg (arg, COMPLEX_TYPE)
7680       || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7681     return NULL_TREE;
7682 
7683   /* Calculate the result when the argument is a constant.  */
7684   if (TREE_CODE (arg) == COMPLEX_CST
7685       && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7686 			      type, mpfr_hypot)))
7687     return res;
7688 
7689   if (TREE_CODE (arg) == COMPLEX_EXPR)
7690     {
7691       tree real = TREE_OPERAND (arg, 0);
7692       tree imag = TREE_OPERAND (arg, 1);
7693 
7694       /* If either part is zero, cabs is fabs of the other.  */
7695       if (real_zerop (real))
7696 	return fold_build1_loc (loc, ABS_EXPR, type, imag);
7697       if (real_zerop (imag))
7698 	return fold_build1_loc (loc, ABS_EXPR, type, real);
7699 
7700       /* cabs(x+xi) -> fabs(x)*sqrt(2).  */
7701       if (flag_unsafe_math_optimizations
7702 	  && operand_equal_p (real, imag, OEP_PURE_SAME))
7703         {
7704 	  const REAL_VALUE_TYPE sqrt2_trunc
7705 	    = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7706 	  STRIP_NOPS (real);
7707 	  return fold_build2_loc (loc, MULT_EXPR, type,
7708 			      fold_build1_loc (loc, ABS_EXPR, type, real),
7709 			      build_real (type, sqrt2_trunc));
7710 	}
7711     }
7712 
7713   /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z).  */
7714   if (TREE_CODE (arg) == NEGATE_EXPR
7715       || TREE_CODE (arg) == CONJ_EXPR)
7716     return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7717 
7718   /* Don't do this when optimizing for size.  */
7719   if (flag_unsafe_math_optimizations
7720       && optimize && optimize_function_for_speed_p (cfun))
7721     {
7722       tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7723 
7724       if (sqrtfn != NULL_TREE)
7725 	{
7726 	  tree rpart, ipart, result;
7727 
7728 	  arg = builtin_save_expr (arg);
7729 
7730 	  rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7731 	  ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7732 
7733 	  rpart = builtin_save_expr (rpart);
7734 	  ipart = builtin_save_expr (ipart);
7735 
7736 	  result = fold_build2_loc (loc, PLUS_EXPR, type,
7737 				fold_build2_loc (loc, MULT_EXPR, type,
7738 					     rpart, rpart),
7739 				fold_build2_loc (loc, MULT_EXPR, type,
7740 					     ipart, ipart));
7741 
7742 	  return build_call_expr_loc (loc, sqrtfn, 1, result);
7743 	}
7744     }
7745 
7746   return NULL_TREE;
7747 }
7748 
7749 /* Build a complex (inf +- 0i) for the result of cproj.  TYPE is the
7750    complex tree type of the result.  If NEG is true, the imaginary
7751    zero is negative.  */
7752 
7753 static tree
7754 build_complex_cproj (tree type, bool neg)
7755 {
7756   REAL_VALUE_TYPE rinf, rzero = dconst0;
7757 
7758   real_inf (&rinf);
7759   rzero.sign = neg;
7760   return build_complex (type, build_real (TREE_TYPE (type), rinf),
7761 			build_real (TREE_TYPE (type), rzero));
7762 }
7763 
7764 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG.  TYPE is the
7765    return type.  Return NULL_TREE if no simplification can be made.  */
7766 
7767 static tree
7768 fold_builtin_cproj (location_t loc, tree arg, tree type)
7769 {
7770   if (!validate_arg (arg, COMPLEX_TYPE)
7771       || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7772     return NULL_TREE;
7773 
7774   /* If there are no infinities, return arg.  */
7775   if (! HONOR_INFINITIES (type))
7776     return non_lvalue_loc (loc, arg);
7777 
7778   /* Calculate the result when the argument is a constant.  */
7779   if (TREE_CODE (arg) == COMPLEX_CST)
7780     {
7781       const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7782       const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7783 
7784       if (real_isinf (real) || real_isinf (imag))
7785 	return build_complex_cproj (type, imag->sign);
7786       else
7787 	return arg;
7788     }
7789   else if (TREE_CODE (arg) == COMPLEX_EXPR)
7790     {
7791       tree real = TREE_OPERAND (arg, 0);
7792       tree imag = TREE_OPERAND (arg, 1);
7793 
7794       STRIP_NOPS (real);
7795       STRIP_NOPS (imag);
7796 
7797       /* If the real part is inf and the imag part is known to be
7798 	 nonnegative, return (inf + 0i).  Remember side-effects are
7799 	 possible in the imag part.  */
7800       if (TREE_CODE (real) == REAL_CST
7801 	  && real_isinf (TREE_REAL_CST_PTR (real))
7802 	  && tree_expr_nonnegative_p (imag))
7803 	return omit_one_operand_loc (loc, type,
7804 				     build_complex_cproj (type, false),
7805 				     arg);
7806 
7807       /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7808 	 Remember side-effects are possible in the real part.  */
7809       if (TREE_CODE (imag) == REAL_CST
7810 	  && real_isinf (TREE_REAL_CST_PTR (imag)))
7811 	return
7812 	  omit_one_operand_loc (loc, type,
7813 				build_complex_cproj (type, TREE_REAL_CST_PTR
7814 						     (imag)->sign), arg);
7815     }
7816 
7817   return NULL_TREE;
7818 }
7819 
7820 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7821    Return NULL_TREE if no simplification can be made.  */
7822 
7823 static tree
7824 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7825 {
7826 
7827   enum built_in_function fcode;
7828   tree res;
7829 
7830   if (!validate_arg (arg, REAL_TYPE))
7831     return NULL_TREE;
7832 
7833   /* Calculate the result when the argument is a constant.  */
7834   if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7835     return res;
7836 
7837   /* Optimize sqrt(expN(x)) = expN(x*0.5).  */
7838   fcode = builtin_mathfn_code (arg);
7839   if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7840     {
7841       tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7842       arg = fold_build2_loc (loc, MULT_EXPR, type,
7843 			 CALL_EXPR_ARG (arg, 0),
7844 			 build_real (type, dconsthalf));
7845       return build_call_expr_loc (loc, expfn, 1, arg);
7846     }
7847 
7848   /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)).  */
7849   if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7850     {
7851       tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7852 
7853       if (powfn)
7854 	{
7855 	  tree arg0 = CALL_EXPR_ARG (arg, 0);
7856 	  tree tree_root;
7857 	  /* The inner root was either sqrt or cbrt.  */
7858 	  /* This was a conditional expression but it triggered a bug
7859 	     in Sun C 5.5.  */
7860 	  REAL_VALUE_TYPE dconstroot;
7861 	  if (BUILTIN_SQRT_P (fcode))
7862 	    dconstroot = dconsthalf;
7863 	  else
7864 	    dconstroot = dconst_third ();
7865 
7866 	  /* Adjust for the outer root.  */
7867 	  SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7868 	  dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7869 	  tree_root = build_real (type, dconstroot);
7870 	  return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7871 	}
7872     }
7873 
7874   /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5).  */
7875   if (flag_unsafe_math_optimizations
7876       && (fcode == BUILT_IN_POW
7877 	  || fcode == BUILT_IN_POWF
7878 	  || fcode == BUILT_IN_POWL))
7879     {
7880       tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7881       tree arg0 = CALL_EXPR_ARG (arg, 0);
7882       tree arg1 = CALL_EXPR_ARG (arg, 1);
7883       tree narg1;
7884       if (!tree_expr_nonnegative_p (arg0))
7885 	arg0 = build1 (ABS_EXPR, type, arg0);
7886       narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7887 			   build_real (type, dconsthalf));
7888       return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7889     }
7890 
7891   return NULL_TREE;
7892 }
7893 
7894 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7895    Return NULL_TREE if no simplification can be made.  */
7896 
7897 static tree
7898 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7899 {
7900   const enum built_in_function fcode = builtin_mathfn_code (arg);
7901   tree res;
7902 
7903   if (!validate_arg (arg, REAL_TYPE))
7904     return NULL_TREE;
7905 
7906   /* Calculate the result when the argument is a constant.  */
7907   if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7908     return res;
7909 
7910   if (flag_unsafe_math_optimizations)
7911     {
7912       /* Optimize cbrt(expN(x)) -> expN(x/3).  */
7913       if (BUILTIN_EXPONENT_P (fcode))
7914 	{
7915 	  tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7916 	  const REAL_VALUE_TYPE third_trunc =
7917 	    real_value_truncate (TYPE_MODE (type), dconst_third ());
7918 	  arg = fold_build2_loc (loc, MULT_EXPR, type,
7919 			     CALL_EXPR_ARG (arg, 0),
7920 			     build_real (type, third_trunc));
7921 	  return build_call_expr_loc (loc, expfn, 1, arg);
7922 	}
7923 
7924       /* Optimize cbrt(sqrt(x)) -> pow(x,1/6).  */
7925       if (BUILTIN_SQRT_P (fcode))
7926 	{
7927 	  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7928 
7929 	  if (powfn)
7930 	    {
7931 	      tree arg0 = CALL_EXPR_ARG (arg, 0);
7932 	      tree tree_root;
7933 	      REAL_VALUE_TYPE dconstroot = dconst_third ();
7934 
7935 	      SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7936 	      dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7937 	      tree_root = build_real (type, dconstroot);
7938 	      return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7939 	    }
7940 	}
7941 
7942       /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative.  */
7943       if (BUILTIN_CBRT_P (fcode))
7944 	{
7945 	  tree arg0 = CALL_EXPR_ARG (arg, 0);
7946 	  if (tree_expr_nonnegative_p (arg0))
7947 	    {
7948 	      tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7949 
7950 	      if (powfn)
7951 		{
7952 		  tree tree_root;
7953 		  REAL_VALUE_TYPE dconstroot;
7954 
7955 		  real_arithmetic (&dconstroot, MULT_EXPR,
7956                                    dconst_third_ptr (), dconst_third_ptr ());
7957 		  dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7958 		  tree_root = build_real (type, dconstroot);
7959 		  return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7960 		}
7961 	    }
7962 	}
7963 
7964       /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative.  */
7965       if (fcode == BUILT_IN_POW
7966           || fcode == BUILT_IN_POWF
7967 	  || fcode == BUILT_IN_POWL)
7968 	{
7969 	  tree arg00 = CALL_EXPR_ARG (arg, 0);
7970 	  tree arg01 = CALL_EXPR_ARG (arg, 1);
7971 	  if (tree_expr_nonnegative_p (arg00))
7972 	    {
7973 	      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7974 	      const REAL_VALUE_TYPE dconstroot
7975 		= real_value_truncate (TYPE_MODE (type), dconst_third ());
7976 	      tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7977 					 build_real (type, dconstroot));
7978 	      return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7979 	    }
7980 	}
7981     }
7982   return NULL_TREE;
7983 }
7984 
7985 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7986    TYPE is the type of the return value.  Return NULL_TREE if no
7987    simplification can be made.  */
7988 
7989 static tree
7990 fold_builtin_cos (location_t loc,
7991 		  tree arg, tree type, tree fndecl)
7992 {
7993   tree res, narg;
7994 
7995   if (!validate_arg (arg, REAL_TYPE))
7996     return NULL_TREE;
7997 
7998   /* Calculate the result when the argument is a constant.  */
7999   if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
8000     return res;
8001 
8002   /* Optimize cos(-x) into cos (x).  */
8003   if ((narg = fold_strip_sign_ops (arg)))
8004     return build_call_expr_loc (loc, fndecl, 1, narg);
8005 
8006   return NULL_TREE;
8007 }
8008 
8009 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
8010    Return NULL_TREE if no simplification can be made.  */
8011 
8012 static tree
8013 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
8014 {
8015   if (validate_arg (arg, REAL_TYPE))
8016     {
8017       tree res, narg;
8018 
8019       /* Calculate the result when the argument is a constant.  */
8020       if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
8021 	return res;
8022 
8023       /* Optimize cosh(-x) into cosh (x).  */
8024       if ((narg = fold_strip_sign_ops (arg)))
8025 	return build_call_expr_loc (loc, fndecl, 1, narg);
8026     }
8027 
8028   return NULL_TREE;
8029 }
8030 
8031 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
8032    argument ARG.  TYPE is the type of the return value.  Return
8033    NULL_TREE if no simplification can be made.  */
8034 
8035 static tree
8036 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
8037 		   bool hyper)
8038 {
8039   if (validate_arg (arg, COMPLEX_TYPE)
8040       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8041     {
8042       tree tmp;
8043 
8044       /* Calculate the result when the argument is a constant.  */
8045       if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8046 	return tmp;
8047 
8048       /* Optimize fn(-x) into fn(x).  */
8049       if ((tmp = fold_strip_sign_ops (arg)))
8050 	return build_call_expr_loc (loc, fndecl, 1, tmp);
8051     }
8052 
8053   return NULL_TREE;
8054 }
8055 
8056 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8057    Return NULL_TREE if no simplification can be made.  */
8058 
8059 static tree
8060 fold_builtin_tan (tree arg, tree type)
8061 {
8062   enum built_in_function fcode;
8063   tree res;
8064 
8065   if (!validate_arg (arg, REAL_TYPE))
8066     return NULL_TREE;
8067 
8068   /* Calculate the result when the argument is a constant.  */
8069   if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8070     return res;
8071 
8072   /* Optimize tan(atan(x)) = x.  */
8073   fcode = builtin_mathfn_code (arg);
8074   if (flag_unsafe_math_optimizations
8075       && (fcode == BUILT_IN_ATAN
8076 	  || fcode == BUILT_IN_ATANF
8077 	  || fcode == BUILT_IN_ATANL))
8078     return CALL_EXPR_ARG (arg, 0);
8079 
8080   return NULL_TREE;
8081 }
8082 
8083 /* Fold function call to builtin sincos, sincosf, or sincosl.  Return
8084    NULL_TREE if no simplification can be made.  */
8085 
8086 static tree
8087 fold_builtin_sincos (location_t loc,
8088 		     tree arg0, tree arg1, tree arg2)
8089 {
8090   tree type;
8091   tree res, fn, call;
8092 
8093   if (!validate_arg (arg0, REAL_TYPE)
8094       || !validate_arg (arg1, POINTER_TYPE)
8095       || !validate_arg (arg2, POINTER_TYPE))
8096     return NULL_TREE;
8097 
8098   type = TREE_TYPE (arg0);
8099 
8100   /* Calculate the result when the argument is a constant.  */
8101   if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8102     return res;
8103 
8104   /* Canonicalize sincos to cexpi.  */
8105   if (!targetm.libc_has_function (function_c99_math_complex))
8106     return NULL_TREE;
8107   fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8108   if (!fn)
8109     return NULL_TREE;
8110 
8111   call = build_call_expr_loc (loc, fn, 1, arg0);
8112   call = builtin_save_expr (call);
8113 
8114   return build2 (COMPOUND_EXPR, void_type_node,
8115 		 build2 (MODIFY_EXPR, void_type_node,
8116 			 build_fold_indirect_ref_loc (loc, arg1),
8117 			 build1 (IMAGPART_EXPR, type, call)),
8118 		 build2 (MODIFY_EXPR, void_type_node,
8119 			 build_fold_indirect_ref_loc (loc, arg2),
8120 			 build1 (REALPART_EXPR, type, call)));
8121 }
8122 
8123 /* Fold function call to builtin cexp, cexpf, or cexpl.  Return
8124    NULL_TREE if no simplification can be made.  */
8125 
8126 static tree
8127 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8128 {
8129   tree rtype;
8130   tree realp, imagp, ifn;
8131   tree res;
8132 
8133   if (!validate_arg (arg0, COMPLEX_TYPE)
8134       || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8135     return NULL_TREE;
8136 
8137   /* Calculate the result when the argument is a constant.  */
8138   if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8139     return res;
8140 
8141   rtype = TREE_TYPE (TREE_TYPE (arg0));
8142 
8143   /* In case we can figure out the real part of arg0 and it is constant zero
8144      fold to cexpi.  */
8145   if (!targetm.libc_has_function (function_c99_math_complex))
8146     return NULL_TREE;
8147   ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8148   if (!ifn)
8149     return NULL_TREE;
8150 
8151   if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8152       && real_zerop (realp))
8153     {
8154       tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8155       return build_call_expr_loc (loc, ifn, 1, narg);
8156     }
8157 
8158   /* In case we can easily decompose real and imaginary parts split cexp
8159      to exp (r) * cexpi (i).  */
8160   if (flag_unsafe_math_optimizations
8161       && realp)
8162     {
8163       tree rfn, rcall, icall;
8164 
8165       rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8166       if (!rfn)
8167 	return NULL_TREE;
8168 
8169       imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8170       if (!imagp)
8171 	return NULL_TREE;
8172 
8173       icall = build_call_expr_loc (loc, ifn, 1, imagp);
8174       icall = builtin_save_expr (icall);
8175       rcall = build_call_expr_loc (loc, rfn, 1, realp);
8176       rcall = builtin_save_expr (rcall);
8177       return fold_build2_loc (loc, COMPLEX_EXPR, type,
8178 			  fold_build2_loc (loc, MULT_EXPR, rtype,
8179 				       rcall,
8180 			 	       fold_build1_loc (loc, REALPART_EXPR,
8181 						    rtype, icall)),
8182 			  fold_build2_loc (loc, MULT_EXPR, rtype,
8183 				       rcall,
8184 				       fold_build1_loc (loc, IMAGPART_EXPR,
8185 						    rtype, icall)));
8186     }
8187 
8188   return NULL_TREE;
8189 }
8190 
8191 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8192    Return NULL_TREE if no simplification can be made.  */
8193 
8194 static tree
8195 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8196 {
8197   if (!validate_arg (arg, REAL_TYPE))
8198     return NULL_TREE;
8199 
8200   /* Optimize trunc of constant value.  */
8201   if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8202     {
8203       REAL_VALUE_TYPE r, x;
8204       tree type = TREE_TYPE (TREE_TYPE (fndecl));
8205 
8206       x = TREE_REAL_CST (arg);
8207       real_trunc (&r, TYPE_MODE (type), &x);
8208       return build_real (type, r);
8209     }
8210 
8211   return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8212 }
8213 
8214 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8215    Return NULL_TREE if no simplification can be made.  */
8216 
8217 static tree
8218 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8219 {
8220   if (!validate_arg (arg, REAL_TYPE))
8221     return NULL_TREE;
8222 
8223   /* Optimize floor of constant value.  */
8224   if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8225     {
8226       REAL_VALUE_TYPE x;
8227 
8228       x = TREE_REAL_CST (arg);
8229       if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8230 	{
8231 	  tree type = TREE_TYPE (TREE_TYPE (fndecl));
8232 	  REAL_VALUE_TYPE r;
8233 
8234 	  real_floor (&r, TYPE_MODE (type), &x);
8235 	  return build_real (type, r);
8236 	}
8237     }
8238 
8239   /* Fold floor (x) where x is nonnegative to trunc (x).  */
8240   if (tree_expr_nonnegative_p (arg))
8241     {
8242       tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8243       if (truncfn)
8244 	return build_call_expr_loc (loc, truncfn, 1, arg);
8245     }
8246 
8247   return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8248 }
8249 
8250 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8251    Return NULL_TREE if no simplification can be made.  */
8252 
8253 static tree
8254 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8255 {
8256   if (!validate_arg (arg, REAL_TYPE))
8257     return NULL_TREE;
8258 
8259   /* Optimize ceil of constant value.  */
8260   if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8261     {
8262       REAL_VALUE_TYPE x;
8263 
8264       x = TREE_REAL_CST (arg);
8265       if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8266 	{
8267 	  tree type = TREE_TYPE (TREE_TYPE (fndecl));
8268 	  REAL_VALUE_TYPE r;
8269 
8270 	  real_ceil (&r, TYPE_MODE (type), &x);
8271 	  return build_real (type, r);
8272 	}
8273     }
8274 
8275   return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8276 }
8277 
8278 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8279    Return NULL_TREE if no simplification can be made.  */
8280 
8281 static tree
8282 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8283 {
8284   if (!validate_arg (arg, REAL_TYPE))
8285     return NULL_TREE;
8286 
8287   /* Optimize round of constant value.  */
8288   if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8289     {
8290       REAL_VALUE_TYPE x;
8291 
8292       x = TREE_REAL_CST (arg);
8293       if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8294 	{
8295 	  tree type = TREE_TYPE (TREE_TYPE (fndecl));
8296 	  REAL_VALUE_TYPE r;
8297 
8298 	  real_round (&r, TYPE_MODE (type), &x);
8299 	  return build_real (type, r);
8300 	}
8301     }
8302 
8303   return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8304 }
8305 
8306 /* Fold function call to builtin lround, lroundf or lroundl (or the
8307    corresponding long long versions) and other rounding functions.  ARG
8308    is the argument to the call.  Return NULL_TREE if no simplification
8309    can be made.  */
8310 
8311 static tree
8312 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8313 {
8314   if (!validate_arg (arg, REAL_TYPE))
8315     return NULL_TREE;
8316 
8317   /* Optimize lround of constant value.  */
8318   if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8319     {
8320       const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8321 
8322       if (real_isfinite (&x))
8323 	{
8324 	  tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8325 	  tree ftype = TREE_TYPE (arg);
8326 	  REAL_VALUE_TYPE r;
8327 	  bool fail = false;
8328 
8329 	  switch (DECL_FUNCTION_CODE (fndecl))
8330 	    {
8331 	    CASE_FLT_FN (BUILT_IN_IFLOOR):
8332 	    CASE_FLT_FN (BUILT_IN_LFLOOR):
8333 	    CASE_FLT_FN (BUILT_IN_LLFLOOR):
8334 	      real_floor (&r, TYPE_MODE (ftype), &x);
8335 	      break;
8336 
8337 	    CASE_FLT_FN (BUILT_IN_ICEIL):
8338 	    CASE_FLT_FN (BUILT_IN_LCEIL):
8339 	    CASE_FLT_FN (BUILT_IN_LLCEIL):
8340 	      real_ceil (&r, TYPE_MODE (ftype), &x);
8341 	      break;
8342 
8343 	    CASE_FLT_FN (BUILT_IN_IROUND):
8344 	    CASE_FLT_FN (BUILT_IN_LROUND):
8345 	    CASE_FLT_FN (BUILT_IN_LLROUND):
8346 	      real_round (&r, TYPE_MODE (ftype), &x);
8347 	      break;
8348 
8349 	    default:
8350 	      gcc_unreachable ();
8351 	    }
8352 
8353 	  wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8354 	  if (!fail)
8355 	    return wide_int_to_tree (itype, val);
8356 	}
8357     }
8358 
8359   switch (DECL_FUNCTION_CODE (fndecl))
8360     {
8361     CASE_FLT_FN (BUILT_IN_LFLOOR):
8362     CASE_FLT_FN (BUILT_IN_LLFLOOR):
8363       /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x).  */
8364       if (tree_expr_nonnegative_p (arg))
8365 	return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8366 			    TREE_TYPE (TREE_TYPE (fndecl)), arg);
8367       break;
8368     default:;
8369     }
8370 
8371   return fold_fixed_mathfn (loc, fndecl, arg);
8372 }
8373 
8374 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8375    and their long and long long variants (i.e. ffsl and ffsll).  ARG is
8376    the argument to the call.  Return NULL_TREE if no simplification can
8377    be made.  */
8378 
8379 static tree
8380 fold_builtin_bitop (tree fndecl, tree arg)
8381 {
8382   if (!validate_arg (arg, INTEGER_TYPE))
8383     return NULL_TREE;
8384 
8385   /* Optimize for constant argument.  */
8386   if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8387     {
8388       tree type = TREE_TYPE (arg);
8389       int result;
8390 
8391       switch (DECL_FUNCTION_CODE (fndecl))
8392 	{
8393 	CASE_INT_FN (BUILT_IN_FFS):
8394 	  result = wi::ffs (arg);
8395 	  break;
8396 
8397 	CASE_INT_FN (BUILT_IN_CLZ):
8398 	  if (wi::ne_p (arg, 0))
8399 	    result = wi::clz (arg);
8400 	  else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8401 	    result = TYPE_PRECISION (type);
8402 	  break;
8403 
8404 	CASE_INT_FN (BUILT_IN_CTZ):
8405 	  if (wi::ne_p (arg, 0))
8406 	    result = wi::ctz (arg);
8407 	  else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8408 	    result = TYPE_PRECISION (type);
8409 	  break;
8410 
8411 	CASE_INT_FN (BUILT_IN_CLRSB):
8412 	  result = wi::clrsb (arg);
8413 	  break;
8414 
8415 	CASE_INT_FN (BUILT_IN_POPCOUNT):
8416 	  result = wi::popcount (arg);
8417 	  break;
8418 
8419 	CASE_INT_FN (BUILT_IN_PARITY):
8420 	  result = wi::parity (arg);
8421 	  break;
8422 
8423 	default:
8424 	  gcc_unreachable ();
8425 	}
8426 
8427       return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8428     }
8429 
8430   return NULL_TREE;
8431 }
8432 
8433 /* Fold function call to builtin_bswap and the short, long and long long
8434    variants.  Return NULL_TREE if no simplification can be made.  */
8435 static tree
8436 fold_builtin_bswap (tree fndecl, tree arg)
8437 {
8438   if (! validate_arg (arg, INTEGER_TYPE))
8439     return NULL_TREE;
8440 
8441   /* Optimize constant value.  */
8442   if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8443     {
8444       tree type = TREE_TYPE (TREE_TYPE (fndecl));
8445 
8446       switch (DECL_FUNCTION_CODE (fndecl))
8447 	{
8448 	  case BUILT_IN_BSWAP16:
8449 	  case BUILT_IN_BSWAP32:
8450 	  case BUILT_IN_BSWAP64:
8451 	    {
8452 	      signop sgn = TYPE_SIGN (type);
8453 	      tree result =
8454 		wide_int_to_tree (type,
8455 				  wide_int::from (arg, TYPE_PRECISION (type),
8456 						  sgn).bswap ());
8457 	      return result;
8458 	    }
8459 	default:
8460 	  gcc_unreachable ();
8461 	}
8462     }
8463 
8464   return NULL_TREE;
8465 }
8466 
8467 /* Fold a builtin function call to hypot, hypotf, or hypotl.  Return
8468    NULL_TREE if no simplification can be made.  */
8469 
8470 static tree
8471 fold_builtin_hypot (location_t loc, tree fndecl,
8472 		    tree arg0, tree arg1, tree type)
8473 {
8474   tree res, narg0, narg1;
8475 
8476   if (!validate_arg (arg0, REAL_TYPE)
8477       || !validate_arg (arg1, REAL_TYPE))
8478     return NULL_TREE;
8479 
8480   /* Calculate the result when the argument is a constant.  */
8481   if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8482     return res;
8483 
8484   /* If either argument to hypot has a negate or abs, strip that off.
8485      E.g. hypot(-x,fabs(y)) -> hypot(x,y).  */
8486   narg0 = fold_strip_sign_ops (arg0);
8487   narg1 = fold_strip_sign_ops (arg1);
8488   if (narg0 || narg1)
8489     {
8490       return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8491 			      narg1 ? narg1 : arg1);
8492     }
8493 
8494   /* If either argument is zero, hypot is fabs of the other.  */
8495   if (real_zerop (arg0))
8496     return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8497   else if (real_zerop (arg1))
8498     return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8499 
8500   /* hypot(x,x) -> fabs(x)*sqrt(2).  */
8501   if (flag_unsafe_math_optimizations
8502       && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8503     {
8504       const REAL_VALUE_TYPE sqrt2_trunc
8505 	= real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8506       return fold_build2_loc (loc, MULT_EXPR, type,
8507 			  fold_build1_loc (loc, ABS_EXPR, type, arg0),
8508 			  build_real (type, sqrt2_trunc));
8509     }
8510 
8511   return NULL_TREE;
8512 }
8513 
8514 
8515 /* Fold a builtin function call to pow, powf, or powl.  Return
8516    NULL_TREE if no simplification can be made.  */
8517 static tree
8518 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8519 {
8520   tree res;
8521 
8522   if (!validate_arg (arg0, REAL_TYPE)
8523        || !validate_arg (arg1, REAL_TYPE))
8524     return NULL_TREE;
8525 
8526   /* Calculate the result when the argument is a constant.  */
8527   if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8528     return res;
8529 
8530   /* Optimize pow(1.0,y) = 1.0.  */
8531   if (real_onep (arg0))
8532     return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8533 
8534   if (TREE_CODE (arg1) == REAL_CST
8535       && !TREE_OVERFLOW (arg1))
8536     {
8537       REAL_VALUE_TYPE cint;
8538       REAL_VALUE_TYPE c;
8539       HOST_WIDE_INT n;
8540 
8541       c = TREE_REAL_CST (arg1);
8542 
8543       /* Optimize pow(x,0.0) = 1.0.  */
8544       if (REAL_VALUES_EQUAL (c, dconst0))
8545 	return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8546 				 arg0);
8547 
8548       /* Optimize pow(x,1.0) = x.  */
8549       if (REAL_VALUES_EQUAL (c, dconst1))
8550 	return arg0;
8551 
8552       /* Optimize pow(x,-1.0) = 1.0/x.  */
8553       if (REAL_VALUES_EQUAL (c, dconstm1))
8554 	return fold_build2_loc (loc, RDIV_EXPR, type,
8555 			    build_real (type, dconst1), arg0);
8556 
8557       /* Optimize pow(x,0.5) = sqrt(x).  */
8558       if (flag_unsafe_math_optimizations
8559 	  && REAL_VALUES_EQUAL (c, dconsthalf))
8560 	{
8561 	  tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8562 
8563 	  if (sqrtfn != NULL_TREE)
8564 	    return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8565 	}
8566 
8567       /* Optimize pow(x,1.0/3.0) = cbrt(x).  */
8568       if (flag_unsafe_math_optimizations)
8569 	{
8570 	  const REAL_VALUE_TYPE dconstroot
8571 	    = real_value_truncate (TYPE_MODE (type), dconst_third ());
8572 
8573 	  if (REAL_VALUES_EQUAL (c, dconstroot))
8574 	    {
8575 	      tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8576 	      if (cbrtfn != NULL_TREE)
8577 		return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8578 	    }
8579 	}
8580 
8581       /* Check for an integer exponent.  */
8582       n = real_to_integer (&c);
8583       real_from_integer (&cint, VOIDmode, n, SIGNED);
8584       if (real_identical (&c, &cint))
8585 	{
8586 	  /* Attempt to evaluate pow at compile-time, unless this should
8587 	     raise an exception.  */
8588 	  if (TREE_CODE (arg0) == REAL_CST
8589 	      && !TREE_OVERFLOW (arg0)
8590 	      && (n > 0
8591 		  || (!flag_trapping_math && !flag_errno_math)
8592 		  || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8593 	    {
8594 	      REAL_VALUE_TYPE x;
8595 	      bool inexact;
8596 
8597 	      x = TREE_REAL_CST (arg0);
8598 	      inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8599 	      if (flag_unsafe_math_optimizations || !inexact)
8600 		return build_real (type, x);
8601 	    }
8602 
8603 	  /* Strip sign ops from even integer powers.  */
8604 	  if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8605 	    {
8606 	      tree narg0 = fold_strip_sign_ops (arg0);
8607 	      if (narg0)
8608 		return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8609 	    }
8610 	}
8611     }
8612 
8613   if (flag_unsafe_math_optimizations)
8614     {
8615       const enum built_in_function fcode = builtin_mathfn_code (arg0);
8616 
8617       /* Optimize pow(expN(x),y) = expN(x*y).  */
8618       if (BUILTIN_EXPONENT_P (fcode))
8619 	{
8620 	  tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8621 	  tree arg = CALL_EXPR_ARG (arg0, 0);
8622 	  arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8623 	  return build_call_expr_loc (loc, expfn, 1, arg);
8624 	}
8625 
8626       /* Optimize pow(sqrt(x),y) = pow(x,y*0.5).  */
8627       if (BUILTIN_SQRT_P (fcode))
8628 	{
8629 	  tree narg0 = CALL_EXPR_ARG (arg0, 0);
8630 	  tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8631 				    build_real (type, dconsthalf));
8632 	  return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8633 	}
8634 
8635       /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative.  */
8636       if (BUILTIN_CBRT_P (fcode))
8637 	{
8638 	  tree arg = CALL_EXPR_ARG (arg0, 0);
8639 	  if (tree_expr_nonnegative_p (arg))
8640 	    {
8641 	      const REAL_VALUE_TYPE dconstroot
8642 		= real_value_truncate (TYPE_MODE (type), dconst_third ());
8643 	      tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8644 					build_real (type, dconstroot));
8645 	      return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8646 	    }
8647 	}
8648 
8649       /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative.  */
8650       if (fcode == BUILT_IN_POW
8651 	  || fcode == BUILT_IN_POWF
8652 	  || fcode == BUILT_IN_POWL)
8653 	{
8654 	  tree arg00 = CALL_EXPR_ARG (arg0, 0);
8655 	  if (tree_expr_nonnegative_p (arg00))
8656 	    {
8657 	      tree arg01 = CALL_EXPR_ARG (arg0, 1);
8658 	      tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8659 	      return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8660 	    }
8661 	}
8662     }
8663 
8664   return NULL_TREE;
8665 }
8666 
8667 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8668    Return NULL_TREE if no simplification can be made.  */
8669 static tree
8670 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8671 		   tree arg0, tree arg1, tree type)
8672 {
8673   if (!validate_arg (arg0, REAL_TYPE)
8674       || !validate_arg (arg1, INTEGER_TYPE))
8675     return NULL_TREE;
8676 
8677   /* Optimize pow(1.0,y) = 1.0.  */
8678   if (real_onep (arg0))
8679     return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8680 
8681   if (tree_fits_shwi_p (arg1))
8682     {
8683       HOST_WIDE_INT c = tree_to_shwi (arg1);
8684 
8685       /* Evaluate powi at compile-time.  */
8686       if (TREE_CODE (arg0) == REAL_CST
8687 	  && !TREE_OVERFLOW (arg0))
8688 	{
8689 	  REAL_VALUE_TYPE x;
8690 	  x = TREE_REAL_CST (arg0);
8691 	  real_powi (&x, TYPE_MODE (type), &x, c);
8692 	  return build_real (type, x);
8693 	}
8694 
8695       /* Optimize pow(x,0) = 1.0.  */
8696       if (c == 0)
8697 	return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8698 				 arg0);
8699 
8700       /* Optimize pow(x,1) = x.  */
8701       if (c == 1)
8702 	return arg0;
8703 
8704       /* Optimize pow(x,-1) = 1.0/x.  */
8705       if (c == -1)
8706 	return fold_build2_loc (loc, RDIV_EXPR, type,
8707 			   build_real (type, dconst1), arg0);
8708     }
8709 
8710   return NULL_TREE;
8711 }
8712 
8713 /* A subroutine of fold_builtin to fold the various exponent
8714    functions.  Return NULL_TREE if no simplification can be made.
8715    FUNC is the corresponding MPFR exponent function.  */
8716 
8717 static tree
8718 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8719 		       int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8720 {
8721   if (validate_arg (arg, REAL_TYPE))
8722     {
8723       tree type = TREE_TYPE (TREE_TYPE (fndecl));
8724       tree res;
8725 
8726       /* Calculate the result when the argument is a constant.  */
8727       if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8728 	return res;
8729 
8730       /* Optimize expN(logN(x)) = x.  */
8731       if (flag_unsafe_math_optimizations)
8732 	{
8733 	  const enum built_in_function fcode = builtin_mathfn_code (arg);
8734 
8735 	  if ((func == mpfr_exp
8736 	       && (fcode == BUILT_IN_LOG
8737 		   || fcode == BUILT_IN_LOGF
8738 		   || fcode == BUILT_IN_LOGL))
8739 	      || (func == mpfr_exp2
8740 		  && (fcode == BUILT_IN_LOG2
8741 		      || fcode == BUILT_IN_LOG2F
8742 		      || fcode == BUILT_IN_LOG2L))
8743 	      || (func == mpfr_exp10
8744 		  && (fcode == BUILT_IN_LOG10
8745 		      || fcode == BUILT_IN_LOG10F
8746 		      || fcode == BUILT_IN_LOG10L)))
8747 	    return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8748 	}
8749     }
8750 
8751   return NULL_TREE;
8752 }
8753 
8754 /* Fold function call to builtin memchr.  ARG1, ARG2 and LEN are the
8755    arguments to the call, and TYPE is its return type.
8756    Return NULL_TREE if no simplification can be made.  */
8757 
8758 static tree
8759 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8760 {
8761   if (!validate_arg (arg1, POINTER_TYPE)
8762       || !validate_arg (arg2, INTEGER_TYPE)
8763       || !validate_arg (len, INTEGER_TYPE))
8764     return NULL_TREE;
8765   else
8766     {
8767       const char *p1;
8768 
8769       if (TREE_CODE (arg2) != INTEGER_CST
8770 	  || !tree_fits_uhwi_p (len))
8771 	return NULL_TREE;
8772 
8773       p1 = c_getstr (arg1);
8774       if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8775 	{
8776 	  char c;
8777 	  const char *r;
8778 	  tree tem;
8779 
8780 	  if (target_char_cast (arg2, &c))
8781 	    return NULL_TREE;
8782 
8783 	  r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8784 
8785 	  if (r == NULL)
8786 	    return build_int_cst (TREE_TYPE (arg1), 0);
8787 
8788 	  tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8789 	  return fold_convert_loc (loc, type, tem);
8790 	}
8791       return NULL_TREE;
8792     }
8793 }
8794 
8795 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8796    Return NULL_TREE if no simplification can be made.  */
8797 
8798 static tree
8799 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8800 {
8801   const char *p1, *p2;
8802 
8803   if (!validate_arg (arg1, POINTER_TYPE)
8804       || !validate_arg (arg2, POINTER_TYPE)
8805       || !validate_arg (len, INTEGER_TYPE))
8806     return NULL_TREE;
8807 
8808   /* If the LEN parameter is zero, return zero.  */
8809   if (integer_zerop (len))
8810     return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8811 			      arg1, arg2);
8812 
8813   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
8814   if (operand_equal_p (arg1, arg2, 0))
8815     return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8816 
8817   p1 = c_getstr (arg1);
8818   p2 = c_getstr (arg2);
8819 
8820   /* If all arguments are constant, and the value of len is not greater
8821      than the lengths of arg1 and arg2, evaluate at compile-time.  */
8822   if (tree_fits_uhwi_p (len) && p1 && p2
8823       && compare_tree_int (len, strlen (p1) + 1) <= 0
8824       && compare_tree_int (len, strlen (p2) + 1) <= 0)
8825     {
8826       const int r = memcmp (p1, p2, tree_to_uhwi (len));
8827 
8828       if (r > 0)
8829 	return integer_one_node;
8830       else if (r < 0)
8831 	return integer_minus_one_node;
8832       else
8833 	return integer_zero_node;
8834     }
8835 
8836   /* If len parameter is one, return an expression corresponding to
8837      (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
8838   if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8839     {
8840       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8841       tree cst_uchar_ptr_node
8842 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8843 
8844       tree ind1
8845 	= fold_convert_loc (loc, integer_type_node,
8846 			    build1 (INDIRECT_REF, cst_uchar_node,
8847 				    fold_convert_loc (loc,
8848 						      cst_uchar_ptr_node,
8849 						      arg1)));
8850       tree ind2
8851 	= fold_convert_loc (loc, integer_type_node,
8852 			    build1 (INDIRECT_REF, cst_uchar_node,
8853 				    fold_convert_loc (loc,
8854 						      cst_uchar_ptr_node,
8855 						      arg2)));
8856       return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8857     }
8858 
8859   return NULL_TREE;
8860 }
8861 
8862 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8863    Return NULL_TREE if no simplification can be made.  */
8864 
8865 static tree
8866 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8867 {
8868   const char *p1, *p2;
8869 
8870   if (!validate_arg (arg1, POINTER_TYPE)
8871       || !validate_arg (arg2, POINTER_TYPE))
8872     return NULL_TREE;
8873 
8874   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
8875   if (operand_equal_p (arg1, arg2, 0))
8876     return integer_zero_node;
8877 
8878   p1 = c_getstr (arg1);
8879   p2 = c_getstr (arg2);
8880 
8881   if (p1 && p2)
8882     {
8883       const int i = strcmp (p1, p2);
8884       if (i < 0)
8885 	return integer_minus_one_node;
8886       else if (i > 0)
8887 	return integer_one_node;
8888       else
8889 	return integer_zero_node;
8890     }
8891 
8892   /* If the second arg is "", return *(const unsigned char*)arg1.  */
8893   if (p2 && *p2 == '\0')
8894     {
8895       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8896       tree cst_uchar_ptr_node
8897 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8898 
8899       return fold_convert_loc (loc, integer_type_node,
8900 			       build1 (INDIRECT_REF, cst_uchar_node,
8901 				       fold_convert_loc (loc,
8902 							 cst_uchar_ptr_node,
8903 							 arg1)));
8904     }
8905 
8906   /* If the first arg is "", return -*(const unsigned char*)arg2.  */
8907   if (p1 && *p1 == '\0')
8908     {
8909       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8910       tree cst_uchar_ptr_node
8911 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8912 
8913       tree temp
8914 	= fold_convert_loc (loc, integer_type_node,
8915 			    build1 (INDIRECT_REF, cst_uchar_node,
8916 				    fold_convert_loc (loc,
8917 						      cst_uchar_ptr_node,
8918 						      arg2)));
8919       return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8920     }
8921 
8922   return NULL_TREE;
8923 }
8924 
8925 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8926    Return NULL_TREE if no simplification can be made.  */
8927 
8928 static tree
8929 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8930 {
8931   const char *p1, *p2;
8932 
8933   if (!validate_arg (arg1, POINTER_TYPE)
8934       || !validate_arg (arg2, POINTER_TYPE)
8935       || !validate_arg (len, INTEGER_TYPE))
8936     return NULL_TREE;
8937 
8938   /* If the LEN parameter is zero, return zero.  */
8939   if (integer_zerop (len))
8940     return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8941 			      arg1, arg2);
8942 
8943   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
8944   if (operand_equal_p (arg1, arg2, 0))
8945     return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8946 
8947   p1 = c_getstr (arg1);
8948   p2 = c_getstr (arg2);
8949 
8950   if (tree_fits_uhwi_p (len) && p1 && p2)
8951     {
8952       const int i = strncmp (p1, p2, tree_to_uhwi (len));
8953       if (i > 0)
8954 	return integer_one_node;
8955       else if (i < 0)
8956 	return integer_minus_one_node;
8957       else
8958 	return integer_zero_node;
8959     }
8960 
8961   /* If the second arg is "", and the length is greater than zero,
8962      return *(const unsigned char*)arg1.  */
8963   if (p2 && *p2 == '\0'
8964       && TREE_CODE (len) == INTEGER_CST
8965       && tree_int_cst_sgn (len) == 1)
8966     {
8967       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8968       tree cst_uchar_ptr_node
8969 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8970 
8971       return fold_convert_loc (loc, integer_type_node,
8972 			       build1 (INDIRECT_REF, cst_uchar_node,
8973 				       fold_convert_loc (loc,
8974 							 cst_uchar_ptr_node,
8975 							 arg1)));
8976     }
8977 
8978   /* If the first arg is "", and the length is greater than zero,
8979      return -*(const unsigned char*)arg2.  */
8980   if (p1 && *p1 == '\0'
8981       && TREE_CODE (len) == INTEGER_CST
8982       && tree_int_cst_sgn (len) == 1)
8983     {
8984       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8985       tree cst_uchar_ptr_node
8986 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8987 
8988       tree temp = fold_convert_loc (loc, integer_type_node,
8989 				    build1 (INDIRECT_REF, cst_uchar_node,
8990 					    fold_convert_loc (loc,
8991 							      cst_uchar_ptr_node,
8992 							      arg2)));
8993       return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8994     }
8995 
8996   /* If len parameter is one, return an expression corresponding to
8997      (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
8998   if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8999     {
9000       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9001       tree cst_uchar_ptr_node
9002 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9003 
9004       tree ind1 = fold_convert_loc (loc, integer_type_node,
9005 				    build1 (INDIRECT_REF, cst_uchar_node,
9006 					    fold_convert_loc (loc,
9007 							      cst_uchar_ptr_node,
9008 							      arg1)));
9009       tree ind2 = fold_convert_loc (loc, integer_type_node,
9010 				    build1 (INDIRECT_REF, cst_uchar_node,
9011 					    fold_convert_loc (loc,
9012 							      cst_uchar_ptr_node,
9013 							      arg2)));
9014       return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9015     }
9016 
9017   return NULL_TREE;
9018 }
9019 
9020 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9021    ARG.  Return NULL_TREE if no simplification can be made.  */
9022 
9023 static tree
9024 fold_builtin_signbit (location_t loc, tree arg, tree type)
9025 {
9026   if (!validate_arg (arg, REAL_TYPE))
9027     return NULL_TREE;
9028 
9029   /* If ARG is a compile-time constant, determine the result.  */
9030   if (TREE_CODE (arg) == REAL_CST
9031       && !TREE_OVERFLOW (arg))
9032     {
9033       REAL_VALUE_TYPE c;
9034 
9035       c = TREE_REAL_CST (arg);
9036       return (REAL_VALUE_NEGATIVE (c)
9037 	      ? build_one_cst (type)
9038 	      : build_zero_cst (type));
9039     }
9040 
9041   /* If ARG is non-negative, the result is always zero.  */
9042   if (tree_expr_nonnegative_p (arg))
9043     return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9044 
9045   /* If ARG's format doesn't have signed zeros, return "arg < 0.0".  */
9046   if (!HONOR_SIGNED_ZEROS (arg))
9047     return fold_convert (type,
9048 			 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9049 			build_real (TREE_TYPE (arg), dconst0)));
9050 
9051   return NULL_TREE;
9052 }
9053 
9054 /* Fold function call to builtin copysign, copysignf or copysignl with
9055    arguments ARG1 and ARG2.  Return NULL_TREE if no simplification can
9056    be made.  */
9057 
9058 static tree
9059 fold_builtin_copysign (location_t loc, tree fndecl,
9060 		       tree arg1, tree arg2, tree type)
9061 {
9062   tree tem;
9063 
9064   if (!validate_arg (arg1, REAL_TYPE)
9065       || !validate_arg (arg2, REAL_TYPE))
9066     return NULL_TREE;
9067 
9068   /* copysign(X,X) is X.  */
9069   if (operand_equal_p (arg1, arg2, 0))
9070     return fold_convert_loc (loc, type, arg1);
9071 
9072   /* If ARG1 and ARG2 are compile-time constants, determine the result.  */
9073   if (TREE_CODE (arg1) == REAL_CST
9074       && TREE_CODE (arg2) == REAL_CST
9075       && !TREE_OVERFLOW (arg1)
9076       && !TREE_OVERFLOW (arg2))
9077     {
9078       REAL_VALUE_TYPE c1, c2;
9079 
9080       c1 = TREE_REAL_CST (arg1);
9081       c2 = TREE_REAL_CST (arg2);
9082       /* c1.sign := c2.sign.  */
9083       real_copysign (&c1, &c2);
9084       return build_real (type, c1);
9085     }
9086 
9087   /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9088      Remember to evaluate Y for side-effects.  */
9089   if (tree_expr_nonnegative_p (arg2))
9090     return omit_one_operand_loc (loc, type,
9091 			     fold_build1_loc (loc, ABS_EXPR, type, arg1),
9092 			     arg2);
9093 
9094   /* Strip sign changing operations for the first argument.  */
9095   tem = fold_strip_sign_ops (arg1);
9096   if (tem)
9097     return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9098 
9099   return NULL_TREE;
9100 }
9101 
9102 /* Fold a call to builtin isascii with argument ARG.  */
9103 
9104 static tree
9105 fold_builtin_isascii (location_t loc, tree arg)
9106 {
9107   if (!validate_arg (arg, INTEGER_TYPE))
9108     return NULL_TREE;
9109   else
9110     {
9111       /* Transform isascii(c) -> ((c & ~0x7f) == 0).  */
9112       arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9113 			 build_int_cst (integer_type_node,
9114 					~ (unsigned HOST_WIDE_INT) 0x7f));
9115       return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9116 			      arg, integer_zero_node);
9117     }
9118 }
9119 
9120 /* Fold a call to builtin toascii with argument ARG.  */
9121 
9122 static tree
9123 fold_builtin_toascii (location_t loc, tree arg)
9124 {
9125   if (!validate_arg (arg, INTEGER_TYPE))
9126     return NULL_TREE;
9127 
9128   /* Transform toascii(c) -> (c & 0x7f).  */
9129   return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9130 			  build_int_cst (integer_type_node, 0x7f));
9131 }
9132 
9133 /* Fold a call to builtin isdigit with argument ARG.  */
9134 
9135 static tree
9136 fold_builtin_isdigit (location_t loc, tree arg)
9137 {
9138   if (!validate_arg (arg, INTEGER_TYPE))
9139     return NULL_TREE;
9140   else
9141     {
9142       /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9.  */
9143       /* According to the C standard, isdigit is unaffected by locale.
9144 	 However, it definitely is affected by the target character set.  */
9145       unsigned HOST_WIDE_INT target_digit0
9146 	= lang_hooks.to_target_charset ('0');
9147 
9148       if (target_digit0 == 0)
9149 	return NULL_TREE;
9150 
9151       arg = fold_convert_loc (loc, unsigned_type_node, arg);
9152       arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9153 			 build_int_cst (unsigned_type_node, target_digit0));
9154       return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9155 			  build_int_cst (unsigned_type_node, 9));
9156     }
9157 }
9158 
9159 /* Fold a call to fabs, fabsf or fabsl with argument ARG.  */
9160 
9161 static tree
9162 fold_builtin_fabs (location_t loc, tree arg, tree type)
9163 {
9164   if (!validate_arg (arg, REAL_TYPE))
9165     return NULL_TREE;
9166 
9167   arg = fold_convert_loc (loc, type, arg);
9168   if (TREE_CODE (arg) == REAL_CST)
9169     return fold_abs_const (arg, type);
9170   return fold_build1_loc (loc, ABS_EXPR, type, arg);
9171 }
9172 
9173 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG.  */
9174 
9175 static tree
9176 fold_builtin_abs (location_t loc, tree arg, tree type)
9177 {
9178   if (!validate_arg (arg, INTEGER_TYPE))
9179     return NULL_TREE;
9180 
9181   arg = fold_convert_loc (loc, type, arg);
9182   if (TREE_CODE (arg) == INTEGER_CST)
9183     return fold_abs_const (arg, type);
9184   return fold_build1_loc (loc, ABS_EXPR, type, arg);
9185 }
9186 
9187 /* Fold a fma operation with arguments ARG[012].  */
9188 
9189 tree
9190 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9191 	  tree type, tree arg0, tree arg1, tree arg2)
9192 {
9193   if (TREE_CODE (arg0) == REAL_CST
9194       && TREE_CODE (arg1) == REAL_CST
9195       && TREE_CODE (arg2) == REAL_CST)
9196     return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9197 
9198   return NULL_TREE;
9199 }
9200 
9201 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012].  */
9202 
9203 static tree
9204 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9205 {
9206   if (validate_arg (arg0, REAL_TYPE)
9207       && validate_arg (arg1, REAL_TYPE)
9208       && validate_arg (arg2, REAL_TYPE))
9209     {
9210       tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9211       if (tem)
9212 	return tem;
9213 
9214       /* ??? Only expand to FMA_EXPR if it's directly supported.  */
9215       if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9216         return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9217     }
9218   return NULL_TREE;
9219 }
9220 
9221 /* Fold a call to builtin fmin or fmax.  */
9222 
9223 static tree
9224 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9225 			tree type, bool max)
9226 {
9227   if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9228     {
9229       /* Calculate the result when the argument is a constant.  */
9230       tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9231 
9232       if (res)
9233 	return res;
9234 
9235       /* If either argument is NaN, return the other one.  Avoid the
9236 	 transformation if we get (and honor) a signalling NaN.  Using
9237 	 omit_one_operand() ensures we create a non-lvalue.  */
9238       if (TREE_CODE (arg0) == REAL_CST
9239 	  && real_isnan (&TREE_REAL_CST (arg0))
9240 	  && (! HONOR_SNANS (arg0)
9241 	      || ! TREE_REAL_CST (arg0).signalling))
9242 	return omit_one_operand_loc (loc, type, arg1, arg0);
9243       if (TREE_CODE (arg1) == REAL_CST
9244 	  && real_isnan (&TREE_REAL_CST (arg1))
9245 	  && (! HONOR_SNANS (arg1)
9246 	      || ! TREE_REAL_CST (arg1).signalling))
9247 	return omit_one_operand_loc (loc, type, arg0, arg1);
9248 
9249       /* Transform fmin/fmax(x,x) -> x.  */
9250       if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9251 	return omit_one_operand_loc (loc, type, arg0, arg1);
9252 
9253       /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR.  C99 requires these
9254 	 functions to return the numeric arg if the other one is NaN.
9255 	 These tree codes don't honor that, so only transform if
9256 	 -ffinite-math-only is set.  C99 doesn't require -0.0 to be
9257 	 handled, so we don't have to worry about it either.  */
9258       if (flag_finite_math_only)
9259 	return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9260 			    fold_convert_loc (loc, type, arg0),
9261 			    fold_convert_loc (loc, type, arg1));
9262     }
9263   return NULL_TREE;
9264 }
9265 
9266 /* Fold a call to builtin carg(a+bi) -> atan2(b,a).  */
9267 
9268 static tree
9269 fold_builtin_carg (location_t loc, tree arg, tree type)
9270 {
9271   if (validate_arg (arg, COMPLEX_TYPE)
9272       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9273     {
9274       tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9275 
9276       if (atan2_fn)
9277         {
9278   	  tree new_arg = builtin_save_expr (arg);
9279 	  tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9280 	  tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9281 	  return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9282 	}
9283     }
9284 
9285   return NULL_TREE;
9286 }
9287 
9288 /* Fold a call to builtin logb/ilogb.  */
9289 
9290 static tree
9291 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9292 {
9293   if (! validate_arg (arg, REAL_TYPE))
9294     return NULL_TREE;
9295 
9296   STRIP_NOPS (arg);
9297 
9298   if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9299     {
9300       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9301 
9302       switch (value->cl)
9303       {
9304       case rvc_nan:
9305       case rvc_inf:
9306 	/* If arg is Inf or NaN and we're logb, return it.  */
9307 	if (TREE_CODE (rettype) == REAL_TYPE)
9308 	  {
9309 	    /* For logb(-Inf) we have to return +Inf.  */
9310 	    if (real_isinf (value) && real_isneg (value))
9311 	      {
9312 		REAL_VALUE_TYPE tem;
9313 		real_inf (&tem);
9314 		return build_real (rettype, tem);
9315 	      }
9316 	    return fold_convert_loc (loc, rettype, arg);
9317 	  }
9318 	/* Fall through... */
9319       case rvc_zero:
9320 	/* Zero may set errno and/or raise an exception for logb, also
9321 	   for ilogb we don't know FP_ILOGB0.  */
9322 	return NULL_TREE;
9323       case rvc_normal:
9324 	/* For normal numbers, proceed iff radix == 2.  In GCC,
9325 	   normalized significands are in the range [0.5, 1.0).  We
9326 	   want the exponent as if they were [1.0, 2.0) so get the
9327 	   exponent and subtract 1.  */
9328 	if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9329 	  return fold_convert_loc (loc, rettype,
9330 				   build_int_cst (integer_type_node,
9331 						  REAL_EXP (value)-1));
9332 	break;
9333       }
9334     }
9335 
9336   return NULL_TREE;
9337 }
9338 
9339 /* Fold a call to builtin significand, if radix == 2.  */
9340 
9341 static tree
9342 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9343 {
9344   if (! validate_arg (arg, REAL_TYPE))
9345     return NULL_TREE;
9346 
9347   STRIP_NOPS (arg);
9348 
9349   if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9350     {
9351       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9352 
9353       switch (value->cl)
9354       {
9355       case rvc_zero:
9356       case rvc_nan:
9357       case rvc_inf:
9358 	/* If arg is +-0, +-Inf or +-NaN, then return it.  */
9359 	return fold_convert_loc (loc, rettype, arg);
9360       case rvc_normal:
9361 	/* For normal numbers, proceed iff radix == 2.  */
9362 	if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9363 	  {
9364 	    REAL_VALUE_TYPE result = *value;
9365 	    /* In GCC, normalized significands are in the range [0.5,
9366 	       1.0).  We want them to be [1.0, 2.0) so set the
9367 	       exponent to 1.  */
9368 	    SET_REAL_EXP (&result, 1);
9369 	    return build_real (rettype, result);
9370 	  }
9371 	break;
9372       }
9373     }
9374 
9375   return NULL_TREE;
9376 }
9377 
9378 /* Fold a call to builtin frexp, we can assume the base is 2.  */
9379 
9380 static tree
9381 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9382 {
9383   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9384     return NULL_TREE;
9385 
9386   STRIP_NOPS (arg0);
9387 
9388   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9389     return NULL_TREE;
9390 
9391   arg1 = build_fold_indirect_ref_loc (loc, arg1);
9392 
9393   /* Proceed if a valid pointer type was passed in.  */
9394   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9395     {
9396       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9397       tree frac, exp;
9398 
9399       switch (value->cl)
9400       {
9401       case rvc_zero:
9402 	/* For +-0, return (*exp = 0, +-0).  */
9403 	exp = integer_zero_node;
9404 	frac = arg0;
9405 	break;
9406       case rvc_nan:
9407       case rvc_inf:
9408 	/* For +-NaN or +-Inf, *exp is unspecified, return arg0.  */
9409 	return omit_one_operand_loc (loc, rettype, arg0, arg1);
9410       case rvc_normal:
9411 	{
9412 	  /* Since the frexp function always expects base 2, and in
9413 	     GCC normalized significands are already in the range
9414 	     [0.5, 1.0), we have exactly what frexp wants.  */
9415 	  REAL_VALUE_TYPE frac_rvt = *value;
9416 	  SET_REAL_EXP (&frac_rvt, 0);
9417 	  frac = build_real (rettype, frac_rvt);
9418 	  exp = build_int_cst (integer_type_node, REAL_EXP (value));
9419 	}
9420 	break;
9421       default:
9422 	gcc_unreachable ();
9423       }
9424 
9425       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9426       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9427       TREE_SIDE_EFFECTS (arg1) = 1;
9428       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9429     }
9430 
9431   return NULL_TREE;
9432 }
9433 
9434 /* Fold a call to builtin ldexp or scalbn/scalbln.  If LDEXP is true
9435    then we can assume the base is two.  If it's false, then we have to
9436    check the mode of the TYPE parameter in certain cases.  */
9437 
9438 static tree
9439 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9440 			    tree type, bool ldexp)
9441 {
9442   if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9443     {
9444       STRIP_NOPS (arg0);
9445       STRIP_NOPS (arg1);
9446 
9447       /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0.  */
9448       if (real_zerop (arg0) || integer_zerop (arg1)
9449 	  || (TREE_CODE (arg0) == REAL_CST
9450 	      && !real_isfinite (&TREE_REAL_CST (arg0))))
9451 	return omit_one_operand_loc (loc, type, arg0, arg1);
9452 
9453       /* If both arguments are constant, then try to evaluate it.  */
9454       if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9455 	  && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9456 	  && tree_fits_shwi_p (arg1))
9457         {
9458 	  /* Bound the maximum adjustment to twice the range of the
9459 	     mode's valid exponents.  Use abs to ensure the range is
9460 	     positive as a sanity check.  */
9461 	  const long max_exp_adj = 2 *
9462 	    labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9463 		 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9464 
9465 	  /* Get the user-requested adjustment.  */
9466 	  const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9467 
9468 	  /* The requested adjustment must be inside this range.  This
9469 	     is a preliminary cap to avoid things like overflow, we
9470 	     may still fail to compute the result for other reasons.  */
9471 	  if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9472 	    {
9473 	      REAL_VALUE_TYPE initial_result;
9474 
9475 	      real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9476 
9477 	      /* Ensure we didn't overflow.  */
9478 	      if (! real_isinf (&initial_result))
9479 	        {
9480 		  const REAL_VALUE_TYPE trunc_result
9481 		    = real_value_truncate (TYPE_MODE (type), initial_result);
9482 
9483 		  /* Only proceed if the target mode can hold the
9484 		     resulting value.  */
9485 		  if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9486 		    return build_real (type, trunc_result);
9487 		}
9488 	    }
9489 	}
9490     }
9491 
9492   return NULL_TREE;
9493 }
9494 
9495 /* Fold a call to builtin modf.  */
9496 
9497 static tree
9498 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9499 {
9500   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9501     return NULL_TREE;
9502 
9503   STRIP_NOPS (arg0);
9504 
9505   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9506     return NULL_TREE;
9507 
9508   arg1 = build_fold_indirect_ref_loc (loc, arg1);
9509 
9510   /* Proceed if a valid pointer type was passed in.  */
9511   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9512     {
9513       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9514       REAL_VALUE_TYPE trunc, frac;
9515 
9516       switch (value->cl)
9517       {
9518       case rvc_nan:
9519       case rvc_zero:
9520 	/* For +-NaN or +-0, return (*arg1 = arg0, arg0).  */
9521 	trunc = frac = *value;
9522 	break;
9523       case rvc_inf:
9524 	/* For +-Inf, return (*arg1 = arg0, +-0).  */
9525 	frac = dconst0;
9526 	frac.sign = value->sign;
9527 	trunc = *value;
9528 	break;
9529       case rvc_normal:
9530 	/* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)).  */
9531 	real_trunc (&trunc, VOIDmode, value);
9532 	real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9533 	/* If the original number was negative and already
9534 	   integral, then the fractional part is -0.0.  */
9535 	if (value->sign && frac.cl == rvc_zero)
9536 	  frac.sign = value->sign;
9537 	break;
9538       }
9539 
9540       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9541       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9542 			  build_real (rettype, trunc));
9543       TREE_SIDE_EFFECTS (arg1) = 1;
9544       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9545 			  build_real (rettype, frac));
9546     }
9547 
9548   return NULL_TREE;
9549 }
9550 
9551 /* Given a location LOC, an interclass builtin function decl FNDECL
9552    and its single argument ARG, return an folded expression computing
9553    the same, or NULL_TREE if we either couldn't or didn't want to fold
9554    (the latter happen if there's an RTL instruction available).  */
9555 
9556 static tree
9557 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9558 {
9559   machine_mode mode;
9560 
9561   if (!validate_arg (arg, REAL_TYPE))
9562     return NULL_TREE;
9563 
9564   if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9565     return NULL_TREE;
9566 
9567   mode = TYPE_MODE (TREE_TYPE (arg));
9568 
9569   bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9570 
9571   /* If there is no optab, try generic code.  */
9572   switch (DECL_FUNCTION_CODE (fndecl))
9573     {
9574       tree result;
9575 
9576     CASE_FLT_FN (BUILT_IN_ISINF):
9577       {
9578 	/* isinf(x) -> isgreater(fabs(x),DBL_MAX).  */
9579 	tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9580 	tree type = TREE_TYPE (arg);
9581 	REAL_VALUE_TYPE r;
9582 	char buf[128];
9583 
9584 	if (is_ibm_extended)
9585 	  {
9586 	    /* NaN and Inf are encoded in the high-order double value
9587 	       only.  The low-order value is not significant.  */
9588 	    type = double_type_node;
9589 	    mode = DFmode;
9590 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9591 	  }
9592 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9593 	real_from_string (&r, buf);
9594 	result = build_call_expr (isgr_fn, 2,
9595 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
9596 				  build_real (type, r));
9597 	return result;
9598       }
9599     CASE_FLT_FN (BUILT_IN_FINITE):
9600     case BUILT_IN_ISFINITE:
9601       {
9602 	/* isfinite(x) -> islessequal(fabs(x),DBL_MAX).  */
9603 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9604 	tree type = TREE_TYPE (arg);
9605 	REAL_VALUE_TYPE r;
9606 	char buf[128];
9607 
9608 	if (is_ibm_extended)
9609 	  {
9610 	    /* NaN and Inf are encoded in the high-order double value
9611 	       only.  The low-order value is not significant.  */
9612 	    type = double_type_node;
9613 	    mode = DFmode;
9614 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9615 	  }
9616 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9617 	real_from_string (&r, buf);
9618 	result = build_call_expr (isle_fn, 2,
9619 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
9620 				  build_real (type, r));
9621 	/*result = fold_build2_loc (loc, UNGT_EXPR,
9622 				  TREE_TYPE (TREE_TYPE (fndecl)),
9623 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
9624 				  build_real (type, r));
9625 	result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9626 				  TREE_TYPE (TREE_TYPE (fndecl)),
9627 				  result);*/
9628 	return result;
9629       }
9630     case BUILT_IN_ISNORMAL:
9631       {
9632 	/* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9633 	   islessequal(fabs(x),DBL_MAX).  */
9634 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9635 	tree type = TREE_TYPE (arg);
9636 	tree orig_arg, max_exp, min_exp;
9637 	machine_mode orig_mode = mode;
9638 	REAL_VALUE_TYPE rmax, rmin;
9639 	char buf[128];
9640 
9641 	orig_arg = arg = builtin_save_expr (arg);
9642 	if (is_ibm_extended)
9643 	  {
9644 	    /* Use double to test the normal range of IBM extended
9645 	       precision.  Emin for IBM extended precision is
9646 	       different to emin for IEEE double, being 53 higher
9647 	       since the low double exponent is at least 53 lower
9648 	       than the high double exponent.  */
9649 	    type = double_type_node;
9650 	    mode = DFmode;
9651 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9652 	  }
9653 	arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9654 
9655 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9656 	real_from_string (&rmax, buf);
9657 	sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9658 	real_from_string (&rmin, buf);
9659 	max_exp = build_real (type, rmax);
9660 	min_exp = build_real (type, rmin);
9661 
9662 	max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9663 	if (is_ibm_extended)
9664 	  {
9665 	    /* Testing the high end of the range is done just using
9666 	       the high double, using the same test as isfinite().
9667 	       For the subnormal end of the range we first test the
9668 	       high double, then if its magnitude is equal to the
9669 	       limit of 0x1p-969, we test whether the low double is
9670 	       non-zero and opposite sign to the high double.  */
9671 	    tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9672 	    tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9673 	    tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9674 	    tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9675 				       arg, min_exp);
9676 	    tree as_complex = build1 (VIEW_CONVERT_EXPR,
9677 				      complex_double_type_node, orig_arg);
9678 	    tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9679 	    tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9680 	    tree zero = build_real (type, dconst0);
9681 	    tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9682 	    tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9683 	    tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9684 	    tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9685 				      fold_build3 (COND_EXPR,
9686 						   integer_type_node,
9687 						   hilt, logt, lolt));
9688 	    eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9689 				  eq_min, ok_lo);
9690 	    min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9691 				   gt_min, eq_min);
9692 	  }
9693 	else
9694 	  {
9695 	    tree const isge_fn
9696 	      = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9697 	    min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9698 	  }
9699 	result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9700 			      max_exp, min_exp);
9701 	return result;
9702       }
9703     default:
9704       break;
9705     }
9706 
9707   return NULL_TREE;
9708 }
9709 
9710 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9711    ARG is the argument for the call.  */
9712 
9713 static tree
9714 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9715 {
9716   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9717   REAL_VALUE_TYPE r;
9718 
9719   if (!validate_arg (arg, REAL_TYPE))
9720     return NULL_TREE;
9721 
9722   switch (builtin_index)
9723     {
9724     case BUILT_IN_ISINF:
9725       if (!HONOR_INFINITIES (arg))
9726 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9727 
9728       if (TREE_CODE (arg) == REAL_CST)
9729 	{
9730 	  r = TREE_REAL_CST (arg);
9731 	  if (real_isinf (&r))
9732 	    return real_compare (GT_EXPR, &r, &dconst0)
9733 		   ? integer_one_node : integer_minus_one_node;
9734 	  else
9735 	    return integer_zero_node;
9736 	}
9737 
9738       return NULL_TREE;
9739 
9740     case BUILT_IN_ISINF_SIGN:
9741       {
9742 	/* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9743 	/* In a boolean context, GCC will fold the inner COND_EXPR to
9744 	   1.  So e.g. "if (isinf_sign(x))" would be folded to just
9745 	   "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9746 	tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9747 	tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9748 	tree tmp = NULL_TREE;
9749 
9750 	arg = builtin_save_expr (arg);
9751 
9752 	if (signbit_fn && isinf_fn)
9753 	  {
9754 	    tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9755 	    tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9756 
9757 	    signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9758 					signbit_call, integer_zero_node);
9759 	    isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9760 				      isinf_call, integer_zero_node);
9761 
9762 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9763 			       integer_minus_one_node, integer_one_node);
9764 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9765 			       isinf_call, tmp,
9766 			       integer_zero_node);
9767 	  }
9768 
9769 	return tmp;
9770       }
9771 
9772     case BUILT_IN_ISFINITE:
9773       if (!HONOR_NANS (arg)
9774 	  && !HONOR_INFINITIES (arg))
9775 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
9776 
9777       if (TREE_CODE (arg) == REAL_CST)
9778 	{
9779 	  r = TREE_REAL_CST (arg);
9780 	  return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9781 	}
9782 
9783       return NULL_TREE;
9784 
9785     case BUILT_IN_ISNAN:
9786       if (!HONOR_NANS (arg))
9787 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9788 
9789       if (TREE_CODE (arg) == REAL_CST)
9790 	{
9791 	  r = TREE_REAL_CST (arg);
9792 	  return real_isnan (&r) ? integer_one_node : integer_zero_node;
9793 	}
9794 
9795       {
9796 	bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9797 	if (is_ibm_extended)
9798 	  {
9799 	    /* NaN and Inf are encoded in the high-order double value
9800 	       only.  The low-order value is not significant.  */
9801 	    arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9802 	  }
9803       }
9804       arg = builtin_save_expr (arg);
9805       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9806 
9807     default:
9808       gcc_unreachable ();
9809     }
9810 }
9811 
9812 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9813    This builtin will generate code to return the appropriate floating
9814    point classification depending on the value of the floating point
9815    number passed in.  The possible return values must be supplied as
9816    int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9817    FP_NORMAL, FP_SUBNORMAL and FP_ZERO.  The ellipses is for exactly
9818    one floating point argument which is "type generic".  */
9819 
9820 static tree
9821 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9822 {
9823   tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9824     arg, type, res, tmp;
9825   machine_mode mode;
9826   REAL_VALUE_TYPE r;
9827   char buf[128];
9828 
9829   /* Verify the required arguments in the original call.  */
9830   if (nargs != 6
9831       || !validate_arg (args[0], INTEGER_TYPE)
9832       || !validate_arg (args[1], INTEGER_TYPE)
9833       || !validate_arg (args[2], INTEGER_TYPE)
9834       || !validate_arg (args[3], INTEGER_TYPE)
9835       || !validate_arg (args[4], INTEGER_TYPE)
9836       || !validate_arg (args[5], REAL_TYPE))
9837     return NULL_TREE;
9838 
9839   fp_nan = args[0];
9840   fp_infinite = args[1];
9841   fp_normal = args[2];
9842   fp_subnormal = args[3];
9843   fp_zero = args[4];
9844   arg = args[5];
9845   type = TREE_TYPE (arg);
9846   mode = TYPE_MODE (type);
9847   arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9848 
9849   /* fpclassify(x) ->
9850        isnan(x) ? FP_NAN :
9851          (fabs(x) == Inf ? FP_INFINITE :
9852 	   (fabs(x) >= DBL_MIN ? FP_NORMAL :
9853 	     (x == 0 ? FP_ZERO : FP_SUBNORMAL))).  */
9854 
9855   tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9856 		     build_real (type, dconst0));
9857   res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9858 		     tmp, fp_zero, fp_subnormal);
9859 
9860   sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9861   real_from_string (&r, buf);
9862   tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9863 		     arg, build_real (type, r));
9864   res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9865 
9866   if (HONOR_INFINITIES (mode))
9867     {
9868       real_inf (&r);
9869       tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9870 			 build_real (type, r));
9871       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9872 			 fp_infinite, res);
9873     }
9874 
9875   if (HONOR_NANS (mode))
9876     {
9877       tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9878       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9879     }
9880 
9881   return res;
9882 }
9883 
9884 /* Fold a call to an unordered comparison function such as
9885    __builtin_isgreater().  FNDECL is the FUNCTION_DECL for the function
9886    being called and ARG0 and ARG1 are the arguments for the call.
9887    UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9888    the opposite of the desired result.  UNORDERED_CODE is used
9889    for modes that can hold NaNs and ORDERED_CODE is used for
9890    the rest.  */
9891 
9892 static tree
9893 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9894 			    enum tree_code unordered_code,
9895 			    enum tree_code ordered_code)
9896 {
9897   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9898   enum tree_code code;
9899   tree type0, type1;
9900   enum tree_code code0, code1;
9901   tree cmp_type = NULL_TREE;
9902 
9903   type0 = TREE_TYPE (arg0);
9904   type1 = TREE_TYPE (arg1);
9905 
9906   code0 = TREE_CODE (type0);
9907   code1 = TREE_CODE (type1);
9908 
9909   if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9910     /* Choose the wider of two real types.  */
9911     cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9912       ? type0 : type1;
9913   else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9914     cmp_type = type0;
9915   else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9916     cmp_type = type1;
9917 
9918   arg0 = fold_convert_loc (loc, cmp_type, arg0);
9919   arg1 = fold_convert_loc (loc, cmp_type, arg1);
9920 
9921   if (unordered_code == UNORDERED_EXPR)
9922     {
9923       if (!HONOR_NANS (arg0))
9924 	return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9925       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9926     }
9927 
9928   code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9929   return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9930 		      fold_build2_loc (loc, code, type, arg0, arg1));
9931 }
9932 
9933 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9934    arithmetics if it can never overflow, or into internal functions that
9935    return both result of arithmetics and overflowed boolean flag in
9936    a complex integer result, or some other check for overflow.  */
9937 
9938 static tree
9939 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9940 			     tree arg0, tree arg1, tree arg2)
9941 {
9942   enum internal_fn ifn = IFN_LAST;
9943   tree type = TREE_TYPE (TREE_TYPE (arg2));
9944   tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9945   switch (fcode)
9946     {
9947     case BUILT_IN_ADD_OVERFLOW:
9948     case BUILT_IN_SADD_OVERFLOW:
9949     case BUILT_IN_SADDL_OVERFLOW:
9950     case BUILT_IN_SADDLL_OVERFLOW:
9951     case BUILT_IN_UADD_OVERFLOW:
9952     case BUILT_IN_UADDL_OVERFLOW:
9953     case BUILT_IN_UADDLL_OVERFLOW:
9954       ifn = IFN_ADD_OVERFLOW;
9955       break;
9956     case BUILT_IN_SUB_OVERFLOW:
9957     case BUILT_IN_SSUB_OVERFLOW:
9958     case BUILT_IN_SSUBL_OVERFLOW:
9959     case BUILT_IN_SSUBLL_OVERFLOW:
9960     case BUILT_IN_USUB_OVERFLOW:
9961     case BUILT_IN_USUBL_OVERFLOW:
9962     case BUILT_IN_USUBLL_OVERFLOW:
9963       ifn = IFN_SUB_OVERFLOW;
9964       break;
9965     case BUILT_IN_MUL_OVERFLOW:
9966     case BUILT_IN_SMUL_OVERFLOW:
9967     case BUILT_IN_SMULL_OVERFLOW:
9968     case BUILT_IN_SMULLL_OVERFLOW:
9969     case BUILT_IN_UMUL_OVERFLOW:
9970     case BUILT_IN_UMULL_OVERFLOW:
9971     case BUILT_IN_UMULLL_OVERFLOW:
9972       ifn = IFN_MUL_OVERFLOW;
9973       break;
9974     default:
9975       gcc_unreachable ();
9976     }
9977   tree ctype = build_complex_type (type);
9978   tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9979 					    2, arg0, arg1);
9980   tree tgt = save_expr (call);
9981   tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9982   tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9983   ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9984   tree store
9985     = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9986   return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9987 }
9988 
9989 /* Fold a call to built-in function FNDECL with 0 arguments.
9990    This function returns NULL_TREE if no simplification was possible.  */
9991 
9992 static tree
9993 fold_builtin_0 (location_t loc, tree fndecl)
9994 {
9995   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9996   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9997   switch (fcode)
9998     {
9999     CASE_FLT_FN (BUILT_IN_INF):
10000     case BUILT_IN_INFD32:
10001     case BUILT_IN_INFD64:
10002     case BUILT_IN_INFD128:
10003       return fold_builtin_inf (loc, type, true);
10004 
10005     CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10006       return fold_builtin_inf (loc, type, false);
10007 
10008     case BUILT_IN_CLASSIFY_TYPE:
10009       return fold_builtin_classify_type (NULL_TREE);
10010 
10011     default:
10012       break;
10013     }
10014   return NULL_TREE;
10015 }
10016 
10017 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10018    This function returns NULL_TREE if no simplification was possible.  */
10019 
10020 static tree
10021 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
10022 {
10023   tree type = TREE_TYPE (TREE_TYPE (fndecl));
10024   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10025   switch (fcode)
10026     {
10027     case BUILT_IN_CONSTANT_P:
10028       {
10029 	tree val = fold_builtin_constant_p (arg0);
10030 
10031 	/* Gimplification will pull the CALL_EXPR for the builtin out of
10032 	   an if condition.  When not optimizing, we'll not CSE it back.
10033 	   To avoid link error types of regressions, return false now.  */
10034 	if (!val && !optimize)
10035 	  val = integer_zero_node;
10036 
10037 	return val;
10038       }
10039 
10040     case BUILT_IN_CLASSIFY_TYPE:
10041       return fold_builtin_classify_type (arg0);
10042 
10043     case BUILT_IN_STRLEN:
10044       return fold_builtin_strlen (loc, type, arg0);
10045 
10046     CASE_FLT_FN (BUILT_IN_FABS):
10047     case BUILT_IN_FABSD32:
10048     case BUILT_IN_FABSD64:
10049     case BUILT_IN_FABSD128:
10050       return fold_builtin_fabs (loc, arg0, type);
10051 
10052     case BUILT_IN_ABS:
10053     case BUILT_IN_LABS:
10054     case BUILT_IN_LLABS:
10055     case BUILT_IN_IMAXABS:
10056       return fold_builtin_abs (loc, arg0, type);
10057 
10058     CASE_FLT_FN (BUILT_IN_CONJ):
10059       if (validate_arg (arg0, COMPLEX_TYPE)
10060 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10061 	return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10062     break;
10063 
10064     CASE_FLT_FN (BUILT_IN_CREAL):
10065       if (validate_arg (arg0, COMPLEX_TYPE)
10066 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10067 	return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10068     break;
10069 
10070     CASE_FLT_FN (BUILT_IN_CIMAG):
10071       if (validate_arg (arg0, COMPLEX_TYPE)
10072 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10073 	return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10074     break;
10075 
10076     CASE_FLT_FN (BUILT_IN_CCOS):
10077       return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10078 
10079     CASE_FLT_FN (BUILT_IN_CCOSH):
10080       return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10081 
10082     CASE_FLT_FN (BUILT_IN_CPROJ):
10083       return fold_builtin_cproj (loc, arg0, type);
10084 
10085     CASE_FLT_FN (BUILT_IN_CSIN):
10086       if (validate_arg (arg0, COMPLEX_TYPE)
10087 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10088 	return do_mpc_arg1 (arg0, type, mpc_sin);
10089     break;
10090 
10091     CASE_FLT_FN (BUILT_IN_CSINH):
10092       if (validate_arg (arg0, COMPLEX_TYPE)
10093 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10094 	return do_mpc_arg1 (arg0, type, mpc_sinh);
10095     break;
10096 
10097     CASE_FLT_FN (BUILT_IN_CTAN):
10098       if (validate_arg (arg0, COMPLEX_TYPE)
10099 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10100 	return do_mpc_arg1 (arg0, type, mpc_tan);
10101     break;
10102 
10103     CASE_FLT_FN (BUILT_IN_CTANH):
10104       if (validate_arg (arg0, COMPLEX_TYPE)
10105 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10106 	return do_mpc_arg1 (arg0, type, mpc_tanh);
10107     break;
10108 
10109     CASE_FLT_FN (BUILT_IN_CLOG):
10110       if (validate_arg (arg0, COMPLEX_TYPE)
10111 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10112 	return do_mpc_arg1 (arg0, type, mpc_log);
10113     break;
10114 
10115     CASE_FLT_FN (BUILT_IN_CSQRT):
10116       if (validate_arg (arg0, COMPLEX_TYPE)
10117 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10118 	return do_mpc_arg1 (arg0, type, mpc_sqrt);
10119     break;
10120 
10121     CASE_FLT_FN (BUILT_IN_CASIN):
10122       if (validate_arg (arg0, COMPLEX_TYPE)
10123 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10124 	return do_mpc_arg1 (arg0, type, mpc_asin);
10125     break;
10126 
10127     CASE_FLT_FN (BUILT_IN_CACOS):
10128       if (validate_arg (arg0, COMPLEX_TYPE)
10129 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10130 	return do_mpc_arg1 (arg0, type, mpc_acos);
10131     break;
10132 
10133     CASE_FLT_FN (BUILT_IN_CATAN):
10134       if (validate_arg (arg0, COMPLEX_TYPE)
10135 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10136 	return do_mpc_arg1 (arg0, type, mpc_atan);
10137     break;
10138 
10139     CASE_FLT_FN (BUILT_IN_CASINH):
10140       if (validate_arg (arg0, COMPLEX_TYPE)
10141 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10142 	return do_mpc_arg1 (arg0, type, mpc_asinh);
10143     break;
10144 
10145     CASE_FLT_FN (BUILT_IN_CACOSH):
10146       if (validate_arg (arg0, COMPLEX_TYPE)
10147 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10148 	return do_mpc_arg1 (arg0, type, mpc_acosh);
10149     break;
10150 
10151     CASE_FLT_FN (BUILT_IN_CATANH):
10152       if (validate_arg (arg0, COMPLEX_TYPE)
10153 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10154 	return do_mpc_arg1 (arg0, type, mpc_atanh);
10155     break;
10156 
10157     CASE_FLT_FN (BUILT_IN_CABS):
10158       return fold_builtin_cabs (loc, arg0, type, fndecl);
10159 
10160     CASE_FLT_FN (BUILT_IN_CARG):
10161       return fold_builtin_carg (loc, arg0, type);
10162 
10163     CASE_FLT_FN (BUILT_IN_SQRT):
10164       return fold_builtin_sqrt (loc, arg0, type);
10165 
10166     CASE_FLT_FN (BUILT_IN_CBRT):
10167       return fold_builtin_cbrt (loc, arg0, type);
10168 
10169     CASE_FLT_FN (BUILT_IN_ASIN):
10170       if (validate_arg (arg0, REAL_TYPE))
10171 	return do_mpfr_arg1 (arg0, type, mpfr_asin,
10172 			     &dconstm1, &dconst1, true);
10173     break;
10174 
10175     CASE_FLT_FN (BUILT_IN_ACOS):
10176       if (validate_arg (arg0, REAL_TYPE))
10177 	return do_mpfr_arg1 (arg0, type, mpfr_acos,
10178 			     &dconstm1, &dconst1, true);
10179     break;
10180 
10181     CASE_FLT_FN (BUILT_IN_ATAN):
10182       if (validate_arg (arg0, REAL_TYPE))
10183 	return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10184     break;
10185 
10186     CASE_FLT_FN (BUILT_IN_ASINH):
10187       if (validate_arg (arg0, REAL_TYPE))
10188 	return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10189     break;
10190 
10191     CASE_FLT_FN (BUILT_IN_ACOSH):
10192       if (validate_arg (arg0, REAL_TYPE))
10193 	return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10194 			     &dconst1, NULL, true);
10195     break;
10196 
10197     CASE_FLT_FN (BUILT_IN_ATANH):
10198       if (validate_arg (arg0, REAL_TYPE))
10199 	return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10200 			     &dconstm1, &dconst1, false);
10201     break;
10202 
10203     CASE_FLT_FN (BUILT_IN_SIN):
10204       if (validate_arg (arg0, REAL_TYPE))
10205 	return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10206     break;
10207 
10208     CASE_FLT_FN (BUILT_IN_COS):
10209       return fold_builtin_cos (loc, arg0, type, fndecl);
10210 
10211     CASE_FLT_FN (BUILT_IN_TAN):
10212       return fold_builtin_tan (arg0, type);
10213 
10214     CASE_FLT_FN (BUILT_IN_CEXP):
10215       return fold_builtin_cexp (loc, arg0, type);
10216 
10217     CASE_FLT_FN (BUILT_IN_CEXPI):
10218       if (validate_arg (arg0, REAL_TYPE))
10219 	return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10220     break;
10221 
10222     CASE_FLT_FN (BUILT_IN_SINH):
10223       if (validate_arg (arg0, REAL_TYPE))
10224 	return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10225     break;
10226 
10227     CASE_FLT_FN (BUILT_IN_COSH):
10228       return fold_builtin_cosh (loc, arg0, type, fndecl);
10229 
10230     CASE_FLT_FN (BUILT_IN_TANH):
10231       if (validate_arg (arg0, REAL_TYPE))
10232 	return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10233     break;
10234 
10235     CASE_FLT_FN (BUILT_IN_ERF):
10236       if (validate_arg (arg0, REAL_TYPE))
10237 	return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10238     break;
10239 
10240     CASE_FLT_FN (BUILT_IN_ERFC):
10241       if (validate_arg (arg0, REAL_TYPE))
10242 	return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10243     break;
10244 
10245     CASE_FLT_FN (BUILT_IN_TGAMMA):
10246       if (validate_arg (arg0, REAL_TYPE))
10247 	return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10248     break;
10249 
10250     CASE_FLT_FN (BUILT_IN_EXP):
10251       return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10252 
10253     CASE_FLT_FN (BUILT_IN_EXP2):
10254       return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10255 
10256     CASE_FLT_FN (BUILT_IN_EXP10):
10257     CASE_FLT_FN (BUILT_IN_POW10):
10258       return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10259 
10260     CASE_FLT_FN (BUILT_IN_EXPM1):
10261       if (validate_arg (arg0, REAL_TYPE))
10262 	return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10263       break;
10264 
10265     CASE_FLT_FN (BUILT_IN_LOG):
10266       if (validate_arg (arg0, REAL_TYPE))
10267         return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10268       break;
10269 
10270     CASE_FLT_FN (BUILT_IN_LOG2):
10271       if (validate_arg (arg0, REAL_TYPE))
10272         return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10273       break;
10274 
10275     CASE_FLT_FN (BUILT_IN_LOG10):
10276       if (validate_arg (arg0, REAL_TYPE))
10277         return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10278       break;
10279 
10280     CASE_FLT_FN (BUILT_IN_LOG1P):
10281       if (validate_arg (arg0, REAL_TYPE))
10282 	return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10283 			     &dconstm1, NULL, false);
10284     break;
10285 
10286     CASE_FLT_FN (BUILT_IN_J0):
10287       if (validate_arg (arg0, REAL_TYPE))
10288 	return do_mpfr_arg1 (arg0, type, mpfr_j0,
10289 			     NULL, NULL, 0);
10290     break;
10291 
10292     CASE_FLT_FN (BUILT_IN_J1):
10293       if (validate_arg (arg0, REAL_TYPE))
10294 	return do_mpfr_arg1 (arg0, type, mpfr_j1,
10295 			     NULL, NULL, 0);
10296     break;
10297 
10298     CASE_FLT_FN (BUILT_IN_Y0):
10299       if (validate_arg (arg0, REAL_TYPE))
10300 	return do_mpfr_arg1 (arg0, type, mpfr_y0,
10301 			     &dconst0, NULL, false);
10302     break;
10303 
10304     CASE_FLT_FN (BUILT_IN_Y1):
10305       if (validate_arg (arg0, REAL_TYPE))
10306 	return do_mpfr_arg1 (arg0, type, mpfr_y1,
10307 			     &dconst0, NULL, false);
10308     break;
10309 
10310     CASE_FLT_FN (BUILT_IN_NAN):
10311     case BUILT_IN_NAND32:
10312     case BUILT_IN_NAND64:
10313     case BUILT_IN_NAND128:
10314       return fold_builtin_nan (arg0, type, true);
10315 
10316     CASE_FLT_FN (BUILT_IN_NANS):
10317       return fold_builtin_nan (arg0, type, false);
10318 
10319     CASE_FLT_FN (BUILT_IN_FLOOR):
10320       return fold_builtin_floor (loc, fndecl, arg0);
10321 
10322     CASE_FLT_FN (BUILT_IN_CEIL):
10323       return fold_builtin_ceil (loc, fndecl, arg0);
10324 
10325     CASE_FLT_FN (BUILT_IN_TRUNC):
10326       return fold_builtin_trunc (loc, fndecl, arg0);
10327 
10328     CASE_FLT_FN (BUILT_IN_ROUND):
10329       return fold_builtin_round (loc, fndecl, arg0);
10330 
10331     CASE_FLT_FN (BUILT_IN_NEARBYINT):
10332     CASE_FLT_FN (BUILT_IN_RINT):
10333       return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10334 
10335     CASE_FLT_FN (BUILT_IN_ICEIL):
10336     CASE_FLT_FN (BUILT_IN_LCEIL):
10337     CASE_FLT_FN (BUILT_IN_LLCEIL):
10338     CASE_FLT_FN (BUILT_IN_LFLOOR):
10339     CASE_FLT_FN (BUILT_IN_IFLOOR):
10340     CASE_FLT_FN (BUILT_IN_LLFLOOR):
10341     CASE_FLT_FN (BUILT_IN_IROUND):
10342     CASE_FLT_FN (BUILT_IN_LROUND):
10343     CASE_FLT_FN (BUILT_IN_LLROUND):
10344       return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10345 
10346     CASE_FLT_FN (BUILT_IN_IRINT):
10347     CASE_FLT_FN (BUILT_IN_LRINT):
10348     CASE_FLT_FN (BUILT_IN_LLRINT):
10349       return fold_fixed_mathfn (loc, fndecl, arg0);
10350 
10351     case BUILT_IN_BSWAP16:
10352     case BUILT_IN_BSWAP32:
10353     case BUILT_IN_BSWAP64:
10354       return fold_builtin_bswap (fndecl, arg0);
10355 
10356     CASE_INT_FN (BUILT_IN_FFS):
10357     CASE_INT_FN (BUILT_IN_CLZ):
10358     CASE_INT_FN (BUILT_IN_CTZ):
10359     CASE_INT_FN (BUILT_IN_CLRSB):
10360     CASE_INT_FN (BUILT_IN_POPCOUNT):
10361     CASE_INT_FN (BUILT_IN_PARITY):
10362       return fold_builtin_bitop (fndecl, arg0);
10363 
10364     CASE_FLT_FN (BUILT_IN_SIGNBIT):
10365       return fold_builtin_signbit (loc, arg0, type);
10366 
10367     CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10368       return fold_builtin_significand (loc, arg0, type);
10369 
10370     CASE_FLT_FN (BUILT_IN_ILOGB):
10371     CASE_FLT_FN (BUILT_IN_LOGB):
10372       return fold_builtin_logb (loc, arg0, type);
10373 
10374     case BUILT_IN_ISASCII:
10375       return fold_builtin_isascii (loc, arg0);
10376 
10377     case BUILT_IN_TOASCII:
10378       return fold_builtin_toascii (loc, arg0);
10379 
10380     case BUILT_IN_ISDIGIT:
10381       return fold_builtin_isdigit (loc, arg0);
10382 
10383     CASE_FLT_FN (BUILT_IN_FINITE):
10384     case BUILT_IN_FINITED32:
10385     case BUILT_IN_FINITED64:
10386     case BUILT_IN_FINITED128:
10387     case BUILT_IN_ISFINITE:
10388       {
10389 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10390 	if (ret)
10391 	  return ret;
10392 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10393       }
10394 
10395     CASE_FLT_FN (BUILT_IN_ISINF):
10396     case BUILT_IN_ISINFD32:
10397     case BUILT_IN_ISINFD64:
10398     case BUILT_IN_ISINFD128:
10399       {
10400 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10401 	if (ret)
10402 	  return ret;
10403 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10404       }
10405 
10406     case BUILT_IN_ISNORMAL:
10407       return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10408 
10409     case BUILT_IN_ISINF_SIGN:
10410       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10411 
10412     CASE_FLT_FN (BUILT_IN_ISNAN):
10413     case BUILT_IN_ISNAND32:
10414     case BUILT_IN_ISNAND64:
10415     case BUILT_IN_ISNAND128:
10416       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10417 
10418     case BUILT_IN_FREE:
10419       if (integer_zerop (arg0))
10420 	return build_empty_stmt (loc);
10421       break;
10422 
10423     default:
10424       break;
10425     }
10426 
10427   return NULL_TREE;
10428 
10429 }
10430 
10431 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10432    This function returns NULL_TREE if no simplification was possible.  */
10433 
10434 static tree
10435 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10436 {
10437   tree type = TREE_TYPE (TREE_TYPE (fndecl));
10438   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10439 
10440   switch (fcode)
10441     {
10442     CASE_FLT_FN (BUILT_IN_JN):
10443       if (validate_arg (arg0, INTEGER_TYPE)
10444 	  && validate_arg (arg1, REAL_TYPE))
10445 	return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10446     break;
10447 
10448     CASE_FLT_FN (BUILT_IN_YN):
10449       if (validate_arg (arg0, INTEGER_TYPE)
10450 	  && validate_arg (arg1, REAL_TYPE))
10451 	return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10452 				 &dconst0, false);
10453     break;
10454 
10455     CASE_FLT_FN (BUILT_IN_DREM):
10456     CASE_FLT_FN (BUILT_IN_REMAINDER):
10457       if (validate_arg (arg0, REAL_TYPE)
10458           && validate_arg (arg1, REAL_TYPE))
10459         return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10460     break;
10461 
10462     CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10463     CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10464       if (validate_arg (arg0, REAL_TYPE)
10465 	  && validate_arg (arg1, POINTER_TYPE))
10466 	return do_mpfr_lgamma_r (arg0, arg1, type);
10467     break;
10468 
10469     CASE_FLT_FN (BUILT_IN_ATAN2):
10470       if (validate_arg (arg0, REAL_TYPE)
10471 	  && validate_arg (arg1, REAL_TYPE))
10472 	return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10473     break;
10474 
10475     CASE_FLT_FN (BUILT_IN_FDIM):
10476       if (validate_arg (arg0, REAL_TYPE)
10477 	  && validate_arg (arg1, REAL_TYPE))
10478 	return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10479     break;
10480 
10481     CASE_FLT_FN (BUILT_IN_HYPOT):
10482       return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10483 
10484     CASE_FLT_FN (BUILT_IN_CPOW):
10485       if (validate_arg (arg0, COMPLEX_TYPE)
10486 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10487 	  && validate_arg (arg1, COMPLEX_TYPE)
10488 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10489 	return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10490     break;
10491 
10492     CASE_FLT_FN (BUILT_IN_LDEXP):
10493       return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10494     CASE_FLT_FN (BUILT_IN_SCALBN):
10495     CASE_FLT_FN (BUILT_IN_SCALBLN):
10496       return fold_builtin_load_exponent (loc, arg0, arg1,
10497 					 type, /*ldexp=*/false);
10498 
10499     CASE_FLT_FN (BUILT_IN_FREXP):
10500       return fold_builtin_frexp (loc, arg0, arg1, type);
10501 
10502     CASE_FLT_FN (BUILT_IN_MODF):
10503       return fold_builtin_modf (loc, arg0, arg1, type);
10504 
10505     case BUILT_IN_STRSTR:
10506       return fold_builtin_strstr (loc, arg0, arg1, type);
10507 
10508     case BUILT_IN_STRSPN:
10509       return fold_builtin_strspn (loc, arg0, arg1);
10510 
10511     case BUILT_IN_STRCSPN:
10512       return fold_builtin_strcspn (loc, arg0, arg1);
10513 
10514     case BUILT_IN_STRCHR:
10515     case BUILT_IN_INDEX:
10516       return fold_builtin_strchr (loc, arg0, arg1, type);
10517 
10518     case BUILT_IN_STRRCHR:
10519     case BUILT_IN_RINDEX:
10520       return fold_builtin_strrchr (loc, arg0, arg1, type);
10521 
10522     case BUILT_IN_STRCMP:
10523       return fold_builtin_strcmp (loc, arg0, arg1);
10524 
10525     case BUILT_IN_STRPBRK:
10526       return fold_builtin_strpbrk (loc, arg0, arg1, type);
10527 
10528     case BUILT_IN_EXPECT:
10529       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10530 
10531     CASE_FLT_FN (BUILT_IN_POW):
10532       return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10533 
10534     CASE_FLT_FN (BUILT_IN_POWI):
10535       return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10536 
10537     CASE_FLT_FN (BUILT_IN_COPYSIGN):
10538       return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10539 
10540     CASE_FLT_FN (BUILT_IN_FMIN):
10541       return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10542 
10543     CASE_FLT_FN (BUILT_IN_FMAX):
10544       return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10545 
10546     case BUILT_IN_ISGREATER:
10547       return fold_builtin_unordered_cmp (loc, fndecl,
10548 					 arg0, arg1, UNLE_EXPR, LE_EXPR);
10549     case BUILT_IN_ISGREATEREQUAL:
10550       return fold_builtin_unordered_cmp (loc, fndecl,
10551 					 arg0, arg1, UNLT_EXPR, LT_EXPR);
10552     case BUILT_IN_ISLESS:
10553       return fold_builtin_unordered_cmp (loc, fndecl,
10554 					 arg0, arg1, UNGE_EXPR, GE_EXPR);
10555     case BUILT_IN_ISLESSEQUAL:
10556       return fold_builtin_unordered_cmp (loc, fndecl,
10557 					 arg0, arg1, UNGT_EXPR, GT_EXPR);
10558     case BUILT_IN_ISLESSGREATER:
10559       return fold_builtin_unordered_cmp (loc, fndecl,
10560 					 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10561     case BUILT_IN_ISUNORDERED:
10562       return fold_builtin_unordered_cmp (loc, fndecl,
10563 					 arg0, arg1, UNORDERED_EXPR,
10564 					 NOP_EXPR);
10565 
10566       /* We do the folding for va_start in the expander.  */
10567     case BUILT_IN_VA_START:
10568       break;
10569 
10570     case BUILT_IN_OBJECT_SIZE:
10571       return fold_builtin_object_size (arg0, arg1);
10572 
10573     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10574       return fold_builtin_atomic_always_lock_free (arg0, arg1);
10575 
10576     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10577       return fold_builtin_atomic_is_lock_free (arg0, arg1);
10578 
10579     default:
10580       break;
10581     }
10582   return NULL_TREE;
10583 }
10584 
10585 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10586    and ARG2.
10587    This function returns NULL_TREE if no simplification was possible.  */
10588 
10589 static tree
10590 fold_builtin_3 (location_t loc, tree fndecl,
10591 		tree arg0, tree arg1, tree arg2)
10592 {
10593   tree type = TREE_TYPE (TREE_TYPE (fndecl));
10594   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10595   switch (fcode)
10596     {
10597 
10598     CASE_FLT_FN (BUILT_IN_SINCOS):
10599       return fold_builtin_sincos (loc, arg0, arg1, arg2);
10600 
10601     CASE_FLT_FN (BUILT_IN_FMA):
10602       return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10603     break;
10604 
10605     CASE_FLT_FN (BUILT_IN_REMQUO):
10606       if (validate_arg (arg0, REAL_TYPE)
10607 	  && validate_arg (arg1, REAL_TYPE)
10608 	  && validate_arg (arg2, POINTER_TYPE))
10609 	return do_mpfr_remquo (arg0, arg1, arg2);
10610     break;
10611 
10612     case BUILT_IN_STRNCMP:
10613       return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10614 
10615     case BUILT_IN_MEMCHR:
10616       return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10617 
10618     case BUILT_IN_BCMP:
10619     case BUILT_IN_MEMCMP:
10620       return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10621 
10622     case BUILT_IN_EXPECT:
10623       return fold_builtin_expect (loc, arg0, arg1, arg2);
10624 
10625     case BUILT_IN_ADD_OVERFLOW:
10626     case BUILT_IN_SUB_OVERFLOW:
10627     case BUILT_IN_MUL_OVERFLOW:
10628     case BUILT_IN_SADD_OVERFLOW:
10629     case BUILT_IN_SADDL_OVERFLOW:
10630     case BUILT_IN_SADDLL_OVERFLOW:
10631     case BUILT_IN_SSUB_OVERFLOW:
10632     case BUILT_IN_SSUBL_OVERFLOW:
10633     case BUILT_IN_SSUBLL_OVERFLOW:
10634     case BUILT_IN_SMUL_OVERFLOW:
10635     case BUILT_IN_SMULL_OVERFLOW:
10636     case BUILT_IN_SMULLL_OVERFLOW:
10637     case BUILT_IN_UADD_OVERFLOW:
10638     case BUILT_IN_UADDL_OVERFLOW:
10639     case BUILT_IN_UADDLL_OVERFLOW:
10640     case BUILT_IN_USUB_OVERFLOW:
10641     case BUILT_IN_USUBL_OVERFLOW:
10642     case BUILT_IN_USUBLL_OVERFLOW:
10643     case BUILT_IN_UMUL_OVERFLOW:
10644     case BUILT_IN_UMULL_OVERFLOW:
10645     case BUILT_IN_UMULLL_OVERFLOW:
10646       return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10647 
10648     default:
10649       break;
10650     }
10651   return NULL_TREE;
10652 }
10653 
10654 /* Fold a call to built-in function FNDECL.  ARGS is an array of NARGS
10655    arguments.  IGNORE is true if the result of the
10656    function call is ignored.  This function returns NULL_TREE if no
10657    simplification was possible.  */
10658 
10659 tree
10660 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10661 {
10662   tree ret = NULL_TREE;
10663 
10664   switch (nargs)
10665     {
10666     case 0:
10667       ret = fold_builtin_0 (loc, fndecl);
10668       break;
10669     case 1:
10670       ret = fold_builtin_1 (loc, fndecl, args[0]);
10671       break;
10672     case 2:
10673       ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10674       break;
10675     case 3:
10676       ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10677       break;
10678     default:
10679       ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10680       break;
10681     }
10682   if (ret)
10683     {
10684       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10685       SET_EXPR_LOCATION (ret, loc);
10686       TREE_NO_WARNING (ret) = 1;
10687       return ret;
10688     }
10689   return NULL_TREE;
10690 }
10691 
10692 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10693    list ARGS along with N new arguments in NEWARGS.  SKIP is the number
10694    of arguments in ARGS to be omitted.  OLDNARGS is the number of
10695    elements in ARGS.  */
10696 
10697 static tree
10698 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10699 			  int skip, tree fndecl, int n, va_list newargs)
10700 {
10701   int nargs = oldnargs - skip + n;
10702   tree *buffer;
10703 
10704   if (n > 0)
10705     {
10706       int i, j;
10707 
10708       buffer = XALLOCAVEC (tree, nargs);
10709       for (i = 0; i < n; i++)
10710 	buffer[i] = va_arg (newargs, tree);
10711       for (j = skip; j < oldnargs; j++, i++)
10712 	buffer[i] = args[j];
10713     }
10714   else
10715     buffer = args + skip;
10716 
10717   return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10718 }
10719 
10720 /* Return true if FNDECL shouldn't be folded right now.
10721    If a built-in function has an inline attribute always_inline
10722    wrapper, defer folding it after always_inline functions have
10723    been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10724    might not be performed.  */
10725 
10726 bool
10727 avoid_folding_inline_builtin (tree fndecl)
10728 {
10729   return (DECL_DECLARED_INLINE_P (fndecl)
10730 	  && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10731 	  && cfun
10732 	  && !cfun->always_inline_functions_inlined
10733 	  && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10734 }
10735 
10736 /* A wrapper function for builtin folding that prevents warnings for
10737    "statement without effect" and the like, caused by removing the
10738    call node earlier than the warning is generated.  */
10739 
10740 tree
10741 fold_call_expr (location_t loc, tree exp, bool ignore)
10742 {
10743   tree ret = NULL_TREE;
10744   tree fndecl = get_callee_fndecl (exp);
10745   if (fndecl
10746       && TREE_CODE (fndecl) == FUNCTION_DECL
10747       && DECL_BUILT_IN (fndecl)
10748       /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10749 	 yet.  Defer folding until we see all the arguments
10750 	 (after inlining).  */
10751       && !CALL_EXPR_VA_ARG_PACK (exp))
10752     {
10753       int nargs = call_expr_nargs (exp);
10754 
10755       /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10756 	 instead last argument is __builtin_va_arg_pack ().  Defer folding
10757 	 even in that case, until arguments are finalized.  */
10758       if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10759 	{
10760 	  tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10761 	  if (fndecl2
10762 	      && TREE_CODE (fndecl2) == FUNCTION_DECL
10763 	      && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10764 	      && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10765 	    return NULL_TREE;
10766 	}
10767 
10768       if (avoid_folding_inline_builtin (fndecl))
10769 	return NULL_TREE;
10770 
10771       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10772         return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10773 				     CALL_EXPR_ARGP (exp), ignore);
10774       else
10775 	{
10776 	  tree *args = CALL_EXPR_ARGP (exp);
10777 	  ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10778 	  if (ret)
10779 	    return ret;
10780 	}
10781     }
10782   return NULL_TREE;
10783 }
10784 
10785 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10786    N arguments are passed in the array ARGARRAY.  Return a folded
10787    expression or NULL_TREE if no simplification was possible.  */
10788 
10789 tree
10790 fold_builtin_call_array (location_t loc, tree,
10791 			 tree fn,
10792 			 int n,
10793 			 tree *argarray)
10794 {
10795   if (TREE_CODE (fn) != ADDR_EXPR)
10796     return NULL_TREE;
10797 
10798   tree fndecl = TREE_OPERAND (fn, 0);
10799   if (TREE_CODE (fndecl) == FUNCTION_DECL
10800       && DECL_BUILT_IN (fndecl))
10801     {
10802       /* If last argument is __builtin_va_arg_pack (), arguments to this
10803 	 function are not finalized yet.  Defer folding until they are.  */
10804       if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10805 	{
10806 	  tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10807 	  if (fndecl2
10808 	      && TREE_CODE (fndecl2) == FUNCTION_DECL
10809 	      && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10810 	      && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10811 	    return NULL_TREE;
10812 	}
10813       if (avoid_folding_inline_builtin (fndecl))
10814 	return NULL_TREE;
10815       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10816 	return targetm.fold_builtin (fndecl, n, argarray, false);
10817       else
10818 	return fold_builtin_n (loc, fndecl, argarray, n, false);
10819     }
10820 
10821   return NULL_TREE;
10822 }
10823 
10824 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10825    along with N new arguments specified as the "..." parameters.  SKIP
10826    is the number of arguments in EXP to be omitted.  This function is used
10827    to do varargs-to-varargs transformations.  */
10828 
10829 static tree
10830 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10831 {
10832   va_list ap;
10833   tree t;
10834 
10835   va_start (ap, n);
10836   t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10837 				CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10838   va_end (ap);
10839 
10840   return t;
10841 }
10842 
10843 /* Validate a single argument ARG against a tree code CODE representing
10844    a type.  */
10845 
10846 static bool
10847 validate_arg (const_tree arg, enum tree_code code)
10848 {
10849   if (!arg)
10850     return false;
10851   else if (code == POINTER_TYPE)
10852     return POINTER_TYPE_P (TREE_TYPE (arg));
10853   else if (code == INTEGER_TYPE)
10854     return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10855   return code == TREE_CODE (TREE_TYPE (arg));
10856 }
10857 
10858 /* This function validates the types of a function call argument list
10859    against a specified list of tree_codes.  If the last specifier is a 0,
10860    that represents an ellipses, otherwise the last specifier must be a
10861    VOID_TYPE.
10862 
10863    This is the GIMPLE version of validate_arglist.  Eventually we want to
10864    completely convert builtins.c to work from GIMPLEs and the tree based
10865    validate_arglist will then be removed.  */
10866 
10867 bool
10868 validate_gimple_arglist (const gcall *call, ...)
10869 {
10870   enum tree_code code;
10871   bool res = 0;
10872   va_list ap;
10873   const_tree arg;
10874   size_t i;
10875 
10876   va_start (ap, call);
10877   i = 0;
10878 
10879   do
10880     {
10881       code = (enum tree_code) va_arg (ap, int);
10882       switch (code)
10883 	{
10884 	case 0:
10885 	  /* This signifies an ellipses, any further arguments are all ok.  */
10886 	  res = true;
10887 	  goto end;
10888 	case VOID_TYPE:
10889 	  /* This signifies an endlink, if no arguments remain, return
10890 	     true, otherwise return false.  */
10891 	  res = (i == gimple_call_num_args (call));
10892 	  goto end;
10893 	default:
10894 	  /* If no parameters remain or the parameter's code does not
10895 	     match the specified code, return false.  Otherwise continue
10896 	     checking any remaining arguments.  */
10897 	  arg = gimple_call_arg (call, i++);
10898 	  if (!validate_arg (arg, code))
10899 	    goto end;
10900 	  break;
10901 	}
10902     }
10903   while (1);
10904 
10905   /* We need gotos here since we can only have one VA_CLOSE in a
10906      function.  */
10907  end: ;
10908   va_end (ap);
10909 
10910   return res;
10911 }
10912 
10913 /* Default target-specific builtin expander that does nothing.  */
10914 
10915 rtx
10916 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10917 			rtx target ATTRIBUTE_UNUSED,
10918 			rtx subtarget ATTRIBUTE_UNUSED,
10919 			machine_mode mode ATTRIBUTE_UNUSED,
10920 			int ignore ATTRIBUTE_UNUSED)
10921 {
10922   return NULL_RTX;
10923 }
10924 
10925 /* Returns true is EXP represents data that would potentially reside
10926    in a readonly section.  */
10927 
10928 bool
10929 readonly_data_expr (tree exp)
10930 {
10931   STRIP_NOPS (exp);
10932 
10933   if (TREE_CODE (exp) != ADDR_EXPR)
10934     return false;
10935 
10936   exp = get_base_address (TREE_OPERAND (exp, 0));
10937   if (!exp)
10938     return false;
10939 
10940   /* Make sure we call decl_readonly_section only for trees it
10941      can handle (since it returns true for everything it doesn't
10942      understand).  */
10943   if (TREE_CODE (exp) == STRING_CST
10944       || TREE_CODE (exp) == CONSTRUCTOR
10945       || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10946     return decl_readonly_section (exp, 0);
10947   else
10948     return false;
10949 }
10950 
10951 /* Simplify a call to the strstr builtin.  S1 and S2 are the arguments
10952    to the call, and TYPE is its return type.
10953 
10954    Return NULL_TREE if no simplification was possible, otherwise return the
10955    simplified form of the call as a tree.
10956 
10957    The simplified form may be a constant or other expression which
10958    computes the same value, but in a more efficient manner (including
10959    calls to other builtin functions).
10960 
10961    The call may contain arguments which need to be evaluated, but
10962    which are not useful to determine the result of the call.  In
10963    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10964    COMPOUND_EXPR will be an argument which must be evaluated.
10965    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10966    COMPOUND_EXPR in the chain will contain the tree for the simplified
10967    form of the builtin function call.  */
10968 
10969 static tree
10970 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10971 {
10972   if (!validate_arg (s1, POINTER_TYPE)
10973       || !validate_arg (s2, POINTER_TYPE))
10974     return NULL_TREE;
10975   else
10976     {
10977       tree fn;
10978       const char *p1, *p2;
10979 
10980       p2 = c_getstr (s2);
10981       if (p2 == NULL)
10982 	return NULL_TREE;
10983 
10984       p1 = c_getstr (s1);
10985       if (p1 != NULL)
10986 	{
10987 	  const char *r = strstr (p1, p2);
10988 	  tree tem;
10989 
10990 	  if (r == NULL)
10991 	    return build_int_cst (TREE_TYPE (s1), 0);
10992 
10993 	  /* Return an offset into the constant string argument.  */
10994 	  tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10995 	  return fold_convert_loc (loc, type, tem);
10996 	}
10997 
10998       /* The argument is const char *, and the result is char *, so we need
10999 	 a type conversion here to avoid a warning.  */
11000       if (p2[0] == '\0')
11001 	return fold_convert_loc (loc, type, s1);
11002 
11003       if (p2[1] != '\0')
11004 	return NULL_TREE;
11005 
11006       fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11007       if (!fn)
11008 	return NULL_TREE;
11009 
11010       /* New argument list transforming strstr(s1, s2) to
11011 	 strchr(s1, s2[0]).  */
11012       return build_call_expr_loc (loc, fn, 2, s1,
11013 				  build_int_cst (integer_type_node, p2[0]));
11014     }
11015 }
11016 
11017 /* Simplify a call to the strchr builtin.  S1 and S2 are the arguments to
11018    the call, and TYPE is its return type.
11019 
11020    Return NULL_TREE if no simplification was possible, otherwise return the
11021    simplified form of the call as a tree.
11022 
11023    The simplified form may be a constant or other expression which
11024    computes the same value, but in a more efficient manner (including
11025    calls to other builtin functions).
11026 
11027    The call may contain arguments which need to be evaluated, but
11028    which are not useful to determine the result of the call.  In
11029    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
11030    COMPOUND_EXPR will be an argument which must be evaluated.
11031    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
11032    COMPOUND_EXPR in the chain will contain the tree for the simplified
11033    form of the builtin function call.  */
11034 
11035 static tree
11036 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11037 {
11038   if (!validate_arg (s1, POINTER_TYPE)
11039       || !validate_arg (s2, INTEGER_TYPE))
11040     return NULL_TREE;
11041   else
11042     {
11043       const char *p1;
11044 
11045       if (TREE_CODE (s2) != INTEGER_CST)
11046 	return NULL_TREE;
11047 
11048       p1 = c_getstr (s1);
11049       if (p1 != NULL)
11050 	{
11051 	  char c;
11052 	  const char *r;
11053 	  tree tem;
11054 
11055 	  if (target_char_cast (s2, &c))
11056 	    return NULL_TREE;
11057 
11058 	  r = strchr (p1, c);
11059 
11060 	  if (r == NULL)
11061 	    return build_int_cst (TREE_TYPE (s1), 0);
11062 
11063 	  /* Return an offset into the constant string argument.  */
11064 	  tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11065 	  return fold_convert_loc (loc, type, tem);
11066 	}
11067       return NULL_TREE;
11068     }
11069 }
11070 
11071 /* Simplify a call to the strrchr builtin.  S1 and S2 are the arguments to
11072    the call, and TYPE is its return type.
11073 
11074    Return NULL_TREE if no simplification was possible, otherwise return the
11075    simplified form of the call as a tree.
11076 
11077    The simplified form may be a constant or other expression which
11078    computes the same value, but in a more efficient manner (including
11079    calls to other builtin functions).
11080 
11081    The call may contain arguments which need to be evaluated, but
11082    which are not useful to determine the result of the call.  In
11083    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
11084    COMPOUND_EXPR will be an argument which must be evaluated.
11085    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
11086    COMPOUND_EXPR in the chain will contain the tree for the simplified
11087    form of the builtin function call.  */
11088 
11089 static tree
11090 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11091 {
11092   if (!validate_arg (s1, POINTER_TYPE)
11093       || !validate_arg (s2, INTEGER_TYPE))
11094     return NULL_TREE;
11095   else
11096     {
11097       tree fn;
11098       const char *p1;
11099 
11100       if (TREE_CODE (s2) != INTEGER_CST)
11101 	return NULL_TREE;
11102 
11103       p1 = c_getstr (s1);
11104       if (p1 != NULL)
11105 	{
11106 	  char c;
11107 	  const char *r;
11108 	  tree tem;
11109 
11110 	  if (target_char_cast (s2, &c))
11111 	    return NULL_TREE;
11112 
11113 	  r = strrchr (p1, c);
11114 
11115 	  if (r == NULL)
11116 	    return build_int_cst (TREE_TYPE (s1), 0);
11117 
11118 	  /* Return an offset into the constant string argument.  */
11119 	  tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11120 	  return fold_convert_loc (loc, type, tem);
11121 	}
11122 
11123       if (! integer_zerop (s2))
11124 	return NULL_TREE;
11125 
11126       fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11127       if (!fn)
11128 	return NULL_TREE;
11129 
11130       /* Transform strrchr(s1, '\0') to strchr(s1, '\0').  */
11131       return build_call_expr_loc (loc, fn, 2, s1, s2);
11132     }
11133 }
11134 
11135 /* Simplify a call to the strpbrk builtin.  S1 and S2 are the arguments
11136    to the call, and TYPE is its return type.
11137 
11138    Return NULL_TREE if no simplification was possible, otherwise return the
11139    simplified form of the call as a tree.
11140 
11141    The simplified form may be a constant or other expression which
11142    computes the same value, but in a more efficient manner (including
11143    calls to other builtin functions).
11144 
11145    The call may contain arguments which need to be evaluated, but
11146    which are not useful to determine the result of the call.  In
11147    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
11148    COMPOUND_EXPR will be an argument which must be evaluated.
11149    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
11150    COMPOUND_EXPR in the chain will contain the tree for the simplified
11151    form of the builtin function call.  */
11152 
11153 static tree
11154 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11155 {
11156   if (!validate_arg (s1, POINTER_TYPE)
11157       || !validate_arg (s2, POINTER_TYPE))
11158     return NULL_TREE;
11159   else
11160     {
11161       tree fn;
11162       const char *p1, *p2;
11163 
11164       p2 = c_getstr (s2);
11165       if (p2 == NULL)
11166 	return NULL_TREE;
11167 
11168       p1 = c_getstr (s1);
11169       if (p1 != NULL)
11170 	{
11171 	  const char *r = strpbrk (p1, p2);
11172 	  tree tem;
11173 
11174 	  if (r == NULL)
11175 	    return build_int_cst (TREE_TYPE (s1), 0);
11176 
11177 	  /* Return an offset into the constant string argument.  */
11178 	  tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11179 	  return fold_convert_loc (loc, type, tem);
11180 	}
11181 
11182       if (p2[0] == '\0')
11183 	/* strpbrk(x, "") == NULL.
11184 	   Evaluate and ignore s1 in case it had side-effects.  */
11185 	return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11186 
11187       if (p2[1] != '\0')
11188 	return NULL_TREE;  /* Really call strpbrk.  */
11189 
11190       fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11191       if (!fn)
11192 	return NULL_TREE;
11193 
11194       /* New argument list transforming strpbrk(s1, s2) to
11195 	 strchr(s1, s2[0]).  */
11196       return build_call_expr_loc (loc, fn, 2, s1,
11197 				  build_int_cst (integer_type_node, p2[0]));
11198     }
11199 }
11200 
11201 /* Simplify a call to the strspn builtin.  S1 and S2 are the arguments
11202    to the call.
11203 
11204    Return NULL_TREE if no simplification was possible, otherwise return the
11205    simplified form of the call as a tree.
11206 
11207    The simplified form may be a constant or other expression which
11208    computes the same value, but in a more efficient manner (including
11209    calls to other builtin functions).
11210 
11211    The call may contain arguments which need to be evaluated, but
11212    which are not useful to determine the result of the call.  In
11213    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
11214    COMPOUND_EXPR will be an argument which must be evaluated.
11215    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
11216    COMPOUND_EXPR in the chain will contain the tree for the simplified
11217    form of the builtin function call.  */
11218 
11219 static tree
11220 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11221 {
11222   if (!validate_arg (s1, POINTER_TYPE)
11223       || !validate_arg (s2, POINTER_TYPE))
11224     return NULL_TREE;
11225   else
11226     {
11227       const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11228 
11229       /* If both arguments are constants, evaluate at compile-time.  */
11230       if (p1 && p2)
11231 	{
11232 	  const size_t r = strspn (p1, p2);
11233 	  return build_int_cst (size_type_node, r);
11234 	}
11235 
11236       /* If either argument is "", return NULL_TREE.  */
11237       if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11238 	/* Evaluate and ignore both arguments in case either one has
11239 	   side-effects.  */
11240 	return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11241 				  s1, s2);
11242       return NULL_TREE;
11243     }
11244 }
11245 
11246 /* Simplify a call to the strcspn builtin.  S1 and S2 are the arguments
11247    to the call.
11248 
11249    Return NULL_TREE if no simplification was possible, otherwise return the
11250    simplified form of the call as a tree.
11251 
11252    The simplified form may be a constant or other expression which
11253    computes the same value, but in a more efficient manner (including
11254    calls to other builtin functions).
11255 
11256    The call may contain arguments which need to be evaluated, but
11257    which are not useful to determine the result of the call.  In
11258    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
11259    COMPOUND_EXPR will be an argument which must be evaluated.
11260    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
11261    COMPOUND_EXPR in the chain will contain the tree for the simplified
11262    form of the builtin function call.  */
11263 
11264 static tree
11265 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11266 {
11267   if (!validate_arg (s1, POINTER_TYPE)
11268       || !validate_arg (s2, POINTER_TYPE))
11269     return NULL_TREE;
11270   else
11271     {
11272       const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11273 
11274       /* If both arguments are constants, evaluate at compile-time.  */
11275       if (p1 && p2)
11276 	{
11277 	  const size_t r = strcspn (p1, p2);
11278 	  return build_int_cst (size_type_node, r);
11279 	}
11280 
11281       /* If the first argument is "", return NULL_TREE.  */
11282       if (p1 && *p1 == '\0')
11283 	{
11284 	  /* Evaluate and ignore argument s2 in case it has
11285 	     side-effects.  */
11286 	  return omit_one_operand_loc (loc, size_type_node,
11287 				   size_zero_node, s2);
11288 	}
11289 
11290       /* If the second argument is "", return __builtin_strlen(s1).  */
11291       if (p2 && *p2 == '\0')
11292 	{
11293 	  tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11294 
11295 	  /* If the replacement _DECL isn't initialized, don't do the
11296 	     transformation.  */
11297 	  if (!fn)
11298 	    return NULL_TREE;
11299 
11300 	  return build_call_expr_loc (loc, fn, 1, s1);
11301 	}
11302       return NULL_TREE;
11303     }
11304 }
11305 
11306 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11307    produced.  False otherwise.  This is done so that we don't output the error
11308    or warning twice or three times.  */
11309 
11310 bool
11311 fold_builtin_next_arg (tree exp, bool va_start_p)
11312 {
11313   tree fntype = TREE_TYPE (current_function_decl);
11314   int nargs = call_expr_nargs (exp);
11315   tree arg;
11316   /* There is good chance the current input_location points inside the
11317      definition of the va_start macro (perhaps on the token for
11318      builtin) in a system header, so warnings will not be emitted.
11319      Use the location in real source code.  */
11320   source_location current_location =
11321     linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11322 					      NULL);
11323 
11324   if (!stdarg_p (fntype))
11325     {
11326       error ("%<va_start%> used in function with fixed args");
11327       return true;
11328     }
11329 
11330   if (va_start_p)
11331     {
11332       if (va_start_p && (nargs != 2))
11333 	{
11334 	  error ("wrong number of arguments to function %<va_start%>");
11335 	  return true;
11336 	}
11337       arg = CALL_EXPR_ARG (exp, 1);
11338     }
11339   /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11340      when we checked the arguments and if needed issued a warning.  */
11341   else
11342     {
11343       if (nargs == 0)
11344 	{
11345 	  /* Evidently an out of date version of <stdarg.h>; can't validate
11346 	     va_start's second argument, but can still work as intended.  */
11347 	  warning_at (current_location,
11348 		      OPT_Wvarargs,
11349 		   "%<__builtin_next_arg%> called without an argument");
11350 	  return true;
11351 	}
11352       else if (nargs > 1)
11353 	{
11354 	  error ("wrong number of arguments to function %<__builtin_next_arg%>");
11355 	  return true;
11356 	}
11357       arg = CALL_EXPR_ARG (exp, 0);
11358     }
11359 
11360   if (TREE_CODE (arg) == SSA_NAME)
11361     arg = SSA_NAME_VAR (arg);
11362 
11363   /* We destructively modify the call to be __builtin_va_start (ap, 0)
11364      or __builtin_next_arg (0) the first time we see it, after checking
11365      the arguments and if needed issuing a warning.  */
11366   if (!integer_zerop (arg))
11367     {
11368       tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11369 
11370       /* Strip off all nops for the sake of the comparison.  This
11371 	 is not quite the same as STRIP_NOPS.  It does more.
11372 	 We must also strip off INDIRECT_EXPR for C++ reference
11373 	 parameters.  */
11374       while (CONVERT_EXPR_P (arg)
11375 	     || TREE_CODE (arg) == INDIRECT_REF)
11376 	arg = TREE_OPERAND (arg, 0);
11377       if (arg != last_parm)
11378 	{
11379 	  /* FIXME: Sometimes with the tree optimizers we can get the
11380 	     not the last argument even though the user used the last
11381 	     argument.  We just warn and set the arg to be the last
11382 	     argument so that we will get wrong-code because of
11383 	     it.  */
11384 	  warning_at (current_location,
11385 		      OPT_Wvarargs,
11386 		      "second parameter of %<va_start%> not last named argument");
11387 	}
11388 
11389       /* Undefined by C99 7.15.1.4p4 (va_start):
11390          "If the parameter parmN is declared with the register storage
11391          class, with a function or array type, or with a type that is
11392          not compatible with the type that results after application of
11393          the default argument promotions, the behavior is undefined."
11394       */
11395       else if (DECL_REGISTER (arg))
11396 	{
11397 	  warning_at (current_location,
11398 		      OPT_Wvarargs,
11399 		      "undefined behaviour when second parameter of "
11400 		      "%<va_start%> is declared with %<register%> storage");
11401 	}
11402 
11403       /* We want to verify the second parameter just once before the tree
11404 	 optimizers are run and then avoid keeping it in the tree,
11405 	 as otherwise we could warn even for correct code like:
11406 	 void foo (int i, ...)
11407 	 { va_list ap; i++; va_start (ap, i); va_end (ap); }  */
11408       if (va_start_p)
11409 	CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11410       else
11411 	CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11412     }
11413   return false;
11414 }
11415 
11416 
11417 /* Expand a call EXP to __builtin_object_size.  */
11418 
11419 static rtx
11420 expand_builtin_object_size (tree exp)
11421 {
11422   tree ost;
11423   int object_size_type;
11424   tree fndecl = get_callee_fndecl (exp);
11425 
11426   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11427     {
11428       error ("%Kfirst argument of %D must be a pointer, second integer constant",
11429 	     exp, fndecl);
11430       expand_builtin_trap ();
11431       return const0_rtx;
11432     }
11433 
11434   ost = CALL_EXPR_ARG (exp, 1);
11435   STRIP_NOPS (ost);
11436 
11437   if (TREE_CODE (ost) != INTEGER_CST
11438       || tree_int_cst_sgn (ost) < 0
11439       || compare_tree_int (ost, 3) > 0)
11440     {
11441       error ("%Klast argument of %D is not integer constant between 0 and 3",
11442 	     exp, fndecl);
11443       expand_builtin_trap ();
11444       return const0_rtx;
11445     }
11446 
11447   object_size_type = tree_to_shwi (ost);
11448 
11449   return object_size_type < 2 ? constm1_rtx : const0_rtx;
11450 }
11451 
11452 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11453    FCODE is the BUILT_IN_* to use.
11454    Return NULL_RTX if we failed; the caller should emit a normal call,
11455    otherwise try to get the result in TARGET, if convenient (and in
11456    mode MODE if that's convenient).  */
11457 
11458 static rtx
11459 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11460 			   enum built_in_function fcode)
11461 {
11462   tree dest, src, len, size;
11463 
11464   if (!validate_arglist (exp,
11465 			 POINTER_TYPE,
11466 			 fcode == BUILT_IN_MEMSET_CHK
11467 			 ? INTEGER_TYPE : POINTER_TYPE,
11468 			 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11469     return NULL_RTX;
11470 
11471   dest = CALL_EXPR_ARG (exp, 0);
11472   src = CALL_EXPR_ARG (exp, 1);
11473   len = CALL_EXPR_ARG (exp, 2);
11474   size = CALL_EXPR_ARG (exp, 3);
11475 
11476   if (! tree_fits_uhwi_p (size))
11477     return NULL_RTX;
11478 
11479   if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11480     {
11481       tree fn;
11482 
11483       if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11484 	{
11485 	  warning_at (tree_nonartificial_location (exp),
11486 		      0, "%Kcall to %D will always overflow destination buffer",
11487 		      exp, get_callee_fndecl (exp));
11488 	  return NULL_RTX;
11489 	}
11490 
11491       fn = NULL_TREE;
11492       /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11493 	 mem{cpy,pcpy,move,set} is available.  */
11494       switch (fcode)
11495 	{
11496 	case BUILT_IN_MEMCPY_CHK:
11497 	  fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11498 	  break;
11499 	case BUILT_IN_MEMPCPY_CHK:
11500 	  fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11501 	  break;
11502 	case BUILT_IN_MEMMOVE_CHK:
11503 	  fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11504 	  break;
11505 	case BUILT_IN_MEMSET_CHK:
11506 	  fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11507 	  break;
11508 	default:
11509 	  break;
11510 	}
11511 
11512       if (! fn)
11513 	return NULL_RTX;
11514 
11515       fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11516       gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11517       CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11518       return expand_expr (fn, target, mode, EXPAND_NORMAL);
11519     }
11520   else if (fcode == BUILT_IN_MEMSET_CHK)
11521     return NULL_RTX;
11522   else
11523     {
11524       unsigned int dest_align = get_pointer_alignment (dest);
11525 
11526       /* If DEST is not a pointer type, call the normal function.  */
11527       if (dest_align == 0)
11528 	return NULL_RTX;
11529 
11530       /* If SRC and DEST are the same (and not volatile), do nothing.  */
11531       if (operand_equal_p (src, dest, 0))
11532 	{
11533 	  tree expr;
11534 
11535 	  if (fcode != BUILT_IN_MEMPCPY_CHK)
11536 	    {
11537 	      /* Evaluate and ignore LEN in case it has side-effects.  */
11538 	      expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11539 	      return expand_expr (dest, target, mode, EXPAND_NORMAL);
11540 	    }
11541 
11542 	  expr = fold_build_pointer_plus (dest, len);
11543 	  return expand_expr (expr, target, mode, EXPAND_NORMAL);
11544 	}
11545 
11546       /* __memmove_chk special case.  */
11547       if (fcode == BUILT_IN_MEMMOVE_CHK)
11548 	{
11549 	  unsigned int src_align = get_pointer_alignment (src);
11550 
11551 	  if (src_align == 0)
11552 	    return NULL_RTX;
11553 
11554 	  /* If src is categorized for a readonly section we can use
11555 	     normal __memcpy_chk.  */
11556 	  if (readonly_data_expr (src))
11557 	    {
11558 	      tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11559 	      if (!fn)
11560 		return NULL_RTX;
11561 	      fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11562 					  dest, src, len, size);
11563 	      gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11564 	      CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11565 	      return expand_expr (fn, target, mode, EXPAND_NORMAL);
11566 	    }
11567 	}
11568       return NULL_RTX;
11569     }
11570 }
11571 
11572 /* Emit warning if a buffer overflow is detected at compile time.  */
11573 
11574 static void
11575 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11576 {
11577   int is_strlen = 0;
11578   tree len, size;
11579   location_t loc = tree_nonartificial_location (exp);
11580 
11581   switch (fcode)
11582     {
11583     case BUILT_IN_STRCPY_CHK:
11584     case BUILT_IN_STPCPY_CHK:
11585     /* For __strcat_chk the warning will be emitted only if overflowing
11586        by at least strlen (dest) + 1 bytes.  */
11587     case BUILT_IN_STRCAT_CHK:
11588       len = CALL_EXPR_ARG (exp, 1);
11589       size = CALL_EXPR_ARG (exp, 2);
11590       is_strlen = 1;
11591       break;
11592     case BUILT_IN_STRNCAT_CHK:
11593     case BUILT_IN_STRNCPY_CHK:
11594     case BUILT_IN_STPNCPY_CHK:
11595       len = CALL_EXPR_ARG (exp, 2);
11596       size = CALL_EXPR_ARG (exp, 3);
11597       break;
11598     case BUILT_IN_SNPRINTF_CHK:
11599     case BUILT_IN_VSNPRINTF_CHK:
11600       len = CALL_EXPR_ARG (exp, 1);
11601       size = CALL_EXPR_ARG (exp, 3);
11602       break;
11603     default:
11604       gcc_unreachable ();
11605     }
11606 
11607   if (!len || !size)
11608     return;
11609 
11610   if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11611     return;
11612 
11613   if (is_strlen)
11614     {
11615       len = c_strlen (len, 1);
11616       if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11617 	return;
11618     }
11619   else if (fcode == BUILT_IN_STRNCAT_CHK)
11620     {
11621       tree src = CALL_EXPR_ARG (exp, 1);
11622       if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11623 	return;
11624       src = c_strlen (src, 1);
11625       if (! src || ! tree_fits_uhwi_p (src))
11626 	{
11627 	  warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11628 		      exp, get_callee_fndecl (exp));
11629 	  return;
11630 	}
11631       else if (tree_int_cst_lt (src, size))
11632 	return;
11633     }
11634   else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11635     return;
11636 
11637   warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11638 	      exp, get_callee_fndecl (exp));
11639 }
11640 
11641 /* Emit warning if a buffer overflow is detected at compile time
11642    in __sprintf_chk/__vsprintf_chk calls.  */
11643 
11644 static void
11645 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11646 {
11647   tree size, len, fmt;
11648   const char *fmt_str;
11649   int nargs = call_expr_nargs (exp);
11650 
11651   /* Verify the required arguments in the original call.  */
11652 
11653   if (nargs < 4)
11654     return;
11655   size = CALL_EXPR_ARG (exp, 2);
11656   fmt = CALL_EXPR_ARG (exp, 3);
11657 
11658   if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11659     return;
11660 
11661   /* Check whether the format is a literal string constant.  */
11662   fmt_str = c_getstr (fmt);
11663   if (fmt_str == NULL)
11664     return;
11665 
11666   if (!init_target_chars ())
11667     return;
11668 
11669   /* If the format doesn't contain % args or %%, we know its size.  */
11670   if (strchr (fmt_str, target_percent) == 0)
11671     len = build_int_cstu (size_type_node, strlen (fmt_str));
11672   /* If the format is "%s" and first ... argument is a string literal,
11673      we know it too.  */
11674   else if (fcode == BUILT_IN_SPRINTF_CHK
11675 	   && strcmp (fmt_str, target_percent_s) == 0)
11676     {
11677       tree arg;
11678 
11679       if (nargs < 5)
11680 	return;
11681       arg = CALL_EXPR_ARG (exp, 4);
11682       if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11683 	return;
11684 
11685       len = c_strlen (arg, 1);
11686       if (!len || ! tree_fits_uhwi_p (len))
11687 	return;
11688     }
11689   else
11690     return;
11691 
11692   if (! tree_int_cst_lt (len, size))
11693     warning_at (tree_nonartificial_location (exp),
11694 		0, "%Kcall to %D will always overflow destination buffer",
11695 		exp, get_callee_fndecl (exp));
11696 }
11697 
11698 /* Emit warning if a free is called with address of a variable.  */
11699 
11700 static void
11701 maybe_emit_free_warning (tree exp)
11702 {
11703   tree arg = CALL_EXPR_ARG (exp, 0);
11704 
11705   STRIP_NOPS (arg);
11706   if (TREE_CODE (arg) != ADDR_EXPR)
11707     return;
11708 
11709   arg = get_base_address (TREE_OPERAND (arg, 0));
11710   if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11711     return;
11712 
11713   if (SSA_VAR_P (arg))
11714     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11715 		"%Kattempt to free a non-heap object %qD", exp, arg);
11716   else
11717     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11718 		"%Kattempt to free a non-heap object", exp);
11719 }
11720 
11721 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11722    if possible.  */
11723 
11724 static tree
11725 fold_builtin_object_size (tree ptr, tree ost)
11726 {
11727   unsigned HOST_WIDE_INT bytes;
11728   int object_size_type;
11729 
11730   if (!validate_arg (ptr, POINTER_TYPE)
11731       || !validate_arg (ost, INTEGER_TYPE))
11732     return NULL_TREE;
11733 
11734   STRIP_NOPS (ost);
11735 
11736   if (TREE_CODE (ost) != INTEGER_CST
11737       || tree_int_cst_sgn (ost) < 0
11738       || compare_tree_int (ost, 3) > 0)
11739     return NULL_TREE;
11740 
11741   object_size_type = tree_to_shwi (ost);
11742 
11743   /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11744      if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11745      and (size_t) 0 for types 2 and 3.  */
11746   if (TREE_SIDE_EFFECTS (ptr))
11747     return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11748 
11749   if (TREE_CODE (ptr) == ADDR_EXPR)
11750     {
11751       bytes = compute_builtin_object_size (ptr, object_size_type);
11752       if (wi::fits_to_tree_p (bytes, size_type_node))
11753 	return build_int_cstu (size_type_node, bytes);
11754     }
11755   else if (TREE_CODE (ptr) == SSA_NAME)
11756     {
11757       /* If object size is not known yet, delay folding until
11758        later.  Maybe subsequent passes will help determining
11759        it.  */
11760       bytes = compute_builtin_object_size (ptr, object_size_type);
11761       if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11762           && wi::fits_to_tree_p (bytes, size_type_node))
11763 	return build_int_cstu (size_type_node, bytes);
11764     }
11765 
11766   return NULL_TREE;
11767 }
11768 
11769 /* Builtins with folding operations that operate on "..." arguments
11770    need special handling; we need to store the arguments in a convenient
11771    data structure before attempting any folding.  Fortunately there are
11772    only a few builtins that fall into this category.  FNDECL is the
11773    function, EXP is the CALL_EXPR for the call.  */
11774 
11775 static tree
11776 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11777 {
11778   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11779   tree ret = NULL_TREE;
11780 
11781   switch (fcode)
11782     {
11783     case BUILT_IN_FPCLASSIFY:
11784       ret = fold_builtin_fpclassify (loc, args, nargs);
11785       break;
11786 
11787     default:
11788       break;
11789     }
11790   if (ret)
11791     {
11792       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11793       SET_EXPR_LOCATION (ret, loc);
11794       TREE_NO_WARNING (ret) = 1;
11795       return ret;
11796     }
11797   return NULL_TREE;
11798 }
11799 
11800 /* Initialize format string characters in the target charset.  */
11801 
11802 bool
11803 init_target_chars (void)
11804 {
11805   static bool init;
11806   if (!init)
11807     {
11808       target_newline = lang_hooks.to_target_charset ('\n');
11809       target_percent = lang_hooks.to_target_charset ('%');
11810       target_c = lang_hooks.to_target_charset ('c');
11811       target_s = lang_hooks.to_target_charset ('s');
11812       if (target_newline == 0 || target_percent == 0 || target_c == 0
11813 	  || target_s == 0)
11814 	return false;
11815 
11816       target_percent_c[0] = target_percent;
11817       target_percent_c[1] = target_c;
11818       target_percent_c[2] = '\0';
11819 
11820       target_percent_s[0] = target_percent;
11821       target_percent_s[1] = target_s;
11822       target_percent_s[2] = '\0';
11823 
11824       target_percent_s_newline[0] = target_percent;
11825       target_percent_s_newline[1] = target_s;
11826       target_percent_s_newline[2] = target_newline;
11827       target_percent_s_newline[3] = '\0';
11828 
11829       init = true;
11830     }
11831   return true;
11832 }
11833 
11834 /* Helper function for do_mpfr_arg*().  Ensure M is a normal number
11835    and no overflow/underflow occurred.  INEXACT is true if M was not
11836    exactly calculated.  TYPE is the tree type for the result.  This
11837    function assumes that you cleared the MPFR flags and then
11838    calculated M to see if anything subsequently set a flag prior to
11839    entering this function.  Return NULL_TREE if any checks fail.  */
11840 
11841 static tree
11842 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11843 {
11844   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11845      overflow/underflow occurred.  If -frounding-math, proceed iff the
11846      result of calling FUNC was exact.  */
11847   if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11848       && (!flag_rounding_math || !inexact))
11849     {
11850       REAL_VALUE_TYPE rr;
11851 
11852       real_from_mpfr (&rr, m, type, GMP_RNDN);
11853       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11854 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
11855 	 but the mpft_t is not, then we underflowed in the
11856 	 conversion.  */
11857       if (real_isfinite (&rr)
11858 	  && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11859         {
11860 	  REAL_VALUE_TYPE rmode;
11861 
11862 	  real_convert (&rmode, TYPE_MODE (type), &rr);
11863 	  /* Proceed iff the specified mode can hold the value.  */
11864 	  if (real_identical (&rmode, &rr))
11865 	    return build_real (type, rmode);
11866 	}
11867     }
11868   return NULL_TREE;
11869 }
11870 
11871 /* Helper function for do_mpc_arg*().  Ensure M is a normal complex
11872    number and no overflow/underflow occurred.  INEXACT is true if M
11873    was not exactly calculated.  TYPE is the tree type for the result.
11874    This function assumes that you cleared the MPFR flags and then
11875    calculated M to see if anything subsequently set a flag prior to
11876    entering this function.  Return NULL_TREE if any checks fail, if
11877    FORCE_CONVERT is true, then bypass the checks.  */
11878 
11879 static tree
11880 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11881 {
11882   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11883      overflow/underflow occurred.  If -frounding-math, proceed iff the
11884      result of calling FUNC was exact.  */
11885   if (force_convert
11886       || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11887 	  && !mpfr_overflow_p () && !mpfr_underflow_p ()
11888 	  && (!flag_rounding_math || !inexact)))
11889     {
11890       REAL_VALUE_TYPE re, im;
11891 
11892       real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11893       real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11894       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11895 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
11896 	 but the mpft_t is not, then we underflowed in the
11897 	 conversion.  */
11898       if (force_convert
11899 	  || (real_isfinite (&re) && real_isfinite (&im)
11900 	      && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11901 	      && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11902         {
11903 	  REAL_VALUE_TYPE re_mode, im_mode;
11904 
11905 	  real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11906 	  real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11907 	  /* Proceed iff the specified mode can hold the value.  */
11908 	  if (force_convert
11909 	      || (real_identical (&re_mode, &re)
11910 		  && real_identical (&im_mode, &im)))
11911 	    return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11912 				  build_real (TREE_TYPE (type), im_mode));
11913 	}
11914     }
11915   return NULL_TREE;
11916 }
11917 
11918 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11919    FUNC on it and return the resulting value as a tree with type TYPE.
11920    If MIN and/or MAX are not NULL, then the supplied ARG must be
11921    within those bounds.  If INCLUSIVE is true, then MIN/MAX are
11922    acceptable values, otherwise they are not.  The mpfr precision is
11923    set to the precision of TYPE.  We assume that function FUNC returns
11924    zero if the result could be calculated exactly within the requested
11925    precision.  */
11926 
11927 static tree
11928 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11929 	      const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11930 	      bool inclusive)
11931 {
11932   tree result = NULL_TREE;
11933 
11934   STRIP_NOPS (arg);
11935 
11936   /* To proceed, MPFR must exactly represent the target floating point
11937      format, which only happens when the target base equals two.  */
11938   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11939       && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11940     {
11941       const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11942 
11943       if (real_isfinite (ra)
11944 	  && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11945 	  && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11946         {
11947 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11948 	  const int prec = fmt->p;
11949 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11950 	  int inexact;
11951 	  mpfr_t m;
11952 
11953 	  mpfr_init2 (m, prec);
11954 	  mpfr_from_real (m, ra, GMP_RNDN);
11955 	  mpfr_clear_flags ();
11956 	  inexact = func (m, m, rnd);
11957 	  result = do_mpfr_ckconv (m, type, inexact);
11958 	  mpfr_clear (m);
11959 	}
11960     }
11961 
11962   return result;
11963 }
11964 
11965 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11966    FUNC on it and return the resulting value as a tree with type TYPE.
11967    The mpfr precision is set to the precision of TYPE.  We assume that
11968    function FUNC returns zero if the result could be calculated
11969    exactly within the requested precision.  */
11970 
11971 static tree
11972 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11973 	      int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11974 {
11975   tree result = NULL_TREE;
11976 
11977   STRIP_NOPS (arg1);
11978   STRIP_NOPS (arg2);
11979 
11980   /* To proceed, MPFR must exactly represent the target floating point
11981      format, which only happens when the target base equals two.  */
11982   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11983       && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11984       && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11985     {
11986       const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11987       const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11988 
11989       if (real_isfinite (ra1) && real_isfinite (ra2))
11990         {
11991 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11992 	  const int prec = fmt->p;
11993 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11994 	  int inexact;
11995 	  mpfr_t m1, m2;
11996 
11997 	  mpfr_inits2 (prec, m1, m2, NULL);
11998 	  mpfr_from_real (m1, ra1, GMP_RNDN);
11999 	  mpfr_from_real (m2, ra2, GMP_RNDN);
12000 	  mpfr_clear_flags ();
12001 	  inexact = func (m1, m1, m2, rnd);
12002 	  result = do_mpfr_ckconv (m1, type, inexact);
12003 	  mpfr_clears (m1, m2, NULL);
12004 	}
12005     }
12006 
12007   return result;
12008 }
12009 
12010 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12011    FUNC on it and return the resulting value as a tree with type TYPE.
12012    The mpfr precision is set to the precision of TYPE.  We assume that
12013    function FUNC returns zero if the result could be calculated
12014    exactly within the requested precision.  */
12015 
12016 static tree
12017 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12018 	      int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12019 {
12020   tree result = NULL_TREE;
12021 
12022   STRIP_NOPS (arg1);
12023   STRIP_NOPS (arg2);
12024   STRIP_NOPS (arg3);
12025 
12026   /* To proceed, MPFR must exactly represent the target floating point
12027      format, which only happens when the target base equals two.  */
12028   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12029       && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12030       && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12031       && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12032     {
12033       const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12034       const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12035       const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12036 
12037       if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12038         {
12039 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12040 	  const int prec = fmt->p;
12041 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12042 	  int inexact;
12043 	  mpfr_t m1, m2, m3;
12044 
12045 	  mpfr_inits2 (prec, m1, m2, m3, NULL);
12046 	  mpfr_from_real (m1, ra1, GMP_RNDN);
12047 	  mpfr_from_real (m2, ra2, GMP_RNDN);
12048 	  mpfr_from_real (m3, ra3, GMP_RNDN);
12049 	  mpfr_clear_flags ();
12050 	  inexact = func (m1, m1, m2, m3, rnd);
12051 	  result = do_mpfr_ckconv (m1, type, inexact);
12052 	  mpfr_clears (m1, m2, m3, NULL);
12053 	}
12054     }
12055 
12056   return result;
12057 }
12058 
12059 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12060    the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12061    If ARG_SINP and ARG_COSP are NULL then the result is returned
12062    as a complex value.
12063    The type is taken from the type of ARG and is used for setting the
12064    precision of the calculation and results.  */
12065 
12066 static tree
12067 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12068 {
12069   tree const type = TREE_TYPE (arg);
12070   tree result = NULL_TREE;
12071 
12072   STRIP_NOPS (arg);
12073 
12074   /* To proceed, MPFR must exactly represent the target floating point
12075      format, which only happens when the target base equals two.  */
12076   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12077       && TREE_CODE (arg) == REAL_CST
12078       && !TREE_OVERFLOW (arg))
12079     {
12080       const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12081 
12082       if (real_isfinite (ra))
12083         {
12084 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12085 	  const int prec = fmt->p;
12086 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12087 	  tree result_s, result_c;
12088 	  int inexact;
12089 	  mpfr_t m, ms, mc;
12090 
12091 	  mpfr_inits2 (prec, m, ms, mc, NULL);
12092 	  mpfr_from_real (m, ra, GMP_RNDN);
12093 	  mpfr_clear_flags ();
12094 	  inexact = mpfr_sin_cos (ms, mc, m, rnd);
12095 	  result_s = do_mpfr_ckconv (ms, type, inexact);
12096 	  result_c = do_mpfr_ckconv (mc, type, inexact);
12097 	  mpfr_clears (m, ms, mc, NULL);
12098 	  if (result_s && result_c)
12099 	    {
12100 	      /* If we are to return in a complex value do so.  */
12101 	      if (!arg_sinp && !arg_cosp)
12102 		return build_complex (build_complex_type (type),
12103 				      result_c, result_s);
12104 
12105 	      /* Dereference the sin/cos pointer arguments.  */
12106 	      arg_sinp = build_fold_indirect_ref (arg_sinp);
12107 	      arg_cosp = build_fold_indirect_ref (arg_cosp);
12108 	      /* Proceed if valid pointer type were passed in.  */
12109 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12110 		  && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12111 	        {
12112 		  /* Set the values. */
12113 		  result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12114 		      			  result_s);
12115 		  TREE_SIDE_EFFECTS (result_s) = 1;
12116 		  result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12117 		      			  result_c);
12118 		  TREE_SIDE_EFFECTS (result_c) = 1;
12119 		  /* Combine the assignments into a compound expr.  */
12120 		  result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12121 						    result_s, result_c));
12122 		}
12123 	    }
12124 	}
12125     }
12126   return result;
12127 }
12128 
12129 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12130    two-argument mpfr order N Bessel function FUNC on them and return
12131    the resulting value as a tree with type TYPE.  The mpfr precision
12132    is set to the precision of TYPE.  We assume that function FUNC
12133    returns zero if the result could be calculated exactly within the
12134    requested precision.  */
12135 static tree
12136 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12137 		  int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12138 		  const REAL_VALUE_TYPE *min, bool inclusive)
12139 {
12140   tree result = NULL_TREE;
12141 
12142   STRIP_NOPS (arg1);
12143   STRIP_NOPS (arg2);
12144 
12145   /* To proceed, MPFR must exactly represent the target floating point
12146      format, which only happens when the target base equals two.  */
12147   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12148       && tree_fits_shwi_p (arg1)
12149       && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12150     {
12151       const HOST_WIDE_INT n = tree_to_shwi (arg1);
12152       const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12153 
12154       if (n == (long)n
12155 	  && real_isfinite (ra)
12156 	  && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12157         {
12158 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12159 	  const int prec = fmt->p;
12160 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12161 	  int inexact;
12162 	  mpfr_t m;
12163 
12164 	  mpfr_init2 (m, prec);
12165 	  mpfr_from_real (m, ra, GMP_RNDN);
12166 	  mpfr_clear_flags ();
12167 	  inexact = func (m, n, m, rnd);
12168 	  result = do_mpfr_ckconv (m, type, inexact);
12169 	  mpfr_clear (m);
12170 	}
12171     }
12172 
12173   return result;
12174 }
12175 
12176 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12177    the pointer *(ARG_QUO) and return the result.  The type is taken
12178    from the type of ARG0 and is used for setting the precision of the
12179    calculation and results.  */
12180 
12181 static tree
12182 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12183 {
12184   tree const type = TREE_TYPE (arg0);
12185   tree result = NULL_TREE;
12186 
12187   STRIP_NOPS (arg0);
12188   STRIP_NOPS (arg1);
12189 
12190   /* To proceed, MPFR must exactly represent the target floating point
12191      format, which only happens when the target base equals two.  */
12192   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12193       && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12194       && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12195     {
12196       const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12197       const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12198 
12199       if (real_isfinite (ra0) && real_isfinite (ra1))
12200         {
12201 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12202 	  const int prec = fmt->p;
12203 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12204 	  tree result_rem;
12205 	  long integer_quo;
12206 	  mpfr_t m0, m1;
12207 
12208 	  mpfr_inits2 (prec, m0, m1, NULL);
12209 	  mpfr_from_real (m0, ra0, GMP_RNDN);
12210 	  mpfr_from_real (m1, ra1, GMP_RNDN);
12211 	  mpfr_clear_flags ();
12212 	  mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12213 	  /* Remquo is independent of the rounding mode, so pass
12214 	     inexact=0 to do_mpfr_ckconv().  */
12215 	  result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12216 	  mpfr_clears (m0, m1, NULL);
12217 	  if (result_rem)
12218 	    {
12219 	      /* MPFR calculates quo in the host's long so it may
12220 		 return more bits in quo than the target int can hold
12221 		 if sizeof(host long) > sizeof(target int).  This can
12222 		 happen even for native compilers in LP64 mode.  In
12223 		 these cases, modulo the quo value with the largest
12224 		 number that the target int can hold while leaving one
12225 		 bit for the sign.  */
12226 	      if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12227 		integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12228 
12229 	      /* Dereference the quo pointer argument.  */
12230 	      arg_quo = build_fold_indirect_ref (arg_quo);
12231 	      /* Proceed iff a valid pointer type was passed in.  */
12232 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12233 	        {
12234 		  /* Set the value. */
12235 		  tree result_quo
12236 		    = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12237 				   build_int_cst (TREE_TYPE (arg_quo),
12238 						  integer_quo));
12239 		  TREE_SIDE_EFFECTS (result_quo) = 1;
12240 		  /* Combine the quo assignment with the rem.  */
12241 		  result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12242 						    result_quo, result_rem));
12243 		}
12244 	    }
12245 	}
12246     }
12247   return result;
12248 }
12249 
12250 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12251    resulting value as a tree with type TYPE.  The mpfr precision is
12252    set to the precision of TYPE.  We assume that this mpfr function
12253    returns zero if the result could be calculated exactly within the
12254    requested precision.  In addition, the integer pointer represented
12255    by ARG_SG will be dereferenced and set to the appropriate signgam
12256    (-1,1) value.  */
12257 
12258 static tree
12259 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12260 {
12261   tree result = NULL_TREE;
12262 
12263   STRIP_NOPS (arg);
12264 
12265   /* To proceed, MPFR must exactly represent the target floating point
12266      format, which only happens when the target base equals two.  Also
12267      verify ARG is a constant and that ARG_SG is an int pointer.  */
12268   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12269       && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12270       && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12271       && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12272     {
12273       const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12274 
12275       /* In addition to NaN and Inf, the argument cannot be zero or a
12276 	 negative integer.  */
12277       if (real_isfinite (ra)
12278 	  && ra->cl != rvc_zero
12279 	  && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12280         {
12281 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12282 	  const int prec = fmt->p;
12283 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12284 	  int inexact, sg;
12285 	  mpfr_t m;
12286 	  tree result_lg;
12287 
12288 	  mpfr_init2 (m, prec);
12289 	  mpfr_from_real (m, ra, GMP_RNDN);
12290 	  mpfr_clear_flags ();
12291 	  inexact = mpfr_lgamma (m, &sg, m, rnd);
12292 	  result_lg = do_mpfr_ckconv (m, type, inexact);
12293 	  mpfr_clear (m);
12294 	  if (result_lg)
12295 	    {
12296 	      tree result_sg;
12297 
12298 	      /* Dereference the arg_sg pointer argument.  */
12299 	      arg_sg = build_fold_indirect_ref (arg_sg);
12300 	      /* Assign the signgam value into *arg_sg. */
12301 	      result_sg = fold_build2 (MODIFY_EXPR,
12302 				       TREE_TYPE (arg_sg), arg_sg,
12303 				       build_int_cst (TREE_TYPE (arg_sg), sg));
12304 	      TREE_SIDE_EFFECTS (result_sg) = 1;
12305 	      /* Combine the signgam assignment with the lgamma result.  */
12306 	      result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12307 						result_sg, result_lg));
12308 	    }
12309 	}
12310     }
12311 
12312   return result;
12313 }
12314 
12315 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12316    function FUNC on it and return the resulting value as a tree with
12317    type TYPE.  The mpfr precision is set to the precision of TYPE.  We
12318    assume that function FUNC returns zero if the result could be
12319    calculated exactly within the requested precision.  */
12320 
12321 static tree
12322 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12323 {
12324   tree result = NULL_TREE;
12325 
12326   STRIP_NOPS (arg);
12327 
12328   /* To proceed, MPFR must exactly represent the target floating point
12329      format, which only happens when the target base equals two.  */
12330   if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12331       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12332       && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12333     {
12334       const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12335       const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12336 
12337       if (real_isfinite (re) && real_isfinite (im))
12338         {
12339 	  const struct real_format *const fmt =
12340 	    REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12341 	  const int prec = fmt->p;
12342 	  const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12343 	  const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12344 	  int inexact;
12345 	  mpc_t m;
12346 
12347 	  mpc_init2 (m, prec);
12348 	  mpfr_from_real (mpc_realref (m), re, rnd);
12349 	  mpfr_from_real (mpc_imagref (m), im, rnd);
12350 	  mpfr_clear_flags ();
12351 	  inexact = func (m, m, crnd);
12352 	  result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12353 	  mpc_clear (m);
12354 	}
12355     }
12356 
12357   return result;
12358 }
12359 
12360 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12361    mpc function FUNC on it and return the resulting value as a tree
12362    with type TYPE.  The mpfr precision is set to the precision of
12363    TYPE.  We assume that function FUNC returns zero if the result
12364    could be calculated exactly within the requested precision.  If
12365    DO_NONFINITE is true, then fold expressions containing Inf or NaN
12366    in the arguments and/or results.  */
12367 
12368 tree
12369 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12370 	     int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12371 {
12372   tree result = NULL_TREE;
12373 
12374   STRIP_NOPS (arg0);
12375   STRIP_NOPS (arg1);
12376 
12377   /* To proceed, MPFR must exactly represent the target floating point
12378      format, which only happens when the target base equals two.  */
12379   if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12380       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12381       && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12382       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12383       && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12384     {
12385       const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12386       const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12387       const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12388       const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12389 
12390       if (do_nonfinite
12391 	  || (real_isfinite (re0) && real_isfinite (im0)
12392 	      && real_isfinite (re1) && real_isfinite (im1)))
12393         {
12394 	  const struct real_format *const fmt =
12395 	    REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12396 	  const int prec = fmt->p;
12397 	  const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12398 	  const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12399 	  int inexact;
12400 	  mpc_t m0, m1;
12401 
12402 	  mpc_init2 (m0, prec);
12403 	  mpc_init2 (m1, prec);
12404 	  mpfr_from_real (mpc_realref (m0), re0, rnd);
12405 	  mpfr_from_real (mpc_imagref (m0), im0, rnd);
12406 	  mpfr_from_real (mpc_realref (m1), re1, rnd);
12407 	  mpfr_from_real (mpc_imagref (m1), im1, rnd);
12408 	  mpfr_clear_flags ();
12409 	  inexact = func (m0, m0, m1, crnd);
12410 	  result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12411 	  mpc_clear (m0);
12412 	  mpc_clear (m1);
12413 	}
12414     }
12415 
12416   return result;
12417 }
12418 
12419 /* A wrapper function for builtin folding that prevents warnings for
12420    "statement without effect" and the like, caused by removing the
12421    call node earlier than the warning is generated.  */
12422 
12423 tree
12424 fold_call_stmt (gcall *stmt, bool ignore)
12425 {
12426   tree ret = NULL_TREE;
12427   tree fndecl = gimple_call_fndecl (stmt);
12428   location_t loc = gimple_location (stmt);
12429   if (fndecl
12430       && TREE_CODE (fndecl) == FUNCTION_DECL
12431       && DECL_BUILT_IN (fndecl)
12432       && !gimple_call_va_arg_pack_p (stmt))
12433     {
12434       int nargs = gimple_call_num_args (stmt);
12435       tree *args = (nargs > 0
12436 		    ? gimple_call_arg_ptr (stmt, 0)
12437 		    : &error_mark_node);
12438 
12439       if (avoid_folding_inline_builtin (fndecl))
12440 	return NULL_TREE;
12441       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12442         {
12443 	  return targetm.fold_builtin (fndecl, nargs, args, ignore);
12444         }
12445       else
12446 	{
12447 	  ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12448 	  if (ret)
12449 	    {
12450 	      /* Propagate location information from original call to
12451 		 expansion of builtin.  Otherwise things like
12452 		 maybe_emit_chk_warning, that operate on the expansion
12453 		 of a builtin, will use the wrong location information.  */
12454 	      if (gimple_has_location (stmt))
12455                 {
12456 		  tree realret = ret;
12457 		  if (TREE_CODE (ret) == NOP_EXPR)
12458 		    realret = TREE_OPERAND (ret, 0);
12459 		  if (CAN_HAVE_LOCATION_P (realret)
12460 		      && !EXPR_HAS_LOCATION (realret))
12461 		    SET_EXPR_LOCATION (realret, loc);
12462                   return realret;
12463                 }
12464 	      return ret;
12465 	    }
12466 	}
12467     }
12468   return NULL_TREE;
12469 }
12470 
12471 /* Look up the function in builtin_decl that corresponds to DECL
12472    and set ASMSPEC as its user assembler name.  DECL must be a
12473    function decl that declares a builtin.  */
12474 
12475 void
12476 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12477 {
12478   tree builtin;
12479   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12480 	      && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12481 	      && asmspec != 0);
12482 
12483   builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12484   set_user_assembler_name (builtin, asmspec);
12485   switch (DECL_FUNCTION_CODE (decl))
12486     {
12487     case BUILT_IN_MEMCPY:
12488       init_block_move_fn (asmspec);
12489       memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12490       break;
12491     case BUILT_IN_MEMSET:
12492       init_block_clear_fn (asmspec);
12493       memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12494       break;
12495     case BUILT_IN_MEMMOVE:
12496       memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12497       break;
12498     case BUILT_IN_MEMCMP:
12499       memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12500       break;
12501     case BUILT_IN_ABORT:
12502       abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12503       break;
12504     case BUILT_IN_FFS:
12505       if (INT_TYPE_SIZE < BITS_PER_WORD)
12506 	{
12507 	  set_user_assembler_libfunc ("ffs", asmspec);
12508 	  set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12509 						       MODE_INT, 0), "ffs");
12510 	}
12511       break;
12512     default:
12513       break;
12514     }
12515 }
12516 
12517 /* Return true if DECL is a builtin that expands to a constant or similarly
12518    simple code.  */
12519 bool
12520 is_simple_builtin (tree decl)
12521 {
12522   if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12523     switch (DECL_FUNCTION_CODE (decl))
12524       {
12525 	/* Builtins that expand to constants.  */
12526       case BUILT_IN_CONSTANT_P:
12527       case BUILT_IN_EXPECT:
12528       case BUILT_IN_OBJECT_SIZE:
12529       case BUILT_IN_UNREACHABLE:
12530 	/* Simple register moves or loads from stack.  */
12531       case BUILT_IN_ASSUME_ALIGNED:
12532       case BUILT_IN_RETURN_ADDRESS:
12533       case BUILT_IN_EXTRACT_RETURN_ADDR:
12534       case BUILT_IN_FROB_RETURN_ADDR:
12535       case BUILT_IN_RETURN:
12536       case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12537       case BUILT_IN_FRAME_ADDRESS:
12538       case BUILT_IN_VA_END:
12539       case BUILT_IN_STACK_SAVE:
12540       case BUILT_IN_STACK_RESTORE:
12541 	/* Exception state returns or moves registers around.  */
12542       case BUILT_IN_EH_FILTER:
12543       case BUILT_IN_EH_POINTER:
12544       case BUILT_IN_EH_COPY_VALUES:
12545 	return true;
12546 
12547       default:
12548 	return false;
12549       }
12550 
12551   return false;
12552 }
12553 
12554 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12555    most probably expanded inline into reasonably simple code.  This is a
12556    superset of is_simple_builtin.  */
12557 bool
12558 is_inexpensive_builtin (tree decl)
12559 {
12560   if (!decl)
12561     return false;
12562   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12563     return true;
12564   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12565     switch (DECL_FUNCTION_CODE (decl))
12566       {
12567       case BUILT_IN_ABS:
12568       case BUILT_IN_ALLOCA:
12569       case BUILT_IN_ALLOCA_WITH_ALIGN:
12570       case BUILT_IN_BSWAP16:
12571       case BUILT_IN_BSWAP32:
12572       case BUILT_IN_BSWAP64:
12573       case BUILT_IN_CLZ:
12574       case BUILT_IN_CLZIMAX:
12575       case BUILT_IN_CLZL:
12576       case BUILT_IN_CLZLL:
12577       case BUILT_IN_CTZ:
12578       case BUILT_IN_CTZIMAX:
12579       case BUILT_IN_CTZL:
12580       case BUILT_IN_CTZLL:
12581       case BUILT_IN_FFS:
12582       case BUILT_IN_FFSIMAX:
12583       case BUILT_IN_FFSL:
12584       case BUILT_IN_FFSLL:
12585       case BUILT_IN_IMAXABS:
12586       case BUILT_IN_FINITE:
12587       case BUILT_IN_FINITEF:
12588       case BUILT_IN_FINITEL:
12589       case BUILT_IN_FINITED32:
12590       case BUILT_IN_FINITED64:
12591       case BUILT_IN_FINITED128:
12592       case BUILT_IN_FPCLASSIFY:
12593       case BUILT_IN_ISFINITE:
12594       case BUILT_IN_ISINF_SIGN:
12595       case BUILT_IN_ISINF:
12596       case BUILT_IN_ISINFF:
12597       case BUILT_IN_ISINFL:
12598       case BUILT_IN_ISINFD32:
12599       case BUILT_IN_ISINFD64:
12600       case BUILT_IN_ISINFD128:
12601       case BUILT_IN_ISNAN:
12602       case BUILT_IN_ISNANF:
12603       case BUILT_IN_ISNANL:
12604       case BUILT_IN_ISNAND32:
12605       case BUILT_IN_ISNAND64:
12606       case BUILT_IN_ISNAND128:
12607       case BUILT_IN_ISNORMAL:
12608       case BUILT_IN_ISGREATER:
12609       case BUILT_IN_ISGREATEREQUAL:
12610       case BUILT_IN_ISLESS:
12611       case BUILT_IN_ISLESSEQUAL:
12612       case BUILT_IN_ISLESSGREATER:
12613       case BUILT_IN_ISUNORDERED:
12614       case BUILT_IN_VA_ARG_PACK:
12615       case BUILT_IN_VA_ARG_PACK_LEN:
12616       case BUILT_IN_VA_COPY:
12617       case BUILT_IN_TRAP:
12618       case BUILT_IN_SAVEREGS:
12619       case BUILT_IN_POPCOUNTL:
12620       case BUILT_IN_POPCOUNTLL:
12621       case BUILT_IN_POPCOUNTIMAX:
12622       case BUILT_IN_POPCOUNT:
12623       case BUILT_IN_PARITYL:
12624       case BUILT_IN_PARITYLL:
12625       case BUILT_IN_PARITYIMAX:
12626       case BUILT_IN_PARITY:
12627       case BUILT_IN_LABS:
12628       case BUILT_IN_LLABS:
12629       case BUILT_IN_PREFETCH:
12630       case BUILT_IN_ACC_ON_DEVICE:
12631 	return true;
12632 
12633       default:
12634 	return is_simple_builtin (decl);
12635       }
12636 
12637   return false;
12638 }
12639