xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/function.c (revision 929c70cba110089af68ff46da658d45500ef61a1)
1 /* Expands front end tree to back end RTL for GCC.
2    Copyright (C) 1987-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file handles the generation of rtl code from tree structure
21    at the level of the function as a whole.
22    It creates the rtl expressions for parameters and auto variables
23    and has full responsibility for allocating stack slots.
24 
25    `expand_function_start' is called at the beginning of a function,
26    before the function body is parsed, and `expand_function_end' is
27    called after parsing the body.
28 
29    Call `assign_stack_local' to allocate a stack slot for a local variable.
30    This is usually done during the RTL generation for the function body,
31    but it can also be done in the reload pass when a pseudo-register does
32    not get a hard register.  */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "memmodel.h"
45 #include "tm_p.h"
46 #include "stringpool.h"
47 #include "expmed.h"
48 #include "optabs.h"
49 #include "regs.h"
50 #include "emit-rtl.h"
51 #include "recog.h"
52 #include "rtl-error.h"
53 #include "alias.h"
54 #include "fold-const.h"
55 #include "stor-layout.h"
56 #include "varasm.h"
57 #include "except.h"
58 #include "dojump.h"
59 #include "explow.h"
60 #include "calls.h"
61 #include "expr.h"
62 #include "optabs-tree.h"
63 #include "output.h"
64 #include "langhooks.h"
65 #include "common/common-target.h"
66 #include "gimplify.h"
67 #include "tree-pass.h"
68 #include "cfgrtl.h"
69 #include "cfganal.h"
70 #include "cfgbuild.h"
71 #include "cfgcleanup.h"
72 #include "cfgexpand.h"
73 #include "shrink-wrap.h"
74 #include "toplev.h"
75 #include "rtl-iter.h"
76 #include "tree-chkp.h"
77 #include "rtl-chkp.h"
78 #include "tree-dfa.h"
79 #include "tree-ssa.h"
80 
81 /* So we can assign to cfun in this file.  */
82 #undef cfun
83 
84 #ifndef STACK_ALIGNMENT_NEEDED
85 #define STACK_ALIGNMENT_NEEDED 1
86 #endif
87 
88 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
89 
90 /* Round a value to the lowest integer less than it that is a multiple of
91    the required alignment.  Avoid using division in case the value is
92    negative.  Assume the alignment is a power of two.  */
93 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
94 
95 /* Similar, but round to the next highest integer that meets the
96    alignment.  */
97 #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
98 
99 /* Nonzero once virtual register instantiation has been done.
100    assign_stack_local uses frame_pointer_rtx when this is nonzero.
101    calls.c:emit_library_call_value_1 uses it to set up
102    post-instantiation libcalls.  */
103 int virtuals_instantiated;
104 
105 /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
106 static GTY(()) int funcdef_no;
107 
108 /* These variables hold pointers to functions to create and destroy
109    target specific, per-function data structures.  */
110 struct machine_function * (*init_machine_status) (void);
111 
112 /* The currently compiled function.  */
113 struct function *cfun = 0;
114 
115 /* These hashes record the prologue and epilogue insns.  */
116 
117 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
118 {
119   static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
120   static bool equal (rtx a, rtx b) { return a == b; }
121 };
122 
123 static GTY((cache))
124   hash_table<insn_cache_hasher> *prologue_insn_hash;
125 static GTY((cache))
126   hash_table<insn_cache_hasher> *epilogue_insn_hash;
127 
128 
129 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
130 vec<tree, va_gc> *types_used_by_cur_var_decl;
131 
132 /* Forward declarations.  */
133 
134 static struct temp_slot *find_temp_slot_from_address (rtx);
135 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
136 static void pad_below (struct args_size *, machine_mode, tree);
137 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
138 static int all_blocks (tree, tree *);
139 static tree *get_block_vector (tree, int *);
140 extern tree debug_find_var_in_block_tree (tree, tree);
141 /* We always define `record_insns' even if it's not used so that we
142    can always export `prologue_epilogue_contains'.  */
143 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
144      ATTRIBUTE_UNUSED;
145 static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
146 static void prepare_function_start (void);
147 static void do_clobber_return_reg (rtx, void *);
148 static void do_use_return_reg (rtx, void *);
149 
150 
151 /* Stack of nested functions.  */
152 /* Keep track of the cfun stack.  */
153 
154 static vec<function *> function_context_stack;
155 
156 /* Save the current context for compilation of a nested function.
157    This is called from language-specific code.  */
158 
159 void
160 push_function_context (void)
161 {
162   if (cfun == 0)
163     allocate_struct_function (NULL, false);
164 
165   function_context_stack.safe_push (cfun);
166   set_cfun (NULL);
167 }
168 
169 /* Restore the last saved context, at the end of a nested function.
170    This function is called from language-specific code.  */
171 
172 void
173 pop_function_context (void)
174 {
175   struct function *p = function_context_stack.pop ();
176   set_cfun (p);
177   current_function_decl = p->decl;
178 
179   /* Reset variables that have known state during rtx generation.  */
180   virtuals_instantiated = 0;
181   generating_concat_p = 1;
182 }
183 
184 /* Clear out all parts of the state in F that can safely be discarded
185    after the function has been parsed, but not compiled, to let
186    garbage collection reclaim the memory.  */
187 
188 void
189 free_after_parsing (struct function *f)
190 {
191   f->language = 0;
192 }
193 
194 /* Clear out all parts of the state in F that can safely be discarded
195    after the function has been compiled, to let garbage collection
196    reclaim the memory.  */
197 
198 void
199 free_after_compilation (struct function *f)
200 {
201   prologue_insn_hash = NULL;
202   epilogue_insn_hash = NULL;
203 
204   free (crtl->emit.regno_pointer_align);
205 
206   memset (crtl, 0, sizeof (struct rtl_data));
207   f->eh = NULL;
208   f->machine = NULL;
209   f->cfg = NULL;
210   f->curr_properties &= ~PROP_cfg;
211 
212   regno_reg_rtx = NULL;
213 }
214 
215 /* Return size needed for stack frame based on slots so far allocated.
216    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
217    the caller may have to do that.  */
218 
219 HOST_WIDE_INT
220 get_frame_size (void)
221 {
222   if (FRAME_GROWS_DOWNWARD)
223     return -frame_offset;
224   else
225     return frame_offset;
226 }
227 
228 /* Issue an error message and return TRUE if frame OFFSET overflows in
229    the signed target pointer arithmetics for function FUNC.  Otherwise
230    return FALSE.  */
231 
232 bool
233 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
234 {
235   unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
236 
237   if (size > (HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
238 	       /* Leave room for the fixed part of the frame.  */
239 	       - 64 * UNITS_PER_WORD)
240     {
241       error_at (DECL_SOURCE_LOCATION (func),
242 		"total size of local objects too large");
243       return TRUE;
244     }
245 
246   return FALSE;
247 }
248 
249 /* Return the minimum spill slot alignment for a register of mode MODE.  */
250 
251 unsigned int
252 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
253 {
254   return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
255 }
256 
257 /* Return stack slot alignment in bits for TYPE and MODE.  */
258 
259 static unsigned int
260 get_stack_local_alignment (tree type, machine_mode mode)
261 {
262   unsigned int alignment;
263 
264   if (mode == BLKmode)
265     alignment = BIGGEST_ALIGNMENT;
266   else
267     alignment = GET_MODE_ALIGNMENT (mode);
268 
269   /* Allow the frond-end to (possibly) increase the alignment of this
270      stack slot.  */
271   if (! type)
272     type = lang_hooks.types.type_for_mode (mode, 0);
273 
274   return STACK_SLOT_ALIGNMENT (type, mode, alignment);
275 }
276 
277 /* Determine whether it is possible to fit a stack slot of size SIZE and
278    alignment ALIGNMENT into an area in the stack frame that starts at
279    frame offset START and has a length of LENGTH.  If so, store the frame
280    offset to be used for the stack slot in *POFFSET and return true;
281    return false otherwise.  This function will extend the frame size when
282    given a start/length pair that lies at the end of the frame.  */
283 
284 static bool
285 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
286 		     HOST_WIDE_INT size, unsigned int alignment,
287 		     HOST_WIDE_INT *poffset)
288 {
289   HOST_WIDE_INT this_frame_offset;
290   int frame_off, frame_alignment, frame_phase;
291 
292   /* Calculate how many bytes the start of local variables is off from
293      stack alignment.  */
294   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
295   frame_off = STARTING_FRAME_OFFSET % frame_alignment;
296   frame_phase = frame_off ? frame_alignment - frame_off : 0;
297 
298   /* Round the frame offset to the specified alignment.  */
299 
300   /*  We must be careful here, since FRAME_OFFSET might be negative and
301       division with a negative dividend isn't as well defined as we might
302       like.  So we instead assume that ALIGNMENT is a power of two and
303       use logical operations which are unambiguous.  */
304   if (FRAME_GROWS_DOWNWARD)
305     this_frame_offset
306       = (FLOOR_ROUND (start + length - size - frame_phase,
307 		      (unsigned HOST_WIDE_INT) alignment)
308 	 + frame_phase);
309   else
310     this_frame_offset
311       = (CEIL_ROUND (start - frame_phase,
312 		     (unsigned HOST_WIDE_INT) alignment)
313 	 + frame_phase);
314 
315   /* See if it fits.  If this space is at the edge of the frame,
316      consider extending the frame to make it fit.  Our caller relies on
317      this when allocating a new slot.  */
318   if (frame_offset == start && this_frame_offset < frame_offset)
319     frame_offset = this_frame_offset;
320   else if (this_frame_offset < start)
321     return false;
322   else if (start + length == frame_offset
323 	   && this_frame_offset + size > start + length)
324     frame_offset = this_frame_offset + size;
325   else if (this_frame_offset + size > start + length)
326     return false;
327 
328   *poffset = this_frame_offset;
329   return true;
330 }
331 
332 /* Create a new frame_space structure describing free space in the stack
333    frame beginning at START and ending at END, and chain it into the
334    function's frame_space_list.  */
335 
336 static void
337 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
338 {
339   struct frame_space *space = ggc_alloc<frame_space> ();
340   space->next = crtl->frame_space_list;
341   crtl->frame_space_list = space;
342   space->start = start;
343   space->length = end - start;
344 }
345 
346 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
347    with machine mode MODE.
348 
349    ALIGN controls the amount of alignment for the address of the slot:
350    0 means according to MODE,
351    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
352    -2 means use BITS_PER_UNIT,
353    positive specifies alignment boundary in bits.
354 
355    KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
356    alignment and ASLK_RECORD_PAD bit set if we should remember
357    extra space we allocated for alignment purposes.  When we are
358    called from assign_stack_temp_for_type, it is not set so we don't
359    track the same stack slot in two independent lists.
360 
361    We do not round to stack_boundary here.  */
362 
363 rtx
364 assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
365 		      int align, int kind)
366 {
367   rtx x, addr;
368   int bigend_correction = 0;
369   HOST_WIDE_INT slot_offset = 0, old_frame_offset;
370   unsigned int alignment, alignment_in_bits;
371 
372   if (align == 0)
373     {
374       alignment = get_stack_local_alignment (NULL, mode);
375       alignment /= BITS_PER_UNIT;
376     }
377   else if (align == -1)
378     {
379       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
380       size = CEIL_ROUND (size, alignment);
381     }
382   else if (align == -2)
383     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
384   else
385     alignment = align / BITS_PER_UNIT;
386 
387   alignment_in_bits = alignment * BITS_PER_UNIT;
388 
389   /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT.  */
390   if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
391     {
392       alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
393       alignment = alignment_in_bits / BITS_PER_UNIT;
394     }
395 
396   if (SUPPORTS_STACK_ALIGNMENT)
397     {
398       if (crtl->stack_alignment_estimated < alignment_in_bits)
399 	{
400           if (!crtl->stack_realign_processed)
401 	    crtl->stack_alignment_estimated = alignment_in_bits;
402           else
403 	    {
404 	      /* If stack is realigned and stack alignment value
405 		 hasn't been finalized, it is OK not to increase
406 		 stack_alignment_estimated.  The bigger alignment
407 		 requirement is recorded in stack_alignment_needed
408 		 below.  */
409 	      gcc_assert (!crtl->stack_realign_finalized);
410 	      if (!crtl->stack_realign_needed)
411 		{
412 		  /* It is OK to reduce the alignment as long as the
413 		     requested size is 0 or the estimated stack
414 		     alignment >= mode alignment.  */
415 		  gcc_assert ((kind & ASLK_REDUCE_ALIGN)
416 		              || size == 0
417 			      || (crtl->stack_alignment_estimated
418 				  >= GET_MODE_ALIGNMENT (mode)));
419 		  alignment_in_bits = crtl->stack_alignment_estimated;
420 		  alignment = alignment_in_bits / BITS_PER_UNIT;
421 		}
422 	    }
423 	}
424     }
425 
426   if (crtl->stack_alignment_needed < alignment_in_bits)
427     crtl->stack_alignment_needed = alignment_in_bits;
428   if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
429     crtl->max_used_stack_slot_alignment = alignment_in_bits;
430 
431   if (mode != BLKmode || size != 0)
432     {
433       if (kind & ASLK_RECORD_PAD)
434 	{
435 	  struct frame_space **psp;
436 
437 	  for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
438 	    {
439 	      struct frame_space *space = *psp;
440 	      if (!try_fit_stack_local (space->start, space->length, size,
441 					alignment, &slot_offset))
442 		continue;
443 	      *psp = space->next;
444 	      if (slot_offset > space->start)
445 		add_frame_space (space->start, slot_offset);
446 	      if (slot_offset + size < space->start + space->length)
447 		add_frame_space (slot_offset + size,
448 				 space->start + space->length);
449 	      goto found_space;
450 	    }
451 	}
452     }
453   else if (!STACK_ALIGNMENT_NEEDED)
454     {
455       slot_offset = frame_offset;
456       goto found_space;
457     }
458 
459   old_frame_offset = frame_offset;
460 
461   if (FRAME_GROWS_DOWNWARD)
462     {
463       frame_offset -= size;
464       try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
465 
466       if (kind & ASLK_RECORD_PAD)
467 	{
468 	  if (slot_offset > frame_offset)
469 	    add_frame_space (frame_offset, slot_offset);
470 	  if (slot_offset + size < old_frame_offset)
471 	    add_frame_space (slot_offset + size, old_frame_offset);
472 	}
473     }
474   else
475     {
476       frame_offset += size;
477       try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
478 
479       if (kind & ASLK_RECORD_PAD)
480 	{
481 	  if (slot_offset > old_frame_offset)
482 	    add_frame_space (old_frame_offset, slot_offset);
483 	  if (slot_offset + size < frame_offset)
484 	    add_frame_space (slot_offset + size, frame_offset);
485 	}
486     }
487 
488  found_space:
489   /* On a big-endian machine, if we are allocating more space than we will use,
490      use the least significant bytes of those that are allocated.  */
491   if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
492     bigend_correction = size - GET_MODE_SIZE (mode);
493 
494   /* If we have already instantiated virtual registers, return the actual
495      address relative to the frame pointer.  */
496   if (virtuals_instantiated)
497     addr = plus_constant (Pmode, frame_pointer_rtx,
498 			  trunc_int_for_mode
499 			  (slot_offset + bigend_correction
500 			   + STARTING_FRAME_OFFSET, Pmode));
501   else
502     addr = plus_constant (Pmode, virtual_stack_vars_rtx,
503 			  trunc_int_for_mode
504 			  (slot_offset + bigend_correction,
505 			   Pmode));
506 
507   x = gen_rtx_MEM (mode, addr);
508   set_mem_align (x, alignment_in_bits);
509   MEM_NOTRAP_P (x) = 1;
510 
511   vec_safe_push (stack_slot_list, x);
512 
513   if (frame_offset_overflow (frame_offset, current_function_decl))
514     frame_offset = 0;
515 
516   return x;
517 }
518 
519 /* Wrap up assign_stack_local_1 with last parameter as false.  */
520 
521 rtx
522 assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
523 {
524   return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
525 }
526 
527 /* In order to evaluate some expressions, such as function calls returning
528    structures in memory, we need to temporarily allocate stack locations.
529    We record each allocated temporary in the following structure.
530 
531    Associated with each temporary slot is a nesting level.  When we pop up
532    one level, all temporaries associated with the previous level are freed.
533    Normally, all temporaries are freed after the execution of the statement
534    in which they were created.  However, if we are inside a ({...}) grouping,
535    the result may be in a temporary and hence must be preserved.  If the
536    result could be in a temporary, we preserve it if we can determine which
537    one it is in.  If we cannot determine which temporary may contain the
538    result, all temporaries are preserved.  A temporary is preserved by
539    pretending it was allocated at the previous nesting level.  */
540 
541 struct GTY(()) temp_slot {
542   /* Points to next temporary slot.  */
543   struct temp_slot *next;
544   /* Points to previous temporary slot.  */
545   struct temp_slot *prev;
546   /* The rtx to used to reference the slot.  */
547   rtx slot;
548   /* The size, in units, of the slot.  */
549   HOST_WIDE_INT size;
550   /* The type of the object in the slot, or zero if it doesn't correspond
551      to a type.  We use this to determine whether a slot can be reused.
552      It can be reused if objects of the type of the new slot will always
553      conflict with objects of the type of the old slot.  */
554   tree type;
555   /* The alignment (in bits) of the slot.  */
556   unsigned int align;
557   /* Nonzero if this temporary is currently in use.  */
558   char in_use;
559   /* Nesting level at which this slot is being used.  */
560   int level;
561   /* The offset of the slot from the frame_pointer, including extra space
562      for alignment.  This info is for combine_temp_slots.  */
563   HOST_WIDE_INT base_offset;
564   /* The size of the slot, including extra space for alignment.  This
565      info is for combine_temp_slots.  */
566   HOST_WIDE_INT full_size;
567 };
568 
569 /* Entry for the below hash table.  */
570 struct GTY((for_user)) temp_slot_address_entry {
571   hashval_t hash;
572   rtx address;
573   struct temp_slot *temp_slot;
574 };
575 
576 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
577 {
578   static hashval_t hash (temp_slot_address_entry *);
579   static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
580 };
581 
582 /* A table of addresses that represent a stack slot.  The table is a mapping
583    from address RTXen to a temp slot.  */
584 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
585 static size_t n_temp_slots_in_use;
586 
587 /* Removes temporary slot TEMP from LIST.  */
588 
589 static void
590 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
591 {
592   if (temp->next)
593     temp->next->prev = temp->prev;
594   if (temp->prev)
595     temp->prev->next = temp->next;
596   else
597     *list = temp->next;
598 
599   temp->prev = temp->next = NULL;
600 }
601 
602 /* Inserts temporary slot TEMP to LIST.  */
603 
604 static void
605 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
606 {
607   temp->next = *list;
608   if (*list)
609     (*list)->prev = temp;
610   temp->prev = NULL;
611   *list = temp;
612 }
613 
614 /* Returns the list of used temp slots at LEVEL.  */
615 
616 static struct temp_slot **
617 temp_slots_at_level (int level)
618 {
619   if (level >= (int) vec_safe_length (used_temp_slots))
620     vec_safe_grow_cleared (used_temp_slots, level + 1);
621 
622   return &(*used_temp_slots)[level];
623 }
624 
625 /* Returns the maximal temporary slot level.  */
626 
627 static int
628 max_slot_level (void)
629 {
630   if (!used_temp_slots)
631     return -1;
632 
633   return used_temp_slots->length () - 1;
634 }
635 
636 /* Moves temporary slot TEMP to LEVEL.  */
637 
638 static void
639 move_slot_to_level (struct temp_slot *temp, int level)
640 {
641   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
642   insert_slot_to_list (temp, temp_slots_at_level (level));
643   temp->level = level;
644 }
645 
646 /* Make temporary slot TEMP available.  */
647 
648 static void
649 make_slot_available (struct temp_slot *temp)
650 {
651   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
652   insert_slot_to_list (temp, &avail_temp_slots);
653   temp->in_use = 0;
654   temp->level = -1;
655   n_temp_slots_in_use--;
656 }
657 
658 /* Compute the hash value for an address -> temp slot mapping.
659    The value is cached on the mapping entry.  */
660 static hashval_t
661 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
662 {
663   int do_not_record = 0;
664   return hash_rtx (t->address, GET_MODE (t->address),
665 		   &do_not_record, NULL, false);
666 }
667 
668 /* Return the hash value for an address -> temp slot mapping.  */
669 hashval_t
670 temp_address_hasher::hash (temp_slot_address_entry *t)
671 {
672   return t->hash;
673 }
674 
675 /* Compare two address -> temp slot mapping entries.  */
676 bool
677 temp_address_hasher::equal (temp_slot_address_entry *t1,
678 			    temp_slot_address_entry *t2)
679 {
680   return exp_equiv_p (t1->address, t2->address, 0, true);
681 }
682 
683 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping.  */
684 static void
685 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
686 {
687   struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
688   t->address = address;
689   t->temp_slot = temp_slot;
690   t->hash = temp_slot_address_compute_hash (t);
691   *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
692 }
693 
694 /* Remove an address -> temp slot mapping entry if the temp slot is
695    not in use anymore.  Callback for remove_unused_temp_slot_addresses.  */
696 int
697 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
698 {
699   const struct temp_slot_address_entry *t = *slot;
700   if (! t->temp_slot->in_use)
701     temp_slot_address_table->clear_slot (slot);
702   return 1;
703 }
704 
705 /* Remove all mappings of addresses to unused temp slots.  */
706 static void
707 remove_unused_temp_slot_addresses (void)
708 {
709   /* Use quicker clearing if there aren't any active temp slots.  */
710   if (n_temp_slots_in_use)
711     temp_slot_address_table->traverse
712       <void *, remove_unused_temp_slot_addresses_1> (NULL);
713   else
714     temp_slot_address_table->empty ();
715 }
716 
717 /* Find the temp slot corresponding to the object at address X.  */
718 
719 static struct temp_slot *
720 find_temp_slot_from_address (rtx x)
721 {
722   struct temp_slot *p;
723   struct temp_slot_address_entry tmp, *t;
724 
725   /* First try the easy way:
726      See if X exists in the address -> temp slot mapping.  */
727   tmp.address = x;
728   tmp.temp_slot = NULL;
729   tmp.hash = temp_slot_address_compute_hash (&tmp);
730   t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
731   if (t)
732     return t->temp_slot;
733 
734   /* If we have a sum involving a register, see if it points to a temp
735      slot.  */
736   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
737       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
738     return p;
739   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
740 	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
741     return p;
742 
743   /* Last resort: Address is a virtual stack var address.  */
744   if (GET_CODE (x) == PLUS
745       && XEXP (x, 0) == virtual_stack_vars_rtx
746       && CONST_INT_P (XEXP (x, 1)))
747     {
748       int i;
749       for (i = max_slot_level (); i >= 0; i--)
750 	for (p = *temp_slots_at_level (i); p; p = p->next)
751 	  {
752 	    if (INTVAL (XEXP (x, 1)) >= p->base_offset
753 		&& INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
754 	      return p;
755 	  }
756     }
757 
758   return NULL;
759 }
760 
761 /* Allocate a temporary stack slot and record it for possible later
762    reuse.
763 
764    MODE is the machine mode to be given to the returned rtx.
765 
766    SIZE is the size in units of the space required.  We do no rounding here
767    since assign_stack_local will do any required rounding.
768 
769    TYPE is the type that will be used for the stack slot.  */
770 
771 rtx
772 assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
773 			    tree type)
774 {
775   unsigned int align;
776   struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
777   rtx slot;
778 
779   /* If SIZE is -1 it means that somebody tried to allocate a temporary
780      of a variable size.  */
781   gcc_assert (size != -1);
782 
783   align = get_stack_local_alignment (type, mode);
784 
785   /* Try to find an available, already-allocated temporary of the proper
786      mode which meets the size and alignment requirements.  Choose the
787      smallest one with the closest alignment.
788 
789      If assign_stack_temp is called outside of the tree->rtl expansion,
790      we cannot reuse the stack slots (that may still refer to
791      VIRTUAL_STACK_VARS_REGNUM).  */
792   if (!virtuals_instantiated)
793     {
794       for (p = avail_temp_slots; p; p = p->next)
795 	{
796 	  if (p->align >= align && p->size >= size
797 	      && GET_MODE (p->slot) == mode
798 	      && objects_must_conflict_p (p->type, type)
799 	      && (best_p == 0 || best_p->size > p->size
800 		  || (best_p->size == p->size && best_p->align > p->align)))
801 	    {
802 	      if (p->align == align && p->size == size)
803 		{
804 		  selected = p;
805 		  cut_slot_from_list (selected, &avail_temp_slots);
806 		  best_p = 0;
807 		  break;
808 		}
809 	      best_p = p;
810 	    }
811 	}
812     }
813 
814   /* Make our best, if any, the one to use.  */
815   if (best_p)
816     {
817       selected = best_p;
818       cut_slot_from_list (selected, &avail_temp_slots);
819 
820       /* If there are enough aligned bytes left over, make them into a new
821 	 temp_slot so that the extra bytes don't get wasted.  Do this only
822 	 for BLKmode slots, so that we can be sure of the alignment.  */
823       if (GET_MODE (best_p->slot) == BLKmode)
824 	{
825 	  int alignment = best_p->align / BITS_PER_UNIT;
826 	  HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
827 
828 	  if (best_p->size - rounded_size >= alignment)
829 	    {
830 	      p = ggc_alloc<temp_slot> ();
831 	      p->in_use = 0;
832 	      p->size = best_p->size - rounded_size;
833 	      p->base_offset = best_p->base_offset + rounded_size;
834 	      p->full_size = best_p->full_size - rounded_size;
835 	      p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
836 	      p->align = best_p->align;
837 	      p->type = best_p->type;
838 	      insert_slot_to_list (p, &avail_temp_slots);
839 
840 	      vec_safe_push (stack_slot_list, p->slot);
841 
842 	      best_p->size = rounded_size;
843 	      best_p->full_size = rounded_size;
844 	    }
845 	}
846     }
847 
848   /* If we still didn't find one, make a new temporary.  */
849   if (selected == 0)
850     {
851       HOST_WIDE_INT frame_offset_old = frame_offset;
852 
853       p = ggc_alloc<temp_slot> ();
854 
855       /* We are passing an explicit alignment request to assign_stack_local.
856 	 One side effect of that is assign_stack_local will not round SIZE
857 	 to ensure the frame offset remains suitably aligned.
858 
859 	 So for requests which depended on the rounding of SIZE, we go ahead
860 	 and round it now.  We also make sure ALIGNMENT is at least
861 	 BIGGEST_ALIGNMENT.  */
862       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
863       p->slot = assign_stack_local_1 (mode,
864 				      (mode == BLKmode
865 				       ? CEIL_ROUND (size,
866 						     (int) align
867 						     / BITS_PER_UNIT)
868 				       : size),
869 				      align, 0);
870 
871       p->align = align;
872 
873       /* The following slot size computation is necessary because we don't
874 	 know the actual size of the temporary slot until assign_stack_local
875 	 has performed all the frame alignment and size rounding for the
876 	 requested temporary.  Note that extra space added for alignment
877 	 can be either above or below this stack slot depending on which
878 	 way the frame grows.  We include the extra space if and only if it
879 	 is above this slot.  */
880       if (FRAME_GROWS_DOWNWARD)
881 	p->size = frame_offset_old - frame_offset;
882       else
883 	p->size = size;
884 
885       /* Now define the fields used by combine_temp_slots.  */
886       if (FRAME_GROWS_DOWNWARD)
887 	{
888 	  p->base_offset = frame_offset;
889 	  p->full_size = frame_offset_old - frame_offset;
890 	}
891       else
892 	{
893 	  p->base_offset = frame_offset_old;
894 	  p->full_size = frame_offset - frame_offset_old;
895 	}
896 
897       selected = p;
898     }
899 
900   p = selected;
901   p->in_use = 1;
902   p->type = type;
903   p->level = temp_slot_level;
904   n_temp_slots_in_use++;
905 
906   pp = temp_slots_at_level (p->level);
907   insert_slot_to_list (p, pp);
908   insert_temp_slot_address (XEXP (p->slot, 0), p);
909 
910   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
911   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
912   vec_safe_push (stack_slot_list, slot);
913 
914   /* If we know the alias set for the memory that will be used, use
915      it.  If there's no TYPE, then we don't know anything about the
916      alias set for the memory.  */
917   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
918   set_mem_align (slot, align);
919 
920   /* If a type is specified, set the relevant flags.  */
921   if (type != 0)
922     MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
923   MEM_NOTRAP_P (slot) = 1;
924 
925   return slot;
926 }
927 
928 /* Allocate a temporary stack slot and record it for possible later
929    reuse.  First two arguments are same as in preceding function.  */
930 
931 rtx
932 assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
933 {
934   return assign_stack_temp_for_type (mode, size, NULL_TREE);
935 }
936 
937 /* Assign a temporary.
938    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
939    and so that should be used in error messages.  In either case, we
940    allocate of the given type.
941    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
942    it is 0 if a register is OK.
943    DONT_PROMOTE is 1 if we should not promote values in register
944    to wider modes.  */
945 
946 rtx
947 assign_temp (tree type_or_decl, int memory_required,
948 	     int dont_promote ATTRIBUTE_UNUSED)
949 {
950   tree type, decl;
951   machine_mode mode;
952 #ifdef PROMOTE_MODE
953   int unsignedp;
954 #endif
955 
956   if (DECL_P (type_or_decl))
957     decl = type_or_decl, type = TREE_TYPE (decl);
958   else
959     decl = NULL, type = type_or_decl;
960 
961   mode = TYPE_MODE (type);
962 #ifdef PROMOTE_MODE
963   unsignedp = TYPE_UNSIGNED (type);
964 #endif
965 
966   /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
967      end.  See also create_tmp_var for the gimplification-time check.  */
968   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
969 
970   if (mode == BLKmode || memory_required)
971     {
972       HOST_WIDE_INT size = int_size_in_bytes (type);
973       rtx tmp;
974 
975       /* Zero sized arrays are GNU C extension.  Set size to 1 to avoid
976 	 problems with allocating the stack space.  */
977       if (size == 0)
978 	size = 1;
979 
980       /* Unfortunately, we don't yet know how to allocate variable-sized
981 	 temporaries.  However, sometimes we can find a fixed upper limit on
982 	 the size, so try that instead.  */
983       else if (size == -1)
984 	size = max_int_size_in_bytes (type);
985 
986       /* The size of the temporary may be too large to fit into an integer.  */
987       /* ??? Not sure this should happen except for user silliness, so limit
988 	 this to things that aren't compiler-generated temporaries.  The
989 	 rest of the time we'll die in assign_stack_temp_for_type.  */
990       if (decl && size == -1
991 	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
992 	{
993 	  error ("size of variable %q+D is too large", decl);
994 	  size = 1;
995 	}
996 
997       tmp = assign_stack_temp_for_type (mode, size, type);
998       return tmp;
999     }
1000 
1001 #ifdef PROMOTE_MODE
1002   if (! dont_promote)
1003     mode = promote_mode (type, mode, &unsignedp);
1004 #endif
1005 
1006   return gen_reg_rtx (mode);
1007 }
1008 
1009 /* Combine temporary stack slots which are adjacent on the stack.
1010 
1011    This allows for better use of already allocated stack space.  This is only
1012    done for BLKmode slots because we can be sure that we won't have alignment
1013    problems in this case.  */
1014 
1015 static void
1016 combine_temp_slots (void)
1017 {
1018   struct temp_slot *p, *q, *next, *next_q;
1019   int num_slots;
1020 
1021   /* We can't combine slots, because the information about which slot
1022      is in which alias set will be lost.  */
1023   if (flag_strict_aliasing)
1024     return;
1025 
1026   /* If there are a lot of temp slots, don't do anything unless
1027      high levels of optimization.  */
1028   if (! flag_expensive_optimizations)
1029     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1030       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1031 	return;
1032 
1033   for (p = avail_temp_slots; p; p = next)
1034     {
1035       int delete_p = 0;
1036 
1037       next = p->next;
1038 
1039       if (GET_MODE (p->slot) != BLKmode)
1040 	continue;
1041 
1042       for (q = p->next; q; q = next_q)
1043 	{
1044        	  int delete_q = 0;
1045 
1046 	  next_q = q->next;
1047 
1048 	  if (GET_MODE (q->slot) != BLKmode)
1049 	    continue;
1050 
1051 	  if (p->base_offset + p->full_size == q->base_offset)
1052 	    {
1053 	      /* Q comes after P; combine Q into P.  */
1054 	      p->size += q->size;
1055 	      p->full_size += q->full_size;
1056 	      delete_q = 1;
1057 	    }
1058 	  else if (q->base_offset + q->full_size == p->base_offset)
1059 	    {
1060 	      /* P comes after Q; combine P into Q.  */
1061 	      q->size += p->size;
1062 	      q->full_size += p->full_size;
1063 	      delete_p = 1;
1064 	      break;
1065 	    }
1066 	  if (delete_q)
1067 	    cut_slot_from_list (q, &avail_temp_slots);
1068 	}
1069 
1070       /* Either delete P or advance past it.  */
1071       if (delete_p)
1072 	cut_slot_from_list (p, &avail_temp_slots);
1073     }
1074 }
1075 
1076 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1077    slot that previously was known by OLD_RTX.  */
1078 
1079 void
1080 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1081 {
1082   struct temp_slot *p;
1083 
1084   if (rtx_equal_p (old_rtx, new_rtx))
1085     return;
1086 
1087   p = find_temp_slot_from_address (old_rtx);
1088 
1089   /* If we didn't find one, see if both OLD_RTX is a PLUS.  If so, and
1090      NEW_RTX is a register, see if one operand of the PLUS is a
1091      temporary location.  If so, NEW_RTX points into it.  Otherwise,
1092      if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1093      in common between them.  If so, try a recursive call on those
1094      values.  */
1095   if (p == 0)
1096     {
1097       if (GET_CODE (old_rtx) != PLUS)
1098 	return;
1099 
1100       if (REG_P (new_rtx))
1101 	{
1102 	  update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1103 	  update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1104 	  return;
1105 	}
1106       else if (GET_CODE (new_rtx) != PLUS)
1107 	return;
1108 
1109       if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1110 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1111       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1112 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1113       else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1114 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1115       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1116 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1117 
1118       return;
1119     }
1120 
1121   /* Otherwise add an alias for the temp's address.  */
1122   insert_temp_slot_address (new_rtx, p);
1123 }
1124 
1125 /* If X could be a reference to a temporary slot, mark that slot as
1126    belonging to the to one level higher than the current level.  If X
1127    matched one of our slots, just mark that one.  Otherwise, we can't
1128    easily predict which it is, so upgrade all of them.
1129 
1130    This is called when an ({...}) construct occurs and a statement
1131    returns a value in memory.  */
1132 
1133 void
1134 preserve_temp_slots (rtx x)
1135 {
1136   struct temp_slot *p = 0, *next;
1137 
1138   if (x == 0)
1139     return;
1140 
1141   /* If X is a register that is being used as a pointer, see if we have
1142      a temporary slot we know it points to.  */
1143   if (REG_P (x) && REG_POINTER (x))
1144     p = find_temp_slot_from_address (x);
1145 
1146   /* If X is not in memory or is at a constant address, it cannot be in
1147      a temporary slot.  */
1148   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1149     return;
1150 
1151   /* First see if we can find a match.  */
1152   if (p == 0)
1153     p = find_temp_slot_from_address (XEXP (x, 0));
1154 
1155   if (p != 0)
1156     {
1157       if (p->level == temp_slot_level)
1158 	move_slot_to_level (p, temp_slot_level - 1);
1159       return;
1160     }
1161 
1162   /* Otherwise, preserve all non-kept slots at this level.  */
1163   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1164     {
1165       next = p->next;
1166       move_slot_to_level (p, temp_slot_level - 1);
1167     }
1168 }
1169 
1170 /* Free all temporaries used so far.  This is normally called at the
1171    end of generating code for a statement.  */
1172 
1173 void
1174 free_temp_slots (void)
1175 {
1176   struct temp_slot *p, *next;
1177   bool some_available = false;
1178 
1179   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1180     {
1181       next = p->next;
1182       make_slot_available (p);
1183       some_available = true;
1184     }
1185 
1186   if (some_available)
1187     {
1188       remove_unused_temp_slot_addresses ();
1189       combine_temp_slots ();
1190     }
1191 }
1192 
1193 /* Push deeper into the nesting level for stack temporaries.  */
1194 
1195 void
1196 push_temp_slots (void)
1197 {
1198   temp_slot_level++;
1199 }
1200 
1201 /* Pop a temporary nesting level.  All slots in use in the current level
1202    are freed.  */
1203 
1204 void
1205 pop_temp_slots (void)
1206 {
1207   free_temp_slots ();
1208   temp_slot_level--;
1209 }
1210 
1211 /* Initialize temporary slots.  */
1212 
1213 void
1214 init_temp_slots (void)
1215 {
1216   /* We have not allocated any temporaries yet.  */
1217   avail_temp_slots = 0;
1218   vec_alloc (used_temp_slots, 0);
1219   temp_slot_level = 0;
1220   n_temp_slots_in_use = 0;
1221 
1222   /* Set up the table to map addresses to temp slots.  */
1223   if (! temp_slot_address_table)
1224     temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1225   else
1226     temp_slot_address_table->empty ();
1227 }
1228 
1229 /* Functions and data structures to keep track of the values hard regs
1230    had at the start of the function.  */
1231 
1232 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1233    and has_hard_reg_initial_val..  */
1234 struct GTY(()) initial_value_pair {
1235   rtx hard_reg;
1236   rtx pseudo;
1237 };
1238 /* ???  This could be a VEC but there is currently no way to define an
1239    opaque VEC type.  This could be worked around by defining struct
1240    initial_value_pair in function.h.  */
1241 struct GTY(()) initial_value_struct {
1242   int num_entries;
1243   int max_entries;
1244   initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1245 };
1246 
1247 /* If a pseudo represents an initial hard reg (or expression), return
1248    it, else return NULL_RTX.  */
1249 
1250 rtx
1251 get_hard_reg_initial_reg (rtx reg)
1252 {
1253   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1254   int i;
1255 
1256   if (ivs == 0)
1257     return NULL_RTX;
1258 
1259   for (i = 0; i < ivs->num_entries; i++)
1260     if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1261       return ivs->entries[i].hard_reg;
1262 
1263   return NULL_RTX;
1264 }
1265 
1266 /* Make sure that there's a pseudo register of mode MODE that stores the
1267    initial value of hard register REGNO.  Return an rtx for such a pseudo.  */
1268 
1269 rtx
1270 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1271 {
1272   struct initial_value_struct *ivs;
1273   rtx rv;
1274 
1275   rv = has_hard_reg_initial_val (mode, regno);
1276   if (rv)
1277     return rv;
1278 
1279   ivs = crtl->hard_reg_initial_vals;
1280   if (ivs == 0)
1281     {
1282       ivs = ggc_alloc<initial_value_struct> ();
1283       ivs->num_entries = 0;
1284       ivs->max_entries = 5;
1285       ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1286       crtl->hard_reg_initial_vals = ivs;
1287     }
1288 
1289   if (ivs->num_entries >= ivs->max_entries)
1290     {
1291       ivs->max_entries += 5;
1292       ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1293 				    ivs->max_entries);
1294     }
1295 
1296   ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1297   ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1298 
1299   return ivs->entries[ivs->num_entries++].pseudo;
1300 }
1301 
1302 /* See if get_hard_reg_initial_val has been used to create a pseudo
1303    for the initial value of hard register REGNO in mode MODE.  Return
1304    the associated pseudo if so, otherwise return NULL.  */
1305 
1306 rtx
1307 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1308 {
1309   struct initial_value_struct *ivs;
1310   int i;
1311 
1312   ivs = crtl->hard_reg_initial_vals;
1313   if (ivs != 0)
1314     for (i = 0; i < ivs->num_entries; i++)
1315       if (GET_MODE (ivs->entries[i].hard_reg) == mode
1316 	  && REGNO (ivs->entries[i].hard_reg) == regno)
1317 	return ivs->entries[i].pseudo;
1318 
1319   return NULL_RTX;
1320 }
1321 
1322 unsigned int
1323 emit_initial_value_sets (void)
1324 {
1325   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1326   int i;
1327   rtx_insn *seq;
1328 
1329   if (ivs == 0)
1330     return 0;
1331 
1332   start_sequence ();
1333   for (i = 0; i < ivs->num_entries; i++)
1334     emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1335   seq = get_insns ();
1336   end_sequence ();
1337 
1338   emit_insn_at_entry (seq);
1339   return 0;
1340 }
1341 
1342 /* Return the hardreg-pseudoreg initial values pair entry I and
1343    TRUE if I is a valid entry, or FALSE if I is not a valid entry.  */
1344 bool
1345 initial_value_entry (int i, rtx *hreg, rtx *preg)
1346 {
1347   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1348   if (!ivs || i >= ivs->num_entries)
1349     return false;
1350 
1351   *hreg = ivs->entries[i].hard_reg;
1352   *preg = ivs->entries[i].pseudo;
1353   return true;
1354 }
1355 
1356 /* These routines are responsible for converting virtual register references
1357    to the actual hard register references once RTL generation is complete.
1358 
1359    The following four variables are used for communication between the
1360    routines.  They contain the offsets of the virtual registers from their
1361    respective hard registers.  */
1362 
1363 static int in_arg_offset;
1364 static int var_offset;
1365 static int dynamic_offset;
1366 static int out_arg_offset;
1367 static int cfa_offset;
1368 
1369 /* In most machines, the stack pointer register is equivalent to the bottom
1370    of the stack.  */
1371 
1372 #ifndef STACK_POINTER_OFFSET
1373 #define STACK_POINTER_OFFSET	0
1374 #endif
1375 
1376 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1377 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1378 #endif
1379 
1380 /* If not defined, pick an appropriate default for the offset of dynamically
1381    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1382    INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
1383 
1384 #ifndef STACK_DYNAMIC_OFFSET
1385 
1386 /* The bottom of the stack points to the actual arguments.  If
1387    REG_PARM_STACK_SPACE is defined, this includes the space for the register
1388    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
1389    stack space for register parameters is not pushed by the caller, but
1390    rather part of the fixed stack areas and hence not included in
1391    `crtl->outgoing_args_size'.  Nevertheless, we must allow
1392    for it when allocating stack dynamic objects.  */
1393 
1394 #ifdef INCOMING_REG_PARM_STACK_SPACE
1395 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1396 ((ACCUMULATE_OUTGOING_ARGS						      \
1397   ? (crtl->outgoing_args_size				      \
1398      + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1399 					       : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1400   : 0) + (STACK_POINTER_OFFSET))
1401 #else
1402 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1403 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0)	      \
1404  + (STACK_POINTER_OFFSET))
1405 #endif
1406 #endif
1407 
1408 
1409 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1410    is a virtual register, return the equivalent hard register and set the
1411    offset indirectly through the pointer.  Otherwise, return 0.  */
1412 
1413 static rtx
1414 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1415 {
1416   rtx new_rtx;
1417   HOST_WIDE_INT offset;
1418 
1419   if (x == virtual_incoming_args_rtx)
1420     {
1421       if (stack_realign_drap)
1422         {
1423 	  /* Replace virtual_incoming_args_rtx with internal arg
1424 	     pointer if DRAP is used to realign stack.  */
1425           new_rtx = crtl->args.internal_arg_pointer;
1426           offset = 0;
1427         }
1428       else
1429         new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1430     }
1431   else if (x == virtual_stack_vars_rtx)
1432     new_rtx = frame_pointer_rtx, offset = var_offset;
1433   else if (x == virtual_stack_dynamic_rtx)
1434     new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1435   else if (x == virtual_outgoing_args_rtx)
1436     new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1437   else if (x == virtual_cfa_rtx)
1438     {
1439 #ifdef FRAME_POINTER_CFA_OFFSET
1440       new_rtx = frame_pointer_rtx;
1441 #else
1442       new_rtx = arg_pointer_rtx;
1443 #endif
1444       offset = cfa_offset;
1445     }
1446   else if (x == virtual_preferred_stack_boundary_rtx)
1447     {
1448       new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1449       offset = 0;
1450     }
1451   else
1452     return NULL_RTX;
1453 
1454   *poffset = offset;
1455   return new_rtx;
1456 }
1457 
1458 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1459    registers present inside of *LOC.  The expression is simplified,
1460    as much as possible, but is not to be considered "valid" in any sense
1461    implied by the target.  Return true if any change is made.  */
1462 
1463 static bool
1464 instantiate_virtual_regs_in_rtx (rtx *loc)
1465 {
1466   if (!*loc)
1467     return false;
1468   bool changed = false;
1469   subrtx_ptr_iterator::array_type array;
1470   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1471     {
1472       rtx *loc = *iter;
1473       if (rtx x = *loc)
1474 	{
1475 	  rtx new_rtx;
1476 	  HOST_WIDE_INT offset;
1477 	  switch (GET_CODE (x))
1478 	    {
1479 	    case REG:
1480 	      new_rtx = instantiate_new_reg (x, &offset);
1481 	      if (new_rtx)
1482 		{
1483 		  *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1484 		  changed = true;
1485 		}
1486 	      iter.skip_subrtxes ();
1487 	      break;
1488 
1489 	    case PLUS:
1490 	      new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1491 	      if (new_rtx)
1492 		{
1493 		  XEXP (x, 0) = new_rtx;
1494 		  *loc = plus_constant (GET_MODE (x), x, offset, true);
1495 		  changed = true;
1496 		  iter.skip_subrtxes ();
1497 		  break;
1498 		}
1499 
1500 	      /* FIXME -- from old code */
1501 	      /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1502 		 we can commute the PLUS and SUBREG because pointers into the
1503 		 frame are well-behaved.  */
1504 	      break;
1505 
1506 	    default:
1507 	      break;
1508 	    }
1509 	}
1510     }
1511   return changed;
1512 }
1513 
1514 /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
1515    matches the predicate for insn CODE operand OPERAND.  */
1516 
1517 static int
1518 safe_insn_predicate (int code, int operand, rtx x)
1519 {
1520   return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1521 }
1522 
1523 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1524    registers present inside of insn.  The result will be a valid insn.  */
1525 
1526 static void
1527 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1528 {
1529   HOST_WIDE_INT offset;
1530   int insn_code, i;
1531   bool any_change = false;
1532   rtx set, new_rtx, x;
1533   rtx_insn *seq;
1534 
1535   /* There are some special cases to be handled first.  */
1536   set = single_set (insn);
1537   if (set)
1538     {
1539       /* We're allowed to assign to a virtual register.  This is interpreted
1540 	 to mean that the underlying register gets assigned the inverse
1541 	 transformation.  This is used, for example, in the handling of
1542 	 non-local gotos.  */
1543       new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1544       if (new_rtx)
1545 	{
1546 	  start_sequence ();
1547 
1548 	  instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1549 	  x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1550 				   gen_int_mode (-offset, GET_MODE (new_rtx)));
1551 	  x = force_operand (x, new_rtx);
1552 	  if (x != new_rtx)
1553 	    emit_move_insn (new_rtx, x);
1554 
1555 	  seq = get_insns ();
1556 	  end_sequence ();
1557 
1558 	  emit_insn_before (seq, insn);
1559 	  delete_insn (insn);
1560 	  return;
1561 	}
1562 
1563       /* Handle a straight copy from a virtual register by generating a
1564 	 new add insn.  The difference between this and falling through
1565 	 to the generic case is avoiding a new pseudo and eliminating a
1566 	 move insn in the initial rtl stream.  */
1567       new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1568       if (new_rtx && offset != 0
1569 	  && REG_P (SET_DEST (set))
1570 	  && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1571 	{
1572 	  start_sequence ();
1573 
1574 	  x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1575 				   gen_int_mode (offset,
1576 						 GET_MODE (SET_DEST (set))),
1577 				   SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1578 	  if (x != SET_DEST (set))
1579 	    emit_move_insn (SET_DEST (set), x);
1580 
1581 	  seq = get_insns ();
1582 	  end_sequence ();
1583 
1584 	  emit_insn_before (seq, insn);
1585 	  delete_insn (insn);
1586 	  return;
1587 	}
1588 
1589       extract_insn (insn);
1590       insn_code = INSN_CODE (insn);
1591 
1592       /* Handle a plus involving a virtual register by determining if the
1593 	 operands remain valid if they're modified in place.  */
1594       if (GET_CODE (SET_SRC (set)) == PLUS
1595 	  && recog_data.n_operands >= 3
1596 	  && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1597 	  && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1598 	  && CONST_INT_P (recog_data.operand[2])
1599 	  && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1600 	{
1601 	  offset += INTVAL (recog_data.operand[2]);
1602 
1603 	  /* If the sum is zero, then replace with a plain move.  */
1604 	  if (offset == 0
1605 	      && REG_P (SET_DEST (set))
1606 	      && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1607 	    {
1608 	      start_sequence ();
1609 	      emit_move_insn (SET_DEST (set), new_rtx);
1610 	      seq = get_insns ();
1611 	      end_sequence ();
1612 
1613 	      emit_insn_before (seq, insn);
1614 	      delete_insn (insn);
1615 	      return;
1616 	    }
1617 
1618 	  x = gen_int_mode (offset, recog_data.operand_mode[2]);
1619 
1620 	  /* Using validate_change and apply_change_group here leaves
1621 	     recog_data in an invalid state.  Since we know exactly what
1622 	     we want to check, do those two by hand.  */
1623 	  if (safe_insn_predicate (insn_code, 1, new_rtx)
1624 	      && safe_insn_predicate (insn_code, 2, x))
1625 	    {
1626 	      *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1627 	      *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1628 	      any_change = true;
1629 
1630 	      /* Fall through into the regular operand fixup loop in
1631 		 order to take care of operands other than 1 and 2.  */
1632 	    }
1633 	}
1634     }
1635   else
1636     {
1637       extract_insn (insn);
1638       insn_code = INSN_CODE (insn);
1639     }
1640 
1641   /* In the general case, we expect virtual registers to appear only in
1642      operands, and then only as either bare registers or inside memories.  */
1643   for (i = 0; i < recog_data.n_operands; ++i)
1644     {
1645       x = recog_data.operand[i];
1646       switch (GET_CODE (x))
1647 	{
1648 	case MEM:
1649 	  {
1650 	    rtx addr = XEXP (x, 0);
1651 
1652 	    if (!instantiate_virtual_regs_in_rtx (&addr))
1653 	      continue;
1654 
1655 	    start_sequence ();
1656 	    x = replace_equiv_address (x, addr, true);
1657 	    /* It may happen that the address with the virtual reg
1658 	       was valid (e.g. based on the virtual stack reg, which might
1659 	       be acceptable to the predicates with all offsets), whereas
1660 	       the address now isn't anymore, for instance when the address
1661 	       is still offsetted, but the base reg isn't virtual-stack-reg
1662 	       anymore.  Below we would do a force_reg on the whole operand,
1663 	       but this insn might actually only accept memory.  Hence,
1664 	       before doing that last resort, try to reload the address into
1665 	       a register, so this operand stays a MEM.  */
1666 	    if (!safe_insn_predicate (insn_code, i, x))
1667 	      {
1668 		addr = force_reg (GET_MODE (addr), addr);
1669 		x = replace_equiv_address (x, addr, true);
1670 	      }
1671 	    seq = get_insns ();
1672 	    end_sequence ();
1673 	    if (seq)
1674 	      emit_insn_before (seq, insn);
1675 	  }
1676 	  break;
1677 
1678 	case REG:
1679 	  new_rtx = instantiate_new_reg (x, &offset);
1680 	  if (new_rtx == NULL)
1681 	    continue;
1682 	  if (offset == 0)
1683 	    x = new_rtx;
1684 	  else
1685 	    {
1686 	      start_sequence ();
1687 
1688 	      /* Careful, special mode predicates may have stuff in
1689 		 insn_data[insn_code].operand[i].mode that isn't useful
1690 		 to us for computing a new value.  */
1691 	      /* ??? Recognize address_operand and/or "p" constraints
1692 		 to see if (plus new offset) is a valid before we put
1693 		 this through expand_simple_binop.  */
1694 	      x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1695 				       gen_int_mode (offset, GET_MODE (x)),
1696 				       NULL_RTX, 1, OPTAB_LIB_WIDEN);
1697 	      seq = get_insns ();
1698 	      end_sequence ();
1699 	      emit_insn_before (seq, insn);
1700 	    }
1701 	  break;
1702 
1703 	case SUBREG:
1704 	  new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1705 	  if (new_rtx == NULL)
1706 	    continue;
1707 	  if (offset != 0)
1708 	    {
1709 	      start_sequence ();
1710 	      new_rtx = expand_simple_binop
1711 		(GET_MODE (new_rtx), PLUS, new_rtx,
1712 		 gen_int_mode (offset, GET_MODE (new_rtx)),
1713 		 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1714 	      seq = get_insns ();
1715 	      end_sequence ();
1716 	      emit_insn_before (seq, insn);
1717 	    }
1718 	  x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1719 				   GET_MODE (new_rtx), SUBREG_BYTE (x));
1720 	  gcc_assert (x);
1721 	  break;
1722 
1723 	default:
1724 	  continue;
1725 	}
1726 
1727       /* At this point, X contains the new value for the operand.
1728 	 Validate the new value vs the insn predicate.  Note that
1729 	 asm insns will have insn_code -1 here.  */
1730       if (!safe_insn_predicate (insn_code, i, x))
1731 	{
1732 	  start_sequence ();
1733 	  if (REG_P (x))
1734 	    {
1735 	      gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1736 	      x = copy_to_reg (x);
1737 	    }
1738 	  else
1739 	    x = force_reg (insn_data[insn_code].operand[i].mode, x);
1740 	  seq = get_insns ();
1741 	  end_sequence ();
1742 	  if (seq)
1743 	    emit_insn_before (seq, insn);
1744 	}
1745 
1746       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1747       any_change = true;
1748     }
1749 
1750   if (any_change)
1751     {
1752       /* Propagate operand changes into the duplicates.  */
1753       for (i = 0; i < recog_data.n_dups; ++i)
1754 	*recog_data.dup_loc[i]
1755 	  = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1756 
1757       /* Force re-recognition of the instruction for validation.  */
1758       INSN_CODE (insn) = -1;
1759     }
1760 
1761   if (asm_noperands (PATTERN (insn)) >= 0)
1762     {
1763       if (!check_asm_operands (PATTERN (insn)))
1764 	{
1765 	  error_for_asm (insn, "impossible constraint in %<asm%>");
1766 	  /* For asm goto, instead of fixing up all the edges
1767 	     just clear the template and clear input operands
1768 	     (asm goto doesn't have any output operands).  */
1769 	  if (JUMP_P (insn))
1770 	    {
1771 	      rtx asm_op = extract_asm_operands (PATTERN (insn));
1772 	      ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1773 	      ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1774 	      ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1775 	    }
1776 	  else
1777 	    delete_insn (insn);
1778 	}
1779     }
1780   else
1781     {
1782       if (recog_memoized (insn) < 0)
1783 	fatal_insn_not_found (insn);
1784     }
1785 }
1786 
1787 /* Subroutine of instantiate_decls.  Given RTL representing a decl,
1788    do any instantiation required.  */
1789 
1790 void
1791 instantiate_decl_rtl (rtx x)
1792 {
1793   rtx addr;
1794 
1795   if (x == 0)
1796     return;
1797 
1798   /* If this is a CONCAT, recurse for the pieces.  */
1799   if (GET_CODE (x) == CONCAT)
1800     {
1801       instantiate_decl_rtl (XEXP (x, 0));
1802       instantiate_decl_rtl (XEXP (x, 1));
1803       return;
1804     }
1805 
1806   /* If this is not a MEM, no need to do anything.  Similarly if the
1807      address is a constant or a register that is not a virtual register.  */
1808   if (!MEM_P (x))
1809     return;
1810 
1811   addr = XEXP (x, 0);
1812   if (CONSTANT_P (addr)
1813       || (REG_P (addr)
1814 	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1815 	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1816     return;
1817 
1818   instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1819 }
1820 
1821 /* Helper for instantiate_decls called via walk_tree: Process all decls
1822    in the given DECL_VALUE_EXPR.  */
1823 
1824 static tree
1825 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1826 {
1827   tree t = *tp;
1828   if (! EXPR_P (t))
1829     {
1830       *walk_subtrees = 0;
1831       if (DECL_P (t))
1832 	{
1833 	  if (DECL_RTL_SET_P (t))
1834 	    instantiate_decl_rtl (DECL_RTL (t));
1835 	  if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1836 	      && DECL_INCOMING_RTL (t))
1837 	    instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1838 	  if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1839 	      && DECL_HAS_VALUE_EXPR_P (t))
1840 	    {
1841 	      tree v = DECL_VALUE_EXPR (t);
1842 	      walk_tree (&v, instantiate_expr, NULL, NULL);
1843 	    }
1844 	}
1845     }
1846   return NULL;
1847 }
1848 
1849 /* Subroutine of instantiate_decls: Process all decls in the given
1850    BLOCK node and all its subblocks.  */
1851 
1852 static void
1853 instantiate_decls_1 (tree let)
1854 {
1855   tree t;
1856 
1857   for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1858     {
1859       if (DECL_RTL_SET_P (t))
1860 	instantiate_decl_rtl (DECL_RTL (t));
1861       if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1862 	{
1863 	  tree v = DECL_VALUE_EXPR (t);
1864 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1865 	}
1866     }
1867 
1868   /* Process all subblocks.  */
1869   for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1870     instantiate_decls_1 (t);
1871 }
1872 
1873 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1874    all virtual registers in their DECL_RTL's.  */
1875 
1876 static void
1877 instantiate_decls (tree fndecl)
1878 {
1879   tree decl;
1880   unsigned ix;
1881 
1882   /* Process all parameters of the function.  */
1883   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1884     {
1885       instantiate_decl_rtl (DECL_RTL (decl));
1886       instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1887       if (DECL_HAS_VALUE_EXPR_P (decl))
1888 	{
1889 	  tree v = DECL_VALUE_EXPR (decl);
1890 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1891 	}
1892     }
1893 
1894   if ((decl = DECL_RESULT (fndecl))
1895       && TREE_CODE (decl) == RESULT_DECL)
1896     {
1897       if (DECL_RTL_SET_P (decl))
1898 	instantiate_decl_rtl (DECL_RTL (decl));
1899       if (DECL_HAS_VALUE_EXPR_P (decl))
1900 	{
1901 	  tree v = DECL_VALUE_EXPR (decl);
1902 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1903 	}
1904     }
1905 
1906   /* Process the saved static chain if it exists.  */
1907   decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1908   if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1909     instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1910 
1911   /* Now process all variables defined in the function or its subblocks.  */
1912   if (DECL_INITIAL (fndecl))
1913     instantiate_decls_1 (DECL_INITIAL (fndecl));
1914 
1915   FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1916     if (DECL_RTL_SET_P (decl))
1917       instantiate_decl_rtl (DECL_RTL (decl));
1918   vec_free (cfun->local_decls);
1919 }
1920 
1921 /* Pass through the INSNS of function FNDECL and convert virtual register
1922    references to hard register references.  */
1923 
1924 static unsigned int
1925 instantiate_virtual_regs (void)
1926 {
1927   rtx_insn *insn;
1928 
1929   /* Compute the offsets to use for this function.  */
1930   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1931   var_offset = STARTING_FRAME_OFFSET;
1932   dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1933   out_arg_offset = STACK_POINTER_OFFSET;
1934 #ifdef FRAME_POINTER_CFA_OFFSET
1935   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1936 #else
1937   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1938 #endif
1939 
1940   /* Initialize recognition, indicating that volatile is OK.  */
1941   init_recog ();
1942 
1943   /* Scan through all the insns, instantiating every virtual register still
1944      present.  */
1945   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1946     if (INSN_P (insn))
1947       {
1948 	/* These patterns in the instruction stream can never be recognized.
1949 	   Fortunately, they shouldn't contain virtual registers either.  */
1950         if (GET_CODE (PATTERN (insn)) == USE
1951 	    || GET_CODE (PATTERN (insn)) == CLOBBER
1952 	    || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1953 	  continue;
1954 	else if (DEBUG_INSN_P (insn))
1955 	  instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
1956 	else
1957 	  instantiate_virtual_regs_in_insn (insn);
1958 
1959 	if (insn->deleted ())
1960 	  continue;
1961 
1962 	instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1963 
1964 	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
1965 	if (CALL_P (insn))
1966 	  instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1967       }
1968 
1969   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
1970   instantiate_decls (current_function_decl);
1971 
1972   targetm.instantiate_decls ();
1973 
1974   /* Indicate that, from now on, assign_stack_local should use
1975      frame_pointer_rtx.  */
1976   virtuals_instantiated = 1;
1977 
1978   return 0;
1979 }
1980 
1981 namespace {
1982 
1983 const pass_data pass_data_instantiate_virtual_regs =
1984 {
1985   RTL_PASS, /* type */
1986   "vregs", /* name */
1987   OPTGROUP_NONE, /* optinfo_flags */
1988   TV_NONE, /* tv_id */
1989   0, /* properties_required */
1990   0, /* properties_provided */
1991   0, /* properties_destroyed */
1992   0, /* todo_flags_start */
1993   0, /* todo_flags_finish */
1994 };
1995 
1996 class pass_instantiate_virtual_regs : public rtl_opt_pass
1997 {
1998 public:
1999   pass_instantiate_virtual_regs (gcc::context *ctxt)
2000     : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2001   {}
2002 
2003   /* opt_pass methods: */
2004   virtual unsigned int execute (function *)
2005     {
2006       return instantiate_virtual_regs ();
2007     }
2008 
2009 }; // class pass_instantiate_virtual_regs
2010 
2011 } // anon namespace
2012 
2013 rtl_opt_pass *
2014 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2015 {
2016   return new pass_instantiate_virtual_regs (ctxt);
2017 }
2018 
2019 
2020 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2021    This means a type for which function calls must pass an address to the
2022    function or get an address back from the function.
2023    EXP may be a type node or an expression (whose type is tested).  */
2024 
2025 int
2026 aggregate_value_p (const_tree exp, const_tree fntype)
2027 {
2028   const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2029   int i, regno, nregs;
2030   rtx reg;
2031 
2032   if (fntype)
2033     switch (TREE_CODE (fntype))
2034       {
2035       case CALL_EXPR:
2036 	{
2037 	  tree fndecl = get_callee_fndecl (fntype);
2038 	  if (fndecl)
2039 	    fntype = TREE_TYPE (fndecl);
2040 	  else if (CALL_EXPR_FN (fntype))
2041 	    fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2042 	  else
2043 	    /* For internal functions, assume nothing needs to be
2044 	       returned in memory.  */
2045 	    return 0;
2046 	}
2047 	break;
2048       case FUNCTION_DECL:
2049 	fntype = TREE_TYPE (fntype);
2050 	break;
2051       case FUNCTION_TYPE:
2052       case METHOD_TYPE:
2053         break;
2054       case IDENTIFIER_NODE:
2055 	fntype = NULL_TREE;
2056 	break;
2057       default:
2058 	/* We don't expect other tree types here.  */
2059 	gcc_unreachable ();
2060       }
2061 
2062   if (VOID_TYPE_P (type))
2063     return 0;
2064 
2065   /* If a record should be passed the same as its first (and only) member
2066      don't pass it as an aggregate.  */
2067   if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2068     return aggregate_value_p (first_field (type), fntype);
2069 
2070   /* If the front end has decided that this needs to be passed by
2071      reference, do so.  */
2072   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2073       && DECL_BY_REFERENCE (exp))
2074     return 1;
2075 
2076   /* Function types that are TREE_ADDRESSABLE force return in memory.  */
2077   if (fntype && TREE_ADDRESSABLE (fntype))
2078     return 1;
2079 
2080   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2081      and thus can't be returned in registers.  */
2082   if (TREE_ADDRESSABLE (type))
2083     return 1;
2084 
2085   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2086     return 1;
2087 
2088   if (targetm.calls.return_in_memory (type, fntype))
2089     return 1;
2090 
2091   /* Make sure we have suitable call-clobbered regs to return
2092      the value in; if not, we must return it in memory.  */
2093   reg = hard_function_value (type, 0, fntype, 0);
2094 
2095   /* If we have something other than a REG (e.g. a PARALLEL), then assume
2096      it is OK.  */
2097   if (!REG_P (reg))
2098     return 0;
2099 
2100   regno = REGNO (reg);
2101   nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2102   for (i = 0; i < nregs; i++)
2103     if (! call_used_regs[regno + i])
2104       return 1;
2105 
2106   return 0;
2107 }
2108 
2109 /* Return true if we should assign DECL a pseudo register; false if it
2110    should live on the local stack.  */
2111 
2112 bool
2113 use_register_for_decl (const_tree decl)
2114 {
2115   if (TREE_CODE (decl) == SSA_NAME)
2116     {
2117       /* We often try to use the SSA_NAME, instead of its underlying
2118 	 decl, to get type information and guide decisions, to avoid
2119 	 differences of behavior between anonymous and named
2120 	 variables, but in this one case we have to go for the actual
2121 	 variable if there is one.  The main reason is that, at least
2122 	 at -O0, we want to place user variables on the stack, but we
2123 	 don't mind using pseudos for anonymous or ignored temps.
2124 	 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2125 	 should go in pseudos, whereas their corresponding variables
2126 	 might have to go on the stack.  So, disregarding the decl
2127 	 here would negatively impact debug info at -O0, enable
2128 	 coalescing between SSA_NAMEs that ought to get different
2129 	 stack/pseudo assignments, and get the incoming argument
2130 	 processing thoroughly confused by PARM_DECLs expected to live
2131 	 in stack slots but assigned to pseudos.  */
2132       if (!SSA_NAME_VAR (decl))
2133 	return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2134 	  && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2135 
2136       decl = SSA_NAME_VAR (decl);
2137     }
2138 
2139   /* Honor volatile.  */
2140   if (TREE_SIDE_EFFECTS (decl))
2141     return false;
2142 
2143   /* Honor addressability.  */
2144   if (TREE_ADDRESSABLE (decl))
2145     return false;
2146 
2147   /* RESULT_DECLs are a bit special in that they're assigned without
2148      regard to use_register_for_decl, but we generally only store in
2149      them.  If we coalesce their SSA NAMEs, we'd better return a
2150      result that matches the assignment in expand_function_start.  */
2151   if (TREE_CODE (decl) == RESULT_DECL)
2152     {
2153       /* If it's not an aggregate, we're going to use a REG or a
2154 	 PARALLEL containing a REG.  */
2155       if (!aggregate_value_p (decl, current_function_decl))
2156 	return true;
2157 
2158       /* If expand_function_start determines the return value, we'll
2159 	 use MEM if it's not by reference.  */
2160       if (cfun->returns_pcc_struct
2161 	  || (targetm.calls.struct_value_rtx
2162 	      (TREE_TYPE (current_function_decl), 1)))
2163 	return DECL_BY_REFERENCE (decl);
2164 
2165       /* Otherwise, we're taking an extra all.function_result_decl
2166 	 argument.  It's set up in assign_parms_augmented_arg_list,
2167 	 under the (negated) conditions above, and then it's used to
2168 	 set up the RESULT_DECL rtl in assign_params, after looping
2169 	 over all parameters.  Now, if the RESULT_DECL is not by
2170 	 reference, we'll use a MEM either way.  */
2171       if (!DECL_BY_REFERENCE (decl))
2172 	return false;
2173 
2174       /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2175 	 the function_result_decl's assignment.  Since it's a pointer,
2176 	 we can short-circuit a number of the tests below, and we must
2177 	 duplicat e them because we don't have the
2178 	 function_result_decl to test.  */
2179       if (!targetm.calls.allocate_stack_slots_for_args ())
2180 	return true;
2181       /* We don't set DECL_IGNORED_P for the function_result_decl.  */
2182       if (optimize)
2183 	return true;
2184       /* We don't set DECL_REGISTER for the function_result_decl.  */
2185       return false;
2186     }
2187 
2188   /* Decl is implicitly addressible by bound stores and loads
2189      if it is an aggregate holding bounds.  */
2190   if (chkp_function_instrumented_p (current_function_decl)
2191       && TREE_TYPE (decl)
2192       && !BOUNDED_P (decl)
2193       && chkp_type_has_pointer (TREE_TYPE (decl)))
2194     return false;
2195 
2196   /* Only register-like things go in registers.  */
2197   if (DECL_MODE (decl) == BLKmode)
2198     return false;
2199 
2200   /* If -ffloat-store specified, don't put explicit float variables
2201      into registers.  */
2202   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2203      propagates values across these stores, and it probably shouldn't.  */
2204   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2205     return false;
2206 
2207   if (!targetm.calls.allocate_stack_slots_for_args ())
2208     return true;
2209 
2210   /* If we're not interested in tracking debugging information for
2211      this decl, then we can certainly put it in a register.  */
2212   if (DECL_IGNORED_P (decl))
2213     return true;
2214 
2215   if (optimize)
2216     return true;
2217 
2218   if (!DECL_REGISTER (decl))
2219     return false;
2220 
2221   switch (TREE_CODE (TREE_TYPE (decl)))
2222     {
2223     case RECORD_TYPE:
2224     case UNION_TYPE:
2225     case QUAL_UNION_TYPE:
2226       /* When not optimizing, disregard register keyword for variables with
2227 	 types containing methods, otherwise the methods won't be callable
2228 	 from the debugger.  */
2229       if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl))))
2230 	return false;
2231       break;
2232     default:
2233       break;
2234     }
2235 
2236   return true;
2237 }
2238 
2239 /* Structures to communicate between the subroutines of assign_parms.
2240    The first holds data persistent across all parameters, the second
2241    is cleared out for each parameter.  */
2242 
2243 struct assign_parm_data_all
2244 {
2245   /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2246      should become a job of the target or otherwise encapsulated.  */
2247   CUMULATIVE_ARGS args_so_far_v;
2248   cumulative_args_t args_so_far;
2249   struct args_size stack_args_size;
2250   tree function_result_decl;
2251   tree orig_fnargs;
2252   rtx_insn *first_conversion_insn;
2253   rtx_insn *last_conversion_insn;
2254   HOST_WIDE_INT pretend_args_size;
2255   HOST_WIDE_INT extra_pretend_bytes;
2256   int reg_parm_stack_space;
2257 };
2258 
2259 struct assign_parm_data_one
2260 {
2261   tree nominal_type;
2262   tree passed_type;
2263   rtx entry_parm;
2264   rtx stack_parm;
2265   machine_mode nominal_mode;
2266   machine_mode passed_mode;
2267   machine_mode promoted_mode;
2268   struct locate_and_pad_arg_data locate;
2269   int partial;
2270   BOOL_BITFIELD named_arg : 1;
2271   BOOL_BITFIELD passed_pointer : 1;
2272   BOOL_BITFIELD on_stack : 1;
2273   BOOL_BITFIELD loaded_in_reg : 1;
2274 };
2275 
2276 struct bounds_parm_data
2277 {
2278   assign_parm_data_one parm_data;
2279   tree bounds_parm;
2280   tree ptr_parm;
2281   rtx ptr_entry;
2282   int bound_no;
2283 };
2284 
2285 /* A subroutine of assign_parms.  Initialize ALL.  */
2286 
2287 static void
2288 assign_parms_initialize_all (struct assign_parm_data_all *all)
2289 {
2290   tree fntype ATTRIBUTE_UNUSED;
2291 
2292   memset (all, 0, sizeof (*all));
2293 
2294   fntype = TREE_TYPE (current_function_decl);
2295 
2296 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2297   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2298 #else
2299   INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2300 			current_function_decl, -1);
2301 #endif
2302   all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2303 
2304 #ifdef INCOMING_REG_PARM_STACK_SPACE
2305   all->reg_parm_stack_space
2306     = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2307 #endif
2308 }
2309 
2310 /* If ARGS contains entries with complex types, split the entry into two
2311    entries of the component type.  Return a new list of substitutions are
2312    needed, else the old list.  */
2313 
2314 static void
2315 split_complex_args (vec<tree> *args)
2316 {
2317   unsigned i;
2318   tree p;
2319 
2320   FOR_EACH_VEC_ELT (*args, i, p)
2321     {
2322       tree type = TREE_TYPE (p);
2323       if (TREE_CODE (type) == COMPLEX_TYPE
2324 	  && targetm.calls.split_complex_arg (type))
2325 	{
2326 	  tree decl;
2327 	  tree subtype = TREE_TYPE (type);
2328 	  bool addressable = TREE_ADDRESSABLE (p);
2329 
2330 	  /* Rewrite the PARM_DECL's type with its component.  */
2331 	  p = copy_node (p);
2332 	  TREE_TYPE (p) = subtype;
2333 	  DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2334 	  SET_DECL_MODE (p, VOIDmode);
2335 	  DECL_SIZE (p) = NULL;
2336 	  DECL_SIZE_UNIT (p) = NULL;
2337 	  /* If this arg must go in memory, put it in a pseudo here.
2338 	     We can't allow it to go in memory as per normal parms,
2339 	     because the usual place might not have the imag part
2340 	     adjacent to the real part.  */
2341 	  DECL_ARTIFICIAL (p) = addressable;
2342 	  DECL_IGNORED_P (p) = addressable;
2343 	  TREE_ADDRESSABLE (p) = 0;
2344 	  layout_decl (p, 0);
2345 	  (*args)[i] = p;
2346 
2347 	  /* Build a second synthetic decl.  */
2348 	  decl = build_decl (EXPR_LOCATION (p),
2349 			     PARM_DECL, NULL_TREE, subtype);
2350 	  DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2351 	  DECL_ARTIFICIAL (decl) = addressable;
2352 	  DECL_IGNORED_P (decl) = addressable;
2353 	  layout_decl (decl, 0);
2354 	  args->safe_insert (++i, decl);
2355 	}
2356     }
2357 }
2358 
2359 /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
2360    the hidden struct return argument, and (abi willing) complex args.
2361    Return the new parameter list.  */
2362 
2363 static vec<tree>
2364 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2365 {
2366   tree fndecl = current_function_decl;
2367   tree fntype = TREE_TYPE (fndecl);
2368   vec<tree> fnargs = vNULL;
2369   tree arg;
2370 
2371   for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2372     fnargs.safe_push (arg);
2373 
2374   all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2375 
2376   /* If struct value address is treated as the first argument, make it so.  */
2377   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2378       && ! cfun->returns_pcc_struct
2379       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2380     {
2381       tree type = build_pointer_type (TREE_TYPE (fntype));
2382       tree decl;
2383 
2384       decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2385 			 PARM_DECL, get_identifier (".result_ptr"), type);
2386       DECL_ARG_TYPE (decl) = type;
2387       DECL_ARTIFICIAL (decl) = 1;
2388       DECL_NAMELESS (decl) = 1;
2389       TREE_CONSTANT (decl) = 1;
2390       /* We don't set DECL_IGNORED_P or DECL_REGISTER here.  If this
2391 	 changes, the end of the RESULT_DECL handling block in
2392 	 use_register_for_decl must be adjusted to match.  */
2393 
2394       DECL_CHAIN (decl) = all->orig_fnargs;
2395       all->orig_fnargs = decl;
2396       fnargs.safe_insert (0, decl);
2397 
2398       all->function_result_decl = decl;
2399 
2400       /* If function is instrumented then bounds of the
2401 	 passed structure address is the second argument.  */
2402       if (chkp_function_instrumented_p (fndecl))
2403 	{
2404 	  decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2405 			     PARM_DECL, get_identifier (".result_bnd"),
2406 			     pointer_bounds_type_node);
2407 	  DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2408 	  DECL_ARTIFICIAL (decl) = 1;
2409 	  DECL_NAMELESS (decl) = 1;
2410 	  TREE_CONSTANT (decl) = 1;
2411 
2412 	  DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2413 	  DECL_CHAIN (all->orig_fnargs) = decl;
2414 	  fnargs.safe_insert (1, decl);
2415 	}
2416     }
2417 
2418   /* If the target wants to split complex arguments into scalars, do so.  */
2419   if (targetm.calls.split_complex_arg)
2420     split_complex_args (&fnargs);
2421 
2422   return fnargs;
2423 }
2424 
2425 /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
2426    data for the parameter.  Incorporate ABI specifics such as pass-by-
2427    reference and type promotion.  */
2428 
2429 static void
2430 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2431 			     struct assign_parm_data_one *data)
2432 {
2433   tree nominal_type, passed_type;
2434   machine_mode nominal_mode, passed_mode, promoted_mode;
2435   int unsignedp;
2436 
2437   memset (data, 0, sizeof (*data));
2438 
2439   /* NAMED_ARG is a misnomer.  We really mean 'non-variadic'. */
2440   if (!cfun->stdarg)
2441     data->named_arg = 1;  /* No variadic parms.  */
2442   else if (DECL_CHAIN (parm))
2443     data->named_arg = 1;  /* Not the last non-variadic parm. */
2444   else if (targetm.calls.strict_argument_naming (all->args_so_far))
2445     data->named_arg = 1;  /* Only variadic ones are unnamed.  */
2446   else
2447     data->named_arg = 0;  /* Treat as variadic.  */
2448 
2449   nominal_type = TREE_TYPE (parm);
2450   passed_type = DECL_ARG_TYPE (parm);
2451 
2452   /* Look out for errors propagating this far.  Also, if the parameter's
2453      type is void then its value doesn't matter.  */
2454   if (TREE_TYPE (parm) == error_mark_node
2455       /* This can happen after weird syntax errors
2456 	 or if an enum type is defined among the parms.  */
2457       || TREE_CODE (parm) != PARM_DECL
2458       || passed_type == NULL
2459       || VOID_TYPE_P (nominal_type))
2460     {
2461       nominal_type = passed_type = void_type_node;
2462       nominal_mode = passed_mode = promoted_mode = VOIDmode;
2463       goto egress;
2464     }
2465 
2466   /* Find mode of arg as it is passed, and mode of arg as it should be
2467      during execution of this function.  */
2468   passed_mode = TYPE_MODE (passed_type);
2469   nominal_mode = TYPE_MODE (nominal_type);
2470 
2471   /* If the parm is to be passed as a transparent union or record, use the
2472      type of the first field for the tests below.  We have already verified
2473      that the modes are the same.  */
2474   if ((TREE_CODE (passed_type) == UNION_TYPE
2475        || TREE_CODE (passed_type) == RECORD_TYPE)
2476       && TYPE_TRANSPARENT_AGGR (passed_type))
2477     passed_type = TREE_TYPE (first_field (passed_type));
2478 
2479   /* See if this arg was passed by invisible reference.  */
2480   if (pass_by_reference (&all->args_so_far_v, passed_mode,
2481 			 passed_type, data->named_arg))
2482     {
2483       passed_type = nominal_type = build_pointer_type (passed_type);
2484       data->passed_pointer = true;
2485       passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2486     }
2487 
2488   /* Find mode as it is passed by the ABI.  */
2489   unsignedp = TYPE_UNSIGNED (passed_type);
2490   promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2491 				         TREE_TYPE (current_function_decl), 0);
2492 
2493  egress:
2494   data->nominal_type = nominal_type;
2495   data->passed_type = passed_type;
2496   data->nominal_mode = nominal_mode;
2497   data->passed_mode = passed_mode;
2498   data->promoted_mode = promoted_mode;
2499 }
2500 
2501 /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
2502 
2503 static void
2504 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2505 			    struct assign_parm_data_one *data, bool no_rtl)
2506 {
2507   int varargs_pretend_bytes = 0;
2508 
2509   targetm.calls.setup_incoming_varargs (all->args_so_far,
2510 					data->promoted_mode,
2511 					data->passed_type,
2512 					&varargs_pretend_bytes, no_rtl);
2513 
2514   /* If the back-end has requested extra stack space, record how much is
2515      needed.  Do not change pretend_args_size otherwise since it may be
2516      nonzero from an earlier partial argument.  */
2517   if (varargs_pretend_bytes > 0)
2518     all->pretend_args_size = varargs_pretend_bytes;
2519 }
2520 
2521 /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
2522    the incoming location of the current parameter.  */
2523 
2524 static void
2525 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2526 			    struct assign_parm_data_one *data)
2527 {
2528   HOST_WIDE_INT pretend_bytes = 0;
2529   rtx entry_parm;
2530   bool in_regs;
2531 
2532   if (data->promoted_mode == VOIDmode)
2533     {
2534       data->entry_parm = data->stack_parm = const0_rtx;
2535       return;
2536     }
2537 
2538   entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2539 						    data->promoted_mode,
2540 						    data->passed_type,
2541 						    data->named_arg);
2542 
2543   if (entry_parm == 0)
2544     data->promoted_mode = data->passed_mode;
2545 
2546   /* Determine parm's home in the stack, in case it arrives in the stack
2547      or we should pretend it did.  Compute the stack position and rtx where
2548      the argument arrives and its size.
2549 
2550      There is one complexity here:  If this was a parameter that would
2551      have been passed in registers, but wasn't only because it is
2552      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2553      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2554      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2555      as it was the previous time.  */
2556   in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2557 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2558   in_regs = true;
2559 #endif
2560   if (!in_regs && !data->named_arg)
2561     {
2562       if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2563 	{
2564 	  rtx tem;
2565 	  tem = targetm.calls.function_incoming_arg (all->args_so_far,
2566 						     data->promoted_mode,
2567 						     data->passed_type, true);
2568 	  in_regs = tem != NULL;
2569 	}
2570     }
2571 
2572   /* If this parameter was passed both in registers and in the stack, use
2573      the copy on the stack.  */
2574   if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2575 					data->passed_type))
2576     entry_parm = 0;
2577 
2578   if (entry_parm)
2579     {
2580       int partial;
2581 
2582       partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2583 						 data->promoted_mode,
2584 						 data->passed_type,
2585 						 data->named_arg);
2586       data->partial = partial;
2587 
2588       /* The caller might already have allocated stack space for the
2589 	 register parameters.  */
2590       if (partial != 0 && all->reg_parm_stack_space == 0)
2591 	{
2592 	  /* Part of this argument is passed in registers and part
2593 	     is passed on the stack.  Ask the prologue code to extend
2594 	     the stack part so that we can recreate the full value.
2595 
2596 	     PRETEND_BYTES is the size of the registers we need to store.
2597 	     CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2598 	     stack space that the prologue should allocate.
2599 
2600 	     Internally, gcc assumes that the argument pointer is aligned
2601 	     to STACK_BOUNDARY bits.  This is used both for alignment
2602 	     optimizations (see init_emit) and to locate arguments that are
2603 	     aligned to more than PARM_BOUNDARY bits.  We must preserve this
2604 	     invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2605 	     a stack boundary.  */
2606 
2607 	  /* We assume at most one partial arg, and it must be the first
2608 	     argument on the stack.  */
2609 	  gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2610 
2611 	  pretend_bytes = partial;
2612 	  all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2613 
2614 	  /* We want to align relative to the actual stack pointer, so
2615 	     don't include this in the stack size until later.  */
2616 	  all->extra_pretend_bytes = all->pretend_args_size;
2617 	}
2618     }
2619 
2620   locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2621 		       all->reg_parm_stack_space,
2622 		       entry_parm ? data->partial : 0, current_function_decl,
2623 		       &all->stack_args_size, &data->locate);
2624 
2625   /* Update parm_stack_boundary if this parameter is passed in the
2626      stack.  */
2627   if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2628     crtl->parm_stack_boundary = data->locate.boundary;
2629 
2630   /* Adjust offsets to include the pretend args.  */
2631   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2632   data->locate.slot_offset.constant += pretend_bytes;
2633   data->locate.offset.constant += pretend_bytes;
2634 
2635   data->entry_parm = entry_parm;
2636 }
2637 
2638 /* A subroutine of assign_parms.  If there is actually space on the stack
2639    for this parm, count it in stack_args_size and return true.  */
2640 
2641 static bool
2642 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2643 			   struct assign_parm_data_one *data)
2644 {
2645   /* Bounds are never passed on the stack to keep compatibility
2646      with not instrumented code.  */
2647   if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2648     return false;
2649   /* Trivially true if we've no incoming register.  */
2650   else if (data->entry_parm == NULL)
2651     ;
2652   /* Also true if we're partially in registers and partially not,
2653      since we've arranged to drop the entire argument on the stack.  */
2654   else if (data->partial != 0)
2655     ;
2656   /* Also true if the target says that it's passed in both registers
2657      and on the stack.  */
2658   else if (GET_CODE (data->entry_parm) == PARALLEL
2659 	   && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2660     ;
2661   /* Also true if the target says that there's stack allocated for
2662      all register parameters.  */
2663   else if (all->reg_parm_stack_space > 0)
2664     ;
2665   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
2666   else
2667     return false;
2668 
2669   all->stack_args_size.constant += data->locate.size.constant;
2670   if (data->locate.size.var)
2671     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2672 
2673   return true;
2674 }
2675 
2676 /* A subroutine of assign_parms.  Given that this parameter is allocated
2677    stack space by the ABI, find it.  */
2678 
2679 static void
2680 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2681 {
2682   rtx offset_rtx, stack_parm;
2683   unsigned int align, boundary;
2684 
2685   /* If we're passing this arg using a reg, make its stack home the
2686      aligned stack slot.  */
2687   if (data->entry_parm)
2688     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2689   else
2690     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2691 
2692   stack_parm = crtl->args.internal_arg_pointer;
2693   if (offset_rtx != const0_rtx)
2694     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2695   stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2696 
2697   if (!data->passed_pointer)
2698     {
2699       set_mem_attributes (stack_parm, parm, 1);
2700       /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2701 	 while promoted mode's size is needed.  */
2702       if (data->promoted_mode != BLKmode
2703 	  && data->promoted_mode != DECL_MODE (parm))
2704 	{
2705 	  set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2706 	  if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2707 	    {
2708 	      int offset = subreg_lowpart_offset (DECL_MODE (parm),
2709 						  data->promoted_mode);
2710 	      if (offset)
2711 		set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2712 	    }
2713 	}
2714     }
2715 
2716   boundary = data->locate.boundary;
2717   align = BITS_PER_UNIT;
2718 
2719   /* If we're padding upward, we know that the alignment of the slot
2720      is TARGET_FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
2721      intentionally forcing upward padding.  Otherwise we have to come
2722      up with a guess at the alignment based on OFFSET_RTX.  */
2723   if (data->locate.where_pad != downward || data->entry_parm)
2724     align = boundary;
2725   else if (CONST_INT_P (offset_rtx))
2726     {
2727       align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2728       align = least_bit_hwi (align);
2729     }
2730   set_mem_align (stack_parm, align);
2731 
2732   if (data->entry_parm)
2733     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2734 
2735   data->stack_parm = stack_parm;
2736 }
2737 
2738 /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
2739    always valid and contiguous.  */
2740 
2741 static void
2742 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2743 {
2744   rtx entry_parm = data->entry_parm;
2745   rtx stack_parm = data->stack_parm;
2746 
2747   /* If this parm was passed part in regs and part in memory, pretend it
2748      arrived entirely in memory by pushing the register-part onto the stack.
2749      In the special case of a DImode or DFmode that is split, we could put
2750      it together in a pseudoreg directly, but for now that's not worth
2751      bothering with.  */
2752   if (data->partial != 0)
2753     {
2754       /* Handle calls that pass values in multiple non-contiguous
2755 	 locations.  The Irix 6 ABI has examples of this.  */
2756       if (GET_CODE (entry_parm) == PARALLEL)
2757 	emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2758 			  data->passed_type,
2759 			  int_size_in_bytes (data->passed_type));
2760       else
2761 	{
2762 	  gcc_assert (data->partial % UNITS_PER_WORD == 0);
2763 	  move_block_from_reg (REGNO (entry_parm),
2764 			       validize_mem (copy_rtx (stack_parm)),
2765 			       data->partial / UNITS_PER_WORD);
2766 	}
2767 
2768       entry_parm = stack_parm;
2769     }
2770 
2771   /* If we didn't decide this parm came in a register, by default it came
2772      on the stack.  */
2773   else if (entry_parm == NULL)
2774     entry_parm = stack_parm;
2775 
2776   /* When an argument is passed in multiple locations, we can't make use
2777      of this information, but we can save some copying if the whole argument
2778      is passed in a single register.  */
2779   else if (GET_CODE (entry_parm) == PARALLEL
2780 	   && data->nominal_mode != BLKmode
2781 	   && data->passed_mode != BLKmode)
2782     {
2783       size_t i, len = XVECLEN (entry_parm, 0);
2784 
2785       for (i = 0; i < len; i++)
2786 	if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2787 	    && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2788 	    && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2789 		== data->passed_mode)
2790 	    && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2791 	  {
2792 	    entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2793 	    break;
2794 	  }
2795     }
2796 
2797   data->entry_parm = entry_parm;
2798 }
2799 
2800 /* A subroutine of assign_parms.  Reconstitute any values which were
2801    passed in multiple registers and would fit in a single register.  */
2802 
2803 static void
2804 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2805 {
2806   rtx entry_parm = data->entry_parm;
2807 
2808   /* Convert the PARALLEL to a REG of the same mode as the parallel.
2809      This can be done with register operations rather than on the
2810      stack, even if we will store the reconstituted parameter on the
2811      stack later.  */
2812   if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2813     {
2814       rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2815       emit_group_store (parmreg, entry_parm, data->passed_type,
2816 			GET_MODE_SIZE (GET_MODE (entry_parm)));
2817       entry_parm = parmreg;
2818     }
2819 
2820   data->entry_parm = entry_parm;
2821 }
2822 
2823 /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
2824    always valid and properly aligned.  */
2825 
2826 static void
2827 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2828 {
2829   rtx stack_parm = data->stack_parm;
2830 
2831   /* If we can't trust the parm stack slot to be aligned enough for its
2832      ultimate type, don't use that slot after entry.  We'll make another
2833      stack slot, if we need one.  */
2834   if (stack_parm
2835       && ((STRICT_ALIGNMENT
2836 	   && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2837 	  || (data->nominal_type
2838 	      && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2839 	      && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2840     stack_parm = NULL;
2841 
2842   /* If parm was passed in memory, and we need to convert it on entry,
2843      don't store it back in that same slot.  */
2844   else if (data->entry_parm == stack_parm
2845 	   && data->nominal_mode != BLKmode
2846 	   && data->nominal_mode != data->passed_mode)
2847     stack_parm = NULL;
2848 
2849   /* If stack protection is in effect for this function, don't leave any
2850      pointers in their passed stack slots.  */
2851   else if (crtl->stack_protect_guard
2852 	   && (flag_stack_protect == 2
2853 	       || data->passed_pointer
2854 	       || POINTER_TYPE_P (data->nominal_type)))
2855     stack_parm = NULL;
2856 
2857   data->stack_parm = stack_parm;
2858 }
2859 
2860 /* A subroutine of assign_parms.  Return true if the current parameter
2861    should be stored as a BLKmode in the current frame.  */
2862 
2863 static bool
2864 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2865 {
2866   if (data->nominal_mode == BLKmode)
2867     return true;
2868   if (GET_MODE (data->entry_parm) == BLKmode)
2869     return true;
2870 
2871 #ifdef BLOCK_REG_PADDING
2872   /* Only assign_parm_setup_block knows how to deal with register arguments
2873      that are padded at the least significant end.  */
2874   if (REG_P (data->entry_parm)
2875       && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2876       && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2877 	  == (BYTES_BIG_ENDIAN ? upward : downward)))
2878     return true;
2879 #endif
2880 
2881   return false;
2882 }
2883 
2884 /* A subroutine of assign_parms.  Arrange for the parameter to be
2885    present and valid in DATA->STACK_RTL.  */
2886 
2887 static void
2888 assign_parm_setup_block (struct assign_parm_data_all *all,
2889 			 tree parm, struct assign_parm_data_one *data)
2890 {
2891   rtx entry_parm = data->entry_parm;
2892   rtx stack_parm = data->stack_parm;
2893   rtx target_reg = NULL_RTX;
2894   bool in_conversion_seq = false;
2895   HOST_WIDE_INT size;
2896   HOST_WIDE_INT size_stored;
2897 
2898   if (GET_CODE (entry_parm) == PARALLEL)
2899     entry_parm = emit_group_move_into_temps (entry_parm);
2900 
2901   /* If we want the parameter in a pseudo, don't use a stack slot.  */
2902   if (is_gimple_reg (parm) && use_register_for_decl (parm))
2903     {
2904       tree def = ssa_default_def (cfun, parm);
2905       gcc_assert (def);
2906       machine_mode mode = promote_ssa_mode (def, NULL);
2907       rtx reg = gen_reg_rtx (mode);
2908       if (GET_CODE (reg) != CONCAT)
2909 	stack_parm = reg;
2910       else
2911 	{
2912 	  target_reg = reg;
2913 	  /* Avoid allocating a stack slot, if there isn't one
2914 	     preallocated by the ABI.  It might seem like we should
2915 	     always prefer a pseudo, but converting between
2916 	     floating-point and integer modes goes through the stack
2917 	     on various machines, so it's better to use the reserved
2918 	     stack slot than to risk wasting it and allocating more
2919 	     for the conversion.  */
2920 	  if (stack_parm == NULL_RTX)
2921 	    {
2922 	      int save = generating_concat_p;
2923 	      generating_concat_p = 0;
2924 	      stack_parm = gen_reg_rtx (mode);
2925 	      generating_concat_p = save;
2926 	    }
2927 	}
2928       data->stack_parm = NULL;
2929     }
2930 
2931   size = int_size_in_bytes (data->passed_type);
2932   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2933   if (stack_parm == 0)
2934     {
2935       SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD));
2936       stack_parm = assign_stack_local (BLKmode, size_stored,
2937 				       DECL_ALIGN (parm));
2938       if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2939 	PUT_MODE (stack_parm, GET_MODE (entry_parm));
2940       set_mem_attributes (stack_parm, parm, 1);
2941     }
2942 
2943   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
2944      calls that pass values in multiple non-contiguous locations.  */
2945   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2946     {
2947       rtx mem;
2948 
2949       /* Note that we will be storing an integral number of words.
2950 	 So we have to be careful to ensure that we allocate an
2951 	 integral number of words.  We do this above when we call
2952 	 assign_stack_local if space was not allocated in the argument
2953 	 list.  If it was, this will not work if PARM_BOUNDARY is not
2954 	 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
2955 	 if it becomes a problem.  Exception is when BLKmode arrives
2956 	 with arguments not conforming to word_mode.  */
2957 
2958       if (data->stack_parm == 0)
2959 	;
2960       else if (GET_CODE (entry_parm) == PARALLEL)
2961 	;
2962       else
2963 	gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2964 
2965       mem = validize_mem (copy_rtx (stack_parm));
2966 
2967       /* Handle values in multiple non-contiguous locations.  */
2968       if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2969 	emit_group_store (mem, entry_parm, data->passed_type, size);
2970       else if (GET_CODE (entry_parm) == PARALLEL)
2971 	{
2972 	  push_to_sequence2 (all->first_conversion_insn,
2973 			     all->last_conversion_insn);
2974 	  emit_group_store (mem, entry_parm, data->passed_type, size);
2975 	  all->first_conversion_insn = get_insns ();
2976 	  all->last_conversion_insn = get_last_insn ();
2977 	  end_sequence ();
2978 	  in_conversion_seq = true;
2979 	}
2980 
2981       else if (size == 0)
2982 	;
2983 
2984       /* If SIZE is that of a mode no bigger than a word, just use
2985 	 that mode's store operation.  */
2986       else if (size <= UNITS_PER_WORD)
2987 	{
2988 	  machine_mode mode
2989 	    = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2990 
2991 	  if (mode != BLKmode
2992 #ifdef BLOCK_REG_PADDING
2993 	      && (size == UNITS_PER_WORD
2994 		  || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2995 		      != (BYTES_BIG_ENDIAN ? upward : downward)))
2996 #endif
2997 	      )
2998 	    {
2999 	      rtx reg;
3000 
3001 	      /* We are really truncating a word_mode value containing
3002 		 SIZE bytes into a value of mode MODE.  If such an
3003 		 operation requires no actual instructions, we can refer
3004 		 to the value directly in mode MODE, otherwise we must
3005 		 start with the register in word_mode and explicitly
3006 		 convert it.  */
3007 	      if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
3008 		reg = gen_rtx_REG (mode, REGNO (entry_parm));
3009 	      else
3010 		{
3011 		  reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3012 		  reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3013 		}
3014 	      emit_move_insn (change_address (mem, mode, 0), reg);
3015 	    }
3016 
3017 #ifdef BLOCK_REG_PADDING
3018 	  /* Storing the register in memory as a full word, as
3019 	     move_block_from_reg below would do, and then using the
3020 	     MEM in a smaller mode, has the effect of shifting right
3021 	     if BYTES_BIG_ENDIAN.  If we're bypassing memory, the
3022 	     shifting must be explicit.  */
3023 	  else if (!MEM_P (mem))
3024 	    {
3025 	      rtx x;
3026 
3027 	      /* If the assert below fails, we should have taken the
3028 		 mode != BLKmode path above, unless we have downward
3029 		 padding of smaller-than-word arguments on a machine
3030 		 with little-endian bytes, which would likely require
3031 		 additional changes to work correctly.  */
3032 	      gcc_checking_assert (BYTES_BIG_ENDIAN
3033 				   && (BLOCK_REG_PADDING (mode,
3034 							  data->passed_type, 1)
3035 				       == upward));
3036 
3037 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3038 
3039 	      x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3040 	      x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3041 				NULL_RTX, 1);
3042 	      x = force_reg (word_mode, x);
3043 	      x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3044 
3045 	      emit_move_insn (mem, x);
3046 	    }
3047 #endif
3048 
3049 	  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3050 	     machine must be aligned to the left before storing
3051 	     to memory.  Note that the previous test doesn't
3052 	     handle all cases (e.g. SIZE == 3).  */
3053 	  else if (size != UNITS_PER_WORD
3054 #ifdef BLOCK_REG_PADDING
3055 		   && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3056 		       == downward)
3057 #else
3058 		   && BYTES_BIG_ENDIAN
3059 #endif
3060 		   )
3061 	    {
3062 	      rtx tem, x;
3063 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3064 	      rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3065 
3066 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3067 	      tem = change_address (mem, word_mode, 0);
3068 	      emit_move_insn (tem, x);
3069 	    }
3070 	  else
3071 	    move_block_from_reg (REGNO (entry_parm), mem,
3072 				 size_stored / UNITS_PER_WORD);
3073 	}
3074       else if (!MEM_P (mem))
3075 	{
3076 	  gcc_checking_assert (size > UNITS_PER_WORD);
3077 #ifdef BLOCK_REG_PADDING
3078 	  gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3079 						  data->passed_type, 0)
3080 			       == upward);
3081 #endif
3082 	  emit_move_insn (mem, entry_parm);
3083 	}
3084       else
3085 	move_block_from_reg (REGNO (entry_parm), mem,
3086 			     size_stored / UNITS_PER_WORD);
3087     }
3088   else if (data->stack_parm == 0)
3089     {
3090       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3091       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3092 		       BLOCK_OP_NORMAL);
3093       all->first_conversion_insn = get_insns ();
3094       all->last_conversion_insn = get_last_insn ();
3095       end_sequence ();
3096       in_conversion_seq = true;
3097     }
3098 
3099   if (target_reg)
3100     {
3101       if (!in_conversion_seq)
3102 	emit_move_insn (target_reg, stack_parm);
3103       else
3104 	{
3105 	  push_to_sequence2 (all->first_conversion_insn,
3106 			     all->last_conversion_insn);
3107 	  emit_move_insn (target_reg, stack_parm);
3108 	  all->first_conversion_insn = get_insns ();
3109 	  all->last_conversion_insn = get_last_insn ();
3110 	  end_sequence ();
3111 	}
3112       stack_parm = target_reg;
3113     }
3114 
3115   data->stack_parm = stack_parm;
3116   set_parm_rtl (parm, stack_parm);
3117 }
3118 
3119 /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
3120    parameter.  Get it there.  Perform all ABI specified conversions.  */
3121 
3122 static void
3123 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3124 		       struct assign_parm_data_one *data)
3125 {
3126   rtx parmreg, validated_mem;
3127   rtx equiv_stack_parm;
3128   machine_mode promoted_nominal_mode;
3129   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3130   bool did_conversion = false;
3131   bool need_conversion, moved;
3132   rtx rtl;
3133 
3134   /* Store the parm in a pseudoregister during the function, but we may
3135      need to do it in a wider mode.  Using 2 here makes the result
3136      consistent with promote_decl_mode and thus expand_expr_real_1.  */
3137   promoted_nominal_mode
3138     = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3139 			     TREE_TYPE (current_function_decl), 2);
3140 
3141   parmreg = gen_reg_rtx (promoted_nominal_mode);
3142   if (!DECL_ARTIFICIAL (parm))
3143     mark_user_reg (parmreg);
3144 
3145   /* If this was an item that we received a pointer to,
3146      set rtl appropriately.  */
3147   if (data->passed_pointer)
3148     {
3149       rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3150       set_mem_attributes (rtl, parm, 1);
3151     }
3152   else
3153     rtl = parmreg;
3154 
3155   assign_parm_remove_parallels (data);
3156 
3157   /* Copy the value into the register, thus bridging between
3158      assign_parm_find_data_types and expand_expr_real_1.  */
3159 
3160   equiv_stack_parm = data->stack_parm;
3161   validated_mem = validize_mem (copy_rtx (data->entry_parm));
3162 
3163   need_conversion = (data->nominal_mode != data->passed_mode
3164 		     || promoted_nominal_mode != data->promoted_mode);
3165   moved = false;
3166 
3167   if (need_conversion
3168       && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3169       && data->nominal_mode == data->passed_mode
3170       && data->nominal_mode == GET_MODE (data->entry_parm))
3171     {
3172       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3173 	 mode, by the caller.  We now have to convert it to
3174 	 NOMINAL_MODE, if different.  However, PARMREG may be in
3175 	 a different mode than NOMINAL_MODE if it is being stored
3176 	 promoted.
3177 
3178 	 If ENTRY_PARM is a hard register, it might be in a register
3179 	 not valid for operating in its mode (e.g., an odd-numbered
3180 	 register for a DFmode).  In that case, moves are the only
3181 	 thing valid, so we can't do a convert from there.  This
3182 	 occurs when the calling sequence allow such misaligned
3183 	 usages.
3184 
3185 	 In addition, the conversion may involve a call, which could
3186 	 clobber parameters which haven't been copied to pseudo
3187 	 registers yet.
3188 
3189 	 First, we try to emit an insn which performs the necessary
3190 	 conversion.  We verify that this insn does not clobber any
3191 	 hard registers.  */
3192 
3193       enum insn_code icode;
3194       rtx op0, op1;
3195 
3196       icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3197 			    unsignedp);
3198 
3199       op0 = parmreg;
3200       op1 = validated_mem;
3201       if (icode != CODE_FOR_nothing
3202 	  && insn_operand_matches (icode, 0, op0)
3203 	  && insn_operand_matches (icode, 1, op1))
3204 	{
3205 	  enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3206 	  rtx_insn *insn, *insns;
3207 	  rtx t = op1;
3208 	  HARD_REG_SET hardregs;
3209 
3210 	  start_sequence ();
3211 	  /* If op1 is a hard register that is likely spilled, first
3212 	     force it into a pseudo, otherwise combiner might extend
3213 	     its lifetime too much.  */
3214 	  if (GET_CODE (t) == SUBREG)
3215 	    t = SUBREG_REG (t);
3216 	  if (REG_P (t)
3217 	      && HARD_REGISTER_P (t)
3218 	      && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3219 	      && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3220 	    {
3221 	      t = gen_reg_rtx (GET_MODE (op1));
3222 	      emit_move_insn (t, op1);
3223 	    }
3224 	  else
3225 	    t = op1;
3226 	  rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3227 					   data->passed_mode, unsignedp);
3228 	  emit_insn (pat);
3229 	  insns = get_insns ();
3230 
3231 	  moved = true;
3232 	  CLEAR_HARD_REG_SET (hardregs);
3233 	  for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3234 	    {
3235 	      if (INSN_P (insn))
3236 		note_stores (PATTERN (insn), record_hard_reg_sets,
3237 			     &hardregs);
3238 	      if (!hard_reg_set_empty_p (hardregs))
3239 		moved = false;
3240 	    }
3241 
3242 	  end_sequence ();
3243 
3244 	  if (moved)
3245 	    {
3246 	      emit_insn (insns);
3247 	      if (equiv_stack_parm != NULL_RTX)
3248 		equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3249 						  equiv_stack_parm);
3250 	    }
3251 	}
3252     }
3253 
3254   if (moved)
3255     /* Nothing to do.  */
3256     ;
3257   else if (need_conversion)
3258     {
3259       /* We did not have an insn to convert directly, or the sequence
3260 	 generated appeared unsafe.  We must first copy the parm to a
3261 	 pseudo reg, and save the conversion until after all
3262 	 parameters have been moved.  */
3263 
3264       int save_tree_used;
3265       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3266 
3267       emit_move_insn (tempreg, validated_mem);
3268 
3269       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3270       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3271 
3272       if (GET_CODE (tempreg) == SUBREG
3273 	  && GET_MODE (tempreg) == data->nominal_mode
3274 	  && REG_P (SUBREG_REG (tempreg))
3275 	  && data->nominal_mode == data->passed_mode
3276 	  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3277 	  && GET_MODE_SIZE (GET_MODE (tempreg))
3278 	     < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3279 	{
3280 	  /* The argument is already sign/zero extended, so note it
3281 	     into the subreg.  */
3282 	  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3283 	  SUBREG_PROMOTED_SET (tempreg, unsignedp);
3284 	}
3285 
3286       /* TREE_USED gets set erroneously during expand_assignment.  */
3287       save_tree_used = TREE_USED (parm);
3288       SET_DECL_RTL (parm, rtl);
3289       expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3290       SET_DECL_RTL (parm, NULL_RTX);
3291       TREE_USED (parm) = save_tree_used;
3292       all->first_conversion_insn = get_insns ();
3293       all->last_conversion_insn = get_last_insn ();
3294       end_sequence ();
3295 
3296       did_conversion = true;
3297     }
3298   else
3299     emit_move_insn (parmreg, validated_mem);
3300 
3301   /* If we were passed a pointer but the actual value can safely live
3302      in a register, retrieve it and use it directly.  */
3303   if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3304     {
3305       /* We can't use nominal_mode, because it will have been set to
3306 	 Pmode above.  We must use the actual mode of the parm.  */
3307       if (use_register_for_decl (parm))
3308 	{
3309 	  parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3310 	  mark_user_reg (parmreg);
3311 	}
3312       else
3313 	{
3314 	  int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3315 					    TYPE_MODE (TREE_TYPE (parm)),
3316 					    TYPE_ALIGN (TREE_TYPE (parm)));
3317 	  parmreg
3318 	    = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3319 				  GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3320 				  align);
3321 	  set_mem_attributes (parmreg, parm, 1);
3322 	}
3323 
3324       /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3325 	 the debug info in case it is not legitimate.  */
3326       if (GET_MODE (parmreg) != GET_MODE (rtl))
3327 	{
3328 	  rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3329 	  int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3330 
3331 	  push_to_sequence2 (all->first_conversion_insn,
3332 			     all->last_conversion_insn);
3333 	  emit_move_insn (tempreg, rtl);
3334 	  tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3335 	  emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3336 			  tempreg);
3337 	  all->first_conversion_insn = get_insns ();
3338 	  all->last_conversion_insn = get_last_insn ();
3339 	  end_sequence ();
3340 
3341 	  did_conversion = true;
3342 	}
3343       else
3344 	emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3345 
3346       rtl = parmreg;
3347 
3348       /* STACK_PARM is the pointer, not the parm, and PARMREG is
3349 	 now the parm.  */
3350       data->stack_parm = NULL;
3351     }
3352 
3353   set_parm_rtl (parm, rtl);
3354 
3355   /* Mark the register as eliminable if we did no conversion and it was
3356      copied from memory at a fixed offset, and the arg pointer was not
3357      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
3358      offset formed an invalid address, such memory-equivalences as we
3359      make here would screw up life analysis for it.  */
3360   if (data->nominal_mode == data->passed_mode
3361       && !did_conversion
3362       && data->stack_parm != 0
3363       && MEM_P (data->stack_parm)
3364       && data->locate.offset.var == 0
3365       && reg_mentioned_p (virtual_incoming_args_rtx,
3366 			  XEXP (data->stack_parm, 0)))
3367     {
3368       rtx_insn *linsn = get_last_insn ();
3369       rtx_insn *sinsn;
3370       rtx set;
3371 
3372       /* Mark complex types separately.  */
3373       if (GET_CODE (parmreg) == CONCAT)
3374 	{
3375 	  machine_mode submode
3376 	    = GET_MODE_INNER (GET_MODE (parmreg));
3377 	  int regnor = REGNO (XEXP (parmreg, 0));
3378 	  int regnoi = REGNO (XEXP (parmreg, 1));
3379 	  rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3380 	  rtx stacki = adjust_address_nv (data->stack_parm, submode,
3381 					  GET_MODE_SIZE (submode));
3382 
3383 	  /* Scan backwards for the set of the real and
3384 	     imaginary parts.  */
3385 	  for (sinsn = linsn; sinsn != 0;
3386 	       sinsn = prev_nonnote_insn (sinsn))
3387 	    {
3388 	      set = single_set (sinsn);
3389 	      if (set == 0)
3390 		continue;
3391 
3392 	      if (SET_DEST (set) == regno_reg_rtx [regnoi])
3393 		set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3394 	      else if (SET_DEST (set) == regno_reg_rtx [regnor])
3395 		set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3396 	    }
3397 	}
3398       else
3399 	set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3400     }
3401 
3402   /* For pointer data type, suggest pointer register.  */
3403   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3404     mark_reg_pointer (parmreg,
3405 		      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3406 }
3407 
3408 /* A subroutine of assign_parms.  Allocate stack space to hold the current
3409    parameter.  Get it there.  Perform all ABI specified conversions.  */
3410 
3411 static void
3412 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3413 		         struct assign_parm_data_one *data)
3414 {
3415   /* Value must be stored in the stack slot STACK_PARM during function
3416      execution.  */
3417   bool to_conversion = false;
3418 
3419   assign_parm_remove_parallels (data);
3420 
3421   if (data->promoted_mode != data->nominal_mode)
3422     {
3423       /* Conversion is required.  */
3424       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3425 
3426       emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3427 
3428       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3429       to_conversion = true;
3430 
3431       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3432 					  TYPE_UNSIGNED (TREE_TYPE (parm)));
3433 
3434       if (data->stack_parm)
3435 	{
3436 	  int offset = subreg_lowpart_offset (data->nominal_mode,
3437 					      GET_MODE (data->stack_parm));
3438 	  /* ??? This may need a big-endian conversion on sparc64.  */
3439 	  data->stack_parm
3440 	    = adjust_address (data->stack_parm, data->nominal_mode, 0);
3441 	  if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3442 	    set_mem_offset (data->stack_parm,
3443 			    MEM_OFFSET (data->stack_parm) + offset);
3444 	}
3445     }
3446 
3447   if (data->entry_parm != data->stack_parm)
3448     {
3449       rtx src, dest;
3450 
3451       if (data->stack_parm == 0)
3452 	{
3453 	  int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3454 					    GET_MODE (data->entry_parm),
3455 					    TYPE_ALIGN (data->passed_type));
3456 	  data->stack_parm
3457 	    = assign_stack_local (GET_MODE (data->entry_parm),
3458 				  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3459 				  align);
3460 	  set_mem_attributes (data->stack_parm, parm, 1);
3461 	}
3462 
3463       dest = validize_mem (copy_rtx (data->stack_parm));
3464       src = validize_mem (copy_rtx (data->entry_parm));
3465 
3466       if (MEM_P (src))
3467 	{
3468 	  /* Use a block move to handle potentially misaligned entry_parm.  */
3469 	  if (!to_conversion)
3470 	    push_to_sequence2 (all->first_conversion_insn,
3471 			       all->last_conversion_insn);
3472 	  to_conversion = true;
3473 
3474 	  emit_block_move (dest, src,
3475 			   GEN_INT (int_size_in_bytes (data->passed_type)),
3476 			   BLOCK_OP_NORMAL);
3477 	}
3478       else
3479 	{
3480 	  if (!REG_P (src))
3481 	    src = force_reg (GET_MODE (src), src);
3482 	  emit_move_insn (dest, src);
3483 	}
3484     }
3485 
3486   if (to_conversion)
3487     {
3488       all->first_conversion_insn = get_insns ();
3489       all->last_conversion_insn = get_last_insn ();
3490       end_sequence ();
3491     }
3492 
3493   set_parm_rtl (parm, data->stack_parm);
3494 }
3495 
3496 /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
3497    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
3498 
3499 static void
3500 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3501 			      vec<tree> fnargs)
3502 {
3503   tree parm;
3504   tree orig_fnargs = all->orig_fnargs;
3505   unsigned i = 0;
3506 
3507   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3508     {
3509       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3510 	  && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3511 	{
3512 	  rtx tmp, real, imag;
3513 	  machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3514 
3515 	  real = DECL_RTL (fnargs[i]);
3516 	  imag = DECL_RTL (fnargs[i + 1]);
3517 	  if (inner != GET_MODE (real))
3518 	    {
3519 	      real = gen_lowpart_SUBREG (inner, real);
3520 	      imag = gen_lowpart_SUBREG (inner, imag);
3521 	    }
3522 
3523 	  if (TREE_ADDRESSABLE (parm))
3524 	    {
3525 	      rtx rmem, imem;
3526 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3527 	      int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3528 						DECL_MODE (parm),
3529 						TYPE_ALIGN (TREE_TYPE (parm)));
3530 
3531 	      /* split_complex_arg put the real and imag parts in
3532 		 pseudos.  Move them to memory.  */
3533 	      tmp = assign_stack_local (DECL_MODE (parm), size, align);
3534 	      set_mem_attributes (tmp, parm, 1);
3535 	      rmem = adjust_address_nv (tmp, inner, 0);
3536 	      imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3537 	      push_to_sequence2 (all->first_conversion_insn,
3538 				 all->last_conversion_insn);
3539 	      emit_move_insn (rmem, real);
3540 	      emit_move_insn (imem, imag);
3541 	      all->first_conversion_insn = get_insns ();
3542 	      all->last_conversion_insn = get_last_insn ();
3543 	      end_sequence ();
3544 	    }
3545 	  else
3546 	    tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3547 	  set_parm_rtl (parm, tmp);
3548 
3549 	  real = DECL_INCOMING_RTL (fnargs[i]);
3550 	  imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3551 	  if (inner != GET_MODE (real))
3552 	    {
3553 	      real = gen_lowpart_SUBREG (inner, real);
3554 	      imag = gen_lowpart_SUBREG (inner, imag);
3555 	    }
3556 	  tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3557 	  set_decl_incoming_rtl (parm, tmp, false);
3558 	  i++;
3559 	}
3560     }
3561 }
3562 
3563 /* Load bounds of PARM from bounds table.  */
3564 static void
3565 assign_parm_load_bounds (struct assign_parm_data_one *data,
3566 			 tree parm,
3567 			 rtx entry,
3568 			 unsigned bound_no)
3569 {
3570   bitmap_iterator bi;
3571   unsigned i, offs = 0;
3572   int bnd_no = -1;
3573   rtx slot = NULL, ptr = NULL;
3574 
3575   if (parm)
3576     {
3577       bitmap slots;
3578       bitmap_obstack_initialize (NULL);
3579       slots = BITMAP_ALLOC (NULL);
3580       chkp_find_bound_slots (TREE_TYPE (parm), slots);
3581       EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3582 	{
3583 	  if (bound_no)
3584 	    bound_no--;
3585 	  else
3586 	    {
3587 	      bnd_no = i;
3588 	      break;
3589 	    }
3590 	}
3591       BITMAP_FREE (slots);
3592       bitmap_obstack_release (NULL);
3593     }
3594 
3595   /* We may have bounds not associated with any pointer.  */
3596   if (bnd_no != -1)
3597     offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3598 
3599   /* Find associated pointer.  */
3600   if (bnd_no == -1)
3601     {
3602       /* If bounds are not associated with any bounds,
3603 	 then it is passed in a register or special slot.  */
3604       gcc_assert (data->entry_parm);
3605       ptr = const0_rtx;
3606     }
3607   else if (MEM_P (entry))
3608     slot = adjust_address (entry, Pmode, offs);
3609   else if (REG_P (entry))
3610     ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3611   else if (GET_CODE (entry) == PARALLEL)
3612     ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3613   else
3614     gcc_unreachable ();
3615   data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3616 							data->entry_parm);
3617 }
3618 
3619 /* Assign RTL expressions to the function's bounds parameters BNDARGS.  */
3620 
3621 static void
3622 assign_bounds (vec<bounds_parm_data> &bndargs,
3623 	       struct assign_parm_data_all &all,
3624 	       bool assign_regs, bool assign_special,
3625 	       bool assign_bt)
3626 {
3627   unsigned i, pass;
3628   bounds_parm_data *pbdata;
3629 
3630   if (!bndargs.exists ())
3631     return;
3632 
3633   /* We make few passes to store input bounds.  Firstly handle bounds
3634      passed in registers.  After that we load bounds passed in special
3635      slots.  Finally we load bounds from Bounds Table.  */
3636   for (pass = 0; pass < 3; pass++)
3637     FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3638       {
3639 	/* Pass 0 => regs only.  */
3640 	if (pass == 0
3641 	    && (!assign_regs
3642 		||(!pbdata->parm_data.entry_parm
3643 		   || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
3644 	  continue;
3645 	/* Pass 1 => slots only.  */
3646 	else if (pass == 1
3647 		 && (!assign_special
3648 		     || (!pbdata->parm_data.entry_parm
3649 			 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
3650 	  continue;
3651 	/* Pass 2 => BT only.  */
3652 	else if (pass == 2
3653 		 && (!assign_bt
3654 		     || pbdata->parm_data.entry_parm))
3655 	  continue;
3656 
3657 	if (!pbdata->parm_data.entry_parm
3658 	    || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3659 	  assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3660 				   pbdata->ptr_entry, pbdata->bound_no);
3661 
3662 	set_decl_incoming_rtl (pbdata->bounds_parm,
3663 			       pbdata->parm_data.entry_parm, false);
3664 
3665 	if (assign_parm_setup_block_p (&pbdata->parm_data))
3666 	  assign_parm_setup_block (&all, pbdata->bounds_parm,
3667 				   &pbdata->parm_data);
3668 	else if (pbdata->parm_data.passed_pointer
3669 		 || use_register_for_decl (pbdata->bounds_parm))
3670 	  assign_parm_setup_reg (&all, pbdata->bounds_parm,
3671 				 &pbdata->parm_data);
3672 	else
3673 	  assign_parm_setup_stack (&all, pbdata->bounds_parm,
3674 				   &pbdata->parm_data);
3675       }
3676 }
3677 
3678 /* Assign RTL expressions to the function's parameters.  This may involve
3679    copying them into registers and using those registers as the DECL_RTL.  */
3680 
3681 static void
3682 assign_parms (tree fndecl)
3683 {
3684   struct assign_parm_data_all all;
3685   tree parm;
3686   vec<tree> fnargs;
3687   unsigned i, bound_no = 0;
3688   tree last_arg = NULL;
3689   rtx last_arg_entry = NULL;
3690   vec<bounds_parm_data> bndargs = vNULL;
3691   bounds_parm_data bdata;
3692 
3693   crtl->args.internal_arg_pointer
3694     = targetm.calls.internal_arg_pointer ();
3695 
3696   assign_parms_initialize_all (&all);
3697   fnargs = assign_parms_augmented_arg_list (&all);
3698 
3699   FOR_EACH_VEC_ELT (fnargs, i, parm)
3700     {
3701       struct assign_parm_data_one data;
3702 
3703       /* Extract the type of PARM; adjust it according to ABI.  */
3704       assign_parm_find_data_types (&all, parm, &data);
3705 
3706       /* Early out for errors and void parameters.  */
3707       if (data.passed_mode == VOIDmode)
3708 	{
3709 	  SET_DECL_RTL (parm, const0_rtx);
3710 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3711 	  continue;
3712 	}
3713 
3714       /* Estimate stack alignment from parameter alignment.  */
3715       if (SUPPORTS_STACK_ALIGNMENT)
3716         {
3717           unsigned int align
3718 	    = targetm.calls.function_arg_boundary (data.promoted_mode,
3719 						   data.passed_type);
3720 	  align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3721 				     align);
3722 	  if (TYPE_ALIGN (data.nominal_type) > align)
3723 	    align = MINIMUM_ALIGNMENT (data.nominal_type,
3724 				       TYPE_MODE (data.nominal_type),
3725 				       TYPE_ALIGN (data.nominal_type));
3726 	  if (crtl->stack_alignment_estimated < align)
3727 	    {
3728 	      gcc_assert (!crtl->stack_realign_processed);
3729 	      crtl->stack_alignment_estimated = align;
3730 	    }
3731 	}
3732 
3733       /* Find out where the parameter arrives in this function.  */
3734       assign_parm_find_entry_rtl (&all, &data);
3735 
3736       /* Find out where stack space for this parameter might be.  */
3737       if (assign_parm_is_stack_parm (&all, &data))
3738 	{
3739 	  assign_parm_find_stack_rtl (parm, &data);
3740 	  assign_parm_adjust_entry_rtl (&data);
3741 	}
3742       if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3743 	{
3744 	  /* Remember where last non bounds arg was passed in case
3745 	     we have to load associated bounds for it from Bounds
3746 	     Table.  */
3747 	  last_arg = parm;
3748 	  last_arg_entry = data.entry_parm;
3749 	  bound_no = 0;
3750 	}
3751       /* Record permanently how this parm was passed.  */
3752       if (data.passed_pointer)
3753 	{
3754 	  rtx incoming_rtl
3755 	    = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3756 			   data.entry_parm);
3757 	  set_decl_incoming_rtl (parm, incoming_rtl, true);
3758 	}
3759       else
3760 	set_decl_incoming_rtl (parm, data.entry_parm, false);
3761 
3762       assign_parm_adjust_stack_rtl (&data);
3763 
3764       /* Bounds should be loaded in the particular order to
3765 	 have registers allocated correctly.  Collect info about
3766 	 input bounds and load them later.  */
3767       if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3768 	{
3769 	  /* Expect bounds in instrumented functions only.  */
3770 	  gcc_assert (chkp_function_instrumented_p (fndecl));
3771 
3772 	  bdata.parm_data = data;
3773 	  bdata.bounds_parm = parm;
3774 	  bdata.ptr_parm = last_arg;
3775 	  bdata.ptr_entry = last_arg_entry;
3776 	  bdata.bound_no = bound_no;
3777 	  bndargs.safe_push (bdata);
3778 	}
3779       else
3780 	{
3781 	  if (assign_parm_setup_block_p (&data))
3782 	    assign_parm_setup_block (&all, parm, &data);
3783 	  else if (data.passed_pointer || use_register_for_decl (parm))
3784 	    assign_parm_setup_reg (&all, parm, &data);
3785 	  else
3786 	    assign_parm_setup_stack (&all, parm, &data);
3787 	}
3788 
3789       if (cfun->stdarg && !DECL_CHAIN (parm))
3790 	{
3791 	  int pretend_bytes = 0;
3792 
3793 	  assign_parms_setup_varargs (&all, &data, false);
3794 
3795 	  if (chkp_function_instrumented_p (fndecl))
3796 	    {
3797 	      /* We expect this is the last parm.  Otherwise it is wrong
3798 		 to assign bounds right now.  */
3799 	      gcc_assert (i == (fnargs.length () - 1));
3800 	      assign_bounds (bndargs, all, true, false, false);
3801 	      targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3802 							  data.promoted_mode,
3803 							  data.passed_type,
3804 							  &pretend_bytes,
3805 							  false);
3806 	      assign_bounds (bndargs, all, false, true, true);
3807 	      bndargs.release ();
3808 	    }
3809 	}
3810 
3811       /* Update info on where next arg arrives in registers.  */
3812       targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3813 					  data.passed_type, data.named_arg);
3814 
3815       if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3816 	bound_no++;
3817     }
3818 
3819   assign_bounds (bndargs, all, true, true, true);
3820   bndargs.release ();
3821 
3822   if (targetm.calls.split_complex_arg)
3823     assign_parms_unsplit_complex (&all, fnargs);
3824 
3825   fnargs.release ();
3826 
3827   /* Output all parameter conversion instructions (possibly including calls)
3828      now that all parameters have been copied out of hard registers.  */
3829   emit_insn (all.first_conversion_insn);
3830 
3831   /* Estimate reload stack alignment from scalar return mode.  */
3832   if (SUPPORTS_STACK_ALIGNMENT)
3833     {
3834       if (DECL_RESULT (fndecl))
3835 	{
3836 	  tree type = TREE_TYPE (DECL_RESULT (fndecl));
3837 	  machine_mode mode = TYPE_MODE (type);
3838 
3839 	  if (mode != BLKmode
3840 	      && mode != VOIDmode
3841 	      && !AGGREGATE_TYPE_P (type))
3842 	    {
3843 	      unsigned int align = GET_MODE_ALIGNMENT (mode);
3844 	      if (crtl->stack_alignment_estimated < align)
3845 		{
3846 		  gcc_assert (!crtl->stack_realign_processed);
3847 		  crtl->stack_alignment_estimated = align;
3848 		}
3849 	    }
3850 	}
3851     }
3852 
3853   /* If we are receiving a struct value address as the first argument, set up
3854      the RTL for the function result. As this might require code to convert
3855      the transmitted address to Pmode, we do this here to ensure that possible
3856      preliminary conversions of the address have been emitted already.  */
3857   if (all.function_result_decl)
3858     {
3859       tree result = DECL_RESULT (current_function_decl);
3860       rtx addr = DECL_RTL (all.function_result_decl);
3861       rtx x;
3862 
3863       if (DECL_BY_REFERENCE (result))
3864 	{
3865 	  SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3866 	  x = addr;
3867 	}
3868       else
3869 	{
3870 	  SET_DECL_VALUE_EXPR (result,
3871 			       build1 (INDIRECT_REF, TREE_TYPE (result),
3872 				       all.function_result_decl));
3873 	  addr = convert_memory_address (Pmode, addr);
3874 	  x = gen_rtx_MEM (DECL_MODE (result), addr);
3875 	  set_mem_attributes (x, result, 1);
3876 	}
3877 
3878       DECL_HAS_VALUE_EXPR_P (result) = 1;
3879 
3880       set_parm_rtl (result, x);
3881     }
3882 
3883   /* We have aligned all the args, so add space for the pretend args.  */
3884   crtl->args.pretend_args_size = all.pretend_args_size;
3885   all.stack_args_size.constant += all.extra_pretend_bytes;
3886   crtl->args.size = all.stack_args_size.constant;
3887 
3888   /* Adjust function incoming argument size for alignment and
3889      minimum length.  */
3890 
3891   crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3892   crtl->args.size = CEIL_ROUND (crtl->args.size,
3893 					   PARM_BOUNDARY / BITS_PER_UNIT);
3894 
3895   if (ARGS_GROW_DOWNWARD)
3896     {
3897       crtl->args.arg_offset_rtx
3898 	= (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3899 	   : expand_expr (size_diffop (all.stack_args_size.var,
3900 				       size_int (-all.stack_args_size.constant)),
3901 			  NULL_RTX, VOIDmode, EXPAND_NORMAL));
3902     }
3903   else
3904     crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3905 
3906   /* See how many bytes, if any, of its args a function should try to pop
3907      on return.  */
3908 
3909   crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3910 							 TREE_TYPE (fndecl),
3911 							 crtl->args.size);
3912 
3913   /* For stdarg.h function, save info about
3914      regs and stack space used by the named args.  */
3915 
3916   crtl->args.info = all.args_so_far_v;
3917 
3918   /* Set the rtx used for the function return value.  Put this in its
3919      own variable so any optimizers that need this information don't have
3920      to include tree.h.  Do this here so it gets done when an inlined
3921      function gets output.  */
3922 
3923   crtl->return_rtx
3924     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3925        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3926 
3927   /* If scalar return value was computed in a pseudo-reg, or was a named
3928      return value that got dumped to the stack, copy that to the hard
3929      return register.  */
3930   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3931     {
3932       tree decl_result = DECL_RESULT (fndecl);
3933       rtx decl_rtl = DECL_RTL (decl_result);
3934 
3935       if (REG_P (decl_rtl)
3936 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3937 	  : DECL_REGISTER (decl_result))
3938 	{
3939 	  rtx real_decl_rtl;
3940 
3941 	  real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3942 							fndecl, true);
3943 	  if (chkp_function_instrumented_p (fndecl))
3944 	    crtl->return_bnd
3945 	      = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3946 							  fndecl, true);
3947 	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3948 	  /* The delay slot scheduler assumes that crtl->return_rtx
3949 	     holds the hard register containing the return value, not a
3950 	     temporary pseudo.  */
3951 	  crtl->return_rtx = real_decl_rtl;
3952 	}
3953     }
3954 }
3955 
3956 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3957    For all seen types, gimplify their sizes.  */
3958 
3959 static tree
3960 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3961 {
3962   tree t = *tp;
3963 
3964   *walk_subtrees = 0;
3965   if (TYPE_P (t))
3966     {
3967       if (POINTER_TYPE_P (t))
3968 	*walk_subtrees = 1;
3969       else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3970 	       && !TYPE_SIZES_GIMPLIFIED (t))
3971 	{
3972 	  gimplify_type_sizes (t, (gimple_seq *) data);
3973 	  *walk_subtrees = 1;
3974 	}
3975     }
3976 
3977   return NULL;
3978 }
3979 
3980 /* Gimplify the parameter list for current_function_decl.  This involves
3981    evaluating SAVE_EXPRs of variable sized parameters and generating code
3982    to implement callee-copies reference parameters.  Returns a sequence of
3983    statements to add to the beginning of the function.  */
3984 
3985 gimple_seq
3986 gimplify_parameters (void)
3987 {
3988   struct assign_parm_data_all all;
3989   tree parm;
3990   gimple_seq stmts = NULL;
3991   vec<tree> fnargs;
3992   unsigned i;
3993 
3994   assign_parms_initialize_all (&all);
3995   fnargs = assign_parms_augmented_arg_list (&all);
3996 
3997   FOR_EACH_VEC_ELT (fnargs, i, parm)
3998     {
3999       struct assign_parm_data_one data;
4000 
4001       /* Extract the type of PARM; adjust it according to ABI.  */
4002       assign_parm_find_data_types (&all, parm, &data);
4003 
4004       /* Early out for errors and void parameters.  */
4005       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
4006 	continue;
4007 
4008       /* Update info on where next arg arrives in registers.  */
4009       targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
4010 					  data.passed_type, data.named_arg);
4011 
4012       /* ??? Once upon a time variable_size stuffed parameter list
4013 	 SAVE_EXPRs (amongst others) onto a pending sizes list.  This
4014 	 turned out to be less than manageable in the gimple world.
4015 	 Now we have to hunt them down ourselves.  */
4016       walk_tree_without_duplicates (&data.passed_type,
4017 				    gimplify_parm_type, &stmts);
4018 
4019       if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4020 	{
4021 	  gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
4022 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
4023 	}
4024 
4025       if (data.passed_pointer)
4026 	{
4027           tree type = TREE_TYPE (data.passed_type);
4028 	  if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4029 				       type, data.named_arg))
4030 	    {
4031 	      tree local, t;
4032 
4033 	      /* For constant-sized objects, this is trivial; for
4034 		 variable-sized objects, we have to play games.  */
4035 	      if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
4036 		  && !(flag_stack_check == GENERIC_STACK_CHECK
4037 		       && compare_tree_int (DECL_SIZE_UNIT (parm),
4038 					    STACK_CHECK_MAX_VAR_SIZE) > 0))
4039 		{
4040 		  local = create_tmp_var (type, get_name (parm));
4041 		  DECL_IGNORED_P (local) = 0;
4042 		  /* If PARM was addressable, move that flag over
4043 		     to the local copy, as its address will be taken,
4044 		     not the PARMs.  Keep the parms address taken
4045 		     as we'll query that flag during gimplification.  */
4046 		  if (TREE_ADDRESSABLE (parm))
4047 		    TREE_ADDRESSABLE (local) = 1;
4048 		  else if (TREE_CODE (type) == COMPLEX_TYPE
4049 			   || TREE_CODE (type) == VECTOR_TYPE)
4050 		    DECL_GIMPLE_REG_P (local) = 1;
4051 		}
4052 	      else
4053 		{
4054 		  tree ptr_type, addr;
4055 
4056 		  ptr_type = build_pointer_type (type);
4057 		  addr = create_tmp_reg (ptr_type, get_name (parm));
4058 		  DECL_IGNORED_P (addr) = 0;
4059 		  local = build_fold_indirect_ref (addr);
4060 
4061 		  t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4062 		  t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
4063 				       size_int (DECL_ALIGN (parm)));
4064 
4065 		  /* The call has been built for a variable-sized object.  */
4066 		  CALL_ALLOCA_FOR_VAR_P (t) = 1;
4067 		  t = fold_convert (ptr_type, t);
4068 		  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4069 		  gimplify_and_add (t, &stmts);
4070 		}
4071 
4072 	      gimplify_assign (local, parm, &stmts);
4073 
4074 	      SET_DECL_VALUE_EXPR (parm, local);
4075 	      DECL_HAS_VALUE_EXPR_P (parm) = 1;
4076 	    }
4077 	}
4078     }
4079 
4080   fnargs.release ();
4081 
4082   return stmts;
4083 }
4084 
4085 /* Compute the size and offset from the start of the stacked arguments for a
4086    parm passed in mode PASSED_MODE and with type TYPE.
4087 
4088    INITIAL_OFFSET_PTR points to the current offset into the stacked
4089    arguments.
4090 
4091    The starting offset and size for this parm are returned in
4092    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
4093    nonzero, the offset is that of stack slot, which is returned in
4094    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
4095    padding required from the initial offset ptr to the stack slot.
4096 
4097    IN_REGS is nonzero if the argument will be passed in registers.  It will
4098    never be set if REG_PARM_STACK_SPACE is not defined.
4099 
4100    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4101    for arguments which are passed in registers.
4102 
4103    FNDECL is the function in which the argument was defined.
4104 
4105    There are two types of rounding that are done.  The first, controlled by
4106    TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4107    argument list to be aligned to the specific boundary (in bits).  This
4108    rounding affects the initial and starting offsets, but not the argument
4109    size.
4110 
4111    The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4112    optionally rounds the size of the parm to PARM_BOUNDARY.  The
4113    initial offset is not affected by this rounding, while the size always
4114    is and the starting offset may be.  */
4115 
4116 /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4117     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4118     callers pass in the total size of args so far as
4119     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
4120 
4121 void
4122 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4123 		     int reg_parm_stack_space, int partial,
4124 		     tree fndecl ATTRIBUTE_UNUSED,
4125 		     struct args_size *initial_offset_ptr,
4126 		     struct locate_and_pad_arg_data *locate)
4127 {
4128   tree sizetree;
4129   enum direction where_pad;
4130   unsigned int boundary, round_boundary;
4131   int part_size_in_regs;
4132 
4133   /* If we have found a stack parm before we reach the end of the
4134      area reserved for registers, skip that area.  */
4135   if (! in_regs)
4136     {
4137       if (reg_parm_stack_space > 0)
4138 	{
4139 	  if (initial_offset_ptr->var)
4140 	    {
4141 	      initial_offset_ptr->var
4142 		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4143 			      ssize_int (reg_parm_stack_space));
4144 	      initial_offset_ptr->constant = 0;
4145 	    }
4146 	  else if (initial_offset_ptr->constant < reg_parm_stack_space)
4147 	    initial_offset_ptr->constant = reg_parm_stack_space;
4148 	}
4149     }
4150 
4151   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4152 
4153   sizetree
4154     = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4155   where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4156   boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4157   round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4158 							      type);
4159   locate->where_pad = where_pad;
4160 
4161   /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT.  */
4162   if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4163     boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4164 
4165   locate->boundary = boundary;
4166 
4167   if (SUPPORTS_STACK_ALIGNMENT)
4168     {
4169       /* stack_alignment_estimated can't change after stack has been
4170 	 realigned.  */
4171       if (crtl->stack_alignment_estimated < boundary)
4172         {
4173           if (!crtl->stack_realign_processed)
4174 	    crtl->stack_alignment_estimated = boundary;
4175 	  else
4176 	    {
4177 	      /* If stack is realigned and stack alignment value
4178 		 hasn't been finalized, it is OK not to increase
4179 		 stack_alignment_estimated.  The bigger alignment
4180 		 requirement is recorded in stack_alignment_needed
4181 		 below.  */
4182 	      gcc_assert (!crtl->stack_realign_finalized
4183 			  && crtl->stack_realign_needed);
4184 	    }
4185 	}
4186     }
4187 
4188   /* Remember if the outgoing parameter requires extra alignment on the
4189      calling function side.  */
4190   if (crtl->stack_alignment_needed < boundary)
4191     crtl->stack_alignment_needed = boundary;
4192   if (crtl->preferred_stack_boundary < boundary)
4193     crtl->preferred_stack_boundary = boundary;
4194 
4195   if (ARGS_GROW_DOWNWARD)
4196     {
4197       locate->slot_offset.constant = -initial_offset_ptr->constant;
4198       if (initial_offset_ptr->var)
4199 	locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4200 					      initial_offset_ptr->var);
4201 
4202       {
4203 	tree s2 = sizetree;
4204 	if (where_pad != none
4205 	    && (!tree_fits_uhwi_p (sizetree)
4206 		|| (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4207 	  s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4208 	SUB_PARM_SIZE (locate->slot_offset, s2);
4209       }
4210 
4211       locate->slot_offset.constant += part_size_in_regs;
4212 
4213       if (!in_regs || reg_parm_stack_space > 0)
4214 	pad_to_arg_alignment (&locate->slot_offset, boundary,
4215 			      &locate->alignment_pad);
4216 
4217       locate->size.constant = (-initial_offset_ptr->constant
4218 			       - locate->slot_offset.constant);
4219       if (initial_offset_ptr->var)
4220 	locate->size.var = size_binop (MINUS_EXPR,
4221 				       size_binop (MINUS_EXPR,
4222 						   ssize_int (0),
4223 						   initial_offset_ptr->var),
4224 				       locate->slot_offset.var);
4225 
4226       /* Pad_below needs the pre-rounded size to know how much to pad
4227 	 below.  */
4228       locate->offset = locate->slot_offset;
4229       if (where_pad == downward)
4230 	pad_below (&locate->offset, passed_mode, sizetree);
4231 
4232     }
4233   else
4234     {
4235       if (!in_regs || reg_parm_stack_space > 0)
4236 	pad_to_arg_alignment (initial_offset_ptr, boundary,
4237 			      &locate->alignment_pad);
4238       locate->slot_offset = *initial_offset_ptr;
4239 
4240 #ifdef PUSH_ROUNDING
4241       if (passed_mode != BLKmode)
4242 	sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4243 #endif
4244 
4245       /* Pad_below needs the pre-rounded size to know how much to pad below
4246 	 so this must be done before rounding up.  */
4247       locate->offset = locate->slot_offset;
4248       if (where_pad == downward)
4249 	pad_below (&locate->offset, passed_mode, sizetree);
4250 
4251       if (where_pad != none
4252 	  && (!tree_fits_uhwi_p (sizetree)
4253 	      || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4254 	sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4255 
4256       ADD_PARM_SIZE (locate->size, sizetree);
4257 
4258       locate->size.constant -= part_size_in_regs;
4259     }
4260 
4261 #ifdef FUNCTION_ARG_OFFSET
4262   locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4263 #endif
4264 }
4265 
4266 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4267    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
4268 
4269 static void
4270 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4271 		      struct args_size *alignment_pad)
4272 {
4273   tree save_var = NULL_TREE;
4274   HOST_WIDE_INT save_constant = 0;
4275   int boundary_in_bytes = boundary / BITS_PER_UNIT;
4276   HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4277 
4278 #ifdef SPARC_STACK_BOUNDARY_HACK
4279   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4280      the real alignment of %sp.  However, when it does this, the
4281      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
4282   if (SPARC_STACK_BOUNDARY_HACK)
4283     sp_offset = 0;
4284 #endif
4285 
4286   if (boundary > PARM_BOUNDARY)
4287     {
4288       save_var = offset_ptr->var;
4289       save_constant = offset_ptr->constant;
4290     }
4291 
4292   alignment_pad->var = NULL_TREE;
4293   alignment_pad->constant = 0;
4294 
4295   if (boundary > BITS_PER_UNIT)
4296     {
4297       if (offset_ptr->var)
4298 	{
4299 	  tree sp_offset_tree = ssize_int (sp_offset);
4300 	  tree offset = size_binop (PLUS_EXPR,
4301 				    ARGS_SIZE_TREE (*offset_ptr),
4302 				    sp_offset_tree);
4303 	  tree rounded;
4304 	  if (ARGS_GROW_DOWNWARD)
4305 	    rounded = round_down (offset, boundary / BITS_PER_UNIT);
4306 	  else
4307 	    rounded = round_up   (offset, boundary / BITS_PER_UNIT);
4308 
4309 	  offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4310 	  /* ARGS_SIZE_TREE includes constant term.  */
4311 	  offset_ptr->constant = 0;
4312 	  if (boundary > PARM_BOUNDARY)
4313 	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4314 					     save_var);
4315 	}
4316       else
4317 	{
4318 	  offset_ptr->constant = -sp_offset +
4319 	    (ARGS_GROW_DOWNWARD
4320 	    ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)
4321 	    : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes));
4322 
4323 	    if (boundary > PARM_BOUNDARY)
4324 	      alignment_pad->constant = offset_ptr->constant - save_constant;
4325 	}
4326     }
4327 }
4328 
4329 static void
4330 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4331 {
4332   if (passed_mode != BLKmode)
4333     {
4334       if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4335 	offset_ptr->constant
4336 	  += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4337 	       / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4338 	      - GET_MODE_SIZE (passed_mode));
4339     }
4340   else
4341     {
4342       if (TREE_CODE (sizetree) != INTEGER_CST
4343 	  || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4344 	{
4345 	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
4346 	  tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4347 	  /* Add it in.  */
4348 	  ADD_PARM_SIZE (*offset_ptr, s2);
4349 	  SUB_PARM_SIZE (*offset_ptr, sizetree);
4350 	}
4351     }
4352 }
4353 
4354 
4355 /* True if register REGNO was alive at a place where `setjmp' was
4356    called and was set more than once or is an argument.  Such regs may
4357    be clobbered by `longjmp'.  */
4358 
4359 static bool
4360 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4361 {
4362   /* There appear to be cases where some local vars never reach the
4363      backend but have bogus regnos.  */
4364   if (regno >= max_reg_num ())
4365     return false;
4366 
4367   return ((REG_N_SETS (regno) > 1
4368 	   || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4369 			       regno))
4370 	  && REGNO_REG_SET_P (setjmp_crosses, regno));
4371 }
4372 
4373 /* Walk the tree of blocks describing the binding levels within a
4374    function and warn about variables the might be killed by setjmp or
4375    vfork.  This is done after calling flow_analysis before register
4376    allocation since that will clobber the pseudo-regs to hard
4377    regs.  */
4378 
4379 static void
4380 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4381 {
4382   tree decl, sub;
4383 
4384   for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4385     {
4386       if (VAR_P (decl)
4387 	  && DECL_RTL_SET_P (decl)
4388 	  && REG_P (DECL_RTL (decl))
4389 	  && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4390 	warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4391                  " %<longjmp%> or %<vfork%>", decl);
4392     }
4393 
4394   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4395     setjmp_vars_warning (setjmp_crosses, sub);
4396 }
4397 
4398 /* Do the appropriate part of setjmp_vars_warning
4399    but for arguments instead of local variables.  */
4400 
4401 static void
4402 setjmp_args_warning (bitmap setjmp_crosses)
4403 {
4404   tree decl;
4405   for (decl = DECL_ARGUMENTS (current_function_decl);
4406        decl; decl = DECL_CHAIN (decl))
4407     if (DECL_RTL (decl) != 0
4408 	&& REG_P (DECL_RTL (decl))
4409 	&& regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4410       warning (OPT_Wclobbered,
4411                "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4412 	       decl);
4413 }
4414 
4415 /* Generate warning messages for variables live across setjmp.  */
4416 
4417 void
4418 generate_setjmp_warnings (void)
4419 {
4420   bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4421 
4422   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4423       || bitmap_empty_p (setjmp_crosses))
4424     return;
4425 
4426   setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4427   setjmp_args_warning (setjmp_crosses);
4428 }
4429 
4430 
4431 /* Reverse the order of elements in the fragment chain T of blocks,
4432    and return the new head of the chain (old last element).
4433    In addition to that clear BLOCK_SAME_RANGE flags when needed
4434    and adjust BLOCK_SUPERCONTEXT from the super fragment to
4435    its super fragment origin.  */
4436 
4437 static tree
4438 block_fragments_nreverse (tree t)
4439 {
4440   tree prev = 0, block, next, prev_super = 0;
4441   tree super = BLOCK_SUPERCONTEXT (t);
4442   if (BLOCK_FRAGMENT_ORIGIN (super))
4443     super = BLOCK_FRAGMENT_ORIGIN (super);
4444   for (block = t; block; block = next)
4445     {
4446       next = BLOCK_FRAGMENT_CHAIN (block);
4447       BLOCK_FRAGMENT_CHAIN (block) = prev;
4448       if ((prev && !BLOCK_SAME_RANGE (prev))
4449 	  || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4450 	      != prev_super))
4451 	BLOCK_SAME_RANGE (block) = 0;
4452       prev_super = BLOCK_SUPERCONTEXT (block);
4453       BLOCK_SUPERCONTEXT (block) = super;
4454       prev = block;
4455     }
4456   t = BLOCK_FRAGMENT_ORIGIN (t);
4457   if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4458       != prev_super)
4459     BLOCK_SAME_RANGE (t) = 0;
4460   BLOCK_SUPERCONTEXT (t) = super;
4461   return prev;
4462 }
4463 
4464 /* Reverse the order of elements in the chain T of blocks,
4465    and return the new head of the chain (old last element).
4466    Also do the same on subblocks and reverse the order of elements
4467    in BLOCK_FRAGMENT_CHAIN as well.  */
4468 
4469 static tree
4470 blocks_nreverse_all (tree t)
4471 {
4472   tree prev = 0, block, next;
4473   for (block = t; block; block = next)
4474     {
4475       next = BLOCK_CHAIN (block);
4476       BLOCK_CHAIN (block) = prev;
4477       if (BLOCK_FRAGMENT_CHAIN (block)
4478 	  && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4479 	{
4480 	  BLOCK_FRAGMENT_CHAIN (block)
4481 	    = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4482 	  if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4483 	    BLOCK_SAME_RANGE (block) = 0;
4484 	}
4485       BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4486       prev = block;
4487     }
4488   return prev;
4489 }
4490 
4491 
4492 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4493    and create duplicate blocks.  */
4494 /* ??? Need an option to either create block fragments or to create
4495    abstract origin duplicates of a source block.  It really depends
4496    on what optimization has been performed.  */
4497 
4498 void
4499 reorder_blocks (void)
4500 {
4501   tree block = DECL_INITIAL (current_function_decl);
4502 
4503   if (block == NULL_TREE)
4504     return;
4505 
4506   auto_vec<tree, 10> block_stack;
4507 
4508   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
4509   clear_block_marks (block);
4510 
4511   /* Prune the old trees away, so that they don't get in the way.  */
4512   BLOCK_SUBBLOCKS (block) = NULL_TREE;
4513   BLOCK_CHAIN (block) = NULL_TREE;
4514 
4515   /* Recreate the block tree from the note nesting.  */
4516   reorder_blocks_1 (get_insns (), block, &block_stack);
4517   BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4518 }
4519 
4520 /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
4521 
4522 void
4523 clear_block_marks (tree block)
4524 {
4525   while (block)
4526     {
4527       TREE_ASM_WRITTEN (block) = 0;
4528       clear_block_marks (BLOCK_SUBBLOCKS (block));
4529       block = BLOCK_CHAIN (block);
4530     }
4531 }
4532 
4533 static void
4534 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4535 		  vec<tree> *p_block_stack)
4536 {
4537   rtx_insn *insn;
4538   tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4539 
4540   for (insn = insns; insn; insn = NEXT_INSN (insn))
4541     {
4542       if (NOTE_P (insn))
4543 	{
4544 	  if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4545 	    {
4546 	      tree block = NOTE_BLOCK (insn);
4547 	      tree origin;
4548 
4549 	      gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4550 	      origin = block;
4551 
4552 	      if (prev_end)
4553 		BLOCK_SAME_RANGE (prev_end) = 0;
4554 	      prev_end = NULL_TREE;
4555 
4556 	      /* If we have seen this block before, that means it now
4557 		 spans multiple address regions.  Create a new fragment.  */
4558 	      if (TREE_ASM_WRITTEN (block))
4559 		{
4560 		  tree new_block = copy_node (block);
4561 
4562 		  BLOCK_SAME_RANGE (new_block) = 0;
4563 		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4564 		  BLOCK_FRAGMENT_CHAIN (new_block)
4565 		    = BLOCK_FRAGMENT_CHAIN (origin);
4566 		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4567 
4568 		  NOTE_BLOCK (insn) = new_block;
4569 		  block = new_block;
4570 		}
4571 
4572 	      if (prev_beg == current_block && prev_beg)
4573 		BLOCK_SAME_RANGE (block) = 1;
4574 
4575 	      prev_beg = origin;
4576 
4577 	      BLOCK_SUBBLOCKS (block) = 0;
4578 	      TREE_ASM_WRITTEN (block) = 1;
4579 	      /* When there's only one block for the entire function,
4580 		 current_block == block and we mustn't do this, it
4581 		 will cause infinite recursion.  */
4582 	      if (block != current_block)
4583 		{
4584 		  tree super;
4585 		  if (block != origin)
4586 		    gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4587 				|| BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4588 								      (origin))
4589 				   == current_block);
4590 		  if (p_block_stack->is_empty ())
4591 		    super = current_block;
4592 		  else
4593 		    {
4594 		      super = p_block_stack->last ();
4595 		      gcc_assert (super == current_block
4596 				  || BLOCK_FRAGMENT_ORIGIN (super)
4597 				     == current_block);
4598 		    }
4599 		  BLOCK_SUPERCONTEXT (block) = super;
4600 		  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4601 		  BLOCK_SUBBLOCKS (current_block) = block;
4602 		  current_block = origin;
4603 		}
4604 	      p_block_stack->safe_push (block);
4605 	    }
4606 	  else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4607 	    {
4608 	      NOTE_BLOCK (insn) = p_block_stack->pop ();
4609 	      current_block = BLOCK_SUPERCONTEXT (current_block);
4610 	      if (BLOCK_FRAGMENT_ORIGIN (current_block))
4611 		current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4612 	      prev_beg = NULL_TREE;
4613 	      prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4614 			 ? NOTE_BLOCK (insn) : NULL_TREE;
4615 	    }
4616 	}
4617       else
4618 	{
4619 	  prev_beg = NULL_TREE;
4620 	  if (prev_end)
4621 	    BLOCK_SAME_RANGE (prev_end) = 0;
4622 	  prev_end = NULL_TREE;
4623 	}
4624     }
4625 }
4626 
4627 /* Reverse the order of elements in the chain T of blocks,
4628    and return the new head of the chain (old last element).  */
4629 
4630 tree
4631 blocks_nreverse (tree t)
4632 {
4633   tree prev = 0, block, next;
4634   for (block = t; block; block = next)
4635     {
4636       next = BLOCK_CHAIN (block);
4637       BLOCK_CHAIN (block) = prev;
4638       prev = block;
4639     }
4640   return prev;
4641 }
4642 
4643 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4644    by modifying the last node in chain 1 to point to chain 2.  */
4645 
4646 tree
4647 block_chainon (tree op1, tree op2)
4648 {
4649   tree t1;
4650 
4651   if (!op1)
4652     return op2;
4653   if (!op2)
4654     return op1;
4655 
4656   for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4657     continue;
4658   BLOCK_CHAIN (t1) = op2;
4659 
4660 #ifdef ENABLE_TREE_CHECKING
4661   {
4662     tree t2;
4663     for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4664       gcc_assert (t2 != t1);
4665   }
4666 #endif
4667 
4668   return op1;
4669 }
4670 
4671 /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
4672    non-NULL, list them all into VECTOR, in a depth-first preorder
4673    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
4674    blocks.  */
4675 
4676 static int
4677 all_blocks (tree block, tree *vector)
4678 {
4679   int n_blocks = 0;
4680 
4681   while (block)
4682     {
4683       TREE_ASM_WRITTEN (block) = 0;
4684 
4685       /* Record this block.  */
4686       if (vector)
4687 	vector[n_blocks] = block;
4688 
4689       ++n_blocks;
4690 
4691       /* Record the subblocks, and their subblocks...  */
4692       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4693 			      vector ? vector + n_blocks : 0);
4694       block = BLOCK_CHAIN (block);
4695     }
4696 
4697   return n_blocks;
4698 }
4699 
4700 /* Return a vector containing all the blocks rooted at BLOCK.  The
4701    number of elements in the vector is stored in N_BLOCKS_P.  The
4702    vector is dynamically allocated; it is the caller's responsibility
4703    to call `free' on the pointer returned.  */
4704 
4705 static tree *
4706 get_block_vector (tree block, int *n_blocks_p)
4707 {
4708   tree *block_vector;
4709 
4710   *n_blocks_p = all_blocks (block, NULL);
4711   block_vector = XNEWVEC (tree, *n_blocks_p);
4712   all_blocks (block, block_vector);
4713 
4714   return block_vector;
4715 }
4716 
4717 static GTY(()) int next_block_index = 2;
4718 
4719 /* Set BLOCK_NUMBER for all the blocks in FN.  */
4720 
4721 void
4722 number_blocks (tree fn)
4723 {
4724   int i;
4725   int n_blocks;
4726   tree *block_vector;
4727 
4728   /* For SDB and XCOFF debugging output, we start numbering the blocks
4729      from 1 within each function, rather than keeping a running
4730      count.  */
4731 #if SDB_DEBUGGING_INFO || defined (XCOFF_DEBUGGING_INFO)
4732   if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4733     next_block_index = 1;
4734 #endif
4735 
4736   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4737 
4738   /* The top-level BLOCK isn't numbered at all.  */
4739   for (i = 1; i < n_blocks; ++i)
4740     /* We number the blocks from two.  */
4741     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4742 
4743   free (block_vector);
4744 
4745   return;
4746 }
4747 
4748 /* If VAR is present in a subblock of BLOCK, return the subblock.  */
4749 
4750 DEBUG_FUNCTION tree
4751 debug_find_var_in_block_tree (tree var, tree block)
4752 {
4753   tree t;
4754 
4755   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4756     if (t == var)
4757       return block;
4758 
4759   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4760     {
4761       tree ret = debug_find_var_in_block_tree (var, t);
4762       if (ret)
4763 	return ret;
4764     }
4765 
4766   return NULL_TREE;
4767 }
4768 
4769 /* Keep track of whether we're in a dummy function context.  If we are,
4770    we don't want to invoke the set_current_function hook, because we'll
4771    get into trouble if the hook calls target_reinit () recursively or
4772    when the initial initialization is not yet complete.  */
4773 
4774 static bool in_dummy_function;
4775 
4776 /* Invoke the target hook when setting cfun.  Update the optimization options
4777    if the function uses different options than the default.  */
4778 
4779 static void
4780 invoke_set_current_function_hook (tree fndecl)
4781 {
4782   if (!in_dummy_function)
4783     {
4784       tree opts = ((fndecl)
4785 		   ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4786 		   : optimization_default_node);
4787 
4788       if (!opts)
4789 	opts = optimization_default_node;
4790 
4791       /* Change optimization options if needed.  */
4792       if (optimization_current_node != opts)
4793 	{
4794 	  optimization_current_node = opts;
4795 	  cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4796 	}
4797 
4798       targetm.set_current_function (fndecl);
4799       this_fn_optabs = this_target_optabs;
4800 
4801       if (opts != optimization_default_node)
4802 	{
4803 	  init_tree_optimization_optabs (opts);
4804 	  if (TREE_OPTIMIZATION_OPTABS (opts))
4805 	    this_fn_optabs = (struct target_optabs *)
4806 	      TREE_OPTIMIZATION_OPTABS (opts);
4807 	}
4808     }
4809 }
4810 
4811 /* cfun should never be set directly; use this function.  */
4812 
4813 void
4814 set_cfun (struct function *new_cfun, bool force)
4815 {
4816   if (cfun != new_cfun || force)
4817     {
4818       cfun = new_cfun;
4819       invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4820       redirect_edge_var_map_empty ();
4821     }
4822 }
4823 
4824 /* Initialized with NOGC, making this poisonous to the garbage collector.  */
4825 
4826 static vec<function *> cfun_stack;
4827 
4828 /* Push the current cfun onto the stack, and set cfun to new_cfun.  Also set
4829    current_function_decl accordingly.  */
4830 
4831 void
4832 push_cfun (struct function *new_cfun)
4833 {
4834   gcc_assert ((!cfun && !current_function_decl)
4835 	      || (cfun && current_function_decl == cfun->decl));
4836   cfun_stack.safe_push (cfun);
4837   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4838   set_cfun (new_cfun);
4839 }
4840 
4841 /* Pop cfun from the stack.  Also set current_function_decl accordingly.  */
4842 
4843 void
4844 pop_cfun (void)
4845 {
4846   struct function *new_cfun = cfun_stack.pop ();
4847   /* When in_dummy_function, we do have a cfun but current_function_decl is
4848      NULL.  We also allow pushing NULL cfun and subsequently changing
4849      current_function_decl to something else and have both restored by
4850      pop_cfun.  */
4851   gcc_checking_assert (in_dummy_function
4852 		       || !cfun
4853 		       || current_function_decl == cfun->decl);
4854   set_cfun (new_cfun);
4855   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4856 }
4857 
4858 /* Return value of funcdef and increase it.  */
4859 int
4860 get_next_funcdef_no (void)
4861 {
4862   return funcdef_no++;
4863 }
4864 
4865 /* Return value of funcdef.  */
4866 int
4867 get_last_funcdef_no (void)
4868 {
4869   return funcdef_no;
4870 }
4871 
4872 /* Allocate a function structure for FNDECL and set its contents
4873    to the defaults.  Set cfun to the newly-allocated object.
4874    Some of the helper functions invoked during initialization assume
4875    that cfun has already been set.  Therefore, assign the new object
4876    directly into cfun and invoke the back end hook explicitly at the
4877    very end, rather than initializing a temporary and calling set_cfun
4878    on it.
4879 
4880    ABSTRACT_P is true if this is a function that will never be seen by
4881    the middle-end.  Such functions are front-end concepts (like C++
4882    function templates) that do not correspond directly to functions
4883    placed in object files.  */
4884 
4885 void
4886 allocate_struct_function (tree fndecl, bool abstract_p)
4887 {
4888   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4889 
4890   cfun = ggc_cleared_alloc<function> ();
4891 
4892   init_eh_for_function ();
4893 
4894   if (init_machine_status)
4895     cfun->machine = (*init_machine_status) ();
4896 
4897 #ifdef OVERRIDE_ABI_FORMAT
4898   OVERRIDE_ABI_FORMAT (fndecl);
4899 #endif
4900 
4901   if (fndecl != NULL_TREE)
4902     {
4903       DECL_STRUCT_FUNCTION (fndecl) = cfun;
4904       cfun->decl = fndecl;
4905       current_function_funcdef_no = get_next_funcdef_no ();
4906     }
4907 
4908   invoke_set_current_function_hook (fndecl);
4909 
4910   if (fndecl != NULL_TREE)
4911     {
4912       tree result = DECL_RESULT (fndecl);
4913 
4914       if (!abstract_p)
4915 	{
4916 	  /* Now that we have activated any function-specific attributes
4917 	     that might affect layout, particularly vector modes, relayout
4918 	     each of the parameters and the result.  */
4919 	  relayout_decl (result);
4920 	  for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4921 	       parm = DECL_CHAIN (parm))
4922 	    relayout_decl (parm);
4923 
4924 	  /* Similarly relayout the function decl.  */
4925 	  targetm.target_option.relayout_function (fndecl);
4926 	}
4927 
4928       if (!abstract_p && aggregate_value_p (result, fndecl))
4929 	{
4930 #ifdef PCC_STATIC_STRUCT_RETURN
4931 	  cfun->returns_pcc_struct = 1;
4932 #endif
4933 	  cfun->returns_struct = 1;
4934 	}
4935 
4936       cfun->stdarg = stdarg_p (fntype);
4937 
4938       /* Assume all registers in stdarg functions need to be saved.  */
4939       cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4940       cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4941 
4942       /* ??? This could be set on a per-function basis by the front-end
4943          but is this worth the hassle?  */
4944       cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4945       cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4946 
4947       if (!profile_flag && !flag_instrument_function_entry_exit)
4948 	DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4949     }
4950 }
4951 
4952 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4953    instead of just setting it.  */
4954 
4955 void
4956 push_struct_function (tree fndecl)
4957 {
4958   /* When in_dummy_function we might be in the middle of a pop_cfun and
4959      current_function_decl and cfun may not match.  */
4960   gcc_assert (in_dummy_function
4961 	      || (!cfun && !current_function_decl)
4962 	      || (cfun && current_function_decl == cfun->decl));
4963   cfun_stack.safe_push (cfun);
4964   current_function_decl = fndecl;
4965   allocate_struct_function (fndecl, false);
4966 }
4967 
4968 /* Reset crtl and other non-struct-function variables to defaults as
4969    appropriate for emitting rtl at the start of a function.  */
4970 
4971 static void
4972 prepare_function_start (void)
4973 {
4974   gcc_assert (!get_last_insn ());
4975   init_temp_slots ();
4976   init_emit ();
4977   init_varasm_status ();
4978   init_expr ();
4979   default_rtl_profile ();
4980 
4981   if (flag_stack_usage_info)
4982     {
4983       cfun->su = ggc_cleared_alloc<stack_usage> ();
4984       cfun->su->static_stack_size = -1;
4985     }
4986 
4987   cse_not_expected = ! optimize;
4988 
4989   /* Caller save not needed yet.  */
4990   caller_save_needed = 0;
4991 
4992   /* We haven't done register allocation yet.  */
4993   reg_renumber = 0;
4994 
4995   /* Indicate that we have not instantiated virtual registers yet.  */
4996   virtuals_instantiated = 0;
4997 
4998   /* Indicate that we want CONCATs now.  */
4999   generating_concat_p = 1;
5000 
5001   /* Indicate we have no need of a frame pointer yet.  */
5002   frame_pointer_needed = 0;
5003 }
5004 
5005 void
5006 push_dummy_function (bool with_decl)
5007 {
5008   tree fn_decl, fn_type, fn_result_decl;
5009 
5010   gcc_assert (!in_dummy_function);
5011   in_dummy_function = true;
5012 
5013   if (with_decl)
5014     {
5015       fn_type = build_function_type_list (void_type_node, NULL_TREE);
5016       fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
5017 			    fn_type);
5018       fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
5019 					 NULL_TREE, void_type_node);
5020       DECL_RESULT (fn_decl) = fn_result_decl;
5021     }
5022   else
5023     fn_decl = NULL_TREE;
5024 
5025   push_struct_function (fn_decl);
5026 }
5027 
5028 /* Initialize the rtl expansion mechanism so that we can do simple things
5029    like generate sequences.  This is used to provide a context during global
5030    initialization of some passes.  You must call expand_dummy_function_end
5031    to exit this context.  */
5032 
5033 void
5034 init_dummy_function_start (void)
5035 {
5036   push_dummy_function (false);
5037   prepare_function_start ();
5038 }
5039 
5040 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5041    and initialize static variables for generating RTL for the statements
5042    of the function.  */
5043 
5044 void
5045 init_function_start (tree subr)
5046 {
5047   /* Initialize backend, if needed.  */
5048   initialize_rtl ();
5049 
5050   prepare_function_start ();
5051   decide_function_section (subr);
5052 
5053   /* Warn if this value is an aggregate type,
5054      regardless of which calling convention we are using for it.  */
5055   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5056     warning (OPT_Waggregate_return, "function returns an aggregate");
5057 }
5058 
5059 /* Expand code to verify the stack_protect_guard.  This is invoked at
5060    the end of a function to be protected.  */
5061 
5062 void
5063 stack_protect_epilogue (void)
5064 {
5065   tree guard_decl = targetm.stack_protect_guard ();
5066   rtx_code_label *label = gen_label_rtx ();
5067   rtx x, y;
5068   rtx_insn *seq;
5069 
5070   x = expand_normal (crtl->stack_protect_guard);
5071   if (guard_decl)
5072     y = expand_normal (guard_decl);
5073   else
5074     y = const0_rtx;
5075 
5076   /* Allow the target to compare Y with X without leaking either into
5077      a register.  */
5078   if (targetm.have_stack_protect_test ()
5079       && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX))
5080     emit_insn (seq);
5081   else
5082     emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5083 
5084   /* The noreturn predictor has been moved to the tree level.  The rtl-level
5085      predictors estimate this branch about 20%, which isn't enough to get
5086      things moved out of line.  Since this is the only extant case of adding
5087      a noreturn function at the rtl level, it doesn't seem worth doing ought
5088      except adding the prediction by hand.  */
5089   rtx_insn *tmp = get_last_insn ();
5090   if (JUMP_P (tmp))
5091     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
5092 
5093   expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5094   free_temp_slots ();
5095   emit_label (label);
5096 }
5097 
5098 /* Start the RTL for a new function, and set variables used for
5099    emitting RTL.
5100    SUBR is the FUNCTION_DECL node.
5101    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5102    the function's parameters, which must be run at any return statement.  */
5103 
5104 void
5105 expand_function_start (tree subr)
5106 {
5107   /* Make sure volatile mem refs aren't considered
5108      valid operands of arithmetic insns.  */
5109   init_recog_no_volatile ();
5110 
5111   crtl->profile
5112     = (profile_flag
5113        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5114 
5115   crtl->limit_stack
5116     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5117 
5118   /* Make the label for return statements to jump to.  Do not special
5119      case machines with special return instructions -- they will be
5120      handled later during jump, ifcvt, or epilogue creation.  */
5121   return_label = gen_label_rtx ();
5122 
5123   /* Initialize rtx used to return the value.  */
5124   /* Do this before assign_parms so that we copy the struct value address
5125      before any library calls that assign parms might generate.  */
5126 
5127   /* Decide whether to return the value in memory or in a register.  */
5128   tree res = DECL_RESULT (subr);
5129   if (aggregate_value_p (res, subr))
5130     {
5131       /* Returning something that won't go in a register.  */
5132       rtx value_address = 0;
5133 
5134 #ifdef PCC_STATIC_STRUCT_RETURN
5135       if (cfun->returns_pcc_struct)
5136 	{
5137 	  int size = int_size_in_bytes (TREE_TYPE (res));
5138 	  value_address = assemble_static_space (size);
5139 	}
5140       else
5141 #endif
5142 	{
5143 	  rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5144 	  /* Expect to be passed the address of a place to store the value.
5145 	     If it is passed as an argument, assign_parms will take care of
5146 	     it.  */
5147 	  if (sv)
5148 	    {
5149 	      value_address = gen_reg_rtx (Pmode);
5150 	      emit_move_insn (value_address, sv);
5151 	    }
5152 	}
5153       if (value_address)
5154 	{
5155 	  rtx x = value_address;
5156 	  if (!DECL_BY_REFERENCE (res))
5157 	    {
5158 	      x = gen_rtx_MEM (DECL_MODE (res), x);
5159 	      set_mem_attributes (x, res, 1);
5160 	    }
5161 	  set_parm_rtl (res, x);
5162 	}
5163     }
5164   else if (DECL_MODE (res) == VOIDmode)
5165     /* If return mode is void, this decl rtl should not be used.  */
5166     set_parm_rtl (res, NULL_RTX);
5167   else
5168     {
5169       /* Compute the return values into a pseudo reg, which we will copy
5170 	 into the true return register after the cleanups are done.  */
5171       tree return_type = TREE_TYPE (res);
5172 
5173       /* If we may coalesce this result, make sure it has the expected mode
5174 	 in case it was promoted.  But we need not bother about BLKmode.  */
5175       machine_mode promoted_mode
5176 	= flag_tree_coalesce_vars && is_gimple_reg (res)
5177 	  ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5178 	  : BLKmode;
5179 
5180       if (promoted_mode != BLKmode)
5181 	set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5182       else if (TYPE_MODE (return_type) != BLKmode
5183 	       && targetm.calls.return_in_msb (return_type))
5184 	/* expand_function_end will insert the appropriate padding in
5185 	   this case.  Use the return value's natural (unpadded) mode
5186 	   within the function proper.  */
5187 	set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5188       else
5189 	{
5190 	  /* In order to figure out what mode to use for the pseudo, we
5191 	     figure out what the mode of the eventual return register will
5192 	     actually be, and use that.  */
5193 	  rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5194 
5195 	  /* Structures that are returned in registers are not
5196 	     aggregate_value_p, so we may see a PARALLEL or a REG.  */
5197 	  if (REG_P (hard_reg))
5198 	    set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5199 	  else
5200 	    {
5201 	      gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5202 	      set_parm_rtl (res, gen_group_rtx (hard_reg));
5203 	    }
5204 	}
5205 
5206       /* Set DECL_REGISTER flag so that expand_function_end will copy the
5207 	 result to the real return register(s).  */
5208       DECL_REGISTER (res) = 1;
5209 
5210       if (chkp_function_instrumented_p (current_function_decl))
5211 	{
5212 	  tree return_type = TREE_TYPE (res);
5213 	  rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5214 								 subr, 1);
5215 	  SET_DECL_BOUNDS_RTL (res, bounds);
5216 	}
5217     }
5218 
5219   /* Initialize rtx for parameters and local variables.
5220      In some cases this requires emitting insns.  */
5221   assign_parms (subr);
5222 
5223   /* If function gets a static chain arg, store it.  */
5224   if (cfun->static_chain_decl)
5225     {
5226       tree parm = cfun->static_chain_decl;
5227       rtx local, chain;
5228       rtx_insn *insn;
5229       int unsignedp;
5230 
5231       local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5232       chain = targetm.calls.static_chain (current_function_decl, true);
5233 
5234       set_decl_incoming_rtl (parm, chain, false);
5235       set_parm_rtl (parm, local);
5236       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5237 
5238       if (GET_MODE (local) != GET_MODE (chain))
5239 	{
5240 	  convert_move (local, chain, unsignedp);
5241 	  insn = get_last_insn ();
5242 	}
5243       else
5244 	insn = emit_move_insn (local, chain);
5245 
5246       /* Mark the register as eliminable, similar to parameters.  */
5247       if (MEM_P (chain)
5248 	  && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5249 	set_dst_reg_note (insn, REG_EQUIV, chain, local);
5250 
5251       /* If we aren't optimizing, save the static chain onto the stack.  */
5252       if (!optimize)
5253 	{
5254 	  tree saved_static_chain_decl
5255 	    = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5256 			  DECL_NAME (parm), TREE_TYPE (parm));
5257 	  rtx saved_static_chain_rtx
5258 	    = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5259 	  SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5260 	  emit_move_insn (saved_static_chain_rtx, chain);
5261 	  SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5262 	  DECL_HAS_VALUE_EXPR_P (parm) = 1;
5263 	}
5264     }
5265 
5266   /* The following was moved from init_function_start.
5267      The move is supposed to make sdb output more accurate.  */
5268   /* Indicate the beginning of the function body,
5269      as opposed to parm setup.  */
5270   emit_note (NOTE_INSN_FUNCTION_BEG);
5271 
5272   gcc_assert (NOTE_P (get_last_insn ()));
5273 
5274   parm_birth_insn = get_last_insn ();
5275 
5276   /* If the function receives a non-local goto, then store the
5277      bits we need to restore the frame pointer.  */
5278   if (cfun->nonlocal_goto_save_area)
5279     {
5280       tree t_save;
5281       rtx r_save;
5282 
5283       tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5284       gcc_assert (DECL_RTL_SET_P (var));
5285 
5286       t_save = build4 (ARRAY_REF,
5287 		       TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5288 		       cfun->nonlocal_goto_save_area,
5289 		       integer_zero_node, NULL_TREE, NULL_TREE);
5290       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5291       gcc_assert (GET_MODE (r_save) == Pmode);
5292 
5293       emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5294       update_nonlocal_goto_save_area ();
5295     }
5296 
5297   if (crtl->profile)
5298     {
5299 #ifdef PROFILE_HOOK
5300       PROFILE_HOOK (current_function_funcdef_no);
5301 #endif
5302     }
5303 
5304   /* If we are doing generic stack checking, the probe should go here.  */
5305   if (flag_stack_check == GENERIC_STACK_CHECK)
5306     stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5307 }
5308 
5309 void
5310 pop_dummy_function (void)
5311 {
5312   pop_cfun ();
5313   in_dummy_function = false;
5314 }
5315 
5316 /* Undo the effects of init_dummy_function_start.  */
5317 void
5318 expand_dummy_function_end (void)
5319 {
5320   gcc_assert (in_dummy_function);
5321 
5322   /* End any sequences that failed to be closed due to syntax errors.  */
5323   while (in_sequence_p ())
5324     end_sequence ();
5325 
5326   /* Outside function body, can't compute type's actual size
5327      until next function's body starts.  */
5328 
5329   free_after_parsing (cfun);
5330   free_after_compilation (cfun);
5331   pop_dummy_function ();
5332 }
5333 
5334 /* Helper for diddle_return_value.  */
5335 
5336 void
5337 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5338 {
5339   if (! outgoing)
5340     return;
5341 
5342   if (REG_P (outgoing))
5343     (*doit) (outgoing, arg);
5344   else if (GET_CODE (outgoing) == PARALLEL)
5345     {
5346       int i;
5347 
5348       for (i = 0; i < XVECLEN (outgoing, 0); i++)
5349 	{
5350 	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5351 
5352 	  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5353 	    (*doit) (x, arg);
5354 	}
5355     }
5356 }
5357 
5358 /* Call DOIT for each hard register used as a return value from
5359    the current function.  */
5360 
5361 void
5362 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5363 {
5364   diddle_return_value_1 (doit, arg, crtl->return_bnd);
5365   diddle_return_value_1 (doit, arg, crtl->return_rtx);
5366 }
5367 
5368 static void
5369 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5370 {
5371   emit_clobber (reg);
5372 }
5373 
5374 void
5375 clobber_return_register (void)
5376 {
5377   diddle_return_value (do_clobber_return_reg, NULL);
5378 
5379   /* In case we do use pseudo to return value, clobber it too.  */
5380   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5381     {
5382       tree decl_result = DECL_RESULT (current_function_decl);
5383       rtx decl_rtl = DECL_RTL (decl_result);
5384       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5385 	{
5386 	  do_clobber_return_reg (decl_rtl, NULL);
5387 	}
5388     }
5389 }
5390 
5391 static void
5392 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5393 {
5394   emit_use (reg);
5395 }
5396 
5397 static void
5398 use_return_register (void)
5399 {
5400   diddle_return_value (do_use_return_reg, NULL);
5401 }
5402 
5403 /* Set the location of the insn chain starting at INSN to LOC.  */
5404 
5405 static void
5406 set_insn_locations (rtx_insn *insn, int loc)
5407 {
5408   while (insn != NULL)
5409     {
5410       if (INSN_P (insn))
5411 	INSN_LOCATION (insn) = loc;
5412       insn = NEXT_INSN (insn);
5413     }
5414 }
5415 
5416 /* Generate RTL for the end of the current function.  */
5417 
5418 void
5419 expand_function_end (void)
5420 {
5421   /* If arg_pointer_save_area was referenced only from a nested
5422      function, we will not have initialized it yet.  Do that now.  */
5423   if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5424     get_arg_pointer_save_area ();
5425 
5426   /* If we are doing generic stack checking and this function makes calls,
5427      do a stack probe at the start of the function to ensure we have enough
5428      space for another stack frame.  */
5429   if (flag_stack_check == GENERIC_STACK_CHECK)
5430     {
5431       rtx_insn *insn, *seq;
5432 
5433       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5434 	if (CALL_P (insn))
5435 	  {
5436 	    rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5437 	    start_sequence ();
5438 	    if (STACK_CHECK_MOVING_SP)
5439 	      anti_adjust_stack_and_probe (max_frame_size, true);
5440 	    else
5441 	      probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5442 	    seq = get_insns ();
5443 	    end_sequence ();
5444 	    set_insn_locations (seq, prologue_location);
5445 	    emit_insn_before (seq, stack_check_probe_note);
5446 	    break;
5447 	  }
5448     }
5449 
5450   /* End any sequences that failed to be closed due to syntax errors.  */
5451   while (in_sequence_p ())
5452     end_sequence ();
5453 
5454   clear_pending_stack_adjust ();
5455   do_pending_stack_adjust ();
5456 
5457   /* Output a linenumber for the end of the function.
5458      SDB depends on this.  */
5459   set_curr_insn_location (input_location);
5460 
5461   /* Before the return label (if any), clobber the return
5462      registers so that they are not propagated live to the rest of
5463      the function.  This can only happen with functions that drop
5464      through; if there had been a return statement, there would
5465      have either been a return rtx, or a jump to the return label.
5466 
5467      We delay actual code generation after the current_function_value_rtx
5468      is computed.  */
5469   rtx_insn *clobber_after = get_last_insn ();
5470 
5471   /* Output the label for the actual return from the function.  */
5472   emit_label (return_label);
5473 
5474   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5475     {
5476       /* Let except.c know where it should emit the call to unregister
5477 	 the function context for sjlj exceptions.  */
5478       if (flag_exceptions)
5479 	sjlj_emit_function_exit_after (get_last_insn ());
5480     }
5481   else
5482     {
5483       /* We want to ensure that instructions that may trap are not
5484 	 moved into the epilogue by scheduling, because we don't
5485 	 always emit unwind information for the epilogue.  */
5486       if (cfun->can_throw_non_call_exceptions)
5487 	emit_insn (gen_blockage ());
5488     }
5489 
5490   /* If this is an implementation of throw, do what's necessary to
5491      communicate between __builtin_eh_return and the epilogue.  */
5492   expand_eh_return ();
5493 
5494   /* If scalar return value was computed in a pseudo-reg, or was a named
5495      return value that got dumped to the stack, copy that to the hard
5496      return register.  */
5497   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5498     {
5499       tree decl_result = DECL_RESULT (current_function_decl);
5500       rtx decl_rtl = DECL_RTL (decl_result);
5501 
5502       if (REG_P (decl_rtl)
5503 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5504 	  : DECL_REGISTER (decl_result))
5505 	{
5506 	  rtx real_decl_rtl = crtl->return_rtx;
5507 
5508 	  /* This should be set in assign_parms.  */
5509 	  gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5510 
5511 	  /* If this is a BLKmode structure being returned in registers,
5512 	     then use the mode computed in expand_return.  Note that if
5513 	     decl_rtl is memory, then its mode may have been changed,
5514 	     but that crtl->return_rtx has not.  */
5515 	  if (GET_MODE (real_decl_rtl) == BLKmode)
5516 	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5517 
5518 	  /* If a non-BLKmode return value should be padded at the least
5519 	     significant end of the register, shift it left by the appropriate
5520 	     amount.  BLKmode results are handled using the group load/store
5521 	     machinery.  */
5522 	  if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5523 	      && REG_P (real_decl_rtl)
5524 	      && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5525 	    {
5526 	      emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5527 					   REGNO (real_decl_rtl)),
5528 			      decl_rtl);
5529 	      shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5530 	    }
5531 	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
5532 	    {
5533 	      /* If expand_function_start has created a PARALLEL for decl_rtl,
5534 		 move the result to the real return registers.  Otherwise, do
5535 		 a group load from decl_rtl for a named return.  */
5536 	      if (GET_CODE (decl_rtl) == PARALLEL)
5537 		emit_group_move (real_decl_rtl, decl_rtl);
5538 	      else
5539 		emit_group_load (real_decl_rtl, decl_rtl,
5540 				 TREE_TYPE (decl_result),
5541 				 int_size_in_bytes (TREE_TYPE (decl_result)));
5542 	    }
5543 	  /* In the case of complex integer modes smaller than a word, we'll
5544 	     need to generate some non-trivial bitfield insertions.  Do that
5545 	     on a pseudo and not the hard register.  */
5546 	  else if (GET_CODE (decl_rtl) == CONCAT
5547 		   && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5548 		   && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5549 	    {
5550 	      int old_generating_concat_p;
5551 	      rtx tmp;
5552 
5553 	      old_generating_concat_p = generating_concat_p;
5554 	      generating_concat_p = 0;
5555 	      tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5556 	      generating_concat_p = old_generating_concat_p;
5557 
5558 	      emit_move_insn (tmp, decl_rtl);
5559 	      emit_move_insn (real_decl_rtl, tmp);
5560 	    }
5561 	  /* If a named return value dumped decl_return to memory, then
5562 	     we may need to re-do the PROMOTE_MODE signed/unsigned
5563 	     extension.  */
5564 	  else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5565 	    {
5566 	      int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5567 	      promote_function_mode (TREE_TYPE (decl_result),
5568 				     GET_MODE (decl_rtl), &unsignedp,
5569 				     TREE_TYPE (current_function_decl), 1);
5570 
5571 	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
5572 	    }
5573 	  else
5574 	    emit_move_insn (real_decl_rtl, decl_rtl);
5575 	}
5576     }
5577 
5578   /* If returning a structure, arrange to return the address of the value
5579      in a place where debuggers expect to find it.
5580 
5581      If returning a structure PCC style,
5582      the caller also depends on this value.
5583      And cfun->returns_pcc_struct is not necessarily set.  */
5584   if ((cfun->returns_struct || cfun->returns_pcc_struct)
5585       && !targetm.calls.omit_struct_return_reg)
5586     {
5587       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5588       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5589       rtx outgoing;
5590 
5591       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5592 	type = TREE_TYPE (type);
5593       else
5594 	value_address = XEXP (value_address, 0);
5595 
5596       outgoing = targetm.calls.function_value (build_pointer_type (type),
5597 					       current_function_decl, true);
5598 
5599       /* Mark this as a function return value so integrate will delete the
5600 	 assignment and USE below when inlining this function.  */
5601       REG_FUNCTION_VALUE_P (outgoing) = 1;
5602 
5603       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
5604       value_address = convert_memory_address (GET_MODE (outgoing),
5605 					      value_address);
5606 
5607       emit_move_insn (outgoing, value_address);
5608 
5609       /* Show return register used to hold result (in this case the address
5610 	 of the result.  */
5611       crtl->return_rtx = outgoing;
5612     }
5613 
5614   /* Emit the actual code to clobber return register.  Don't emit
5615      it if clobber_after is a barrier, then the previous basic block
5616      certainly doesn't fall thru into the exit block.  */
5617   if (!BARRIER_P (clobber_after))
5618     {
5619       start_sequence ();
5620       clobber_return_register ();
5621       rtx_insn *seq = get_insns ();
5622       end_sequence ();
5623 
5624       emit_insn_after (seq, clobber_after);
5625     }
5626 
5627   /* Output the label for the naked return from the function.  */
5628   if (naked_return_label)
5629     emit_label (naked_return_label);
5630 
5631   /* @@@ This is a kludge.  We want to ensure that instructions that
5632      may trap are not moved into the epilogue by scheduling, because
5633      we don't always emit unwind information for the epilogue.  */
5634   if (cfun->can_throw_non_call_exceptions
5635       && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5636     emit_insn (gen_blockage ());
5637 
5638   /* If stack protection is enabled for this function, check the guard.  */
5639   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
5640     stack_protect_epilogue ();
5641 
5642   /* If we had calls to alloca, and this machine needs
5643      an accurate stack pointer to exit the function,
5644      insert some code to save and restore the stack pointer.  */
5645   if (! EXIT_IGNORE_STACK
5646       && cfun->calls_alloca)
5647     {
5648       rtx tem = 0;
5649 
5650       start_sequence ();
5651       emit_stack_save (SAVE_FUNCTION, &tem);
5652       rtx_insn *seq = get_insns ();
5653       end_sequence ();
5654       emit_insn_before (seq, parm_birth_insn);
5655 
5656       emit_stack_restore (SAVE_FUNCTION, tem);
5657     }
5658 
5659   /* ??? This should no longer be necessary since stupid is no longer with
5660      us, but there are some parts of the compiler (eg reload_combine, and
5661      sh mach_dep_reorg) that still try and compute their own lifetime info
5662      instead of using the general framework.  */
5663   use_return_register ();
5664 }
5665 
5666 rtx
5667 get_arg_pointer_save_area (void)
5668 {
5669   rtx ret = arg_pointer_save_area;
5670 
5671   if (! ret)
5672     {
5673       ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5674       arg_pointer_save_area = ret;
5675     }
5676 
5677   if (! crtl->arg_pointer_save_area_init)
5678     {
5679       /* Save the arg pointer at the beginning of the function.  The
5680 	 generated stack slot may not be a valid memory address, so we
5681 	 have to check it and fix it if necessary.  */
5682       start_sequence ();
5683       emit_move_insn (validize_mem (copy_rtx (ret)),
5684                       crtl->args.internal_arg_pointer);
5685       rtx_insn *seq = get_insns ();
5686       end_sequence ();
5687 
5688       push_topmost_sequence ();
5689       emit_insn_after (seq, entry_of_function ());
5690       pop_topmost_sequence ();
5691 
5692       crtl->arg_pointer_save_area_init = true;
5693     }
5694 
5695   return ret;
5696 }
5697 
5698 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5699    for the first time.  */
5700 
5701 static void
5702 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5703 {
5704   rtx_insn *tmp;
5705   hash_table<insn_cache_hasher> *hash = *hashp;
5706 
5707   if (hash == NULL)
5708     *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5709 
5710   for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5711     {
5712       rtx *slot = hash->find_slot (tmp, INSERT);
5713       gcc_assert (*slot == NULL);
5714       *slot = tmp;
5715     }
5716 }
5717 
5718 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5719    basic block, splitting or peepholes.  If INSN is a prologue or epilogue
5720    insn, then record COPY as well.  */
5721 
5722 void
5723 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5724 {
5725   hash_table<insn_cache_hasher> *hash;
5726   rtx *slot;
5727 
5728   hash = epilogue_insn_hash;
5729   if (!hash || !hash->find (insn))
5730     {
5731       hash = prologue_insn_hash;
5732       if (!hash || !hash->find (insn))
5733 	return;
5734     }
5735 
5736   slot = hash->find_slot (copy, INSERT);
5737   gcc_assert (*slot == NULL);
5738   *slot = copy;
5739 }
5740 
5741 /* Determine if any INSNs in HASH are, or are part of, INSN.  Because
5742    we can be running after reorg, SEQUENCE rtl is possible.  */
5743 
5744 static bool
5745 contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5746 {
5747   if (hash == NULL)
5748     return false;
5749 
5750   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5751     {
5752       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5753       int i;
5754       for (i = seq->len () - 1; i >= 0; i--)
5755 	if (hash->find (seq->element (i)))
5756 	  return true;
5757       return false;
5758     }
5759 
5760   return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
5761 }
5762 
5763 int
5764 prologue_contains (const rtx_insn *insn)
5765 {
5766   return contains (insn, prologue_insn_hash);
5767 }
5768 
5769 int
5770 epilogue_contains (const rtx_insn *insn)
5771 {
5772   return contains (insn, epilogue_insn_hash);
5773 }
5774 
5775 int
5776 prologue_epilogue_contains (const rtx_insn *insn)
5777 {
5778   if (contains (insn, prologue_insn_hash))
5779     return 1;
5780   if (contains (insn, epilogue_insn_hash))
5781     return 1;
5782   return 0;
5783 }
5784 
5785 void
5786 record_prologue_seq (rtx_insn *seq)
5787 {
5788   record_insns (seq, NULL, &prologue_insn_hash);
5789 }
5790 
5791 void
5792 record_epilogue_seq (rtx_insn *seq)
5793 {
5794   record_insns (seq, NULL, &epilogue_insn_hash);
5795 }
5796 
5797 /* Set JUMP_LABEL for a return insn.  */
5798 
5799 void
5800 set_return_jump_label (rtx_insn *returnjump)
5801 {
5802   rtx pat = PATTERN (returnjump);
5803   if (GET_CODE (pat) == PARALLEL)
5804     pat = XVECEXP (pat, 0, 0);
5805   if (ANY_RETURN_P (pat))
5806     JUMP_LABEL (returnjump) = pat;
5807   else
5808     JUMP_LABEL (returnjump) = ret_rtx;
5809 }
5810 
5811 /* Return a sequence to be used as the split prologue for the current
5812    function, or NULL.  */
5813 
5814 static rtx_insn *
5815 make_split_prologue_seq (void)
5816 {
5817   if (!flag_split_stack
5818       || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5819     return NULL;
5820 
5821   start_sequence ();
5822   emit_insn (targetm.gen_split_stack_prologue ());
5823   rtx_insn *seq = get_insns ();
5824   end_sequence ();
5825 
5826   record_insns (seq, NULL, &prologue_insn_hash);
5827   set_insn_locations (seq, prologue_location);
5828 
5829   return seq;
5830 }
5831 
5832 /* Return a sequence to be used as the prologue for the current function,
5833    or NULL.  */
5834 
5835 static rtx_insn *
5836 make_prologue_seq (void)
5837 {
5838   if (!targetm.have_prologue ())
5839     return NULL;
5840 
5841   start_sequence ();
5842   rtx_insn *seq = targetm.gen_prologue ();
5843   emit_insn (seq);
5844 
5845   /* Insert an explicit USE for the frame pointer
5846      if the profiling is on and the frame pointer is required.  */
5847   if (crtl->profile && frame_pointer_needed)
5848     emit_use (hard_frame_pointer_rtx);
5849 
5850   /* Retain a map of the prologue insns.  */
5851   record_insns (seq, NULL, &prologue_insn_hash);
5852   emit_note (NOTE_INSN_PROLOGUE_END);
5853 
5854   /* Ensure that instructions are not moved into the prologue when
5855      profiling is on.  The call to the profiling routine can be
5856      emitted within the live range of a call-clobbered register.  */
5857   if (!targetm.profile_before_prologue () && crtl->profile)
5858     emit_insn (gen_blockage ());
5859 
5860   seq = get_insns ();
5861   end_sequence ();
5862   set_insn_locations (seq, prologue_location);
5863 
5864   return seq;
5865 }
5866 
5867 /* Return a sequence to be used as the epilogue for the current function,
5868    or NULL.  */
5869 
5870 static rtx_insn *
5871 make_epilogue_seq (void)
5872 {
5873   if (!targetm.have_epilogue ())
5874     return NULL;
5875 
5876   start_sequence ();
5877   emit_note (NOTE_INSN_EPILOGUE_BEG);
5878   rtx_insn *seq = targetm.gen_epilogue ();
5879   if (seq)
5880     emit_jump_insn (seq);
5881 
5882   /* Retain a map of the epilogue insns.  */
5883   record_insns (seq, NULL, &epilogue_insn_hash);
5884   set_insn_locations (seq, epilogue_location);
5885 
5886   seq = get_insns ();
5887   rtx_insn *returnjump = get_last_insn ();
5888   end_sequence ();
5889 
5890   if (JUMP_P (returnjump))
5891     set_return_jump_label (returnjump);
5892 
5893   return seq;
5894 }
5895 
5896 
5897 /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
5898    this into place with notes indicating where the prologue ends and where
5899    the epilogue begins.  Update the basic block information when possible.
5900 
5901    Notes on epilogue placement:
5902    There are several kinds of edges to the exit block:
5903    * a single fallthru edge from LAST_BB
5904    * possibly, edges from blocks containing sibcalls
5905    * possibly, fake edges from infinite loops
5906 
5907    The epilogue is always emitted on the fallthru edge from the last basic
5908    block in the function, LAST_BB, into the exit block.
5909 
5910    If LAST_BB is empty except for a label, it is the target of every
5911    other basic block in the function that ends in a return.  If a
5912    target has a return or simple_return pattern (possibly with
5913    conditional variants), these basic blocks can be changed so that a
5914    return insn is emitted into them, and their target is adjusted to
5915    the real exit block.
5916 
5917    Notes on shrink wrapping: We implement a fairly conservative
5918    version of shrink-wrapping rather than the textbook one.  We only
5919    generate a single prologue and a single epilogue.  This is
5920    sufficient to catch a number of interesting cases involving early
5921    exits.
5922 
5923    First, we identify the blocks that require the prologue to occur before
5924    them.  These are the ones that modify a call-saved register, or reference
5925    any of the stack or frame pointer registers.  To simplify things, we then
5926    mark everything reachable from these blocks as also requiring a prologue.
5927    This takes care of loops automatically, and avoids the need to examine
5928    whether MEMs reference the frame, since it is sufficient to check for
5929    occurrences of the stack or frame pointer.
5930 
5931    We then compute the set of blocks for which the need for a prologue
5932    is anticipatable (borrowing terminology from the shrink-wrapping
5933    description in Muchnick's book).  These are the blocks which either
5934    require a prologue themselves, or those that have only successors
5935    where the prologue is anticipatable.  The prologue needs to be
5936    inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5937    is not.  For the moment, we ensure that only one such edge exists.
5938 
5939    The epilogue is placed as described above, but we make a
5940    distinction between inserting return and simple_return patterns
5941    when modifying other blocks that end in a return.  Blocks that end
5942    in a sibcall omit the sibcall_epilogue if the block is not in
5943    ANTIC.  */
5944 
5945 void
5946 thread_prologue_and_epilogue_insns (void)
5947 {
5948   df_analyze ();
5949 
5950   /* Can't deal with multiple successors of the entry block at the
5951      moment.  Function should always have at least one entry
5952      point.  */
5953   gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5954 
5955   edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5956   edge orig_entry_edge = entry_edge;
5957 
5958   rtx_insn *split_prologue_seq = make_split_prologue_seq ();
5959   rtx_insn *prologue_seq = make_prologue_seq ();
5960   rtx_insn *epilogue_seq = make_epilogue_seq ();
5961 
5962   /* Try to perform a kind of shrink-wrapping, making sure the
5963      prologue/epilogue is emitted only around those parts of the
5964      function that require it.  */
5965   try_shrink_wrapping (&entry_edge, prologue_seq);
5966 
5967   /* If the target can handle splitting the prologue/epilogue into separate
5968      components, try to shrink-wrap these components separately.  */
5969   try_shrink_wrapping_separate (entry_edge->dest);
5970 
5971   /* If that did anything for any component we now need the generate the
5972      "main" prologue again.  Because some targets require some of these
5973      to be called in a specific order (i386 requires the split prologue
5974      to be first, for example), we create all three sequences again here.
5975      If this does not work for some target, that target should not enable
5976      separate shrink-wrapping.  */
5977   if (crtl->shrink_wrapped_separate)
5978     {
5979       split_prologue_seq = make_split_prologue_seq ();
5980       prologue_seq = make_prologue_seq ();
5981       epilogue_seq = make_epilogue_seq ();
5982     }
5983 
5984   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5985 
5986   /* A small fib -- epilogue is not yet completed, but we wish to re-use
5987      this marker for the splits of EH_RETURN patterns, and nothing else
5988      uses the flag in the meantime.  */
5989   epilogue_completed = 1;
5990 
5991   /* Find non-fallthru edges that end with EH_RETURN instructions.  On
5992      some targets, these get split to a special version of the epilogue
5993      code.  In order to be able to properly annotate these with unwind
5994      info, try to split them now.  If we get a valid split, drop an
5995      EPILOGUE_BEG note and mark the insns as epilogue insns.  */
5996   edge e;
5997   edge_iterator ei;
5998   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5999     {
6000       rtx_insn *prev, *last, *trial;
6001 
6002       if (e->flags & EDGE_FALLTHRU)
6003 	continue;
6004       last = BB_END (e->src);
6005       if (!eh_returnjump_p (last))
6006 	continue;
6007 
6008       prev = PREV_INSN (last);
6009       trial = try_split (PATTERN (last), last, 1);
6010       if (trial == last)
6011 	continue;
6012 
6013       record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6014       emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6015     }
6016 
6017   edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6018 
6019   if (exit_fallthru_edge)
6020     {
6021       if (epilogue_seq)
6022 	{
6023 	  insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
6024 	  commit_edge_insertions ();
6025 
6026 	  /* The epilogue insns we inserted may cause the exit edge to no longer
6027 	     be fallthru.  */
6028 	  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6029 	    {
6030 	      if (((e->flags & EDGE_FALLTHRU) != 0)
6031 		  && returnjump_p (BB_END (e->src)))
6032 		e->flags &= ~EDGE_FALLTHRU;
6033 	    }
6034 	}
6035       else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
6036 	{
6037 	  /* We have a fall-through edge to the exit block, the source is not
6038 	     at the end of the function, and there will be an assembler epilogue
6039 	     at the end of the function.
6040 	     We can't use force_nonfallthru here, because that would try to
6041 	     use return.  Inserting a jump 'by hand' is extremely messy, so
6042 	     we take advantage of cfg_layout_finalize using
6043 	     fixup_fallthru_exit_predecessor.  */
6044 	  cfg_layout_initialize (0);
6045 	  basic_block cur_bb;
6046 	  FOR_EACH_BB_FN (cur_bb, cfun)
6047 	    if (cur_bb->index >= NUM_FIXED_BLOCKS
6048 		&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6049 	      cur_bb->aux = cur_bb->next_bb;
6050 	  cfg_layout_finalize ();
6051 	}
6052     }
6053 
6054   /* Insert the prologue.  */
6055 
6056   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6057 
6058   if (split_prologue_seq || prologue_seq)
6059     {
6060       rtx_insn *split_prologue_insn = split_prologue_seq;
6061       if (split_prologue_seq)
6062 	{
6063 	  while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
6064 	    split_prologue_insn = NEXT_INSN (split_prologue_insn);
6065 	  insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6066 	}
6067 
6068       rtx_insn *prologue_insn = prologue_seq;
6069       if (prologue_seq)
6070 	{
6071 	  while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
6072 	    prologue_insn = NEXT_INSN (prologue_insn);
6073 	  insert_insn_on_edge (prologue_seq, entry_edge);
6074 	}
6075 
6076       commit_edge_insertions ();
6077 
6078       /* Look for basic blocks within the prologue insns.  */
6079       if (split_prologue_insn
6080 	  && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
6081 	split_prologue_insn = NULL;
6082       if (prologue_insn
6083 	  && BLOCK_FOR_INSN (prologue_insn) == NULL)
6084 	prologue_insn = NULL;
6085       auto_sbitmap blocks (last_basic_block_for_fn (cfun));
6086       bitmap_clear (blocks);
6087       if (split_prologue_insn)
6088 	bitmap_set_bit (blocks,
6089 			BLOCK_FOR_INSN (split_prologue_insn)->index);
6090       if (prologue_insn)
6091 	bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
6092       bitmap_set_bit (blocks, entry_edge->dest->index);
6093       bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6094       find_many_sub_basic_blocks (blocks);
6095     }
6096 
6097   default_rtl_profile ();
6098 
6099   /* Emit sibling epilogues before any sibling call sites.  */
6100   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6101        (e = ei_safe_edge (ei));
6102        ei_next (&ei))
6103     {
6104       /* Skip those already handled, the ones that run without prologue.  */
6105       if (e->flags & EDGE_IGNORE)
6106 	{
6107 	  e->flags &= ~EDGE_IGNORE;
6108 	  continue;
6109 	}
6110 
6111       rtx_insn *insn = BB_END (e->src);
6112 
6113       if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6114 	continue;
6115 
6116       if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6117 	{
6118 	  start_sequence ();
6119 	  emit_note (NOTE_INSN_EPILOGUE_BEG);
6120 	  emit_insn (ep_seq);
6121 	  rtx_insn *seq = get_insns ();
6122 	  end_sequence ();
6123 
6124 	  /* Retain a map of the epilogue insns.  Used in life analysis to
6125 	     avoid getting rid of sibcall epilogue insns.  Do this before we
6126 	     actually emit the sequence.  */
6127 	  record_insns (seq, NULL, &epilogue_insn_hash);
6128 	  set_insn_locations (seq, epilogue_location);
6129 
6130 	  emit_insn_before (seq, insn);
6131 	}
6132     }
6133 
6134   if (epilogue_seq)
6135     {
6136       rtx_insn *insn, *next;
6137 
6138       /* Similarly, move any line notes that appear after the epilogue.
6139          There is no need, however, to be quite so anal about the existence
6140 	 of such a note.  Also possibly move
6141 	 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6142 	 info generation.  */
6143       for (insn = epilogue_seq; insn; insn = next)
6144 	{
6145 	  next = NEXT_INSN (insn);
6146 	  if (NOTE_P (insn)
6147 	      && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6148 	    reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
6149 	}
6150     }
6151 
6152   /* Threading the prologue and epilogue changes the artificial refs
6153      in the entry and exit blocks.  */
6154   epilogue_completed = 1;
6155   df_update_entry_exit_and_calls ();
6156 }
6157 
6158 /* Reposition the prologue-end and epilogue-begin notes after
6159    instruction scheduling.  */
6160 
6161 void
6162 reposition_prologue_and_epilogue_notes (void)
6163 {
6164   if (!targetm.have_prologue ()
6165       && !targetm.have_epilogue ()
6166       && !targetm.have_sibcall_epilogue ())
6167     return;
6168 
6169   /* Since the hash table is created on demand, the fact that it is
6170      non-null is a signal that it is non-empty.  */
6171   if (prologue_insn_hash != NULL)
6172     {
6173       size_t len = prologue_insn_hash->elements ();
6174       rtx_insn *insn, *last = NULL, *note = NULL;
6175 
6176       /* Scan from the beginning until we reach the last prologue insn.  */
6177       /* ??? While we do have the CFG intact, there are two problems:
6178 	 (1) The prologue can contain loops (typically probing the stack),
6179 	     which means that the end of the prologue isn't in the first bb.
6180 	 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb.  */
6181       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6182 	{
6183 	  if (NOTE_P (insn))
6184 	    {
6185 	      if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6186 		note = insn;
6187 	    }
6188 	  else if (contains (insn, prologue_insn_hash))
6189 	    {
6190 	      last = insn;
6191 	      if (--len == 0)
6192 		break;
6193 	    }
6194 	}
6195 
6196       if (last)
6197 	{
6198 	  if (note == NULL)
6199 	    {
6200 	      /* Scan forward looking for the PROLOGUE_END note.  It should
6201 		 be right at the beginning of the block, possibly with other
6202 		 insn notes that got moved there.  */
6203 	      for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6204 		{
6205 		  if (NOTE_P (note)
6206 		      && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6207 		    break;
6208 		}
6209 	    }
6210 
6211 	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
6212 	  if (LABEL_P (last))
6213 	    last = NEXT_INSN (last);
6214 	  reorder_insns (note, note, last);
6215 	}
6216     }
6217 
6218   if (epilogue_insn_hash != NULL)
6219     {
6220       edge_iterator ei;
6221       edge e;
6222 
6223       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6224 	{
6225 	  rtx_insn *insn, *first = NULL, *note = NULL;
6226 	  basic_block bb = e->src;
6227 
6228 	  /* Scan from the beginning until we reach the first epilogue insn. */
6229 	  FOR_BB_INSNS (bb, insn)
6230 	    {
6231 	      if (NOTE_P (insn))
6232 		{
6233 		  if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6234 		    {
6235 		      note = insn;
6236 		      if (first != NULL)
6237 			break;
6238 		    }
6239 		}
6240 	      else if (first == NULL && contains (insn, epilogue_insn_hash))
6241 		{
6242 		  first = insn;
6243 		  if (note != NULL)
6244 		    break;
6245 		}
6246 	    }
6247 
6248 	  if (note)
6249 	    {
6250 	      /* If the function has a single basic block, and no real
6251 		 epilogue insns (e.g. sibcall with no cleanup), the
6252 		 epilogue note can get scheduled before the prologue
6253 		 note.  If we have frame related prologue insns, having
6254 		 them scanned during the epilogue will result in a crash.
6255 		 In this case re-order the epilogue note to just before
6256 		 the last insn in the block.  */
6257 	      if (first == NULL)
6258 		first = BB_END (bb);
6259 
6260 	      if (PREV_INSN (first) != note)
6261 		reorder_insns (note, note, PREV_INSN (first));
6262 	    }
6263 	}
6264     }
6265 }
6266 
6267 /* Returns the name of function declared by FNDECL.  */
6268 const char *
6269 fndecl_name (tree fndecl)
6270 {
6271   if (fndecl == NULL)
6272     return "(nofn)";
6273   return lang_hooks.decl_printable_name (fndecl, 2);
6274 }
6275 
6276 /* Returns the name of function FN.  */
6277 const char *
6278 function_name (struct function *fn)
6279 {
6280   tree fndecl = (fn == NULL) ? NULL : fn->decl;
6281   return fndecl_name (fndecl);
6282 }
6283 
6284 /* Returns the name of the current function.  */
6285 const char *
6286 current_function_name (void)
6287 {
6288   return function_name (cfun);
6289 }
6290 
6291 
6292 static unsigned int
6293 rest_of_handle_check_leaf_regs (void)
6294 {
6295 #ifdef LEAF_REGISTERS
6296   crtl->uses_only_leaf_regs
6297     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6298 #endif
6299   return 0;
6300 }
6301 
6302 /* Insert a TYPE into the used types hash table of CFUN.  */
6303 
6304 static void
6305 used_types_insert_helper (tree type, struct function *func)
6306 {
6307   if (type != NULL && func != NULL)
6308     {
6309       if (func->used_types_hash == NULL)
6310 	func->used_types_hash = hash_set<tree>::create_ggc (37);
6311 
6312       func->used_types_hash->add (type);
6313     }
6314 }
6315 
6316 /* Given a type, insert it into the used hash table in cfun.  */
6317 void
6318 used_types_insert (tree t)
6319 {
6320   while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6321     if (TYPE_NAME (t))
6322       break;
6323     else
6324       t = TREE_TYPE (t);
6325   if (TREE_CODE (t) == ERROR_MARK)
6326     return;
6327   if (TYPE_NAME (t) == NULL_TREE
6328       || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6329     t = TYPE_MAIN_VARIANT (t);
6330   if (debug_info_level > DINFO_LEVEL_NONE)
6331     {
6332       if (cfun)
6333 	used_types_insert_helper (t, cfun);
6334       else
6335 	{
6336 	  /* So this might be a type referenced by a global variable.
6337 	     Record that type so that we can later decide to emit its
6338 	     debug information.  */
6339 	  vec_safe_push (types_used_by_cur_var_decl, t);
6340 	}
6341     }
6342 }
6343 
6344 /* Helper to Hash a struct types_used_by_vars_entry.  */
6345 
6346 static hashval_t
6347 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6348 {
6349   gcc_assert (entry && entry->var_decl && entry->type);
6350 
6351   return iterative_hash_object (entry->type,
6352 				iterative_hash_object (entry->var_decl, 0));
6353 }
6354 
6355 /* Hash function of the types_used_by_vars_entry hash table.  */
6356 
6357 hashval_t
6358 used_type_hasher::hash (types_used_by_vars_entry *entry)
6359 {
6360   return hash_types_used_by_vars_entry (entry);
6361 }
6362 
6363 /*Equality function of the types_used_by_vars_entry hash table.  */
6364 
6365 bool
6366 used_type_hasher::equal (types_used_by_vars_entry *e1,
6367 			 types_used_by_vars_entry *e2)
6368 {
6369   return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6370 }
6371 
6372 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6373 
6374 void
6375 types_used_by_var_decl_insert (tree type, tree var_decl)
6376 {
6377   if (type != NULL && var_decl != NULL)
6378     {
6379       types_used_by_vars_entry **slot;
6380       struct types_used_by_vars_entry e;
6381       e.var_decl = var_decl;
6382       e.type = type;
6383       if (types_used_by_vars_hash == NULL)
6384 	types_used_by_vars_hash
6385 	  = hash_table<used_type_hasher>::create_ggc (37);
6386 
6387       slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6388       if (*slot == NULL)
6389 	{
6390 	  struct types_used_by_vars_entry *entry;
6391 	  entry = ggc_alloc<types_used_by_vars_entry> ();
6392 	  entry->type = type;
6393 	  entry->var_decl = var_decl;
6394 	  *slot = entry;
6395 	}
6396     }
6397 }
6398 
6399 namespace {
6400 
6401 const pass_data pass_data_leaf_regs =
6402 {
6403   RTL_PASS, /* type */
6404   "*leaf_regs", /* name */
6405   OPTGROUP_NONE, /* optinfo_flags */
6406   TV_NONE, /* tv_id */
6407   0, /* properties_required */
6408   0, /* properties_provided */
6409   0, /* properties_destroyed */
6410   0, /* todo_flags_start */
6411   0, /* todo_flags_finish */
6412 };
6413 
6414 class pass_leaf_regs : public rtl_opt_pass
6415 {
6416 public:
6417   pass_leaf_regs (gcc::context *ctxt)
6418     : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6419   {}
6420 
6421   /* opt_pass methods: */
6422   virtual unsigned int execute (function *)
6423     {
6424       return rest_of_handle_check_leaf_regs ();
6425     }
6426 
6427 }; // class pass_leaf_regs
6428 
6429 } // anon namespace
6430 
6431 rtl_opt_pass *
6432 make_pass_leaf_regs (gcc::context *ctxt)
6433 {
6434   return new pass_leaf_regs (ctxt);
6435 }
6436 
6437 static unsigned int
6438 rest_of_handle_thread_prologue_and_epilogue (void)
6439 {
6440   /* prepare_shrink_wrap is sensitive to the block structure of the control
6441      flow graph, so clean it up first.  */
6442   if (optimize)
6443     cleanup_cfg (0);
6444 
6445   /* On some machines, the prologue and epilogue code, or parts thereof,
6446      can be represented as RTL.  Doing so lets us schedule insns between
6447      it and the rest of the code and also allows delayed branch
6448      scheduling to operate in the epilogue.  */
6449   thread_prologue_and_epilogue_insns ();
6450 
6451   /* Some non-cold blocks may now be only reachable from cold blocks.
6452      Fix that up.  */
6453   fixup_partitions ();
6454 
6455   /* Shrink-wrapping can result in unreachable edges in the epilogue,
6456      see PR57320.  */
6457   cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6458 
6459   /* The stack usage info is finalized during prologue expansion.  */
6460   if (flag_stack_usage_info)
6461     output_stack_usage ();
6462 
6463   return 0;
6464 }
6465 
6466 namespace {
6467 
6468 const pass_data pass_data_thread_prologue_and_epilogue =
6469 {
6470   RTL_PASS, /* type */
6471   "pro_and_epilogue", /* name */
6472   OPTGROUP_NONE, /* optinfo_flags */
6473   TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6474   0, /* properties_required */
6475   0, /* properties_provided */
6476   0, /* properties_destroyed */
6477   0, /* todo_flags_start */
6478   ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6479 };
6480 
6481 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6482 {
6483 public:
6484   pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6485     : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6486   {}
6487 
6488   /* opt_pass methods: */
6489   virtual unsigned int execute (function *)
6490     {
6491       return rest_of_handle_thread_prologue_and_epilogue ();
6492     }
6493 
6494 }; // class pass_thread_prologue_and_epilogue
6495 
6496 } // anon namespace
6497 
6498 rtl_opt_pass *
6499 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6500 {
6501   return new pass_thread_prologue_and_epilogue (ctxt);
6502 }
6503 
6504 
6505 /* This mini-pass fixes fall-out from SSA in asm statements that have
6506    in-out constraints.  Say you start with
6507 
6508      orig = inout;
6509      asm ("": "+mr" (inout));
6510      use (orig);
6511 
6512    which is transformed very early to use explicit output and match operands:
6513 
6514      orig = inout;
6515      asm ("": "=mr" (inout) : "0" (inout));
6516      use (orig);
6517 
6518    Or, after SSA and copyprop,
6519 
6520      asm ("": "=mr" (inout_2) : "0" (inout_1));
6521      use (inout_1);
6522 
6523    Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6524    they represent two separate values, so they will get different pseudo
6525    registers during expansion.  Then, since the two operands need to match
6526    per the constraints, but use different pseudo registers, reload can
6527    only register a reload for these operands.  But reloads can only be
6528    satisfied by hardregs, not by memory, so we need a register for this
6529    reload, just because we are presented with non-matching operands.
6530    So, even though we allow memory for this operand, no memory can be
6531    used for it, just because the two operands don't match.  This can
6532    cause reload failures on register-starved targets.
6533 
6534    So it's a symptom of reload not being able to use memory for reloads
6535    or, alternatively it's also a symptom of both operands not coming into
6536    reload as matching (in which case the pseudo could go to memory just
6537    fine, as the alternative allows it, and no reload would be necessary).
6538    We fix the latter problem here, by transforming
6539 
6540      asm ("": "=mr" (inout_2) : "0" (inout_1));
6541 
6542    back to
6543 
6544      inout_2 = inout_1;
6545      asm ("": "=mr" (inout_2) : "0" (inout_2));  */
6546 
6547 static void
6548 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6549 {
6550   int i;
6551   bool changed = false;
6552   rtx op = SET_SRC (p_sets[0]);
6553   int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6554   rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6555   bool *output_matched = XALLOCAVEC (bool, noutputs);
6556 
6557   memset (output_matched, 0, noutputs * sizeof (bool));
6558   for (i = 0; i < ninputs; i++)
6559     {
6560       rtx input, output;
6561       rtx_insn *insns;
6562       const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6563       char *end;
6564       int match, j;
6565 
6566       if (*constraint == '%')
6567 	constraint++;
6568 
6569       match = strtoul (constraint, &end, 10);
6570       if (end == constraint)
6571 	continue;
6572 
6573       gcc_assert (match < noutputs);
6574       output = SET_DEST (p_sets[match]);
6575       input = RTVEC_ELT (inputs, i);
6576       /* Only do the transformation for pseudos.  */
6577       if (! REG_P (output)
6578 	  || rtx_equal_p (output, input)
6579 	  || !(REG_P (input) || SUBREG_P (input)
6580 	       || MEM_P (input) || CONSTANT_P (input))
6581 	  || !general_operand (input, GET_MODE (output)))
6582 	continue;
6583 
6584       /* We can't do anything if the output is also used as input,
6585 	 as we're going to overwrite it.  */
6586       for (j = 0; j < ninputs; j++)
6587         if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6588 	  break;
6589       if (j != ninputs)
6590 	continue;
6591 
6592       /* Avoid changing the same input several times.  For
6593 	 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6594 	 only change in once (to out1), rather than changing it
6595 	 first to out1 and afterwards to out2.  */
6596       if (i > 0)
6597 	{
6598 	  for (j = 0; j < noutputs; j++)
6599 	    if (output_matched[j] && input == SET_DEST (p_sets[j]))
6600 	      break;
6601 	  if (j != noutputs)
6602 	    continue;
6603 	}
6604       output_matched[match] = true;
6605 
6606       start_sequence ();
6607       emit_move_insn (output, input);
6608       insns = get_insns ();
6609       end_sequence ();
6610       emit_insn_before (insns, insn);
6611 
6612       /* Now replace all mentions of the input with output.  We can't
6613 	 just replace the occurrence in inputs[i], as the register might
6614 	 also be used in some other input (or even in an address of an
6615 	 output), which would mean possibly increasing the number of
6616 	 inputs by one (namely 'output' in addition), which might pose
6617 	 a too complicated problem for reload to solve.  E.g. this situation:
6618 
6619 	   asm ("" : "=r" (output), "=m" (input) : "0" (input))
6620 
6621 	 Here 'input' is used in two occurrences as input (once for the
6622 	 input operand, once for the address in the second output operand).
6623 	 If we would replace only the occurrence of the input operand (to
6624 	 make the matching) we would be left with this:
6625 
6626 	   output = input
6627 	   asm ("" : "=r" (output), "=m" (input) : "0" (output))
6628 
6629 	 Now we suddenly have two different input values (containing the same
6630 	 value, but different pseudos) where we formerly had only one.
6631 	 With more complicated asms this might lead to reload failures
6632 	 which wouldn't have happen without this pass.  So, iterate over
6633 	 all operands and replace all occurrences of the register used.  */
6634       for (j = 0; j < noutputs; j++)
6635 	if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6636 	    && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6637 	  SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6638 					      input, output);
6639       for (j = 0; j < ninputs; j++)
6640 	if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6641 	  RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6642 					       input, output);
6643 
6644       changed = true;
6645     }
6646 
6647   if (changed)
6648     df_insn_rescan (insn);
6649 }
6650 
6651 /* Add the decl D to the local_decls list of FUN.  */
6652 
6653 void
6654 add_local_decl (struct function *fun, tree d)
6655 {
6656   gcc_assert (VAR_P (d));
6657   vec_safe_push (fun->local_decls, d);
6658 }
6659 
6660 namespace {
6661 
6662 const pass_data pass_data_match_asm_constraints =
6663 {
6664   RTL_PASS, /* type */
6665   "asmcons", /* name */
6666   OPTGROUP_NONE, /* optinfo_flags */
6667   TV_NONE, /* tv_id */
6668   0, /* properties_required */
6669   0, /* properties_provided */
6670   0, /* properties_destroyed */
6671   0, /* todo_flags_start */
6672   0, /* todo_flags_finish */
6673 };
6674 
6675 class pass_match_asm_constraints : public rtl_opt_pass
6676 {
6677 public:
6678   pass_match_asm_constraints (gcc::context *ctxt)
6679     : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6680   {}
6681 
6682   /* opt_pass methods: */
6683   virtual unsigned int execute (function *);
6684 
6685 }; // class pass_match_asm_constraints
6686 
6687 unsigned
6688 pass_match_asm_constraints::execute (function *fun)
6689 {
6690   basic_block bb;
6691   rtx_insn *insn;
6692   rtx pat, *p_sets;
6693   int noutputs;
6694 
6695   if (!crtl->has_asm_statement)
6696     return 0;
6697 
6698   df_set_flags (DF_DEFER_INSN_RESCAN);
6699   FOR_EACH_BB_FN (bb, fun)
6700     {
6701       FOR_BB_INSNS (bb, insn)
6702 	{
6703 	  if (!INSN_P (insn))
6704 	    continue;
6705 
6706 	  pat = PATTERN (insn);
6707 	  if (GET_CODE (pat) == PARALLEL)
6708 	    p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6709 	  else if (GET_CODE (pat) == SET)
6710 	    p_sets = &PATTERN (insn), noutputs = 1;
6711 	  else
6712 	    continue;
6713 
6714 	  if (GET_CODE (*p_sets) == SET
6715 	      && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6716 	    match_asm_constraints_1 (insn, p_sets, noutputs);
6717 	 }
6718     }
6719 
6720   return TODO_df_finish;
6721 }
6722 
6723 } // anon namespace
6724 
6725 rtl_opt_pass *
6726 make_pass_match_asm_constraints (gcc::context *ctxt)
6727 {
6728   return new pass_match_asm_constraints (ctxt);
6729 }
6730 
6731 
6732 #include "gt-function.h"
6733