1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "memmodel.h"
45 #include "tm_p.h"
46 #include "stringpool.h"
47 #include "expmed.h"
48 #include "optabs.h"
49 #include "regs.h"
50 #include "emit-rtl.h"
51 #include "recog.h"
52 #include "rtl-error.h"
53 #include "alias.h"
54 #include "fold-const.h"
55 #include "stor-layout.h"
56 #include "varasm.h"
57 #include "except.h"
58 #include "dojump.h"
59 #include "explow.h"
60 #include "calls.h"
61 #include "expr.h"
62 #include "optabs-tree.h"
63 #include "output.h"
64 #include "langhooks.h"
65 #include "common/common-target.h"
66 #include "gimplify.h"
67 #include "tree-pass.h"
68 #include "cfgrtl.h"
69 #include "cfganal.h"
70 #include "cfgbuild.h"
71 #include "cfgcleanup.h"
72 #include "cfgexpand.h"
73 #include "shrink-wrap.h"
74 #include "toplev.h"
75 #include "rtl-iter.h"
76 #include "tree-dfa.h"
77 #include "tree-ssa.h"
78 #include "stringpool.h"
79 #include "attribs.h"
80 #include "gimple.h"
81 #include "options.h"
82 #include "function-abi.h"
83
84 /* So we can assign to cfun in this file. */
85 #undef cfun
86
87 #ifndef STACK_ALIGNMENT_NEEDED
88 #define STACK_ALIGNMENT_NEEDED 1
89 #endif
90
91 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
92
93 /* Round a value to the lowest integer less than it that is a multiple of
94 the required alignment. Avoid using division in case the value is
95 negative. Assume the alignment is a power of two. */
96 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
97
98 /* Similar, but round to the next highest integer that meets the
99 alignment. */
100 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
101
102 /* Nonzero once virtual register instantiation has been done.
103 assign_stack_local uses frame_pointer_rtx when this is nonzero.
104 calls.c:emit_library_call_value_1 uses it to set up
105 post-instantiation libcalls. */
106 int virtuals_instantiated;
107
108 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
109 static GTY(()) int funcdef_no;
110
111 /* These variables hold pointers to functions to create and destroy
112 target specific, per-function data structures. */
113 struct machine_function * (*init_machine_status) (void);
114
115 /* The currently compiled function. */
116 struct function *cfun = 0;
117
118 /* These hashes record the prologue and epilogue insns. */
119
120 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
121 {
hashinsn_cache_hasher122 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
equalinsn_cache_hasher123 static bool equal (rtx a, rtx b) { return a == b; }
124 };
125
126 static GTY((cache))
127 hash_table<insn_cache_hasher> *prologue_insn_hash;
128 static GTY((cache))
129 hash_table<insn_cache_hasher> *epilogue_insn_hash;
130
131
132 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
133 vec<tree, va_gc> *types_used_by_cur_var_decl;
134
135 /* Forward declarations. */
136
137 static class temp_slot *find_temp_slot_from_address (rtx);
138 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
139 static void pad_below (struct args_size *, machine_mode, tree);
140 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
141 static int all_blocks (tree, tree *);
142 static tree *get_block_vector (tree, int *);
143 extern tree debug_find_var_in_block_tree (tree, tree);
144 /* We always define `record_insns' even if it's not used so that we
145 can always export `prologue_epilogue_contains'. */
146 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
147 ATTRIBUTE_UNUSED;
148 static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
149 static void prepare_function_start (void);
150 static void do_clobber_return_reg (rtx, void *);
151 static void do_use_return_reg (rtx, void *);
152
153
154 /* Stack of nested functions. */
155 /* Keep track of the cfun stack. */
156
157 static vec<function *> function_context_stack;
158
159 /* Save the current context for compilation of a nested function.
160 This is called from language-specific code. */
161
162 void
push_function_context(void)163 push_function_context (void)
164 {
165 if (cfun == 0)
166 allocate_struct_function (NULL, false);
167
168 function_context_stack.safe_push (cfun);
169 set_cfun (NULL);
170 }
171
172 /* Restore the last saved context, at the end of a nested function.
173 This function is called from language-specific code. */
174
175 void
pop_function_context(void)176 pop_function_context (void)
177 {
178 struct function *p = function_context_stack.pop ();
179 set_cfun (p);
180 current_function_decl = p->decl;
181
182 /* Reset variables that have known state during rtx generation. */
183 virtuals_instantiated = 0;
184 generating_concat_p = 1;
185 }
186
187 /* Clear out all parts of the state in F that can safely be discarded
188 after the function has been parsed, but not compiled, to let
189 garbage collection reclaim the memory. */
190
191 void
free_after_parsing(struct function * f)192 free_after_parsing (struct function *f)
193 {
194 f->language = 0;
195 }
196
197 /* Clear out all parts of the state in F that can safely be discarded
198 after the function has been compiled, to let garbage collection
199 reclaim the memory. */
200
201 void
free_after_compilation(struct function * f)202 free_after_compilation (struct function *f)
203 {
204 prologue_insn_hash = NULL;
205 epilogue_insn_hash = NULL;
206
207 free (crtl->emit.regno_pointer_align);
208
209 memset (crtl, 0, sizeof (struct rtl_data));
210 f->eh = NULL;
211 f->machine = NULL;
212 f->cfg = NULL;
213 f->curr_properties &= ~PROP_cfg;
214
215 regno_reg_rtx = NULL;
216 }
217
218 /* Return size needed for stack frame based on slots so far allocated.
219 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
220 the caller may have to do that. */
221
222 poly_int64
get_frame_size(void)223 get_frame_size (void)
224 {
225 if (FRAME_GROWS_DOWNWARD)
226 return -frame_offset;
227 else
228 return frame_offset;
229 }
230
231 /* Issue an error message and return TRUE if frame OFFSET overflows in
232 the signed target pointer arithmetics for function FUNC. Otherwise
233 return FALSE. */
234
235 bool
frame_offset_overflow(poly_int64 offset,tree func)236 frame_offset_overflow (poly_int64 offset, tree func)
237 {
238 poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
239 unsigned HOST_WIDE_INT limit
240 = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
241 /* Leave room for the fixed part of the frame. */
242 - 64 * UNITS_PER_WORD);
243
244 if (!coeffs_in_range_p (size, 0U, limit))
245 {
246 unsigned HOST_WIDE_INT hwisize;
247 if (size.is_constant (&hwisize))
248 error_at (DECL_SOURCE_LOCATION (func),
249 "total size of local objects %wu exceeds maximum %wu",
250 hwisize, limit);
251 else
252 error_at (DECL_SOURCE_LOCATION (func),
253 "total size of local objects exceeds maximum %wu",
254 limit);
255 return true;
256 }
257
258 return false;
259 }
260
261 /* Return the minimum spill slot alignment for a register of mode MODE. */
262
263 unsigned int
spill_slot_alignment(machine_mode mode ATTRIBUTE_UNUSED)264 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
265 {
266 return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
267 }
268
269 /* Return stack slot alignment in bits for TYPE and MODE. */
270
271 static unsigned int
get_stack_local_alignment(tree type,machine_mode mode)272 get_stack_local_alignment (tree type, machine_mode mode)
273 {
274 unsigned int alignment;
275
276 if (mode == BLKmode)
277 alignment = BIGGEST_ALIGNMENT;
278 else
279 alignment = GET_MODE_ALIGNMENT (mode);
280
281 /* Allow the frond-end to (possibly) increase the alignment of this
282 stack slot. */
283 if (! type)
284 type = lang_hooks.types.type_for_mode (mode, 0);
285
286 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
287 }
288
289 /* Determine whether it is possible to fit a stack slot of size SIZE and
290 alignment ALIGNMENT into an area in the stack frame that starts at
291 frame offset START and has a length of LENGTH. If so, store the frame
292 offset to be used for the stack slot in *POFFSET and return true;
293 return false otherwise. This function will extend the frame size when
294 given a start/length pair that lies at the end of the frame. */
295
296 static bool
try_fit_stack_local(poly_int64 start,poly_int64 length,poly_int64 size,unsigned int alignment,poly_int64_pod * poffset)297 try_fit_stack_local (poly_int64 start, poly_int64 length,
298 poly_int64 size, unsigned int alignment,
299 poly_int64_pod *poffset)
300 {
301 poly_int64 this_frame_offset;
302 int frame_off, frame_alignment, frame_phase;
303
304 /* Calculate how many bytes the start of local variables is off from
305 stack alignment. */
306 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
307 frame_off = targetm.starting_frame_offset () % frame_alignment;
308 frame_phase = frame_off ? frame_alignment - frame_off : 0;
309
310 /* Round the frame offset to the specified alignment. */
311
312 if (FRAME_GROWS_DOWNWARD)
313 this_frame_offset
314 = (aligned_lower_bound (start + length - size - frame_phase, alignment)
315 + frame_phase);
316 else
317 this_frame_offset
318 = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
319
320 /* See if it fits. If this space is at the edge of the frame,
321 consider extending the frame to make it fit. Our caller relies on
322 this when allocating a new slot. */
323 if (maybe_lt (this_frame_offset, start))
324 {
325 if (known_eq (frame_offset, start))
326 frame_offset = this_frame_offset;
327 else
328 return false;
329 }
330 else if (maybe_gt (this_frame_offset + size, start + length))
331 {
332 if (known_eq (frame_offset, start + length))
333 frame_offset = this_frame_offset + size;
334 else
335 return false;
336 }
337
338 *poffset = this_frame_offset;
339 return true;
340 }
341
342 /* Create a new frame_space structure describing free space in the stack
343 frame beginning at START and ending at END, and chain it into the
344 function's frame_space_list. */
345
346 static void
add_frame_space(poly_int64 start,poly_int64 end)347 add_frame_space (poly_int64 start, poly_int64 end)
348 {
349 class frame_space *space = ggc_alloc<frame_space> ();
350 space->next = crtl->frame_space_list;
351 crtl->frame_space_list = space;
352 space->start = start;
353 space->length = end - start;
354 }
355
356 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
357 with machine mode MODE.
358
359 ALIGN controls the amount of alignment for the address of the slot:
360 0 means according to MODE,
361 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
362 -2 means use BITS_PER_UNIT,
363 positive specifies alignment boundary in bits.
364
365 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
366 alignment and ASLK_RECORD_PAD bit set if we should remember
367 extra space we allocated for alignment purposes. When we are
368 called from assign_stack_temp_for_type, it is not set so we don't
369 track the same stack slot in two independent lists.
370
371 We do not round to stack_boundary here. */
372
373 rtx
assign_stack_local_1(machine_mode mode,poly_int64 size,int align,int kind)374 assign_stack_local_1 (machine_mode mode, poly_int64 size,
375 int align, int kind)
376 {
377 rtx x, addr;
378 poly_int64 bigend_correction = 0;
379 poly_int64 slot_offset = 0, old_frame_offset;
380 unsigned int alignment, alignment_in_bits;
381
382 if (align == 0)
383 {
384 alignment = get_stack_local_alignment (NULL, mode);
385 alignment /= BITS_PER_UNIT;
386 }
387 else if (align == -1)
388 {
389 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
390 size = aligned_upper_bound (size, alignment);
391 }
392 else if (align == -2)
393 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
394 else
395 alignment = align / BITS_PER_UNIT;
396
397 alignment_in_bits = alignment * BITS_PER_UNIT;
398
399 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
400 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
401 {
402 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
403 alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
404 }
405
406 if (SUPPORTS_STACK_ALIGNMENT)
407 {
408 if (crtl->stack_alignment_estimated < alignment_in_bits)
409 {
410 if (!crtl->stack_realign_processed)
411 crtl->stack_alignment_estimated = alignment_in_bits;
412 else
413 {
414 /* If stack is realigned and stack alignment value
415 hasn't been finalized, it is OK not to increase
416 stack_alignment_estimated. The bigger alignment
417 requirement is recorded in stack_alignment_needed
418 below. */
419 gcc_assert (!crtl->stack_realign_finalized);
420 if (!crtl->stack_realign_needed)
421 {
422 /* It is OK to reduce the alignment as long as the
423 requested size is 0 or the estimated stack
424 alignment >= mode alignment. */
425 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
426 || known_eq (size, 0)
427 || (crtl->stack_alignment_estimated
428 >= GET_MODE_ALIGNMENT (mode)));
429 alignment_in_bits = crtl->stack_alignment_estimated;
430 alignment = alignment_in_bits / BITS_PER_UNIT;
431 }
432 }
433 }
434 }
435
436 if (crtl->stack_alignment_needed < alignment_in_bits)
437 crtl->stack_alignment_needed = alignment_in_bits;
438 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
439 crtl->max_used_stack_slot_alignment = alignment_in_bits;
440
441 if (mode != BLKmode || maybe_ne (size, 0))
442 {
443 if (kind & ASLK_RECORD_PAD)
444 {
445 class frame_space **psp;
446
447 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
448 {
449 class frame_space *space = *psp;
450 if (!try_fit_stack_local (space->start, space->length, size,
451 alignment, &slot_offset))
452 continue;
453 *psp = space->next;
454 if (known_gt (slot_offset, space->start))
455 add_frame_space (space->start, slot_offset);
456 if (known_lt (slot_offset + size, space->start + space->length))
457 add_frame_space (slot_offset + size,
458 space->start + space->length);
459 goto found_space;
460 }
461 }
462 }
463 else if (!STACK_ALIGNMENT_NEEDED)
464 {
465 slot_offset = frame_offset;
466 goto found_space;
467 }
468
469 old_frame_offset = frame_offset;
470
471 if (FRAME_GROWS_DOWNWARD)
472 {
473 frame_offset -= size;
474 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
475
476 if (kind & ASLK_RECORD_PAD)
477 {
478 if (known_gt (slot_offset, frame_offset))
479 add_frame_space (frame_offset, slot_offset);
480 if (known_lt (slot_offset + size, old_frame_offset))
481 add_frame_space (slot_offset + size, old_frame_offset);
482 }
483 }
484 else
485 {
486 frame_offset += size;
487 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
488
489 if (kind & ASLK_RECORD_PAD)
490 {
491 if (known_gt (slot_offset, old_frame_offset))
492 add_frame_space (old_frame_offset, slot_offset);
493 if (known_lt (slot_offset + size, frame_offset))
494 add_frame_space (slot_offset + size, frame_offset);
495 }
496 }
497
498 found_space:
499 /* On a big-endian machine, if we are allocating more space than we will use,
500 use the least significant bytes of those that are allocated. */
501 if (mode != BLKmode)
502 {
503 /* The slot size can sometimes be smaller than the mode size;
504 e.g. the rs6000 port allocates slots with a vector mode
505 that have the size of only one element. However, the slot
506 size must always be ordered wrt to the mode size, in the
507 same way as for a subreg. */
508 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
509 if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
510 bigend_correction = size - GET_MODE_SIZE (mode);
511 }
512
513 /* If we have already instantiated virtual registers, return the actual
514 address relative to the frame pointer. */
515 if (virtuals_instantiated)
516 addr = plus_constant (Pmode, frame_pointer_rtx,
517 trunc_int_for_mode
518 (slot_offset + bigend_correction
519 + targetm.starting_frame_offset (), Pmode));
520 else
521 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
522 trunc_int_for_mode
523 (slot_offset + bigend_correction,
524 Pmode));
525
526 x = gen_rtx_MEM (mode, addr);
527 set_mem_align (x, alignment_in_bits);
528 MEM_NOTRAP_P (x) = 1;
529
530 vec_safe_push (stack_slot_list, x);
531
532 if (frame_offset_overflow (frame_offset, current_function_decl))
533 frame_offset = 0;
534
535 return x;
536 }
537
538 /* Wrap up assign_stack_local_1 with last parameter as false. */
539
540 rtx
assign_stack_local(machine_mode mode,poly_int64 size,int align)541 assign_stack_local (machine_mode mode, poly_int64 size, int align)
542 {
543 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
544 }
545
546 /* In order to evaluate some expressions, such as function calls returning
547 structures in memory, we need to temporarily allocate stack locations.
548 We record each allocated temporary in the following structure.
549
550 Associated with each temporary slot is a nesting level. When we pop up
551 one level, all temporaries associated with the previous level are freed.
552 Normally, all temporaries are freed after the execution of the statement
553 in which they were created. However, if we are inside a ({...}) grouping,
554 the result may be in a temporary and hence must be preserved. If the
555 result could be in a temporary, we preserve it if we can determine which
556 one it is in. If we cannot determine which temporary may contain the
557 result, all temporaries are preserved. A temporary is preserved by
558 pretending it was allocated at the previous nesting level. */
559
class()560 class GTY(()) temp_slot {
561 public:
562 /* Points to next temporary slot. */
563 class temp_slot *next;
564 /* Points to previous temporary slot. */
565 class temp_slot *prev;
566 /* The rtx to used to reference the slot. */
567 rtx slot;
568 /* The size, in units, of the slot. */
569 poly_int64 size;
570 /* The type of the object in the slot, or zero if it doesn't correspond
571 to a type. We use this to determine whether a slot can be reused.
572 It can be reused if objects of the type of the new slot will always
573 conflict with objects of the type of the old slot. */
574 tree type;
575 /* The alignment (in bits) of the slot. */
576 unsigned int align;
577 /* Nonzero if this temporary is currently in use. */
578 char in_use;
579 /* Nesting level at which this slot is being used. */
580 int level;
581 /* The offset of the slot from the frame_pointer, including extra space
582 for alignment. This info is for combine_temp_slots. */
583 poly_int64 base_offset;
584 /* The size of the slot, including extra space for alignment. This
585 info is for combine_temp_slots. */
586 poly_int64 full_size;
587 };
588
589 /* Entry for the below hash table. */
590 struct GTY((for_user)) temp_slot_address_entry {
591 hashval_t hash;
592 rtx address;
593 class temp_slot *temp_slot;
594 };
595
596 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
597 {
598 static hashval_t hash (temp_slot_address_entry *);
599 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
600 };
601
602 /* A table of addresses that represent a stack slot. The table is a mapping
603 from address RTXen to a temp slot. */
604 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
605 static size_t n_temp_slots_in_use;
606
607 /* Removes temporary slot TEMP from LIST. */
608
609 static void
cut_slot_from_list(class temp_slot * temp,class temp_slot ** list)610 cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
611 {
612 if (temp->next)
613 temp->next->prev = temp->prev;
614 if (temp->prev)
615 temp->prev->next = temp->next;
616 else
617 *list = temp->next;
618
619 temp->prev = temp->next = NULL;
620 }
621
622 /* Inserts temporary slot TEMP to LIST. */
623
624 static void
insert_slot_to_list(class temp_slot * temp,class temp_slot ** list)625 insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
626 {
627 temp->next = *list;
628 if (*list)
629 (*list)->prev = temp;
630 temp->prev = NULL;
631 *list = temp;
632 }
633
634 /* Returns the list of used temp slots at LEVEL. */
635
636 static class temp_slot **
temp_slots_at_level(int level)637 temp_slots_at_level (int level)
638 {
639 if (level >= (int) vec_safe_length (used_temp_slots))
640 vec_safe_grow_cleared (used_temp_slots, level + 1);
641
642 return &(*used_temp_slots)[level];
643 }
644
645 /* Returns the maximal temporary slot level. */
646
647 static int
max_slot_level(void)648 max_slot_level (void)
649 {
650 if (!used_temp_slots)
651 return -1;
652
653 return used_temp_slots->length () - 1;
654 }
655
656 /* Moves temporary slot TEMP to LEVEL. */
657
658 static void
move_slot_to_level(class temp_slot * temp,int level)659 move_slot_to_level (class temp_slot *temp, int level)
660 {
661 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
662 insert_slot_to_list (temp, temp_slots_at_level (level));
663 temp->level = level;
664 }
665
666 /* Make temporary slot TEMP available. */
667
668 static void
make_slot_available(class temp_slot * temp)669 make_slot_available (class temp_slot *temp)
670 {
671 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
672 insert_slot_to_list (temp, &avail_temp_slots);
673 temp->in_use = 0;
674 temp->level = -1;
675 n_temp_slots_in_use--;
676 }
677
678 /* Compute the hash value for an address -> temp slot mapping.
679 The value is cached on the mapping entry. */
680 static hashval_t
temp_slot_address_compute_hash(struct temp_slot_address_entry * t)681 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
682 {
683 int do_not_record = 0;
684 return hash_rtx (t->address, GET_MODE (t->address),
685 &do_not_record, NULL, false);
686 }
687
688 /* Return the hash value for an address -> temp slot mapping. */
689 hashval_t
hash(temp_slot_address_entry * t)690 temp_address_hasher::hash (temp_slot_address_entry *t)
691 {
692 return t->hash;
693 }
694
695 /* Compare two address -> temp slot mapping entries. */
696 bool
equal(temp_slot_address_entry * t1,temp_slot_address_entry * t2)697 temp_address_hasher::equal (temp_slot_address_entry *t1,
698 temp_slot_address_entry *t2)
699 {
700 return exp_equiv_p (t1->address, t2->address, 0, true);
701 }
702
703 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
704 static void
insert_temp_slot_address(rtx address,class temp_slot * temp_slot)705 insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
706 {
707 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
708 t->address = copy_rtx (address);
709 t->temp_slot = temp_slot;
710 t->hash = temp_slot_address_compute_hash (t);
711 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
712 }
713
714 /* Remove an address -> temp slot mapping entry if the temp slot is
715 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
716 int
remove_unused_temp_slot_addresses_1(temp_slot_address_entry ** slot,void *)717 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
718 {
719 const struct temp_slot_address_entry *t = *slot;
720 if (! t->temp_slot->in_use)
721 temp_slot_address_table->clear_slot (slot);
722 return 1;
723 }
724
725 /* Remove all mappings of addresses to unused temp slots. */
726 static void
remove_unused_temp_slot_addresses(void)727 remove_unused_temp_slot_addresses (void)
728 {
729 /* Use quicker clearing if there aren't any active temp slots. */
730 if (n_temp_slots_in_use)
731 temp_slot_address_table->traverse
732 <void *, remove_unused_temp_slot_addresses_1> (NULL);
733 else
734 temp_slot_address_table->empty ();
735 }
736
737 /* Find the temp slot corresponding to the object at address X. */
738
739 static class temp_slot *
find_temp_slot_from_address(rtx x)740 find_temp_slot_from_address (rtx x)
741 {
742 class temp_slot *p;
743 struct temp_slot_address_entry tmp, *t;
744
745 /* First try the easy way:
746 See if X exists in the address -> temp slot mapping. */
747 tmp.address = x;
748 tmp.temp_slot = NULL;
749 tmp.hash = temp_slot_address_compute_hash (&tmp);
750 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
751 if (t)
752 return t->temp_slot;
753
754 /* If we have a sum involving a register, see if it points to a temp
755 slot. */
756 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
757 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
758 return p;
759 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
760 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
761 return p;
762
763 /* Last resort: Address is a virtual stack var address. */
764 poly_int64 offset;
765 if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
766 {
767 int i;
768 for (i = max_slot_level (); i >= 0; i--)
769 for (p = *temp_slots_at_level (i); p; p = p->next)
770 if (known_in_range_p (offset, p->base_offset, p->full_size))
771 return p;
772 }
773
774 return NULL;
775 }
776
777 /* Allocate a temporary stack slot and record it for possible later
778 reuse.
779
780 MODE is the machine mode to be given to the returned rtx.
781
782 SIZE is the size in units of the space required. We do no rounding here
783 since assign_stack_local will do any required rounding.
784
785 TYPE is the type that will be used for the stack slot. */
786
787 rtx
assign_stack_temp_for_type(machine_mode mode,poly_int64 size,tree type)788 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
789 {
790 unsigned int align;
791 class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
792 rtx slot;
793
794 gcc_assert (known_size_p (size));
795
796 align = get_stack_local_alignment (type, mode);
797
798 /* Try to find an available, already-allocated temporary of the proper
799 mode which meets the size and alignment requirements. Choose the
800 smallest one with the closest alignment.
801
802 If assign_stack_temp is called outside of the tree->rtl expansion,
803 we cannot reuse the stack slots (that may still refer to
804 VIRTUAL_STACK_VARS_REGNUM). */
805 if (!virtuals_instantiated)
806 {
807 for (p = avail_temp_slots; p; p = p->next)
808 {
809 if (p->align >= align
810 && known_ge (p->size, size)
811 && GET_MODE (p->slot) == mode
812 && objects_must_conflict_p (p->type, type)
813 && (best_p == 0
814 || (known_eq (best_p->size, p->size)
815 ? best_p->align > p->align
816 : known_ge (best_p->size, p->size))))
817 {
818 if (p->align == align && known_eq (p->size, size))
819 {
820 selected = p;
821 cut_slot_from_list (selected, &avail_temp_slots);
822 best_p = 0;
823 break;
824 }
825 best_p = p;
826 }
827 }
828 }
829
830 /* Make our best, if any, the one to use. */
831 if (best_p)
832 {
833 selected = best_p;
834 cut_slot_from_list (selected, &avail_temp_slots);
835
836 /* If there are enough aligned bytes left over, make them into a new
837 temp_slot so that the extra bytes don't get wasted. Do this only
838 for BLKmode slots, so that we can be sure of the alignment. */
839 if (GET_MODE (best_p->slot) == BLKmode)
840 {
841 int alignment = best_p->align / BITS_PER_UNIT;
842 poly_int64 rounded_size = aligned_upper_bound (size, alignment);
843
844 if (known_ge (best_p->size - rounded_size, alignment))
845 {
846 p = ggc_alloc<temp_slot> ();
847 p->in_use = 0;
848 p->size = best_p->size - rounded_size;
849 p->base_offset = best_p->base_offset + rounded_size;
850 p->full_size = best_p->full_size - rounded_size;
851 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
852 p->align = best_p->align;
853 p->type = best_p->type;
854 insert_slot_to_list (p, &avail_temp_slots);
855
856 vec_safe_push (stack_slot_list, p->slot);
857
858 best_p->size = rounded_size;
859 best_p->full_size = rounded_size;
860 }
861 }
862 }
863
864 /* If we still didn't find one, make a new temporary. */
865 if (selected == 0)
866 {
867 poly_int64 frame_offset_old = frame_offset;
868
869 p = ggc_alloc<temp_slot> ();
870
871 /* We are passing an explicit alignment request to assign_stack_local.
872 One side effect of that is assign_stack_local will not round SIZE
873 to ensure the frame offset remains suitably aligned.
874
875 So for requests which depended on the rounding of SIZE, we go ahead
876 and round it now. We also make sure ALIGNMENT is at least
877 BIGGEST_ALIGNMENT. */
878 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
879 p->slot = assign_stack_local_1 (mode,
880 (mode == BLKmode
881 ? aligned_upper_bound (size,
882 (int) align
883 / BITS_PER_UNIT)
884 : size),
885 align, 0);
886
887 p->align = align;
888
889 /* The following slot size computation is necessary because we don't
890 know the actual size of the temporary slot until assign_stack_local
891 has performed all the frame alignment and size rounding for the
892 requested temporary. Note that extra space added for alignment
893 can be either above or below this stack slot depending on which
894 way the frame grows. We include the extra space if and only if it
895 is above this slot. */
896 if (FRAME_GROWS_DOWNWARD)
897 p->size = frame_offset_old - frame_offset;
898 else
899 p->size = size;
900
901 /* Now define the fields used by combine_temp_slots. */
902 if (FRAME_GROWS_DOWNWARD)
903 {
904 p->base_offset = frame_offset;
905 p->full_size = frame_offset_old - frame_offset;
906 }
907 else
908 {
909 p->base_offset = frame_offset_old;
910 p->full_size = frame_offset - frame_offset_old;
911 }
912
913 selected = p;
914 }
915
916 p = selected;
917 p->in_use = 1;
918 p->type = type;
919 p->level = temp_slot_level;
920 n_temp_slots_in_use++;
921
922 pp = temp_slots_at_level (p->level);
923 insert_slot_to_list (p, pp);
924 insert_temp_slot_address (XEXP (p->slot, 0), p);
925
926 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
927 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
928 vec_safe_push (stack_slot_list, slot);
929
930 /* If we know the alias set for the memory that will be used, use
931 it. If there's no TYPE, then we don't know anything about the
932 alias set for the memory. */
933 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
934 set_mem_align (slot, align);
935
936 /* If a type is specified, set the relevant flags. */
937 if (type != 0)
938 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
939 MEM_NOTRAP_P (slot) = 1;
940
941 return slot;
942 }
943
944 /* Allocate a temporary stack slot and record it for possible later
945 reuse. First two arguments are same as in preceding function. */
946
947 rtx
assign_stack_temp(machine_mode mode,poly_int64 size)948 assign_stack_temp (machine_mode mode, poly_int64 size)
949 {
950 return assign_stack_temp_for_type (mode, size, NULL_TREE);
951 }
952
953 /* Assign a temporary.
954 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
955 and so that should be used in error messages. In either case, we
956 allocate of the given type.
957 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
958 it is 0 if a register is OK.
959 DONT_PROMOTE is 1 if we should not promote values in register
960 to wider modes. */
961
962 rtx
assign_temp(tree type_or_decl,int memory_required,int dont_promote ATTRIBUTE_UNUSED)963 assign_temp (tree type_or_decl, int memory_required,
964 int dont_promote ATTRIBUTE_UNUSED)
965 {
966 tree type, decl;
967 machine_mode mode;
968 #ifdef PROMOTE_MODE
969 int unsignedp;
970 #endif
971
972 if (DECL_P (type_or_decl))
973 decl = type_or_decl, type = TREE_TYPE (decl);
974 else
975 decl = NULL, type = type_or_decl;
976
977 mode = TYPE_MODE (type);
978 #ifdef PROMOTE_MODE
979 unsignedp = TYPE_UNSIGNED (type);
980 #endif
981
982 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
983 end. See also create_tmp_var for the gimplification-time check. */
984 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
985
986 if (mode == BLKmode || memory_required)
987 {
988 poly_int64 size;
989 rtx tmp;
990
991 /* Unfortunately, we don't yet know how to allocate variable-sized
992 temporaries. However, sometimes we can find a fixed upper limit on
993 the size, so try that instead. */
994 if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
995 size = max_int_size_in_bytes (type);
996
997 /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid
998 problems with allocating the stack space. */
999 if (known_eq (size, 0))
1000 size = 1;
1001
1002 /* The size of the temporary may be too large to fit into an integer. */
1003 /* ??? Not sure this should happen except for user silliness, so limit
1004 this to things that aren't compiler-generated temporaries. The
1005 rest of the time we'll die in assign_stack_temp_for_type. */
1006 if (decl
1007 && !known_size_p (size)
1008 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1009 {
1010 error ("size of variable %q+D is too large", decl);
1011 size = 1;
1012 }
1013
1014 tmp = assign_stack_temp_for_type (mode, size, type);
1015 return tmp;
1016 }
1017
1018 #ifdef PROMOTE_MODE
1019 if (! dont_promote)
1020 mode = promote_mode (type, mode, &unsignedp);
1021 #endif
1022
1023 return gen_reg_rtx (mode);
1024 }
1025
1026 /* Combine temporary stack slots which are adjacent on the stack.
1027
1028 This allows for better use of already allocated stack space. This is only
1029 done for BLKmode slots because we can be sure that we won't have alignment
1030 problems in this case. */
1031
1032 static void
combine_temp_slots(void)1033 combine_temp_slots (void)
1034 {
1035 class temp_slot *p, *q, *next, *next_q;
1036 int num_slots;
1037
1038 /* We can't combine slots, because the information about which slot
1039 is in which alias set will be lost. */
1040 if (flag_strict_aliasing)
1041 return;
1042
1043 /* If there are a lot of temp slots, don't do anything unless
1044 high levels of optimization. */
1045 if (! flag_expensive_optimizations)
1046 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1047 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1048 return;
1049
1050 for (p = avail_temp_slots; p; p = next)
1051 {
1052 int delete_p = 0;
1053
1054 next = p->next;
1055
1056 if (GET_MODE (p->slot) != BLKmode)
1057 continue;
1058
1059 for (q = p->next; q; q = next_q)
1060 {
1061 int delete_q = 0;
1062
1063 next_q = q->next;
1064
1065 if (GET_MODE (q->slot) != BLKmode)
1066 continue;
1067
1068 if (known_eq (p->base_offset + p->full_size, q->base_offset))
1069 {
1070 /* Q comes after P; combine Q into P. */
1071 p->size += q->size;
1072 p->full_size += q->full_size;
1073 delete_q = 1;
1074 }
1075 else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1076 {
1077 /* P comes after Q; combine P into Q. */
1078 q->size += p->size;
1079 q->full_size += p->full_size;
1080 delete_p = 1;
1081 break;
1082 }
1083 if (delete_q)
1084 cut_slot_from_list (q, &avail_temp_slots);
1085 }
1086
1087 /* Either delete P or advance past it. */
1088 if (delete_p)
1089 cut_slot_from_list (p, &avail_temp_slots);
1090 }
1091 }
1092
1093 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1094 slot that previously was known by OLD_RTX. */
1095
1096 void
update_temp_slot_address(rtx old_rtx,rtx new_rtx)1097 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1098 {
1099 class temp_slot *p;
1100
1101 if (rtx_equal_p (old_rtx, new_rtx))
1102 return;
1103
1104 p = find_temp_slot_from_address (old_rtx);
1105
1106 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1107 NEW_RTX is a register, see if one operand of the PLUS is a
1108 temporary location. If so, NEW_RTX points into it. Otherwise,
1109 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1110 in common between them. If so, try a recursive call on those
1111 values. */
1112 if (p == 0)
1113 {
1114 if (GET_CODE (old_rtx) != PLUS)
1115 return;
1116
1117 if (REG_P (new_rtx))
1118 {
1119 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1120 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1121 return;
1122 }
1123 else if (GET_CODE (new_rtx) != PLUS)
1124 return;
1125
1126 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1127 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1128 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1129 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1130 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1131 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1132 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1133 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1134
1135 return;
1136 }
1137
1138 /* Otherwise add an alias for the temp's address. */
1139 insert_temp_slot_address (new_rtx, p);
1140 }
1141
1142 /* If X could be a reference to a temporary slot, mark that slot as
1143 belonging to the to one level higher than the current level. If X
1144 matched one of our slots, just mark that one. Otherwise, we can't
1145 easily predict which it is, so upgrade all of them.
1146
1147 This is called when an ({...}) construct occurs and a statement
1148 returns a value in memory. */
1149
1150 void
preserve_temp_slots(rtx x)1151 preserve_temp_slots (rtx x)
1152 {
1153 class temp_slot *p = 0, *next;
1154
1155 if (x == 0)
1156 return;
1157
1158 /* If X is a register that is being used as a pointer, see if we have
1159 a temporary slot we know it points to. */
1160 if (REG_P (x) && REG_POINTER (x))
1161 p = find_temp_slot_from_address (x);
1162
1163 /* If X is not in memory or is at a constant address, it cannot be in
1164 a temporary slot. */
1165 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1166 return;
1167
1168 /* First see if we can find a match. */
1169 if (p == 0)
1170 p = find_temp_slot_from_address (XEXP (x, 0));
1171
1172 if (p != 0)
1173 {
1174 if (p->level == temp_slot_level)
1175 move_slot_to_level (p, temp_slot_level - 1);
1176 return;
1177 }
1178
1179 /* Otherwise, preserve all non-kept slots at this level. */
1180 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1181 {
1182 next = p->next;
1183 move_slot_to_level (p, temp_slot_level - 1);
1184 }
1185 }
1186
1187 /* Free all temporaries used so far. This is normally called at the
1188 end of generating code for a statement. */
1189
1190 void
free_temp_slots(void)1191 free_temp_slots (void)
1192 {
1193 class temp_slot *p, *next;
1194 bool some_available = false;
1195
1196 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1197 {
1198 next = p->next;
1199 make_slot_available (p);
1200 some_available = true;
1201 }
1202
1203 if (some_available)
1204 {
1205 remove_unused_temp_slot_addresses ();
1206 combine_temp_slots ();
1207 }
1208 }
1209
1210 /* Push deeper into the nesting level for stack temporaries. */
1211
1212 void
push_temp_slots(void)1213 push_temp_slots (void)
1214 {
1215 temp_slot_level++;
1216 }
1217
1218 /* Pop a temporary nesting level. All slots in use in the current level
1219 are freed. */
1220
1221 void
pop_temp_slots(void)1222 pop_temp_slots (void)
1223 {
1224 free_temp_slots ();
1225 temp_slot_level--;
1226 }
1227
1228 /* Initialize temporary slots. */
1229
1230 void
init_temp_slots(void)1231 init_temp_slots (void)
1232 {
1233 /* We have not allocated any temporaries yet. */
1234 avail_temp_slots = 0;
1235 vec_alloc (used_temp_slots, 0);
1236 temp_slot_level = 0;
1237 n_temp_slots_in_use = 0;
1238
1239 /* Set up the table to map addresses to temp slots. */
1240 if (! temp_slot_address_table)
1241 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1242 else
1243 temp_slot_address_table->empty ();
1244 }
1245
1246 /* Functions and data structures to keep track of the values hard regs
1247 had at the start of the function. */
1248
1249 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1250 and has_hard_reg_initial_val.. */
1251 struct GTY(()) initial_value_pair {
1252 rtx hard_reg;
1253 rtx pseudo;
1254 };
1255 /* ??? This could be a VEC but there is currently no way to define an
1256 opaque VEC type. This could be worked around by defining struct
1257 initial_value_pair in function.h. */
1258 struct GTY(()) initial_value_struct {
1259 int num_entries;
1260 int max_entries;
1261 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1262 };
1263
1264 /* If a pseudo represents an initial hard reg (or expression), return
1265 it, else return NULL_RTX. */
1266
1267 rtx
get_hard_reg_initial_reg(rtx reg)1268 get_hard_reg_initial_reg (rtx reg)
1269 {
1270 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1271 int i;
1272
1273 if (ivs == 0)
1274 return NULL_RTX;
1275
1276 for (i = 0; i < ivs->num_entries; i++)
1277 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1278 return ivs->entries[i].hard_reg;
1279
1280 return NULL_RTX;
1281 }
1282
1283 /* Make sure that there's a pseudo register of mode MODE that stores the
1284 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1285
1286 rtx
get_hard_reg_initial_val(machine_mode mode,unsigned int regno)1287 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1288 {
1289 struct initial_value_struct *ivs;
1290 rtx rv;
1291
1292 rv = has_hard_reg_initial_val (mode, regno);
1293 if (rv)
1294 return rv;
1295
1296 ivs = crtl->hard_reg_initial_vals;
1297 if (ivs == 0)
1298 {
1299 ivs = ggc_alloc<initial_value_struct> ();
1300 ivs->num_entries = 0;
1301 ivs->max_entries = 5;
1302 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1303 crtl->hard_reg_initial_vals = ivs;
1304 }
1305
1306 if (ivs->num_entries >= ivs->max_entries)
1307 {
1308 ivs->max_entries += 5;
1309 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1310 ivs->max_entries);
1311 }
1312
1313 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1314 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1315
1316 return ivs->entries[ivs->num_entries++].pseudo;
1317 }
1318
1319 /* See if get_hard_reg_initial_val has been used to create a pseudo
1320 for the initial value of hard register REGNO in mode MODE. Return
1321 the associated pseudo if so, otherwise return NULL. */
1322
1323 rtx
has_hard_reg_initial_val(machine_mode mode,unsigned int regno)1324 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1325 {
1326 struct initial_value_struct *ivs;
1327 int i;
1328
1329 ivs = crtl->hard_reg_initial_vals;
1330 if (ivs != 0)
1331 for (i = 0; i < ivs->num_entries; i++)
1332 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1333 && REGNO (ivs->entries[i].hard_reg) == regno)
1334 return ivs->entries[i].pseudo;
1335
1336 return NULL_RTX;
1337 }
1338
1339 unsigned int
emit_initial_value_sets(void)1340 emit_initial_value_sets (void)
1341 {
1342 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1343 int i;
1344 rtx_insn *seq;
1345
1346 if (ivs == 0)
1347 return 0;
1348
1349 start_sequence ();
1350 for (i = 0; i < ivs->num_entries; i++)
1351 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1352 seq = get_insns ();
1353 end_sequence ();
1354
1355 emit_insn_at_entry (seq);
1356 return 0;
1357 }
1358
1359 /* Return the hardreg-pseudoreg initial values pair entry I and
1360 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1361 bool
initial_value_entry(int i,rtx * hreg,rtx * preg)1362 initial_value_entry (int i, rtx *hreg, rtx *preg)
1363 {
1364 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1365 if (!ivs || i >= ivs->num_entries)
1366 return false;
1367
1368 *hreg = ivs->entries[i].hard_reg;
1369 *preg = ivs->entries[i].pseudo;
1370 return true;
1371 }
1372
1373 /* These routines are responsible for converting virtual register references
1374 to the actual hard register references once RTL generation is complete.
1375
1376 The following four variables are used for communication between the
1377 routines. They contain the offsets of the virtual registers from their
1378 respective hard registers. */
1379
1380 static poly_int64 in_arg_offset;
1381 static poly_int64 var_offset;
1382 static poly_int64 dynamic_offset;
1383 static poly_int64 out_arg_offset;
1384 static poly_int64 cfa_offset;
1385
1386 /* In most machines, the stack pointer register is equivalent to the bottom
1387 of the stack. */
1388
1389 #ifndef STACK_POINTER_OFFSET
1390 #define STACK_POINTER_OFFSET 0
1391 #endif
1392
1393 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1394 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1395 #endif
1396
1397 /* If not defined, pick an appropriate default for the offset of dynamically
1398 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1399 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1400
1401 #ifndef STACK_DYNAMIC_OFFSET
1402
1403 /* The bottom of the stack points to the actual arguments. If
1404 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1405 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1406 stack space for register parameters is not pushed by the caller, but
1407 rather part of the fixed stack areas and hence not included in
1408 `crtl->outgoing_args_size'. Nevertheless, we must allow
1409 for it when allocating stack dynamic objects. */
1410
1411 #ifdef INCOMING_REG_PARM_STACK_SPACE
1412 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1413 ((ACCUMULATE_OUTGOING_ARGS \
1414 ? (crtl->outgoing_args_size \
1415 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1416 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1417 : 0) + (STACK_POINTER_OFFSET))
1418 #else
1419 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1420 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1421 + (STACK_POINTER_OFFSET))
1422 #endif
1423 #endif
1424
1425
1426 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1427 is a virtual register, return the equivalent hard register and set the
1428 offset indirectly through the pointer. Otherwise, return 0. */
1429
1430 static rtx
instantiate_new_reg(rtx x,poly_int64_pod * poffset)1431 instantiate_new_reg (rtx x, poly_int64_pod *poffset)
1432 {
1433 rtx new_rtx;
1434 poly_int64 offset;
1435
1436 if (x == virtual_incoming_args_rtx)
1437 {
1438 if (stack_realign_drap)
1439 {
1440 /* Replace virtual_incoming_args_rtx with internal arg
1441 pointer if DRAP is used to realign stack. */
1442 new_rtx = crtl->args.internal_arg_pointer;
1443 offset = 0;
1444 }
1445 else
1446 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1447 }
1448 else if (x == virtual_stack_vars_rtx)
1449 new_rtx = frame_pointer_rtx, offset = var_offset;
1450 else if (x == virtual_stack_dynamic_rtx)
1451 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1452 else if (x == virtual_outgoing_args_rtx)
1453 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1454 else if (x == virtual_cfa_rtx)
1455 {
1456 #ifdef FRAME_POINTER_CFA_OFFSET
1457 new_rtx = frame_pointer_rtx;
1458 #else
1459 new_rtx = arg_pointer_rtx;
1460 #endif
1461 offset = cfa_offset;
1462 }
1463 else if (x == virtual_preferred_stack_boundary_rtx)
1464 {
1465 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1466 offset = 0;
1467 }
1468 else
1469 return NULL_RTX;
1470
1471 *poffset = offset;
1472 return new_rtx;
1473 }
1474
1475 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1476 registers present inside of *LOC. The expression is simplified,
1477 as much as possible, but is not to be considered "valid" in any sense
1478 implied by the target. Return true if any change is made. */
1479
1480 static bool
instantiate_virtual_regs_in_rtx(rtx * loc)1481 instantiate_virtual_regs_in_rtx (rtx *loc)
1482 {
1483 if (!*loc)
1484 return false;
1485 bool changed = false;
1486 subrtx_ptr_iterator::array_type array;
1487 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1488 {
1489 rtx *loc = *iter;
1490 if (rtx x = *loc)
1491 {
1492 rtx new_rtx;
1493 poly_int64 offset;
1494 switch (GET_CODE (x))
1495 {
1496 case REG:
1497 new_rtx = instantiate_new_reg (x, &offset);
1498 if (new_rtx)
1499 {
1500 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1501 changed = true;
1502 }
1503 iter.skip_subrtxes ();
1504 break;
1505
1506 case PLUS:
1507 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1508 if (new_rtx)
1509 {
1510 XEXP (x, 0) = new_rtx;
1511 *loc = plus_constant (GET_MODE (x), x, offset, true);
1512 changed = true;
1513 iter.skip_subrtxes ();
1514 break;
1515 }
1516
1517 /* FIXME -- from old code */
1518 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1519 we can commute the PLUS and SUBREG because pointers into the
1520 frame are well-behaved. */
1521 break;
1522
1523 default:
1524 break;
1525 }
1526 }
1527 }
1528 return changed;
1529 }
1530
1531 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1532 matches the predicate for insn CODE operand OPERAND. */
1533
1534 static int
safe_insn_predicate(int code,int operand,rtx x)1535 safe_insn_predicate (int code, int operand, rtx x)
1536 {
1537 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1538 }
1539
1540 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1541 registers present inside of insn. The result will be a valid insn. */
1542
1543 static void
instantiate_virtual_regs_in_insn(rtx_insn * insn)1544 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1545 {
1546 poly_int64 offset;
1547 int insn_code, i;
1548 bool any_change = false;
1549 rtx set, new_rtx, x;
1550 rtx_insn *seq;
1551
1552 /* There are some special cases to be handled first. */
1553 set = single_set (insn);
1554 if (set)
1555 {
1556 /* We're allowed to assign to a virtual register. This is interpreted
1557 to mean that the underlying register gets assigned the inverse
1558 transformation. This is used, for example, in the handling of
1559 non-local gotos. */
1560 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1561 if (new_rtx)
1562 {
1563 start_sequence ();
1564
1565 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1566 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1567 gen_int_mode (-offset, GET_MODE (new_rtx)));
1568 x = force_operand (x, new_rtx);
1569 if (x != new_rtx)
1570 emit_move_insn (new_rtx, x);
1571
1572 seq = get_insns ();
1573 end_sequence ();
1574
1575 emit_insn_before (seq, insn);
1576 delete_insn (insn);
1577 return;
1578 }
1579
1580 /* Handle a straight copy from a virtual register by generating a
1581 new add insn. The difference between this and falling through
1582 to the generic case is avoiding a new pseudo and eliminating a
1583 move insn in the initial rtl stream. */
1584 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1585 if (new_rtx
1586 && maybe_ne (offset, 0)
1587 && REG_P (SET_DEST (set))
1588 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1589 {
1590 start_sequence ();
1591
1592 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1593 gen_int_mode (offset,
1594 GET_MODE (SET_DEST (set))),
1595 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1596 if (x != SET_DEST (set))
1597 emit_move_insn (SET_DEST (set), x);
1598
1599 seq = get_insns ();
1600 end_sequence ();
1601
1602 emit_insn_before (seq, insn);
1603 delete_insn (insn);
1604 return;
1605 }
1606
1607 extract_insn (insn);
1608 insn_code = INSN_CODE (insn);
1609
1610 /* Handle a plus involving a virtual register by determining if the
1611 operands remain valid if they're modified in place. */
1612 poly_int64 delta;
1613 if (GET_CODE (SET_SRC (set)) == PLUS
1614 && recog_data.n_operands >= 3
1615 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1616 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1617 && poly_int_rtx_p (recog_data.operand[2], &delta)
1618 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1619 {
1620 offset += delta;
1621
1622 /* If the sum is zero, then replace with a plain move. */
1623 if (known_eq (offset, 0)
1624 && REG_P (SET_DEST (set))
1625 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1626 {
1627 start_sequence ();
1628 emit_move_insn (SET_DEST (set), new_rtx);
1629 seq = get_insns ();
1630 end_sequence ();
1631
1632 emit_insn_before (seq, insn);
1633 delete_insn (insn);
1634 return;
1635 }
1636
1637 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1638
1639 /* Using validate_change and apply_change_group here leaves
1640 recog_data in an invalid state. Since we know exactly what
1641 we want to check, do those two by hand. */
1642 if (safe_insn_predicate (insn_code, 1, new_rtx)
1643 && safe_insn_predicate (insn_code, 2, x))
1644 {
1645 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1646 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1647 any_change = true;
1648
1649 /* Fall through into the regular operand fixup loop in
1650 order to take care of operands other than 1 and 2. */
1651 }
1652 }
1653 }
1654 else
1655 {
1656 extract_insn (insn);
1657 insn_code = INSN_CODE (insn);
1658 }
1659
1660 /* In the general case, we expect virtual registers to appear only in
1661 operands, and then only as either bare registers or inside memories. */
1662 for (i = 0; i < recog_data.n_operands; ++i)
1663 {
1664 x = recog_data.operand[i];
1665 switch (GET_CODE (x))
1666 {
1667 case MEM:
1668 {
1669 rtx addr = XEXP (x, 0);
1670
1671 if (!instantiate_virtual_regs_in_rtx (&addr))
1672 continue;
1673
1674 start_sequence ();
1675 x = replace_equiv_address (x, addr, true);
1676 /* It may happen that the address with the virtual reg
1677 was valid (e.g. based on the virtual stack reg, which might
1678 be acceptable to the predicates with all offsets), whereas
1679 the address now isn't anymore, for instance when the address
1680 is still offsetted, but the base reg isn't virtual-stack-reg
1681 anymore. Below we would do a force_reg on the whole operand,
1682 but this insn might actually only accept memory. Hence,
1683 before doing that last resort, try to reload the address into
1684 a register, so this operand stays a MEM. */
1685 if (!safe_insn_predicate (insn_code, i, x))
1686 {
1687 addr = force_reg (GET_MODE (addr), addr);
1688 x = replace_equiv_address (x, addr, true);
1689 }
1690 seq = get_insns ();
1691 end_sequence ();
1692 if (seq)
1693 emit_insn_before (seq, insn);
1694 }
1695 break;
1696
1697 case REG:
1698 new_rtx = instantiate_new_reg (x, &offset);
1699 if (new_rtx == NULL)
1700 continue;
1701 if (known_eq (offset, 0))
1702 x = new_rtx;
1703 else
1704 {
1705 start_sequence ();
1706
1707 /* Careful, special mode predicates may have stuff in
1708 insn_data[insn_code].operand[i].mode that isn't useful
1709 to us for computing a new value. */
1710 /* ??? Recognize address_operand and/or "p" constraints
1711 to see if (plus new offset) is a valid before we put
1712 this through expand_simple_binop. */
1713 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1714 gen_int_mode (offset, GET_MODE (x)),
1715 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1716 seq = get_insns ();
1717 end_sequence ();
1718 emit_insn_before (seq, insn);
1719 }
1720 break;
1721
1722 case SUBREG:
1723 #ifdef NB_FIX_VAX_BACKEND
1724 if (MEM_P (XEXP (x, 0)))
1725 {
1726 /* convert a subreg of a MEMORY operand into a
1727 register operand */
1728 rtx mx = XEXP (x, 0); /* memory operand */
1729 rtx addr = XEXP (mx, 0);
1730 instantiate_virtual_regs_in_rtx (&addr);
1731 start_sequence ();
1732 mx = replace_equiv_address (mx, addr, true);
1733 addr = force_reg (GET_MODE (addr), addr);
1734 mx = replace_equiv_address (mx, addr, true);
1735 seq = get_insns ();
1736 end_sequence ();
1737 if (seq)
1738 emit_insn_before (seq, insn);
1739
1740 /* generate a new subreg expression */
1741 x = gen_rtx_SUBREG (GET_MODE (x), mx, SUBREG_BYTE (x));
1742 }
1743 #endif
1744 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1745 if (new_rtx == NULL)
1746 continue;
1747 if (maybe_ne (offset, 0))
1748 {
1749 start_sequence ();
1750 new_rtx = expand_simple_binop
1751 (GET_MODE (new_rtx), PLUS, new_rtx,
1752 gen_int_mode (offset, GET_MODE (new_rtx)),
1753 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1754 seq = get_insns ();
1755 end_sequence ();
1756 emit_insn_before (seq, insn);
1757 }
1758 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1759 GET_MODE (new_rtx), SUBREG_BYTE (x));
1760 gcc_assert (x);
1761 break;
1762
1763 default:
1764 continue;
1765 }
1766
1767 /* At this point, X contains the new value for the operand.
1768 Validate the new value vs the insn predicate. Note that
1769 asm insns will have insn_code -1 here. */
1770 if (!safe_insn_predicate (insn_code, i, x))
1771 {
1772 start_sequence ();
1773 if (REG_P (x))
1774 {
1775 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1776 x = copy_to_reg (x);
1777 }
1778 else
1779 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1780 seq = get_insns ();
1781 end_sequence ();
1782 if (seq)
1783 emit_insn_before (seq, insn);
1784 }
1785
1786 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1787 any_change = true;
1788 }
1789
1790 if (any_change)
1791 {
1792 /* Propagate operand changes into the duplicates. */
1793 for (i = 0; i < recog_data.n_dups; ++i)
1794 *recog_data.dup_loc[i]
1795 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1796
1797 /* Force re-recognition of the instruction for validation. */
1798 INSN_CODE (insn) = -1;
1799 }
1800
1801 if (asm_noperands (PATTERN (insn)) >= 0)
1802 {
1803 if (!check_asm_operands (PATTERN (insn)))
1804 {
1805 error_for_asm (insn, "impossible constraint in %<asm%>");
1806 /* For asm goto, instead of fixing up all the edges
1807 just clear the template and clear input operands
1808 (asm goto doesn't have any output operands). */
1809 if (JUMP_P (insn))
1810 {
1811 rtx asm_op = extract_asm_operands (PATTERN (insn));
1812 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1813 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1814 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1815 }
1816 else
1817 delete_insn (insn);
1818 }
1819 }
1820 else
1821 {
1822 if (recog_memoized (insn) < 0)
1823 fatal_insn_not_found (insn);
1824 }
1825 }
1826
1827 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1828 do any instantiation required. */
1829
1830 void
instantiate_decl_rtl(rtx x)1831 instantiate_decl_rtl (rtx x)
1832 {
1833 rtx addr;
1834
1835 if (x == 0)
1836 return;
1837
1838 /* If this is a CONCAT, recurse for the pieces. */
1839 if (GET_CODE (x) == CONCAT)
1840 {
1841 instantiate_decl_rtl (XEXP (x, 0));
1842 instantiate_decl_rtl (XEXP (x, 1));
1843 return;
1844 }
1845
1846 #ifdef NB_FIX_VAX_BACKEND
1847 /* If this is a SUBREG, recurse for the pieces */
1848 if (GET_CODE (x) == SUBREG)
1849 {
1850 instantiate_decl_rtl (XEXP (x, 0));
1851 return;
1852 }
1853 #endif
1854
1855 /* If this is not a MEM, no need to do anything. Similarly if the
1856 address is a constant or a register that is not a virtual register. */
1857 if (!MEM_P (x))
1858 return;
1859
1860 addr = XEXP (x, 0);
1861 if (CONSTANT_P (addr)
1862 || (REG_P (addr)
1863 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1864 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1865 return;
1866
1867 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1868 }
1869
1870 /* Helper for instantiate_decls called via walk_tree: Process all decls
1871 in the given DECL_VALUE_EXPR. */
1872
1873 static tree
instantiate_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1874 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1875 {
1876 tree t = *tp;
1877 if (! EXPR_P (t))
1878 {
1879 *walk_subtrees = 0;
1880 if (DECL_P (t))
1881 {
1882 if (DECL_RTL_SET_P (t))
1883 instantiate_decl_rtl (DECL_RTL (t));
1884 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1885 && DECL_INCOMING_RTL (t))
1886 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1887 if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1888 && DECL_HAS_VALUE_EXPR_P (t))
1889 {
1890 tree v = DECL_VALUE_EXPR (t);
1891 walk_tree (&v, instantiate_expr, NULL, NULL);
1892 }
1893 }
1894 }
1895 return NULL;
1896 }
1897
1898 /* Subroutine of instantiate_decls: Process all decls in the given
1899 BLOCK node and all its subblocks. */
1900
1901 static void
instantiate_decls_1(tree let)1902 instantiate_decls_1 (tree let)
1903 {
1904 tree t;
1905
1906 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1907 {
1908 if (DECL_RTL_SET_P (t))
1909 instantiate_decl_rtl (DECL_RTL (t));
1910 if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1911 {
1912 tree v = DECL_VALUE_EXPR (t);
1913 walk_tree (&v, instantiate_expr, NULL, NULL);
1914 }
1915 }
1916
1917 /* Process all subblocks. */
1918 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1919 instantiate_decls_1 (t);
1920 }
1921
1922 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1923 all virtual registers in their DECL_RTL's. */
1924
1925 static void
instantiate_decls(tree fndecl)1926 instantiate_decls (tree fndecl)
1927 {
1928 tree decl;
1929 unsigned ix;
1930
1931 /* Process all parameters of the function. */
1932 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1933 {
1934 instantiate_decl_rtl (DECL_RTL (decl));
1935 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1936 if (DECL_HAS_VALUE_EXPR_P (decl))
1937 {
1938 tree v = DECL_VALUE_EXPR (decl);
1939 walk_tree (&v, instantiate_expr, NULL, NULL);
1940 }
1941 }
1942
1943 if ((decl = DECL_RESULT (fndecl))
1944 && TREE_CODE (decl) == RESULT_DECL)
1945 {
1946 if (DECL_RTL_SET_P (decl))
1947 instantiate_decl_rtl (DECL_RTL (decl));
1948 if (DECL_HAS_VALUE_EXPR_P (decl))
1949 {
1950 tree v = DECL_VALUE_EXPR (decl);
1951 walk_tree (&v, instantiate_expr, NULL, NULL);
1952 }
1953 }
1954
1955 /* Process the saved static chain if it exists. */
1956 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1957 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1958 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1959
1960 /* Now process all variables defined in the function or its subblocks. */
1961 if (DECL_INITIAL (fndecl))
1962 instantiate_decls_1 (DECL_INITIAL (fndecl));
1963
1964 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1965 if (DECL_RTL_SET_P (decl))
1966 instantiate_decl_rtl (DECL_RTL (decl));
1967 vec_free (cfun->local_decls);
1968 }
1969
1970 /* Pass through the INSNS of function FNDECL and convert virtual register
1971 references to hard register references. */
1972
1973 static unsigned int
instantiate_virtual_regs(void)1974 instantiate_virtual_regs (void)
1975 {
1976 rtx_insn *insn;
1977
1978 /* Compute the offsets to use for this function. */
1979 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1980 var_offset = targetm.starting_frame_offset ();
1981 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1982 out_arg_offset = STACK_POINTER_OFFSET;
1983 #ifdef FRAME_POINTER_CFA_OFFSET
1984 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1985 #else
1986 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1987 #endif
1988
1989 /* Initialize recognition, indicating that volatile is OK. */
1990 init_recog ();
1991
1992 /* Scan through all the insns, instantiating every virtual register still
1993 present. */
1994 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1995 if (INSN_P (insn))
1996 {
1997 /* These patterns in the instruction stream can never be recognized.
1998 Fortunately, they shouldn't contain virtual registers either. */
1999 if (GET_CODE (PATTERN (insn)) == USE
2000 || GET_CODE (PATTERN (insn)) == CLOBBER
2001 || GET_CODE (PATTERN (insn)) == ASM_INPUT
2002 || DEBUG_MARKER_INSN_P (insn))
2003 continue;
2004 else if (DEBUG_BIND_INSN_P (insn))
2005 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
2006 else
2007 instantiate_virtual_regs_in_insn (insn);
2008
2009 if (insn->deleted ())
2010 continue;
2011
2012 instantiate_virtual_regs_in_rtx (®_NOTES (insn));
2013
2014 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
2015 if (CALL_P (insn))
2016 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
2017 }
2018
2019 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
2020 instantiate_decls (current_function_decl);
2021
2022 targetm.instantiate_decls ();
2023
2024 /* Indicate that, from now on, assign_stack_local should use
2025 frame_pointer_rtx. */
2026 virtuals_instantiated = 1;
2027
2028 return 0;
2029 }
2030
2031 namespace {
2032
2033 const pass_data pass_data_instantiate_virtual_regs =
2034 {
2035 RTL_PASS, /* type */
2036 "vregs", /* name */
2037 OPTGROUP_NONE, /* optinfo_flags */
2038 TV_NONE, /* tv_id */
2039 0, /* properties_required */
2040 0, /* properties_provided */
2041 0, /* properties_destroyed */
2042 0, /* todo_flags_start */
2043 0, /* todo_flags_finish */
2044 };
2045
2046 class pass_instantiate_virtual_regs : public rtl_opt_pass
2047 {
2048 public:
pass_instantiate_virtual_regs(gcc::context * ctxt)2049 pass_instantiate_virtual_regs (gcc::context *ctxt)
2050 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2051 {}
2052
2053 /* opt_pass methods: */
execute(function *)2054 virtual unsigned int execute (function *)
2055 {
2056 return instantiate_virtual_regs ();
2057 }
2058
2059 }; // class pass_instantiate_virtual_regs
2060
2061 } // anon namespace
2062
2063 rtl_opt_pass *
make_pass_instantiate_virtual_regs(gcc::context * ctxt)2064 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2065 {
2066 return new pass_instantiate_virtual_regs (ctxt);
2067 }
2068
2069
2070 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2071 This means a type for which function calls must pass an address to the
2072 function or get an address back from the function.
2073 EXP may be a type node or an expression (whose type is tested). */
2074
2075 int
aggregate_value_p(const_tree exp,const_tree fntype)2076 aggregate_value_p (const_tree exp, const_tree fntype)
2077 {
2078 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2079 int i, regno, nregs;
2080 rtx reg;
2081
2082 if (fntype)
2083 switch (TREE_CODE (fntype))
2084 {
2085 case CALL_EXPR:
2086 {
2087 tree fndecl = get_callee_fndecl (fntype);
2088 if (fndecl)
2089 fntype = TREE_TYPE (fndecl);
2090 else if (CALL_EXPR_FN (fntype))
2091 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2092 else
2093 /* For internal functions, assume nothing needs to be
2094 returned in memory. */
2095 return 0;
2096 }
2097 break;
2098 case FUNCTION_DECL:
2099 fntype = TREE_TYPE (fntype);
2100 break;
2101 case FUNCTION_TYPE:
2102 case METHOD_TYPE:
2103 break;
2104 case IDENTIFIER_NODE:
2105 fntype = NULL_TREE;
2106 break;
2107 default:
2108 /* We don't expect other tree types here. */
2109 gcc_unreachable ();
2110 }
2111
2112 if (VOID_TYPE_P (type))
2113 return 0;
2114
2115 /* If a record should be passed the same as its first (and only) member
2116 don't pass it as an aggregate. */
2117 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2118 return aggregate_value_p (first_field (type), fntype);
2119
2120 /* If the front end has decided that this needs to be passed by
2121 reference, do so. */
2122 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2123 && DECL_BY_REFERENCE (exp))
2124 return 1;
2125
2126 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2127 if (fntype && TREE_ADDRESSABLE (fntype))
2128 return 1;
2129
2130 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2131 and thus can't be returned in registers. */
2132 if (TREE_ADDRESSABLE (type))
2133 return 1;
2134
2135 if (TYPE_EMPTY_P (type))
2136 return 0;
2137
2138 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2139 return 1;
2140
2141 if (targetm.calls.return_in_memory (type, fntype))
2142 return 1;
2143
2144 /* Make sure we have suitable call-clobbered regs to return
2145 the value in; if not, we must return it in memory. */
2146 reg = hard_function_value (type, 0, fntype, 0);
2147
2148 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2149 it is OK. */
2150 if (!REG_P (reg))
2151 return 0;
2152
2153 /* Use the default ABI if the type of the function isn't known.
2154 The scheme for handling interoperability between different ABIs
2155 requires us to be able to tell when we're calling a function with
2156 a nondefault ABI. */
2157 const predefined_function_abi &abi = (fntype
2158 ? fntype_abi (fntype)
2159 : default_function_abi);
2160 regno = REGNO (reg);
2161 nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2162 for (i = 0; i < nregs; i++)
2163 if (!fixed_regs[regno + i] && !abi.clobbers_full_reg_p (regno + i))
2164 return 1;
2165
2166 return 0;
2167 }
2168
2169 /* Return true if we should assign DECL a pseudo register; false if it
2170 should live on the local stack. */
2171
2172 bool
use_register_for_decl(const_tree decl)2173 use_register_for_decl (const_tree decl)
2174 {
2175 if (TREE_CODE (decl) == SSA_NAME)
2176 {
2177 /* We often try to use the SSA_NAME, instead of its underlying
2178 decl, to get type information and guide decisions, to avoid
2179 differences of behavior between anonymous and named
2180 variables, but in this one case we have to go for the actual
2181 variable if there is one. The main reason is that, at least
2182 at -O0, we want to place user variables on the stack, but we
2183 don't mind using pseudos for anonymous or ignored temps.
2184 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2185 should go in pseudos, whereas their corresponding variables
2186 might have to go on the stack. So, disregarding the decl
2187 here would negatively impact debug info at -O0, enable
2188 coalescing between SSA_NAMEs that ought to get different
2189 stack/pseudo assignments, and get the incoming argument
2190 processing thoroughly confused by PARM_DECLs expected to live
2191 in stack slots but assigned to pseudos. */
2192 if (!SSA_NAME_VAR (decl))
2193 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2194 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2195
2196 decl = SSA_NAME_VAR (decl);
2197 }
2198
2199 /* Honor volatile. */
2200 if (TREE_SIDE_EFFECTS (decl))
2201 return false;
2202
2203 /* Honor addressability. */
2204 if (TREE_ADDRESSABLE (decl))
2205 return false;
2206
2207 /* RESULT_DECLs are a bit special in that they're assigned without
2208 regard to use_register_for_decl, but we generally only store in
2209 them. If we coalesce their SSA NAMEs, we'd better return a
2210 result that matches the assignment in expand_function_start. */
2211 if (TREE_CODE (decl) == RESULT_DECL)
2212 {
2213 /* If it's not an aggregate, we're going to use a REG or a
2214 PARALLEL containing a REG. */
2215 if (!aggregate_value_p (decl, current_function_decl))
2216 return true;
2217
2218 /* If expand_function_start determines the return value, we'll
2219 use MEM if it's not by reference. */
2220 if (cfun->returns_pcc_struct
2221 || (targetm.calls.struct_value_rtx
2222 (TREE_TYPE (current_function_decl), 1)))
2223 return DECL_BY_REFERENCE (decl);
2224
2225 /* Otherwise, we're taking an extra all.function_result_decl
2226 argument. It's set up in assign_parms_augmented_arg_list,
2227 under the (negated) conditions above, and then it's used to
2228 set up the RESULT_DECL rtl in assign_params, after looping
2229 over all parameters. Now, if the RESULT_DECL is not by
2230 reference, we'll use a MEM either way. */
2231 if (!DECL_BY_REFERENCE (decl))
2232 return false;
2233
2234 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2235 the function_result_decl's assignment. Since it's a pointer,
2236 we can short-circuit a number of the tests below, and we must
2237 duplicat e them because we don't have the
2238 function_result_decl to test. */
2239 if (!targetm.calls.allocate_stack_slots_for_args ())
2240 return true;
2241 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2242 if (optimize)
2243 return true;
2244 /* We don't set DECL_REGISTER for the function_result_decl. */
2245 return false;
2246 }
2247
2248 /* Only register-like things go in registers. */
2249 if (DECL_MODE (decl) == BLKmode)
2250 return false;
2251
2252 /* If -ffloat-store specified, don't put explicit float variables
2253 into registers. */
2254 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2255 propagates values across these stores, and it probably shouldn't. */
2256 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2257 return false;
2258
2259 if (!targetm.calls.allocate_stack_slots_for_args ())
2260 return true;
2261
2262 /* If we're not interested in tracking debugging information for
2263 this decl, then we can certainly put it in a register. */
2264 if (DECL_IGNORED_P (decl))
2265 return true;
2266
2267 if (optimize)
2268 return true;
2269
2270 if (!DECL_REGISTER (decl))
2271 return false;
2272
2273 /* When not optimizing, disregard register keyword for types that
2274 could have methods, otherwise the methods won't be callable from
2275 the debugger. */
2276 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2277 return false;
2278
2279 return true;
2280 }
2281
2282 /* Structures to communicate between the subroutines of assign_parms.
2283 The first holds data persistent across all parameters, the second
2284 is cleared out for each parameter. */
2285
2286 struct assign_parm_data_all
2287 {
2288 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2289 should become a job of the target or otherwise encapsulated. */
2290 CUMULATIVE_ARGS args_so_far_v;
2291 cumulative_args_t args_so_far;
2292 struct args_size stack_args_size;
2293 tree function_result_decl;
2294 tree orig_fnargs;
2295 rtx_insn *first_conversion_insn;
2296 rtx_insn *last_conversion_insn;
2297 HOST_WIDE_INT pretend_args_size;
2298 HOST_WIDE_INT extra_pretend_bytes;
2299 int reg_parm_stack_space;
2300 };
2301
2302 struct assign_parm_data_one
2303 {
2304 tree nominal_type;
2305 function_arg_info arg;
2306 rtx entry_parm;
2307 rtx stack_parm;
2308 machine_mode nominal_mode;
2309 machine_mode passed_mode;
2310 struct locate_and_pad_arg_data locate;
2311 int partial;
2312 };
2313
2314 /* A subroutine of assign_parms. Initialize ALL. */
2315
2316 static void
assign_parms_initialize_all(struct assign_parm_data_all * all)2317 assign_parms_initialize_all (struct assign_parm_data_all *all)
2318 {
2319 tree fntype ATTRIBUTE_UNUSED;
2320
2321 memset (all, 0, sizeof (*all));
2322
2323 fntype = TREE_TYPE (current_function_decl);
2324
2325 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2326 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2327 #else
2328 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2329 current_function_decl, -1);
2330 #endif
2331 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2332
2333 #ifdef INCOMING_REG_PARM_STACK_SPACE
2334 all->reg_parm_stack_space
2335 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2336 #endif
2337 }
2338
2339 /* If ARGS contains entries with complex types, split the entry into two
2340 entries of the component type. Return a new list of substitutions are
2341 needed, else the old list. */
2342
2343 static void
split_complex_args(vec<tree> * args)2344 split_complex_args (vec<tree> *args)
2345 {
2346 unsigned i;
2347 tree p;
2348
2349 FOR_EACH_VEC_ELT (*args, i, p)
2350 {
2351 tree type = TREE_TYPE (p);
2352 if (TREE_CODE (type) == COMPLEX_TYPE
2353 && targetm.calls.split_complex_arg (type))
2354 {
2355 tree decl;
2356 tree subtype = TREE_TYPE (type);
2357 bool addressable = TREE_ADDRESSABLE (p);
2358
2359 /* Rewrite the PARM_DECL's type with its component. */
2360 p = copy_node (p);
2361 TREE_TYPE (p) = subtype;
2362 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2363 SET_DECL_MODE (p, VOIDmode);
2364 DECL_SIZE (p) = NULL;
2365 DECL_SIZE_UNIT (p) = NULL;
2366 /* If this arg must go in memory, put it in a pseudo here.
2367 We can't allow it to go in memory as per normal parms,
2368 because the usual place might not have the imag part
2369 adjacent to the real part. */
2370 DECL_ARTIFICIAL (p) = addressable;
2371 DECL_IGNORED_P (p) = addressable;
2372 TREE_ADDRESSABLE (p) = 0;
2373 layout_decl (p, 0);
2374 (*args)[i] = p;
2375
2376 /* Build a second synthetic decl. */
2377 decl = build_decl (EXPR_LOCATION (p),
2378 PARM_DECL, NULL_TREE, subtype);
2379 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2380 DECL_ARTIFICIAL (decl) = addressable;
2381 DECL_IGNORED_P (decl) = addressable;
2382 layout_decl (decl, 0);
2383 args->safe_insert (++i, decl);
2384 }
2385 }
2386 }
2387
2388 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2389 the hidden struct return argument, and (abi willing) complex args.
2390 Return the new parameter list. */
2391
2392 static vec<tree>
assign_parms_augmented_arg_list(struct assign_parm_data_all * all)2393 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2394 {
2395 tree fndecl = current_function_decl;
2396 tree fntype = TREE_TYPE (fndecl);
2397 vec<tree> fnargs = vNULL;
2398 tree arg;
2399
2400 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2401 fnargs.safe_push (arg);
2402
2403 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2404
2405 /* If struct value address is treated as the first argument, make it so. */
2406 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2407 && ! cfun->returns_pcc_struct
2408 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2409 {
2410 tree type = build_pointer_type (TREE_TYPE (fntype));
2411 tree decl;
2412
2413 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2414 PARM_DECL, get_identifier (".result_ptr"), type);
2415 DECL_ARG_TYPE (decl) = type;
2416 DECL_ARTIFICIAL (decl) = 1;
2417 DECL_NAMELESS (decl) = 1;
2418 TREE_CONSTANT (decl) = 1;
2419 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2420 changes, the end of the RESULT_DECL handling block in
2421 use_register_for_decl must be adjusted to match. */
2422
2423 DECL_CHAIN (decl) = all->orig_fnargs;
2424 all->orig_fnargs = decl;
2425 fnargs.safe_insert (0, decl);
2426
2427 all->function_result_decl = decl;
2428 }
2429
2430 /* If the target wants to split complex arguments into scalars, do so. */
2431 if (targetm.calls.split_complex_arg)
2432 split_complex_args (&fnargs);
2433
2434 return fnargs;
2435 }
2436
2437 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2438 data for the parameter. Incorporate ABI specifics such as pass-by-
2439 reference and type promotion. */
2440
2441 static void
assign_parm_find_data_types(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2442 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2443 struct assign_parm_data_one *data)
2444 {
2445 int unsignedp;
2446
2447 #ifndef BROKEN_VALUE_INITIALIZATION
2448 *data = assign_parm_data_one ();
2449 #else
2450 /* Old versions of GCC used to miscompile the above by only initializing
2451 the members with explicit constructors and copying garbage
2452 to the other members. */
2453 assign_parm_data_one zero_data = {};
2454 *data = zero_data;
2455 #endif
2456
2457 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2458 if (!cfun->stdarg)
2459 data->arg.named = 1; /* No variadic parms. */
2460 else if (DECL_CHAIN (parm))
2461 data->arg.named = 1; /* Not the last non-variadic parm. */
2462 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2463 data->arg.named = 1; /* Only variadic ones are unnamed. */
2464 else
2465 data->arg.named = 0; /* Treat as variadic. */
2466
2467 data->nominal_type = TREE_TYPE (parm);
2468 data->arg.type = DECL_ARG_TYPE (parm);
2469
2470 /* Look out for errors propagating this far. Also, if the parameter's
2471 type is void then its value doesn't matter. */
2472 if (TREE_TYPE (parm) == error_mark_node
2473 /* This can happen after weird syntax errors
2474 or if an enum type is defined among the parms. */
2475 || TREE_CODE (parm) != PARM_DECL
2476 || data->arg.type == NULL
2477 || VOID_TYPE_P (data->nominal_type))
2478 {
2479 data->nominal_type = data->arg.type = void_type_node;
2480 data->nominal_mode = data->passed_mode = data->arg.mode = VOIDmode;
2481 return;
2482 }
2483
2484 /* Find mode of arg as it is passed, and mode of arg as it should be
2485 during execution of this function. */
2486 data->passed_mode = data->arg.mode = TYPE_MODE (data->arg.type);
2487 data->nominal_mode = TYPE_MODE (data->nominal_type);
2488
2489 /* If the parm is to be passed as a transparent union or record, use the
2490 type of the first field for the tests below. We have already verified
2491 that the modes are the same. */
2492 if (RECORD_OR_UNION_TYPE_P (data->arg.type)
2493 && TYPE_TRANSPARENT_AGGR (data->arg.type))
2494 data->arg.type = TREE_TYPE (first_field (data->arg.type));
2495
2496 /* See if this arg was passed by invisible reference. */
2497 if (apply_pass_by_reference_rules (&all->args_so_far_v, data->arg))
2498 {
2499 data->nominal_type = data->arg.type;
2500 data->passed_mode = data->nominal_mode = data->arg.mode;
2501 }
2502
2503 /* Find mode as it is passed by the ABI. */
2504 unsignedp = TYPE_UNSIGNED (data->arg.type);
2505 data->arg.mode
2506 = promote_function_mode (data->arg.type, data->arg.mode, &unsignedp,
2507 TREE_TYPE (current_function_decl), 0);
2508 }
2509
2510 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2511
2512 static void
assign_parms_setup_varargs(struct assign_parm_data_all * all,struct assign_parm_data_one * data,bool no_rtl)2513 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2514 struct assign_parm_data_one *data, bool no_rtl)
2515 {
2516 int varargs_pretend_bytes = 0;
2517
2518 function_arg_info last_named_arg = data->arg;
2519 last_named_arg.named = true;
2520 targetm.calls.setup_incoming_varargs (all->args_so_far, last_named_arg,
2521 &varargs_pretend_bytes, no_rtl);
2522
2523 /* If the back-end has requested extra stack space, record how much is
2524 needed. Do not change pretend_args_size otherwise since it may be
2525 nonzero from an earlier partial argument. */
2526 if (varargs_pretend_bytes > 0)
2527 all->pretend_args_size = varargs_pretend_bytes;
2528 }
2529
2530 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2531 the incoming location of the current parameter. */
2532
2533 static void
assign_parm_find_entry_rtl(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2534 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2535 struct assign_parm_data_one *data)
2536 {
2537 HOST_WIDE_INT pretend_bytes = 0;
2538 rtx entry_parm;
2539 bool in_regs;
2540
2541 if (data->arg.mode == VOIDmode)
2542 {
2543 data->entry_parm = data->stack_parm = const0_rtx;
2544 return;
2545 }
2546
2547 targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2548 data->arg.type);
2549
2550 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2551 data->arg);
2552 if (entry_parm == 0)
2553 data->arg.mode = data->passed_mode;
2554
2555 /* Determine parm's home in the stack, in case it arrives in the stack
2556 or we should pretend it did. Compute the stack position and rtx where
2557 the argument arrives and its size.
2558
2559 There is one complexity here: If this was a parameter that would
2560 have been passed in registers, but wasn't only because it is
2561 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2562 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2563 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2564 as it was the previous time. */
2565 in_regs = (entry_parm != 0);
2566 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2567 in_regs = true;
2568 #endif
2569 if (!in_regs && !data->arg.named)
2570 {
2571 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2572 {
2573 rtx tem;
2574 function_arg_info named_arg = data->arg;
2575 named_arg.named = true;
2576 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2577 named_arg);
2578 in_regs = tem != NULL;
2579 }
2580 }
2581
2582 /* If this parameter was passed both in registers and in the stack, use
2583 the copy on the stack. */
2584 if (targetm.calls.must_pass_in_stack (data->arg))
2585 entry_parm = 0;
2586
2587 if (entry_parm)
2588 {
2589 int partial;
2590
2591 partial = targetm.calls.arg_partial_bytes (all->args_so_far, data->arg);
2592 data->partial = partial;
2593
2594 /* The caller might already have allocated stack space for the
2595 register parameters. */
2596 if (partial != 0 && all->reg_parm_stack_space == 0)
2597 {
2598 /* Part of this argument is passed in registers and part
2599 is passed on the stack. Ask the prologue code to extend
2600 the stack part so that we can recreate the full value.
2601
2602 PRETEND_BYTES is the size of the registers we need to store.
2603 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2604 stack space that the prologue should allocate.
2605
2606 Internally, gcc assumes that the argument pointer is aligned
2607 to STACK_BOUNDARY bits. This is used both for alignment
2608 optimizations (see init_emit) and to locate arguments that are
2609 aligned to more than PARM_BOUNDARY bits. We must preserve this
2610 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2611 a stack boundary. */
2612
2613 /* We assume at most one partial arg, and it must be the first
2614 argument on the stack. */
2615 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2616
2617 pretend_bytes = partial;
2618 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2619
2620 /* We want to align relative to the actual stack pointer, so
2621 don't include this in the stack size until later. */
2622 all->extra_pretend_bytes = all->pretend_args_size;
2623 }
2624 }
2625
2626 locate_and_pad_parm (data->arg.mode, data->arg.type, in_regs,
2627 all->reg_parm_stack_space,
2628 entry_parm ? data->partial : 0, current_function_decl,
2629 &all->stack_args_size, &data->locate);
2630
2631 /* Update parm_stack_boundary if this parameter is passed in the
2632 stack. */
2633 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2634 crtl->parm_stack_boundary = data->locate.boundary;
2635
2636 /* Adjust offsets to include the pretend args. */
2637 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2638 data->locate.slot_offset.constant += pretend_bytes;
2639 data->locate.offset.constant += pretend_bytes;
2640
2641 data->entry_parm = entry_parm;
2642 }
2643
2644 /* A subroutine of assign_parms. If there is actually space on the stack
2645 for this parm, count it in stack_args_size and return true. */
2646
2647 static bool
assign_parm_is_stack_parm(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2648 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2649 struct assign_parm_data_one *data)
2650 {
2651 /* Trivially true if we've no incoming register. */
2652 if (data->entry_parm == NULL)
2653 ;
2654 /* Also true if we're partially in registers and partially not,
2655 since we've arranged to drop the entire argument on the stack. */
2656 else if (data->partial != 0)
2657 ;
2658 /* Also true if the target says that it's passed in both registers
2659 and on the stack. */
2660 else if (GET_CODE (data->entry_parm) == PARALLEL
2661 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2662 ;
2663 /* Also true if the target says that there's stack allocated for
2664 all register parameters. */
2665 else if (all->reg_parm_stack_space > 0)
2666 ;
2667 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2668 else
2669 return false;
2670
2671 all->stack_args_size.constant += data->locate.size.constant;
2672 if (data->locate.size.var)
2673 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2674
2675 return true;
2676 }
2677
2678 /* A subroutine of assign_parms. Given that this parameter is allocated
2679 stack space by the ABI, find it. */
2680
2681 static void
assign_parm_find_stack_rtl(tree parm,struct assign_parm_data_one * data)2682 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2683 {
2684 rtx offset_rtx, stack_parm;
2685 unsigned int align, boundary;
2686
2687 /* If we're passing this arg using a reg, make its stack home the
2688 aligned stack slot. */
2689 if (data->entry_parm)
2690 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2691 else
2692 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2693
2694 stack_parm = crtl->args.internal_arg_pointer;
2695 if (offset_rtx != const0_rtx)
2696 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2697 stack_parm = gen_rtx_MEM (data->arg.mode, stack_parm);
2698
2699 if (!data->arg.pass_by_reference)
2700 {
2701 set_mem_attributes (stack_parm, parm, 1);
2702 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2703 while promoted mode's size is needed. */
2704 if (data->arg.mode != BLKmode
2705 && data->arg.mode != DECL_MODE (parm))
2706 {
2707 set_mem_size (stack_parm, GET_MODE_SIZE (data->arg.mode));
2708 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2709 {
2710 poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2711 data->arg.mode);
2712 if (maybe_ne (offset, 0))
2713 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2714 }
2715 }
2716 }
2717
2718 boundary = data->locate.boundary;
2719 align = BITS_PER_UNIT;
2720
2721 /* If we're padding upward, we know that the alignment of the slot
2722 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2723 intentionally forcing upward padding. Otherwise we have to come
2724 up with a guess at the alignment based on OFFSET_RTX. */
2725 poly_int64 offset;
2726 if (data->locate.where_pad == PAD_NONE || data->entry_parm)
2727 align = boundary;
2728 else if (data->locate.where_pad == PAD_UPWARD)
2729 {
2730 align = boundary;
2731 /* If the argument offset is actually more aligned than the nominal
2732 stack slot boundary, take advantage of that excess alignment.
2733 Don't make any assumptions if STACK_POINTER_OFFSET is in use. */
2734 if (poly_int_rtx_p (offset_rtx, &offset)
2735 && known_eq (STACK_POINTER_OFFSET, 0))
2736 {
2737 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2738 if (offset_align == 0 || offset_align > STACK_BOUNDARY)
2739 offset_align = STACK_BOUNDARY;
2740 align = MAX (align, offset_align);
2741 }
2742 }
2743 else if (poly_int_rtx_p (offset_rtx, &offset))
2744 {
2745 align = least_bit_hwi (boundary);
2746 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2747 if (offset_align != 0)
2748 align = MIN (align, offset_align);
2749 }
2750 set_mem_align (stack_parm, align);
2751
2752 if (data->entry_parm)
2753 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2754
2755 data->stack_parm = stack_parm;
2756 }
2757
2758 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2759 always valid and contiguous. */
2760
2761 static void
assign_parm_adjust_entry_rtl(struct assign_parm_data_one * data)2762 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2763 {
2764 rtx entry_parm = data->entry_parm;
2765 rtx stack_parm = data->stack_parm;
2766
2767 /* If this parm was passed part in regs and part in memory, pretend it
2768 arrived entirely in memory by pushing the register-part onto the stack.
2769 In the special case of a DImode or DFmode that is split, we could put
2770 it together in a pseudoreg directly, but for now that's not worth
2771 bothering with. */
2772 if (data->partial != 0)
2773 {
2774 /* Handle calls that pass values in multiple non-contiguous
2775 locations. The Irix 6 ABI has examples of this. */
2776 if (GET_CODE (entry_parm) == PARALLEL)
2777 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2778 data->arg.type, int_size_in_bytes (data->arg.type));
2779 else
2780 {
2781 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2782 move_block_from_reg (REGNO (entry_parm),
2783 validize_mem (copy_rtx (stack_parm)),
2784 data->partial / UNITS_PER_WORD);
2785 }
2786
2787 entry_parm = stack_parm;
2788 }
2789
2790 /* If we didn't decide this parm came in a register, by default it came
2791 on the stack. */
2792 else if (entry_parm == NULL)
2793 entry_parm = stack_parm;
2794
2795 /* When an argument is passed in multiple locations, we can't make use
2796 of this information, but we can save some copying if the whole argument
2797 is passed in a single register. */
2798 else if (GET_CODE (entry_parm) == PARALLEL
2799 && data->nominal_mode != BLKmode
2800 && data->passed_mode != BLKmode)
2801 {
2802 size_t i, len = XVECLEN (entry_parm, 0);
2803
2804 for (i = 0; i < len; i++)
2805 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2806 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2807 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2808 == data->passed_mode)
2809 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2810 {
2811 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2812 break;
2813 }
2814 }
2815
2816 data->entry_parm = entry_parm;
2817 }
2818
2819 /* A subroutine of assign_parms. Reconstitute any values which were
2820 passed in multiple registers and would fit in a single register. */
2821
2822 static void
assign_parm_remove_parallels(struct assign_parm_data_one * data)2823 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2824 {
2825 rtx entry_parm = data->entry_parm;
2826
2827 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2828 This can be done with register operations rather than on the
2829 stack, even if we will store the reconstituted parameter on the
2830 stack later. */
2831 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2832 {
2833 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2834 emit_group_store (parmreg, entry_parm, data->arg.type,
2835 GET_MODE_SIZE (GET_MODE (entry_parm)));
2836 entry_parm = parmreg;
2837 }
2838
2839 data->entry_parm = entry_parm;
2840 }
2841
2842 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2843 always valid and properly aligned. */
2844
2845 static void
assign_parm_adjust_stack_rtl(struct assign_parm_data_one * data)2846 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2847 {
2848 rtx stack_parm = data->stack_parm;
2849
2850 /* If we can't trust the parm stack slot to be aligned enough for its
2851 ultimate type, don't use that slot after entry. We'll make another
2852 stack slot, if we need one. */
2853 if (stack_parm
2854 && ((GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)
2855 && ((optab_handler (movmisalign_optab, data->nominal_mode)
2856 != CODE_FOR_nothing)
2857 || targetm.slow_unaligned_access (data->nominal_mode,
2858 MEM_ALIGN (stack_parm))))
2859 || (data->nominal_type
2860 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2861 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2862 stack_parm = NULL;
2863
2864 /* If parm was passed in memory, and we need to convert it on entry,
2865 don't store it back in that same slot. */
2866 else if (data->entry_parm == stack_parm
2867 && data->nominal_mode != BLKmode
2868 && data->nominal_mode != data->passed_mode)
2869 stack_parm = NULL;
2870
2871 /* If stack protection is in effect for this function, don't leave any
2872 pointers in their passed stack slots. */
2873 else if (crtl->stack_protect_guard
2874 && (flag_stack_protect == 2
2875 || data->arg.pass_by_reference
2876 || POINTER_TYPE_P (data->nominal_type)))
2877 stack_parm = NULL;
2878
2879 data->stack_parm = stack_parm;
2880 }
2881
2882 /* A subroutine of assign_parms. Return true if the current parameter
2883 should be stored as a BLKmode in the current frame. */
2884
2885 static bool
assign_parm_setup_block_p(struct assign_parm_data_one * data)2886 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2887 {
2888 if (data->nominal_mode == BLKmode)
2889 return true;
2890 if (GET_MODE (data->entry_parm) == BLKmode)
2891 return true;
2892
2893 #ifdef BLOCK_REG_PADDING
2894 /* Only assign_parm_setup_block knows how to deal with register arguments
2895 that are padded at the least significant end. */
2896 if (REG_P (data->entry_parm)
2897 && known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
2898 && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
2899 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2900 return true;
2901 #endif
2902
2903 return false;
2904 }
2905
2906 /* A subroutine of assign_parms. Arrange for the parameter to be
2907 present and valid in DATA->STACK_RTL. */
2908
2909 static void
assign_parm_setup_block(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2910 assign_parm_setup_block (struct assign_parm_data_all *all,
2911 tree parm, struct assign_parm_data_one *data)
2912 {
2913 rtx entry_parm = data->entry_parm;
2914 rtx stack_parm = data->stack_parm;
2915 rtx target_reg = NULL_RTX;
2916 bool in_conversion_seq = false;
2917 HOST_WIDE_INT size;
2918 HOST_WIDE_INT size_stored;
2919
2920 if (GET_CODE (entry_parm) == PARALLEL)
2921 entry_parm = emit_group_move_into_temps (entry_parm);
2922
2923 /* If we want the parameter in a pseudo, don't use a stack slot. */
2924 if (is_gimple_reg (parm) && use_register_for_decl (parm))
2925 {
2926 tree def = ssa_default_def (cfun, parm);
2927 gcc_assert (def);
2928 machine_mode mode = promote_ssa_mode (def, NULL);
2929 rtx reg = gen_reg_rtx (mode);
2930 if (GET_CODE (reg) != CONCAT)
2931 stack_parm = reg;
2932 else
2933 {
2934 target_reg = reg;
2935 /* Avoid allocating a stack slot, if there isn't one
2936 preallocated by the ABI. It might seem like we should
2937 always prefer a pseudo, but converting between
2938 floating-point and integer modes goes through the stack
2939 on various machines, so it's better to use the reserved
2940 stack slot than to risk wasting it and allocating more
2941 for the conversion. */
2942 if (stack_parm == NULL_RTX)
2943 {
2944 int save = generating_concat_p;
2945 generating_concat_p = 0;
2946 stack_parm = gen_reg_rtx (mode);
2947 generating_concat_p = save;
2948 }
2949 }
2950 data->stack_parm = NULL;
2951 }
2952
2953 size = int_size_in_bytes (data->arg.type);
2954 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2955 if (stack_parm == 0)
2956 {
2957 HOST_WIDE_INT parm_align
2958 = (STRICT_ALIGNMENT
2959 ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
2960
2961 SET_DECL_ALIGN (parm, parm_align);
2962 if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
2963 {
2964 rtx allocsize = gen_int_mode (size_stored, Pmode);
2965 get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
2966 stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
2967 MAX_SUPPORTED_STACK_ALIGNMENT);
2968 rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
2969 DECL_ALIGN (parm));
2970 mark_reg_pointer (addr, DECL_ALIGN (parm));
2971 stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
2972 MEM_NOTRAP_P (stack_parm) = 1;
2973 }
2974 else
2975 stack_parm = assign_stack_local (BLKmode, size_stored,
2976 DECL_ALIGN (parm));
2977 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2978 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2979 set_mem_attributes (stack_parm, parm, 1);
2980 }
2981
2982 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2983 calls that pass values in multiple non-contiguous locations. */
2984 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2985 {
2986 rtx mem;
2987
2988 /* Note that we will be storing an integral number of words.
2989 So we have to be careful to ensure that we allocate an
2990 integral number of words. We do this above when we call
2991 assign_stack_local if space was not allocated in the argument
2992 list. If it was, this will not work if PARM_BOUNDARY is not
2993 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2994 if it becomes a problem. Exception is when BLKmode arrives
2995 with arguments not conforming to word_mode. */
2996
2997 if (data->stack_parm == 0)
2998 ;
2999 else if (GET_CODE (entry_parm) == PARALLEL)
3000 ;
3001 else
3002 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
3003
3004 mem = validize_mem (copy_rtx (stack_parm));
3005
3006 /* Handle values in multiple non-contiguous locations. */
3007 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
3008 emit_group_store (mem, entry_parm, data->arg.type, size);
3009 else if (GET_CODE (entry_parm) == PARALLEL)
3010 {
3011 push_to_sequence2 (all->first_conversion_insn,
3012 all->last_conversion_insn);
3013 emit_group_store (mem, entry_parm, data->arg.type, size);
3014 all->first_conversion_insn = get_insns ();
3015 all->last_conversion_insn = get_last_insn ();
3016 end_sequence ();
3017 in_conversion_seq = true;
3018 }
3019
3020 else if (size == 0)
3021 ;
3022
3023 /* If SIZE is that of a mode no bigger than a word, just use
3024 that mode's store operation. */
3025 else if (size <= UNITS_PER_WORD)
3026 {
3027 unsigned int bits = size * BITS_PER_UNIT;
3028 machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
3029
3030 if (mode != BLKmode
3031 #ifdef BLOCK_REG_PADDING
3032 && (size == UNITS_PER_WORD
3033 || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3034 != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
3035 #endif
3036 )
3037 {
3038 rtx reg;
3039
3040 /* We are really truncating a word_mode value containing
3041 SIZE bytes into a value of mode MODE. If such an
3042 operation requires no actual instructions, we can refer
3043 to the value directly in mode MODE, otherwise we must
3044 start with the register in word_mode and explicitly
3045 convert it. */
3046 if (targetm.truly_noop_truncation (size * BITS_PER_UNIT,
3047 BITS_PER_WORD))
3048 reg = gen_rtx_REG (mode, REGNO (entry_parm));
3049 else
3050 {
3051 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3052 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3053 }
3054 emit_move_insn (change_address (mem, mode, 0), reg);
3055 }
3056
3057 #ifdef BLOCK_REG_PADDING
3058 /* Storing the register in memory as a full word, as
3059 move_block_from_reg below would do, and then using the
3060 MEM in a smaller mode, has the effect of shifting right
3061 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3062 shifting must be explicit. */
3063 else if (!MEM_P (mem))
3064 {
3065 rtx x;
3066
3067 /* If the assert below fails, we should have taken the
3068 mode != BLKmode path above, unless we have downward
3069 padding of smaller-than-word arguments on a machine
3070 with little-endian bytes, which would likely require
3071 additional changes to work correctly. */
3072 gcc_checking_assert (BYTES_BIG_ENDIAN
3073 && (BLOCK_REG_PADDING (mode,
3074 data->arg.type, 1)
3075 == PAD_UPWARD));
3076
3077 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3078
3079 x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3080 x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3081 NULL_RTX, 1);
3082 x = force_reg (word_mode, x);
3083 x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3084
3085 emit_move_insn (mem, x);
3086 }
3087 #endif
3088
3089 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3090 machine must be aligned to the left before storing
3091 to memory. Note that the previous test doesn't
3092 handle all cases (e.g. SIZE == 3). */
3093 else if (size != UNITS_PER_WORD
3094 #ifdef BLOCK_REG_PADDING
3095 && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3096 == PAD_DOWNWARD)
3097 #else
3098 && BYTES_BIG_ENDIAN
3099 #endif
3100 )
3101 {
3102 rtx tem, x;
3103 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3104 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3105
3106 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3107 tem = change_address (mem, word_mode, 0);
3108 emit_move_insn (tem, x);
3109 }
3110 else
3111 move_block_from_reg (REGNO (entry_parm), mem,
3112 size_stored / UNITS_PER_WORD);
3113 }
3114 else if (!MEM_P (mem))
3115 {
3116 gcc_checking_assert (size > UNITS_PER_WORD);
3117 #ifdef BLOCK_REG_PADDING
3118 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3119 data->arg.type, 0)
3120 == PAD_UPWARD);
3121 #endif
3122 emit_move_insn (mem, entry_parm);
3123 }
3124 else
3125 move_block_from_reg (REGNO (entry_parm), mem,
3126 size_stored / UNITS_PER_WORD);
3127 }
3128 else if (data->stack_parm == 0 && !TYPE_EMPTY_P (data->arg.type))
3129 {
3130 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3131 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3132 BLOCK_OP_NORMAL);
3133 all->first_conversion_insn = get_insns ();
3134 all->last_conversion_insn = get_last_insn ();
3135 end_sequence ();
3136 in_conversion_seq = true;
3137 }
3138
3139 if (target_reg)
3140 {
3141 if (!in_conversion_seq)
3142 emit_move_insn (target_reg, stack_parm);
3143 else
3144 {
3145 push_to_sequence2 (all->first_conversion_insn,
3146 all->last_conversion_insn);
3147 emit_move_insn (target_reg, stack_parm);
3148 all->first_conversion_insn = get_insns ();
3149 all->last_conversion_insn = get_last_insn ();
3150 end_sequence ();
3151 }
3152 stack_parm = target_reg;
3153 }
3154
3155 data->stack_parm = stack_parm;
3156 set_parm_rtl (parm, stack_parm);
3157 }
3158
3159 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3160 parameter. Get it there. Perform all ABI specified conversions. */
3161
3162 static void
assign_parm_setup_reg(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3163 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3164 struct assign_parm_data_one *data)
3165 {
3166 rtx parmreg, validated_mem;
3167 rtx equiv_stack_parm;
3168 machine_mode promoted_nominal_mode;
3169 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3170 bool did_conversion = false;
3171 bool need_conversion, moved;
3172 enum insn_code icode;
3173 rtx rtl;
3174
3175 /* Store the parm in a pseudoregister during the function, but we may
3176 need to do it in a wider mode. Using 2 here makes the result
3177 consistent with promote_decl_mode and thus expand_expr_real_1. */
3178 promoted_nominal_mode
3179 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3180 TREE_TYPE (current_function_decl), 2);
3181
3182 parmreg = gen_reg_rtx (promoted_nominal_mode);
3183 if (!DECL_ARTIFICIAL (parm))
3184 mark_user_reg (parmreg);
3185
3186 /* If this was an item that we received a pointer to,
3187 set rtl appropriately. */
3188 if (data->arg.pass_by_reference)
3189 {
3190 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->arg.type)), parmreg);
3191 set_mem_attributes (rtl, parm, 1);
3192 }
3193 else
3194 rtl = parmreg;
3195
3196 assign_parm_remove_parallels (data);
3197
3198 /* Copy the value into the register, thus bridging between
3199 assign_parm_find_data_types and expand_expr_real_1. */
3200
3201 equiv_stack_parm = data->stack_parm;
3202 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3203
3204 need_conversion = (data->nominal_mode != data->passed_mode
3205 || promoted_nominal_mode != data->arg.mode);
3206 moved = false;
3207
3208 if (need_conversion
3209 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3210 && data->nominal_mode == data->passed_mode
3211 && data->nominal_mode == GET_MODE (data->entry_parm))
3212 {
3213 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3214 mode, by the caller. We now have to convert it to
3215 NOMINAL_MODE, if different. However, PARMREG may be in
3216 a different mode than NOMINAL_MODE if it is being stored
3217 promoted.
3218
3219 If ENTRY_PARM is a hard register, it might be in a register
3220 not valid for operating in its mode (e.g., an odd-numbered
3221 register for a DFmode). In that case, moves are the only
3222 thing valid, so we can't do a convert from there. This
3223 occurs when the calling sequence allow such misaligned
3224 usages.
3225
3226 In addition, the conversion may involve a call, which could
3227 clobber parameters which haven't been copied to pseudo
3228 registers yet.
3229
3230 First, we try to emit an insn which performs the necessary
3231 conversion. We verify that this insn does not clobber any
3232 hard registers. */
3233
3234 rtx op0, op1;
3235
3236 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3237 unsignedp);
3238
3239 op0 = parmreg;
3240 op1 = validated_mem;
3241 if (icode != CODE_FOR_nothing
3242 && insn_operand_matches (icode, 0, op0)
3243 && insn_operand_matches (icode, 1, op1))
3244 {
3245 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3246 rtx_insn *insn, *insns;
3247 rtx t = op1;
3248 HARD_REG_SET hardregs;
3249
3250 start_sequence ();
3251 /* If op1 is a hard register that is likely spilled, first
3252 force it into a pseudo, otherwise combiner might extend
3253 its lifetime too much. */
3254 if (GET_CODE (t) == SUBREG)
3255 t = SUBREG_REG (t);
3256 if (REG_P (t)
3257 && HARD_REGISTER_P (t)
3258 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3259 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3260 {
3261 t = gen_reg_rtx (GET_MODE (op1));
3262 emit_move_insn (t, op1);
3263 }
3264 else
3265 t = op1;
3266 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3267 data->passed_mode, unsignedp);
3268 emit_insn (pat);
3269 insns = get_insns ();
3270
3271 moved = true;
3272 CLEAR_HARD_REG_SET (hardregs);
3273 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3274 {
3275 if (INSN_P (insn))
3276 note_stores (insn, record_hard_reg_sets, &hardregs);
3277 if (!hard_reg_set_empty_p (hardregs))
3278 moved = false;
3279 }
3280
3281 end_sequence ();
3282
3283 if (moved)
3284 {
3285 emit_insn (insns);
3286 if (equiv_stack_parm != NULL_RTX)
3287 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3288 equiv_stack_parm);
3289 }
3290 }
3291 }
3292
3293 if (moved)
3294 /* Nothing to do. */
3295 ;
3296 else if (need_conversion)
3297 {
3298 /* We did not have an insn to convert directly, or the sequence
3299 generated appeared unsafe. We must first copy the parm to a
3300 pseudo reg, and save the conversion until after all
3301 parameters have been moved. */
3302
3303 int save_tree_used;
3304 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3305
3306 emit_move_insn (tempreg, validated_mem);
3307
3308 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3309 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3310
3311 if (partial_subreg_p (tempreg)
3312 && GET_MODE (tempreg) == data->nominal_mode
3313 && REG_P (SUBREG_REG (tempreg))
3314 && data->nominal_mode == data->passed_mode
3315 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3316 {
3317 /* The argument is already sign/zero extended, so note it
3318 into the subreg. */
3319 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3320 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3321 }
3322
3323 /* TREE_USED gets set erroneously during expand_assignment. */
3324 save_tree_used = TREE_USED (parm);
3325 SET_DECL_RTL (parm, rtl);
3326 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3327 SET_DECL_RTL (parm, NULL_RTX);
3328 TREE_USED (parm) = save_tree_used;
3329 all->first_conversion_insn = get_insns ();
3330 all->last_conversion_insn = get_last_insn ();
3331 end_sequence ();
3332
3333 did_conversion = true;
3334 }
3335 else if (MEM_P (data->entry_parm)
3336 && GET_MODE_ALIGNMENT (promoted_nominal_mode)
3337 > MEM_ALIGN (data->entry_parm)
3338 && (((icode = optab_handler (movmisalign_optab,
3339 promoted_nominal_mode))
3340 != CODE_FOR_nothing)
3341 || targetm.slow_unaligned_access (promoted_nominal_mode,
3342 MEM_ALIGN (data->entry_parm))))
3343 {
3344 if (icode != CODE_FOR_nothing)
3345 emit_insn (GEN_FCN (icode) (parmreg, validated_mem));
3346 else
3347 rtl = parmreg = extract_bit_field (validated_mem,
3348 GET_MODE_BITSIZE (promoted_nominal_mode), 0,
3349 unsignedp, parmreg,
3350 promoted_nominal_mode, VOIDmode, false, NULL);
3351 }
3352 else
3353 emit_move_insn (parmreg, validated_mem);
3354
3355 /* If we were passed a pointer but the actual value can safely live
3356 in a register, retrieve it and use it directly. */
3357 if (data->arg.pass_by_reference && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3358 {
3359 /* We can't use nominal_mode, because it will have been set to
3360 Pmode above. We must use the actual mode of the parm. */
3361 if (use_register_for_decl (parm))
3362 {
3363 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3364 mark_user_reg (parmreg);
3365 }
3366 else
3367 {
3368 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3369 TYPE_MODE (TREE_TYPE (parm)),
3370 TYPE_ALIGN (TREE_TYPE (parm)));
3371 parmreg
3372 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3373 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3374 align);
3375 set_mem_attributes (parmreg, parm, 1);
3376 }
3377
3378 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3379 the debug info in case it is not legitimate. */
3380 if (GET_MODE (parmreg) != GET_MODE (rtl))
3381 {
3382 rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3383 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3384
3385 push_to_sequence2 (all->first_conversion_insn,
3386 all->last_conversion_insn);
3387 emit_move_insn (tempreg, rtl);
3388 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3389 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3390 tempreg);
3391 all->first_conversion_insn = get_insns ();
3392 all->last_conversion_insn = get_last_insn ();
3393 end_sequence ();
3394
3395 did_conversion = true;
3396 }
3397 else
3398 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3399
3400 rtl = parmreg;
3401
3402 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3403 now the parm. */
3404 data->stack_parm = NULL;
3405 }
3406
3407 set_parm_rtl (parm, rtl);
3408
3409 /* Mark the register as eliminable if we did no conversion and it was
3410 copied from memory at a fixed offset, and the arg pointer was not
3411 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3412 offset formed an invalid address, such memory-equivalences as we
3413 make here would screw up life analysis for it. */
3414 if (data->nominal_mode == data->passed_mode
3415 && !did_conversion
3416 && data->stack_parm != 0
3417 && MEM_P (data->stack_parm)
3418 && data->locate.offset.var == 0
3419 && reg_mentioned_p (virtual_incoming_args_rtx,
3420 XEXP (data->stack_parm, 0)))
3421 {
3422 rtx_insn *linsn = get_last_insn ();
3423 rtx_insn *sinsn;
3424 rtx set;
3425
3426 /* Mark complex types separately. */
3427 if (GET_CODE (parmreg) == CONCAT)
3428 {
3429 scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3430 int regnor = REGNO (XEXP (parmreg, 0));
3431 int regnoi = REGNO (XEXP (parmreg, 1));
3432 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3433 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3434 GET_MODE_SIZE (submode));
3435
3436 /* Scan backwards for the set of the real and
3437 imaginary parts. */
3438 for (sinsn = linsn; sinsn != 0;
3439 sinsn = prev_nonnote_insn (sinsn))
3440 {
3441 set = single_set (sinsn);
3442 if (set == 0)
3443 continue;
3444
3445 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3446 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3447 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3448 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3449 }
3450 }
3451 else
3452 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3453 }
3454
3455 /* For pointer data type, suggest pointer register. */
3456 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3457 mark_reg_pointer (parmreg,
3458 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3459 }
3460
3461 /* A subroutine of assign_parms. Allocate stack space to hold the current
3462 parameter. Get it there. Perform all ABI specified conversions. */
3463
3464 static void
assign_parm_setup_stack(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3465 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3466 struct assign_parm_data_one *data)
3467 {
3468 /* Value must be stored in the stack slot STACK_PARM during function
3469 execution. */
3470 bool to_conversion = false;
3471
3472 assign_parm_remove_parallels (data);
3473
3474 if (data->arg.mode != data->nominal_mode)
3475 {
3476 /* Conversion is required. */
3477 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3478
3479 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3480
3481 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3482 to_conversion = true;
3483
3484 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3485 TYPE_UNSIGNED (TREE_TYPE (parm)));
3486
3487 if (data->stack_parm)
3488 {
3489 poly_int64 offset
3490 = subreg_lowpart_offset (data->nominal_mode,
3491 GET_MODE (data->stack_parm));
3492 /* ??? This may need a big-endian conversion on sparc64. */
3493 data->stack_parm
3494 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3495 if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3496 set_mem_offset (data->stack_parm,
3497 MEM_OFFSET (data->stack_parm) + offset);
3498 }
3499 }
3500
3501 if (data->entry_parm != data->stack_parm)
3502 {
3503 rtx src, dest;
3504
3505 if (data->stack_parm == 0)
3506 {
3507 int align = STACK_SLOT_ALIGNMENT (data->arg.type,
3508 GET_MODE (data->entry_parm),
3509 TYPE_ALIGN (data->arg.type));
3510 if (align < (int)GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm))
3511 && ((optab_handler (movmisalign_optab,
3512 GET_MODE (data->entry_parm))
3513 != CODE_FOR_nothing)
3514 || targetm.slow_unaligned_access (GET_MODE (data->entry_parm),
3515 align)))
3516 align = GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm));
3517 data->stack_parm
3518 = assign_stack_local (GET_MODE (data->entry_parm),
3519 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3520 align);
3521 align = MEM_ALIGN (data->stack_parm);
3522 set_mem_attributes (data->stack_parm, parm, 1);
3523 set_mem_align (data->stack_parm, align);
3524 }
3525
3526 dest = validize_mem (copy_rtx (data->stack_parm));
3527 src = validize_mem (copy_rtx (data->entry_parm));
3528
3529 if (TYPE_EMPTY_P (data->arg.type))
3530 /* Empty types don't really need to be copied. */;
3531 else if (MEM_P (src))
3532 {
3533 /* Use a block move to handle potentially misaligned entry_parm. */
3534 if (!to_conversion)
3535 push_to_sequence2 (all->first_conversion_insn,
3536 all->last_conversion_insn);
3537 to_conversion = true;
3538
3539 emit_block_move (dest, src,
3540 GEN_INT (int_size_in_bytes (data->arg.type)),
3541 BLOCK_OP_NORMAL);
3542 }
3543 else
3544 {
3545 if (!REG_P (src))
3546 src = force_reg (GET_MODE (src), src);
3547 emit_move_insn (dest, src);
3548 }
3549 }
3550
3551 if (to_conversion)
3552 {
3553 all->first_conversion_insn = get_insns ();
3554 all->last_conversion_insn = get_last_insn ();
3555 end_sequence ();
3556 }
3557
3558 set_parm_rtl (parm, data->stack_parm);
3559 }
3560
3561 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3562 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3563
3564 static void
assign_parms_unsplit_complex(struct assign_parm_data_all * all,vec<tree> fnargs)3565 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3566 vec<tree> fnargs)
3567 {
3568 tree parm;
3569 tree orig_fnargs = all->orig_fnargs;
3570 unsigned i = 0;
3571
3572 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3573 {
3574 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3575 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3576 {
3577 rtx tmp, real, imag;
3578 scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3579
3580 real = DECL_RTL (fnargs[i]);
3581 imag = DECL_RTL (fnargs[i + 1]);
3582 if (inner != GET_MODE (real))
3583 {
3584 real = gen_lowpart_SUBREG (inner, real);
3585 imag = gen_lowpart_SUBREG (inner, imag);
3586 }
3587
3588 if (TREE_ADDRESSABLE (parm))
3589 {
3590 rtx rmem, imem;
3591 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3592 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3593 DECL_MODE (parm),
3594 TYPE_ALIGN (TREE_TYPE (parm)));
3595
3596 /* split_complex_arg put the real and imag parts in
3597 pseudos. Move them to memory. */
3598 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3599 set_mem_attributes (tmp, parm, 1);
3600 rmem = adjust_address_nv (tmp, inner, 0);
3601 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3602 push_to_sequence2 (all->first_conversion_insn,
3603 all->last_conversion_insn);
3604 emit_move_insn (rmem, real);
3605 emit_move_insn (imem, imag);
3606 all->first_conversion_insn = get_insns ();
3607 all->last_conversion_insn = get_last_insn ();
3608 end_sequence ();
3609 }
3610 else
3611 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3612 set_parm_rtl (parm, tmp);
3613
3614 real = DECL_INCOMING_RTL (fnargs[i]);
3615 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3616 if (inner != GET_MODE (real))
3617 {
3618 real = gen_lowpart_SUBREG (inner, real);
3619 imag = gen_lowpart_SUBREG (inner, imag);
3620 }
3621 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3622 set_decl_incoming_rtl (parm, tmp, false);
3623 i++;
3624 }
3625 }
3626 }
3627
3628 /* Assign RTL expressions to the function's parameters. This may involve
3629 copying them into registers and using those registers as the DECL_RTL. */
3630
3631 static void
assign_parms(tree fndecl)3632 assign_parms (tree fndecl)
3633 {
3634 struct assign_parm_data_all all;
3635 tree parm;
3636 vec<tree> fnargs;
3637 unsigned i;
3638
3639 crtl->args.internal_arg_pointer
3640 = targetm.calls.internal_arg_pointer ();
3641
3642 assign_parms_initialize_all (&all);
3643 fnargs = assign_parms_augmented_arg_list (&all);
3644
3645 FOR_EACH_VEC_ELT (fnargs, i, parm)
3646 {
3647 struct assign_parm_data_one data;
3648
3649 /* Extract the type of PARM; adjust it according to ABI. */
3650 assign_parm_find_data_types (&all, parm, &data);
3651
3652 /* Early out for errors and void parameters. */
3653 if (data.passed_mode == VOIDmode)
3654 {
3655 SET_DECL_RTL (parm, const0_rtx);
3656 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3657 continue;
3658 }
3659
3660 /* Estimate stack alignment from parameter alignment. */
3661 if (SUPPORTS_STACK_ALIGNMENT)
3662 {
3663 unsigned int align
3664 = targetm.calls.function_arg_boundary (data.arg.mode,
3665 data.arg.type);
3666 align = MINIMUM_ALIGNMENT (data.arg.type, data.arg.mode, align);
3667 if (TYPE_ALIGN (data.nominal_type) > align)
3668 align = MINIMUM_ALIGNMENT (data.nominal_type,
3669 TYPE_MODE (data.nominal_type),
3670 TYPE_ALIGN (data.nominal_type));
3671 if (crtl->stack_alignment_estimated < align)
3672 {
3673 gcc_assert (!crtl->stack_realign_processed);
3674 crtl->stack_alignment_estimated = align;
3675 }
3676 }
3677
3678 /* Find out where the parameter arrives in this function. */
3679 assign_parm_find_entry_rtl (&all, &data);
3680
3681 /* Find out where stack space for this parameter might be. */
3682 if (assign_parm_is_stack_parm (&all, &data))
3683 {
3684 assign_parm_find_stack_rtl (parm, &data);
3685 assign_parm_adjust_entry_rtl (&data);
3686 /* For arguments that occupy no space in the parameter
3687 passing area, have non-zero size and have address taken,
3688 force creation of a stack slot so that they have distinct
3689 address from other parameters. */
3690 if (TYPE_EMPTY_P (data.arg.type)
3691 && TREE_ADDRESSABLE (parm)
3692 && data.entry_parm == data.stack_parm
3693 && MEM_P (data.entry_parm)
3694 && int_size_in_bytes (data.arg.type))
3695 data.stack_parm = NULL_RTX;
3696 }
3697 /* Record permanently how this parm was passed. */
3698 if (data.arg.pass_by_reference)
3699 {
3700 rtx incoming_rtl
3701 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.arg.type)),
3702 data.entry_parm);
3703 set_decl_incoming_rtl (parm, incoming_rtl, true);
3704 }
3705 else
3706 set_decl_incoming_rtl (parm, data.entry_parm, false);
3707
3708 assign_parm_adjust_stack_rtl (&data);
3709
3710 if (assign_parm_setup_block_p (&data))
3711 assign_parm_setup_block (&all, parm, &data);
3712 else if (data.arg.pass_by_reference || use_register_for_decl (parm))
3713 assign_parm_setup_reg (&all, parm, &data);
3714 else
3715 assign_parm_setup_stack (&all, parm, &data);
3716
3717 if (cfun->stdarg && !DECL_CHAIN (parm))
3718 assign_parms_setup_varargs (&all, &data, false);
3719
3720 /* Update info on where next arg arrives in registers. */
3721 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3722 }
3723
3724 if (targetm.calls.split_complex_arg)
3725 assign_parms_unsplit_complex (&all, fnargs);
3726
3727 fnargs.release ();
3728
3729 /* Output all parameter conversion instructions (possibly including calls)
3730 now that all parameters have been copied out of hard registers. */
3731 emit_insn (all.first_conversion_insn);
3732
3733 /* Estimate reload stack alignment from scalar return mode. */
3734 if (SUPPORTS_STACK_ALIGNMENT)
3735 {
3736 if (DECL_RESULT (fndecl))
3737 {
3738 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3739 machine_mode mode = TYPE_MODE (type);
3740
3741 if (mode != BLKmode
3742 && mode != VOIDmode
3743 && !AGGREGATE_TYPE_P (type))
3744 {
3745 unsigned int align = GET_MODE_ALIGNMENT (mode);
3746 if (crtl->stack_alignment_estimated < align)
3747 {
3748 gcc_assert (!crtl->stack_realign_processed);
3749 crtl->stack_alignment_estimated = align;
3750 }
3751 }
3752 }
3753 }
3754
3755 /* If we are receiving a struct value address as the first argument, set up
3756 the RTL for the function result. As this might require code to convert
3757 the transmitted address to Pmode, we do this here to ensure that possible
3758 preliminary conversions of the address have been emitted already. */
3759 if (all.function_result_decl)
3760 {
3761 tree result = DECL_RESULT (current_function_decl);
3762 rtx addr = DECL_RTL (all.function_result_decl);
3763 rtx x;
3764
3765 if (DECL_BY_REFERENCE (result))
3766 {
3767 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3768 x = addr;
3769 }
3770 else
3771 {
3772 SET_DECL_VALUE_EXPR (result,
3773 build1 (INDIRECT_REF, TREE_TYPE (result),
3774 all.function_result_decl));
3775 addr = convert_memory_address (Pmode, addr);
3776 x = gen_rtx_MEM (DECL_MODE (result), addr);
3777 set_mem_attributes (x, result, 1);
3778 }
3779
3780 DECL_HAS_VALUE_EXPR_P (result) = 1;
3781
3782 set_parm_rtl (result, x);
3783 }
3784
3785 /* We have aligned all the args, so add space for the pretend args. */
3786 crtl->args.pretend_args_size = all.pretend_args_size;
3787 all.stack_args_size.constant += all.extra_pretend_bytes;
3788 crtl->args.size = all.stack_args_size.constant;
3789
3790 /* Adjust function incoming argument size for alignment and
3791 minimum length. */
3792
3793 crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
3794 crtl->args.size = aligned_upper_bound (crtl->args.size,
3795 PARM_BOUNDARY / BITS_PER_UNIT);
3796
3797 if (ARGS_GROW_DOWNWARD)
3798 {
3799 crtl->args.arg_offset_rtx
3800 = (all.stack_args_size.var == 0
3801 ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3802 : expand_expr (size_diffop (all.stack_args_size.var,
3803 size_int (-all.stack_args_size.constant)),
3804 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3805 }
3806 else
3807 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3808
3809 /* See how many bytes, if any, of its args a function should try to pop
3810 on return. */
3811
3812 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3813 TREE_TYPE (fndecl),
3814 crtl->args.size);
3815
3816 /* For stdarg.h function, save info about
3817 regs and stack space used by the named args. */
3818
3819 crtl->args.info = all.args_so_far_v;
3820
3821 /* Set the rtx used for the function return value. Put this in its
3822 own variable so any optimizers that need this information don't have
3823 to include tree.h. Do this here so it gets done when an inlined
3824 function gets output. */
3825
3826 crtl->return_rtx
3827 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3828 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3829
3830 /* If scalar return value was computed in a pseudo-reg, or was a named
3831 return value that got dumped to the stack, copy that to the hard
3832 return register. */
3833 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3834 {
3835 tree decl_result = DECL_RESULT (fndecl);
3836 rtx decl_rtl = DECL_RTL (decl_result);
3837
3838 if (REG_P (decl_rtl)
3839 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3840 : DECL_REGISTER (decl_result))
3841 {
3842 rtx real_decl_rtl;
3843
3844 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3845 fndecl, true);
3846 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3847 /* The delay slot scheduler assumes that crtl->return_rtx
3848 holds the hard register containing the return value, not a
3849 temporary pseudo. */
3850 crtl->return_rtx = real_decl_rtl;
3851 }
3852 }
3853 }
3854
3855 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3856 For all seen types, gimplify their sizes. */
3857
3858 static tree
gimplify_parm_type(tree * tp,int * walk_subtrees,void * data)3859 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3860 {
3861 tree t = *tp;
3862
3863 *walk_subtrees = 0;
3864 if (TYPE_P (t))
3865 {
3866 if (POINTER_TYPE_P (t))
3867 *walk_subtrees = 1;
3868 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3869 && !TYPE_SIZES_GIMPLIFIED (t))
3870 {
3871 gimplify_type_sizes (t, (gimple_seq *) data);
3872 *walk_subtrees = 1;
3873 }
3874 }
3875
3876 return NULL;
3877 }
3878
3879 /* Gimplify the parameter list for current_function_decl. This involves
3880 evaluating SAVE_EXPRs of variable sized parameters and generating code
3881 to implement callee-copies reference parameters. Returns a sequence of
3882 statements to add to the beginning of the function. */
3883
3884 gimple_seq
gimplify_parameters(gimple_seq * cleanup)3885 gimplify_parameters (gimple_seq *cleanup)
3886 {
3887 struct assign_parm_data_all all;
3888 tree parm;
3889 gimple_seq stmts = NULL;
3890 vec<tree> fnargs;
3891 unsigned i;
3892
3893 assign_parms_initialize_all (&all);
3894 fnargs = assign_parms_augmented_arg_list (&all);
3895
3896 FOR_EACH_VEC_ELT (fnargs, i, parm)
3897 {
3898 struct assign_parm_data_one data;
3899
3900 /* Extract the type of PARM; adjust it according to ABI. */
3901 assign_parm_find_data_types (&all, parm, &data);
3902
3903 /* Early out for errors and void parameters. */
3904 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3905 continue;
3906
3907 /* Update info on where next arg arrives in registers. */
3908 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3909
3910 /* ??? Once upon a time variable_size stuffed parameter list
3911 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3912 turned out to be less than manageable in the gimple world.
3913 Now we have to hunt them down ourselves. */
3914 walk_tree_without_duplicates (&data.arg.type,
3915 gimplify_parm_type, &stmts);
3916
3917 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3918 {
3919 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3920 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3921 }
3922
3923 if (data.arg.pass_by_reference)
3924 {
3925 tree type = TREE_TYPE (data.arg.type);
3926 function_arg_info orig_arg (type, data.arg.named);
3927 if (reference_callee_copied (&all.args_so_far_v, orig_arg))
3928 {
3929 tree local, t;
3930
3931 /* For constant-sized objects, this is trivial; for
3932 variable-sized objects, we have to play games. */
3933 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3934 && !(flag_stack_check == GENERIC_STACK_CHECK
3935 && compare_tree_int (DECL_SIZE_UNIT (parm),
3936 STACK_CHECK_MAX_VAR_SIZE) > 0))
3937 {
3938 local = create_tmp_var (type, get_name (parm));
3939 DECL_IGNORED_P (local) = 0;
3940 /* If PARM was addressable, move that flag over
3941 to the local copy, as its address will be taken,
3942 not the PARMs. Keep the parms address taken
3943 as we'll query that flag during gimplification. */
3944 if (TREE_ADDRESSABLE (parm))
3945 TREE_ADDRESSABLE (local) = 1;
3946 else if (TREE_CODE (type) == COMPLEX_TYPE
3947 || TREE_CODE (type) == VECTOR_TYPE)
3948 DECL_GIMPLE_REG_P (local) = 1;
3949
3950 if (!is_gimple_reg (local)
3951 && flag_stack_reuse != SR_NONE)
3952 {
3953 tree clobber = build_clobber (type);
3954 gimple *clobber_stmt;
3955 clobber_stmt = gimple_build_assign (local, clobber);
3956 gimple_seq_add_stmt (cleanup, clobber_stmt);
3957 }
3958 }
3959 else
3960 {
3961 tree ptr_type, addr;
3962
3963 ptr_type = build_pointer_type (type);
3964 addr = create_tmp_reg (ptr_type, get_name (parm));
3965 DECL_IGNORED_P (addr) = 0;
3966 local = build_fold_indirect_ref (addr);
3967
3968 t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
3969 DECL_ALIGN (parm),
3970 max_int_size_in_bytes (type));
3971 /* The call has been built for a variable-sized object. */
3972 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3973 t = fold_convert (ptr_type, t);
3974 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3975 gimplify_and_add (t, &stmts);
3976 }
3977
3978 gimplify_assign (local, parm, &stmts);
3979
3980 SET_DECL_VALUE_EXPR (parm, local);
3981 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3982 }
3983 }
3984 }
3985
3986 fnargs.release ();
3987
3988 return stmts;
3989 }
3990
3991 /* Compute the size and offset from the start of the stacked arguments for a
3992 parm passed in mode PASSED_MODE and with type TYPE.
3993
3994 INITIAL_OFFSET_PTR points to the current offset into the stacked
3995 arguments.
3996
3997 The starting offset and size for this parm are returned in
3998 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3999 nonzero, the offset is that of stack slot, which is returned in
4000 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
4001 padding required from the initial offset ptr to the stack slot.
4002
4003 IN_REGS is nonzero if the argument will be passed in registers. It will
4004 never be set if REG_PARM_STACK_SPACE is not defined.
4005
4006 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4007 for arguments which are passed in registers.
4008
4009 FNDECL is the function in which the argument was defined.
4010
4011 There are two types of rounding that are done. The first, controlled by
4012 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4013 argument list to be aligned to the specific boundary (in bits). This
4014 rounding affects the initial and starting offsets, but not the argument
4015 size.
4016
4017 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4018 optionally rounds the size of the parm to PARM_BOUNDARY. The
4019 initial offset is not affected by this rounding, while the size always
4020 is and the starting offset may be. */
4021
4022 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4023 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4024 callers pass in the total size of args so far as
4025 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4026
4027 void
locate_and_pad_parm(machine_mode passed_mode,tree type,int in_regs,int reg_parm_stack_space,int partial,tree fndecl ATTRIBUTE_UNUSED,struct args_size * initial_offset_ptr,struct locate_and_pad_arg_data * locate)4028 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4029 int reg_parm_stack_space, int partial,
4030 tree fndecl ATTRIBUTE_UNUSED,
4031 struct args_size *initial_offset_ptr,
4032 struct locate_and_pad_arg_data *locate)
4033 {
4034 tree sizetree;
4035 pad_direction where_pad;
4036 unsigned int boundary, round_boundary;
4037 int part_size_in_regs;
4038
4039 /* If we have found a stack parm before we reach the end of the
4040 area reserved for registers, skip that area. */
4041 if (! in_regs)
4042 {
4043 if (reg_parm_stack_space > 0)
4044 {
4045 if (initial_offset_ptr->var
4046 || !ordered_p (initial_offset_ptr->constant,
4047 reg_parm_stack_space))
4048 {
4049 initial_offset_ptr->var
4050 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4051 ssize_int (reg_parm_stack_space));
4052 initial_offset_ptr->constant = 0;
4053 }
4054 else
4055 initial_offset_ptr->constant
4056 = ordered_max (initial_offset_ptr->constant,
4057 reg_parm_stack_space);
4058 }
4059 }
4060
4061 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4062
4063 sizetree = (type
4064 ? arg_size_in_bytes (type)
4065 : size_int (GET_MODE_SIZE (passed_mode)));
4066 where_pad = targetm.calls.function_arg_padding (passed_mode, type);
4067 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4068 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4069 type);
4070 locate->where_pad = where_pad;
4071
4072 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4073 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4074 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4075
4076 locate->boundary = boundary;
4077
4078 if (SUPPORTS_STACK_ALIGNMENT)
4079 {
4080 /* stack_alignment_estimated can't change after stack has been
4081 realigned. */
4082 if (crtl->stack_alignment_estimated < boundary)
4083 {
4084 if (!crtl->stack_realign_processed)
4085 crtl->stack_alignment_estimated = boundary;
4086 else
4087 {
4088 /* If stack is realigned and stack alignment value
4089 hasn't been finalized, it is OK not to increase
4090 stack_alignment_estimated. The bigger alignment
4091 requirement is recorded in stack_alignment_needed
4092 below. */
4093 gcc_assert (!crtl->stack_realign_finalized
4094 && crtl->stack_realign_needed);
4095 }
4096 }
4097 }
4098
4099 if (ARGS_GROW_DOWNWARD)
4100 {
4101 locate->slot_offset.constant = -initial_offset_ptr->constant;
4102 if (initial_offset_ptr->var)
4103 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4104 initial_offset_ptr->var);
4105
4106 {
4107 tree s2 = sizetree;
4108 if (where_pad != PAD_NONE
4109 && (!tree_fits_uhwi_p (sizetree)
4110 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4111 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4112 SUB_PARM_SIZE (locate->slot_offset, s2);
4113 }
4114
4115 locate->slot_offset.constant += part_size_in_regs;
4116
4117 if (!in_regs || reg_parm_stack_space > 0)
4118 pad_to_arg_alignment (&locate->slot_offset, boundary,
4119 &locate->alignment_pad);
4120
4121 locate->size.constant = (-initial_offset_ptr->constant
4122 - locate->slot_offset.constant);
4123 if (initial_offset_ptr->var)
4124 locate->size.var = size_binop (MINUS_EXPR,
4125 size_binop (MINUS_EXPR,
4126 ssize_int (0),
4127 initial_offset_ptr->var),
4128 locate->slot_offset.var);
4129
4130 /* Pad_below needs the pre-rounded size to know how much to pad
4131 below. */
4132 locate->offset = locate->slot_offset;
4133 if (where_pad == PAD_DOWNWARD)
4134 pad_below (&locate->offset, passed_mode, sizetree);
4135
4136 }
4137 else
4138 {
4139 if (!in_regs || reg_parm_stack_space > 0)
4140 pad_to_arg_alignment (initial_offset_ptr, boundary,
4141 &locate->alignment_pad);
4142 locate->slot_offset = *initial_offset_ptr;
4143
4144 #ifdef PUSH_ROUNDING
4145 if (passed_mode != BLKmode)
4146 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4147 #endif
4148
4149 /* Pad_below needs the pre-rounded size to know how much to pad below
4150 so this must be done before rounding up. */
4151 locate->offset = locate->slot_offset;
4152 if (where_pad == PAD_DOWNWARD)
4153 pad_below (&locate->offset, passed_mode, sizetree);
4154
4155 if (where_pad != PAD_NONE
4156 && (!tree_fits_uhwi_p (sizetree)
4157 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4158 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4159
4160 ADD_PARM_SIZE (locate->size, sizetree);
4161
4162 locate->size.constant -= part_size_in_regs;
4163 }
4164
4165 locate->offset.constant
4166 += targetm.calls.function_arg_offset (passed_mode, type);
4167 }
4168
4169 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4170 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4171
4172 static void
pad_to_arg_alignment(struct args_size * offset_ptr,int boundary,struct args_size * alignment_pad)4173 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4174 struct args_size *alignment_pad)
4175 {
4176 tree save_var = NULL_TREE;
4177 poly_int64 save_constant = 0;
4178 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4179 poly_int64 sp_offset = STACK_POINTER_OFFSET;
4180
4181 #ifdef SPARC_STACK_BOUNDARY_HACK
4182 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4183 the real alignment of %sp. However, when it does this, the
4184 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4185 if (SPARC_STACK_BOUNDARY_HACK)
4186 sp_offset = 0;
4187 #endif
4188
4189 if (boundary > PARM_BOUNDARY)
4190 {
4191 save_var = offset_ptr->var;
4192 save_constant = offset_ptr->constant;
4193 }
4194
4195 alignment_pad->var = NULL_TREE;
4196 alignment_pad->constant = 0;
4197
4198 if (boundary > BITS_PER_UNIT)
4199 {
4200 int misalign;
4201 if (offset_ptr->var
4202 || !known_misalignment (offset_ptr->constant + sp_offset,
4203 boundary_in_bytes, &misalign))
4204 {
4205 tree sp_offset_tree = ssize_int (sp_offset);
4206 tree offset = size_binop (PLUS_EXPR,
4207 ARGS_SIZE_TREE (*offset_ptr),
4208 sp_offset_tree);
4209 tree rounded;
4210 if (ARGS_GROW_DOWNWARD)
4211 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4212 else
4213 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4214
4215 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4216 /* ARGS_SIZE_TREE includes constant term. */
4217 offset_ptr->constant = 0;
4218 if (boundary > PARM_BOUNDARY)
4219 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4220 save_var);
4221 }
4222 else
4223 {
4224 if (ARGS_GROW_DOWNWARD)
4225 offset_ptr->constant -= misalign;
4226 else
4227 offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4228
4229 if (boundary > PARM_BOUNDARY)
4230 alignment_pad->constant = offset_ptr->constant - save_constant;
4231 }
4232 }
4233 }
4234
4235 static void
pad_below(struct args_size * offset_ptr,machine_mode passed_mode,tree sizetree)4236 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4237 {
4238 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4239 int misalign;
4240 if (passed_mode != BLKmode
4241 && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
4242 offset_ptr->constant += -misalign & (align - 1);
4243 else
4244 {
4245 if (TREE_CODE (sizetree) != INTEGER_CST
4246 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4247 {
4248 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4249 tree s2 = round_up (sizetree, align);
4250 /* Add it in. */
4251 ADD_PARM_SIZE (*offset_ptr, s2);
4252 SUB_PARM_SIZE (*offset_ptr, sizetree);
4253 }
4254 }
4255 }
4256
4257
4258 /* True if register REGNO was alive at a place where `setjmp' was
4259 called and was set more than once or is an argument. Such regs may
4260 be clobbered by `longjmp'. */
4261
4262 static bool
regno_clobbered_at_setjmp(bitmap setjmp_crosses,int regno)4263 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4264 {
4265 /* There appear to be cases where some local vars never reach the
4266 backend but have bogus regnos. */
4267 if (regno >= max_reg_num ())
4268 return false;
4269
4270 return ((REG_N_SETS (regno) > 1
4271 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4272 regno))
4273 && REGNO_REG_SET_P (setjmp_crosses, regno));
4274 }
4275
4276 /* Walk the tree of blocks describing the binding levels within a
4277 function and warn about variables the might be killed by setjmp or
4278 vfork. This is done after calling flow_analysis before register
4279 allocation since that will clobber the pseudo-regs to hard
4280 regs. */
4281
4282 static void
setjmp_vars_warning(bitmap setjmp_crosses,tree block)4283 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4284 {
4285 tree decl, sub;
4286
4287 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4288 {
4289 if (VAR_P (decl)
4290 && DECL_RTL_SET_P (decl)
4291 && REG_P (DECL_RTL (decl))
4292 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4293 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4294 " %<longjmp%> or %<vfork%>", decl);
4295 }
4296
4297 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4298 setjmp_vars_warning (setjmp_crosses, sub);
4299 }
4300
4301 /* Do the appropriate part of setjmp_vars_warning
4302 but for arguments instead of local variables. */
4303
4304 static void
setjmp_args_warning(bitmap setjmp_crosses)4305 setjmp_args_warning (bitmap setjmp_crosses)
4306 {
4307 tree decl;
4308 for (decl = DECL_ARGUMENTS (current_function_decl);
4309 decl; decl = DECL_CHAIN (decl))
4310 if (DECL_RTL (decl) != 0
4311 && REG_P (DECL_RTL (decl))
4312 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4313 warning (OPT_Wclobbered,
4314 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4315 decl);
4316 }
4317
4318 /* Generate warning messages for variables live across setjmp. */
4319
4320 void
generate_setjmp_warnings(void)4321 generate_setjmp_warnings (void)
4322 {
4323 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4324
4325 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4326 || bitmap_empty_p (setjmp_crosses))
4327 return;
4328
4329 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4330 setjmp_args_warning (setjmp_crosses);
4331 }
4332
4333
4334 /* Reverse the order of elements in the fragment chain T of blocks,
4335 and return the new head of the chain (old last element).
4336 In addition to that clear BLOCK_SAME_RANGE flags when needed
4337 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4338 its super fragment origin. */
4339
4340 static tree
block_fragments_nreverse(tree t)4341 block_fragments_nreverse (tree t)
4342 {
4343 tree prev = 0, block, next, prev_super = 0;
4344 tree super = BLOCK_SUPERCONTEXT (t);
4345 if (BLOCK_FRAGMENT_ORIGIN (super))
4346 super = BLOCK_FRAGMENT_ORIGIN (super);
4347 for (block = t; block; block = next)
4348 {
4349 next = BLOCK_FRAGMENT_CHAIN (block);
4350 BLOCK_FRAGMENT_CHAIN (block) = prev;
4351 if ((prev && !BLOCK_SAME_RANGE (prev))
4352 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4353 != prev_super))
4354 BLOCK_SAME_RANGE (block) = 0;
4355 prev_super = BLOCK_SUPERCONTEXT (block);
4356 BLOCK_SUPERCONTEXT (block) = super;
4357 prev = block;
4358 }
4359 t = BLOCK_FRAGMENT_ORIGIN (t);
4360 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4361 != prev_super)
4362 BLOCK_SAME_RANGE (t) = 0;
4363 BLOCK_SUPERCONTEXT (t) = super;
4364 return prev;
4365 }
4366
4367 /* Reverse the order of elements in the chain T of blocks,
4368 and return the new head of the chain (old last element).
4369 Also do the same on subblocks and reverse the order of elements
4370 in BLOCK_FRAGMENT_CHAIN as well. */
4371
4372 static tree
blocks_nreverse_all(tree t)4373 blocks_nreverse_all (tree t)
4374 {
4375 tree prev = 0, block, next;
4376 for (block = t; block; block = next)
4377 {
4378 next = BLOCK_CHAIN (block);
4379 BLOCK_CHAIN (block) = prev;
4380 if (BLOCK_FRAGMENT_CHAIN (block)
4381 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4382 {
4383 BLOCK_FRAGMENT_CHAIN (block)
4384 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4385 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4386 BLOCK_SAME_RANGE (block) = 0;
4387 }
4388 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4389 prev = block;
4390 }
4391 return prev;
4392 }
4393
4394
4395 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4396 and create duplicate blocks. */
4397 /* ??? Need an option to either create block fragments or to create
4398 abstract origin duplicates of a source block. It really depends
4399 on what optimization has been performed. */
4400
4401 void
reorder_blocks(void)4402 reorder_blocks (void)
4403 {
4404 tree block = DECL_INITIAL (current_function_decl);
4405
4406 if (block == NULL_TREE)
4407 return;
4408
4409 auto_vec<tree, 10> block_stack;
4410
4411 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4412 clear_block_marks (block);
4413
4414 /* Prune the old trees away, so that they don't get in the way. */
4415 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4416 BLOCK_CHAIN (block) = NULL_TREE;
4417
4418 /* Recreate the block tree from the note nesting. */
4419 reorder_blocks_1 (get_insns (), block, &block_stack);
4420 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4421 }
4422
4423 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4424
4425 void
clear_block_marks(tree block)4426 clear_block_marks (tree block)
4427 {
4428 while (block)
4429 {
4430 TREE_ASM_WRITTEN (block) = 0;
4431 clear_block_marks (BLOCK_SUBBLOCKS (block));
4432 block = BLOCK_CHAIN (block);
4433 }
4434 }
4435
4436 static void
reorder_blocks_1(rtx_insn * insns,tree current_block,vec<tree> * p_block_stack)4437 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4438 vec<tree> *p_block_stack)
4439 {
4440 rtx_insn *insn;
4441 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4442
4443 for (insn = insns; insn; insn = NEXT_INSN (insn))
4444 {
4445 if (NOTE_P (insn))
4446 {
4447 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4448 {
4449 tree block = NOTE_BLOCK (insn);
4450 tree origin;
4451
4452 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4453 origin = block;
4454
4455 if (prev_end)
4456 BLOCK_SAME_RANGE (prev_end) = 0;
4457 prev_end = NULL_TREE;
4458
4459 /* If we have seen this block before, that means it now
4460 spans multiple address regions. Create a new fragment. */
4461 if (TREE_ASM_WRITTEN (block))
4462 {
4463 tree new_block = copy_node (block);
4464
4465 BLOCK_SAME_RANGE (new_block) = 0;
4466 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4467 BLOCK_FRAGMENT_CHAIN (new_block)
4468 = BLOCK_FRAGMENT_CHAIN (origin);
4469 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4470
4471 NOTE_BLOCK (insn) = new_block;
4472 block = new_block;
4473 }
4474
4475 if (prev_beg == current_block && prev_beg)
4476 BLOCK_SAME_RANGE (block) = 1;
4477
4478 prev_beg = origin;
4479
4480 BLOCK_SUBBLOCKS (block) = 0;
4481 TREE_ASM_WRITTEN (block) = 1;
4482 /* When there's only one block for the entire function,
4483 current_block == block and we mustn't do this, it
4484 will cause infinite recursion. */
4485 if (block != current_block)
4486 {
4487 tree super;
4488 if (block != origin)
4489 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4490 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4491 (origin))
4492 == current_block);
4493 if (p_block_stack->is_empty ())
4494 super = current_block;
4495 else
4496 {
4497 super = p_block_stack->last ();
4498 gcc_assert (super == current_block
4499 || BLOCK_FRAGMENT_ORIGIN (super)
4500 == current_block);
4501 }
4502 BLOCK_SUPERCONTEXT (block) = super;
4503 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4504 BLOCK_SUBBLOCKS (current_block) = block;
4505 current_block = origin;
4506 }
4507 p_block_stack->safe_push (block);
4508 }
4509 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4510 {
4511 NOTE_BLOCK (insn) = p_block_stack->pop ();
4512 current_block = BLOCK_SUPERCONTEXT (current_block);
4513 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4514 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4515 prev_beg = NULL_TREE;
4516 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4517 ? NOTE_BLOCK (insn) : NULL_TREE;
4518 }
4519 }
4520 else
4521 {
4522 prev_beg = NULL_TREE;
4523 if (prev_end)
4524 BLOCK_SAME_RANGE (prev_end) = 0;
4525 prev_end = NULL_TREE;
4526 }
4527 }
4528 }
4529
4530 /* Reverse the order of elements in the chain T of blocks,
4531 and return the new head of the chain (old last element). */
4532
4533 tree
blocks_nreverse(tree t)4534 blocks_nreverse (tree t)
4535 {
4536 tree prev = 0, block, next;
4537 for (block = t; block; block = next)
4538 {
4539 next = BLOCK_CHAIN (block);
4540 BLOCK_CHAIN (block) = prev;
4541 prev = block;
4542 }
4543 return prev;
4544 }
4545
4546 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4547 by modifying the last node in chain 1 to point to chain 2. */
4548
4549 tree
block_chainon(tree op1,tree op2)4550 block_chainon (tree op1, tree op2)
4551 {
4552 tree t1;
4553
4554 if (!op1)
4555 return op2;
4556 if (!op2)
4557 return op1;
4558
4559 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4560 continue;
4561 BLOCK_CHAIN (t1) = op2;
4562
4563 #ifdef ENABLE_TREE_CHECKING
4564 {
4565 tree t2;
4566 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4567 gcc_assert (t2 != t1);
4568 }
4569 #endif
4570
4571 return op1;
4572 }
4573
4574 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4575 non-NULL, list them all into VECTOR, in a depth-first preorder
4576 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4577 blocks. */
4578
4579 static int
all_blocks(tree block,tree * vector)4580 all_blocks (tree block, tree *vector)
4581 {
4582 int n_blocks = 0;
4583
4584 while (block)
4585 {
4586 TREE_ASM_WRITTEN (block) = 0;
4587
4588 /* Record this block. */
4589 if (vector)
4590 vector[n_blocks] = block;
4591
4592 ++n_blocks;
4593
4594 /* Record the subblocks, and their subblocks... */
4595 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4596 vector ? vector + n_blocks : 0);
4597 block = BLOCK_CHAIN (block);
4598 }
4599
4600 return n_blocks;
4601 }
4602
4603 /* Return a vector containing all the blocks rooted at BLOCK. The
4604 number of elements in the vector is stored in N_BLOCKS_P. The
4605 vector is dynamically allocated; it is the caller's responsibility
4606 to call `free' on the pointer returned. */
4607
4608 static tree *
get_block_vector(tree block,int * n_blocks_p)4609 get_block_vector (tree block, int *n_blocks_p)
4610 {
4611 tree *block_vector;
4612
4613 *n_blocks_p = all_blocks (block, NULL);
4614 block_vector = XNEWVEC (tree, *n_blocks_p);
4615 all_blocks (block, block_vector);
4616
4617 return block_vector;
4618 }
4619
4620 static GTY(()) int next_block_index = 2;
4621
4622 /* Set BLOCK_NUMBER for all the blocks in FN. */
4623
4624 void
number_blocks(tree fn)4625 number_blocks (tree fn)
4626 {
4627 int i;
4628 int n_blocks;
4629 tree *block_vector;
4630
4631 /* For XCOFF debugging output, we start numbering the blocks
4632 from 1 within each function, rather than keeping a running
4633 count. */
4634 #if defined (XCOFF_DEBUGGING_INFO)
4635 if (write_symbols == XCOFF_DEBUG)
4636 next_block_index = 1;
4637 #endif
4638
4639 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4640
4641 /* The top-level BLOCK isn't numbered at all. */
4642 for (i = 1; i < n_blocks; ++i)
4643 /* We number the blocks from two. */
4644 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4645
4646 free (block_vector);
4647
4648 return;
4649 }
4650
4651 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4652
4653 DEBUG_FUNCTION tree
debug_find_var_in_block_tree(tree var,tree block)4654 debug_find_var_in_block_tree (tree var, tree block)
4655 {
4656 tree t;
4657
4658 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4659 if (t == var)
4660 return block;
4661
4662 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4663 {
4664 tree ret = debug_find_var_in_block_tree (var, t);
4665 if (ret)
4666 return ret;
4667 }
4668
4669 return NULL_TREE;
4670 }
4671
4672 /* Keep track of whether we're in a dummy function context. If we are,
4673 we don't want to invoke the set_current_function hook, because we'll
4674 get into trouble if the hook calls target_reinit () recursively or
4675 when the initial initialization is not yet complete. */
4676
4677 static bool in_dummy_function;
4678
4679 /* Invoke the target hook when setting cfun. Update the optimization options
4680 if the function uses different options than the default. */
4681
4682 static void
invoke_set_current_function_hook(tree fndecl)4683 invoke_set_current_function_hook (tree fndecl)
4684 {
4685 if (!in_dummy_function)
4686 {
4687 tree opts = ((fndecl)
4688 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4689 : optimization_default_node);
4690
4691 if (!opts)
4692 opts = optimization_default_node;
4693
4694 /* Change optimization options if needed. */
4695 if (optimization_current_node != opts)
4696 {
4697 optimization_current_node = opts;
4698 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4699 }
4700
4701 targetm.set_current_function (fndecl);
4702 this_fn_optabs = this_target_optabs;
4703
4704 /* Initialize global alignment variables after op. */
4705 parse_alignment_opts ();
4706
4707 if (opts != optimization_default_node)
4708 {
4709 init_tree_optimization_optabs (opts);
4710 if (TREE_OPTIMIZATION_OPTABS (opts))
4711 this_fn_optabs = (struct target_optabs *)
4712 TREE_OPTIMIZATION_OPTABS (opts);
4713 }
4714 }
4715 }
4716
4717 /* cfun should never be set directly; use this function. */
4718
4719 void
set_cfun(struct function * new_cfun,bool force)4720 set_cfun (struct function *new_cfun, bool force)
4721 {
4722 if (cfun != new_cfun || force)
4723 {
4724 cfun = new_cfun;
4725 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4726 redirect_edge_var_map_empty ();
4727 }
4728 }
4729
4730 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4731
4732 static vec<function *> cfun_stack;
4733
4734 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4735 current_function_decl accordingly. */
4736
4737 void
push_cfun(struct function * new_cfun)4738 push_cfun (struct function *new_cfun)
4739 {
4740 gcc_assert ((!cfun && !current_function_decl)
4741 || (cfun && current_function_decl == cfun->decl));
4742 cfun_stack.safe_push (cfun);
4743 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4744 set_cfun (new_cfun);
4745 }
4746
4747 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4748
4749 void
pop_cfun(void)4750 pop_cfun (void)
4751 {
4752 struct function *new_cfun = cfun_stack.pop ();
4753 /* When in_dummy_function, we do have a cfun but current_function_decl is
4754 NULL. We also allow pushing NULL cfun and subsequently changing
4755 current_function_decl to something else and have both restored by
4756 pop_cfun. */
4757 gcc_checking_assert (in_dummy_function
4758 || !cfun
4759 || current_function_decl == cfun->decl);
4760 set_cfun (new_cfun);
4761 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4762 }
4763
4764 /* Return value of funcdef and increase it. */
4765 int
get_next_funcdef_no(void)4766 get_next_funcdef_no (void)
4767 {
4768 return funcdef_no++;
4769 }
4770
4771 /* Return value of funcdef. */
4772 int
get_last_funcdef_no(void)4773 get_last_funcdef_no (void)
4774 {
4775 return funcdef_no;
4776 }
4777
4778 /* Allocate and initialize the stack usage info data structure for the
4779 current function. */
4780 static void
allocate_stack_usage_info(void)4781 allocate_stack_usage_info (void)
4782 {
4783 gcc_assert (!cfun->su);
4784 cfun->su = ggc_cleared_alloc<stack_usage> ();
4785 cfun->su->static_stack_size = -1;
4786 }
4787
4788 /* Allocate a function structure for FNDECL and set its contents
4789 to the defaults. Set cfun to the newly-allocated object.
4790 Some of the helper functions invoked during initialization assume
4791 that cfun has already been set. Therefore, assign the new object
4792 directly into cfun and invoke the back end hook explicitly at the
4793 very end, rather than initializing a temporary and calling set_cfun
4794 on it.
4795
4796 ABSTRACT_P is true if this is a function that will never be seen by
4797 the middle-end. Such functions are front-end concepts (like C++
4798 function templates) that do not correspond directly to functions
4799 placed in object files. */
4800
4801 void
allocate_struct_function(tree fndecl,bool abstract_p)4802 allocate_struct_function (tree fndecl, bool abstract_p)
4803 {
4804 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4805
4806 cfun = ggc_cleared_alloc<function> ();
4807
4808 init_eh_for_function ();
4809
4810 if (init_machine_status)
4811 cfun->machine = (*init_machine_status) ();
4812
4813 #ifdef OVERRIDE_ABI_FORMAT
4814 OVERRIDE_ABI_FORMAT (fndecl);
4815 #endif
4816
4817 if (fndecl != NULL_TREE)
4818 {
4819 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4820 cfun->decl = fndecl;
4821 current_function_funcdef_no = get_next_funcdef_no ();
4822 }
4823
4824 invoke_set_current_function_hook (fndecl);
4825
4826 if (fndecl != NULL_TREE)
4827 {
4828 tree result = DECL_RESULT (fndecl);
4829
4830 if (!abstract_p)
4831 {
4832 /* Now that we have activated any function-specific attributes
4833 that might affect layout, particularly vector modes, relayout
4834 each of the parameters and the result. */
4835 relayout_decl (result);
4836 for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4837 parm = DECL_CHAIN (parm))
4838 relayout_decl (parm);
4839
4840 /* Similarly relayout the function decl. */
4841 targetm.target_option.relayout_function (fndecl);
4842 }
4843
4844 if (!abstract_p && aggregate_value_p (result, fndecl))
4845 {
4846 #ifdef PCC_STATIC_STRUCT_RETURN
4847 cfun->returns_pcc_struct = 1;
4848 #endif
4849 cfun->returns_struct = 1;
4850 }
4851
4852 cfun->stdarg = stdarg_p (fntype);
4853
4854 /* Assume all registers in stdarg functions need to be saved. */
4855 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4856 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4857
4858 /* ??? This could be set on a per-function basis by the front-end
4859 but is this worth the hassle? */
4860 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4861 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4862
4863 if (!profile_flag && !flag_instrument_function_entry_exit)
4864 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4865
4866 if (flag_callgraph_info)
4867 allocate_stack_usage_info ();
4868 }
4869
4870 /* Don't enable begin stmt markers if var-tracking at assignments is
4871 disabled. The markers make little sense without the variable
4872 binding annotations among them. */
4873 cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4874 && MAY_HAVE_DEBUG_MARKER_STMTS;
4875 }
4876
4877 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4878 instead of just setting it. */
4879
4880 void
push_struct_function(tree fndecl,bool abstract_p)4881 push_struct_function (tree fndecl, bool abstract_p)
4882 {
4883 /* When in_dummy_function we might be in the middle of a pop_cfun and
4884 current_function_decl and cfun may not match. */
4885 gcc_assert (in_dummy_function
4886 || (!cfun && !current_function_decl)
4887 || (cfun && current_function_decl == cfun->decl));
4888 cfun_stack.safe_push (cfun);
4889 current_function_decl = fndecl;
4890 allocate_struct_function (fndecl, abstract_p);
4891 }
4892
4893 /* Reset crtl and other non-struct-function variables to defaults as
4894 appropriate for emitting rtl at the start of a function. */
4895
4896 static void
prepare_function_start(void)4897 prepare_function_start (void)
4898 {
4899 gcc_assert (!get_last_insn ());
4900
4901 if (in_dummy_function)
4902 crtl->abi = &default_function_abi;
4903 else
4904 crtl->abi = &fndecl_abi (cfun->decl).base_abi ();
4905
4906 init_temp_slots ();
4907 init_emit ();
4908 init_varasm_status ();
4909 init_expr ();
4910 default_rtl_profile ();
4911
4912 if (flag_stack_usage_info && !flag_callgraph_info)
4913 allocate_stack_usage_info ();
4914
4915 cse_not_expected = ! optimize;
4916
4917 /* Caller save not needed yet. */
4918 caller_save_needed = 0;
4919
4920 /* We haven't done register allocation yet. */
4921 reg_renumber = 0;
4922
4923 /* Indicate that we have not instantiated virtual registers yet. */
4924 virtuals_instantiated = 0;
4925
4926 /* Indicate that we want CONCATs now. */
4927 generating_concat_p = 1;
4928
4929 /* Indicate we have no need of a frame pointer yet. */
4930 frame_pointer_needed = 0;
4931 }
4932
4933 void
push_dummy_function(bool with_decl)4934 push_dummy_function (bool with_decl)
4935 {
4936 tree fn_decl, fn_type, fn_result_decl;
4937
4938 gcc_assert (!in_dummy_function);
4939 in_dummy_function = true;
4940
4941 if (with_decl)
4942 {
4943 fn_type = build_function_type_list (void_type_node, NULL_TREE);
4944 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4945 fn_type);
4946 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4947 NULL_TREE, void_type_node);
4948 DECL_RESULT (fn_decl) = fn_result_decl;
4949 DECL_ARTIFICIAL (fn_decl) = 1;
4950 tree fn_name = get_identifier (" ");
4951 SET_DECL_ASSEMBLER_NAME (fn_decl, fn_name);
4952 }
4953 else
4954 fn_decl = NULL_TREE;
4955
4956 push_struct_function (fn_decl);
4957 }
4958
4959 /* Initialize the rtl expansion mechanism so that we can do simple things
4960 like generate sequences. This is used to provide a context during global
4961 initialization of some passes. You must call expand_dummy_function_end
4962 to exit this context. */
4963
4964 void
init_dummy_function_start(void)4965 init_dummy_function_start (void)
4966 {
4967 push_dummy_function (false);
4968 prepare_function_start ();
4969 }
4970
4971 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4972 and initialize static variables for generating RTL for the statements
4973 of the function. */
4974
4975 void
init_function_start(tree subr)4976 init_function_start (tree subr)
4977 {
4978 /* Initialize backend, if needed. */
4979 initialize_rtl ();
4980
4981 prepare_function_start ();
4982 decide_function_section (subr);
4983
4984 /* Warn if this value is an aggregate type,
4985 regardless of which calling convention we are using for it. */
4986 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4987 warning (OPT_Waggregate_return, "function returns an aggregate");
4988 }
4989
4990 /* Expand code to verify the stack_protect_guard. This is invoked at
4991 the end of a function to be protected. */
4992
4993 void
stack_protect_epilogue(void)4994 stack_protect_epilogue (void)
4995 {
4996 tree guard_decl = crtl->stack_protect_guard_decl;
4997 rtx_code_label *label = gen_label_rtx ();
4998 rtx x, y;
4999 rtx_insn *seq = NULL;
5000
5001 x = expand_normal (crtl->stack_protect_guard);
5002
5003 if (targetm.have_stack_protect_combined_test () && guard_decl)
5004 {
5005 gcc_assert (DECL_P (guard_decl));
5006 y = DECL_RTL (guard_decl);
5007 /* Allow the target to compute address of Y and compare it with X without
5008 leaking Y into a register. This combined address + compare pattern
5009 allows the target to prevent spilling of any intermediate results by
5010 splitting it after register allocator. */
5011 seq = targetm.gen_stack_protect_combined_test (x, y, label);
5012 }
5013 else
5014 {
5015 if (guard_decl)
5016 y = expand_normal (guard_decl);
5017 else
5018 y = const0_rtx;
5019
5020 /* Allow the target to compare Y with X without leaking either into
5021 a register. */
5022 if (targetm.have_stack_protect_test ())
5023 seq = targetm.gen_stack_protect_test (x, y, label);
5024 }
5025
5026 if (seq)
5027 emit_insn (seq);
5028 else
5029 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5030
5031 /* The noreturn predictor has been moved to the tree level. The rtl-level
5032 predictors estimate this branch about 20%, which isn't enough to get
5033 things moved out of line. Since this is the only extant case of adding
5034 a noreturn function at the rtl level, it doesn't seem worth doing ought
5035 except adding the prediction by hand. */
5036 rtx_insn *tmp = get_last_insn ();
5037 if (JUMP_P (tmp))
5038 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
5039
5040 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5041 free_temp_slots ();
5042 emit_label (label);
5043 }
5044
5045 /* Start the RTL for a new function, and set variables used for
5046 emitting RTL.
5047 SUBR is the FUNCTION_DECL node.
5048 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5049 the function's parameters, which must be run at any return statement. */
5050
5051 bool currently_expanding_function_start;
5052 void
expand_function_start(tree subr)5053 expand_function_start (tree subr)
5054 {
5055 currently_expanding_function_start = true;
5056
5057 /* Make sure volatile mem refs aren't considered
5058 valid operands of arithmetic insns. */
5059 init_recog_no_volatile ();
5060
5061 crtl->profile
5062 = (profile_flag
5063 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5064
5065 crtl->limit_stack
5066 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5067
5068 /* Make the label for return statements to jump to. Do not special
5069 case machines with special return instructions -- they will be
5070 handled later during jump, ifcvt, or epilogue creation. */
5071 return_label = gen_label_rtx ();
5072
5073 /* Initialize rtx used to return the value. */
5074 /* Do this before assign_parms so that we copy the struct value address
5075 before any library calls that assign parms might generate. */
5076
5077 /* Decide whether to return the value in memory or in a register. */
5078 tree res = DECL_RESULT (subr);
5079 if (aggregate_value_p (res, subr))
5080 {
5081 /* Returning something that won't go in a register. */
5082 rtx value_address = 0;
5083
5084 #ifdef PCC_STATIC_STRUCT_RETURN
5085 if (cfun->returns_pcc_struct)
5086 {
5087 int size = int_size_in_bytes (TREE_TYPE (res));
5088 value_address = assemble_static_space (size);
5089 }
5090 else
5091 #endif
5092 {
5093 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5094 /* Expect to be passed the address of a place to store the value.
5095 If it is passed as an argument, assign_parms will take care of
5096 it. */
5097 if (sv)
5098 {
5099 value_address = gen_reg_rtx (Pmode);
5100 emit_move_insn (value_address, sv);
5101 }
5102 }
5103 if (value_address)
5104 {
5105 rtx x = value_address;
5106 if (!DECL_BY_REFERENCE (res))
5107 {
5108 x = gen_rtx_MEM (DECL_MODE (res), x);
5109 set_mem_attributes (x, res, 1);
5110 }
5111 set_parm_rtl (res, x);
5112 }
5113 }
5114 else if (DECL_MODE (res) == VOIDmode)
5115 /* If return mode is void, this decl rtl should not be used. */
5116 set_parm_rtl (res, NULL_RTX);
5117 else
5118 {
5119 /* Compute the return values into a pseudo reg, which we will copy
5120 into the true return register after the cleanups are done. */
5121 tree return_type = TREE_TYPE (res);
5122
5123 /* If we may coalesce this result, make sure it has the expected mode
5124 in case it was promoted. But we need not bother about BLKmode. */
5125 machine_mode promoted_mode
5126 = flag_tree_coalesce_vars && is_gimple_reg (res)
5127 ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5128 : BLKmode;
5129
5130 if (promoted_mode != BLKmode)
5131 set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5132 else if (TYPE_MODE (return_type) != BLKmode
5133 && targetm.calls.return_in_msb (return_type))
5134 /* expand_function_end will insert the appropriate padding in
5135 this case. Use the return value's natural (unpadded) mode
5136 within the function proper. */
5137 set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5138 else
5139 {
5140 /* In order to figure out what mode to use for the pseudo, we
5141 figure out what the mode of the eventual return register will
5142 actually be, and use that. */
5143 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5144
5145 /* Structures that are returned in registers are not
5146 aggregate_value_p, so we may see a PARALLEL or a REG. */
5147 if (REG_P (hard_reg))
5148 set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5149 else
5150 {
5151 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5152 set_parm_rtl (res, gen_group_rtx (hard_reg));
5153 }
5154 }
5155
5156 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5157 result to the real return register(s). */
5158 DECL_REGISTER (res) = 1;
5159 }
5160
5161 /* Initialize rtx for parameters and local variables.
5162 In some cases this requires emitting insns. */
5163 assign_parms (subr);
5164
5165 /* If function gets a static chain arg, store it. */
5166 if (cfun->static_chain_decl)
5167 {
5168 tree parm = cfun->static_chain_decl;
5169 rtx local, chain;
5170 rtx_insn *insn;
5171 int unsignedp;
5172
5173 local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5174 chain = targetm.calls.static_chain (current_function_decl, true);
5175
5176 set_decl_incoming_rtl (parm, chain, false);
5177 set_parm_rtl (parm, local);
5178 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5179
5180 if (GET_MODE (local) != GET_MODE (chain))
5181 {
5182 convert_move (local, chain, unsignedp);
5183 insn = get_last_insn ();
5184 }
5185 else
5186 insn = emit_move_insn (local, chain);
5187
5188 /* Mark the register as eliminable, similar to parameters. */
5189 if (MEM_P (chain)
5190 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5191 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5192
5193 /* If we aren't optimizing, save the static chain onto the stack. */
5194 if (!optimize)
5195 {
5196 tree saved_static_chain_decl
5197 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5198 DECL_NAME (parm), TREE_TYPE (parm));
5199 rtx saved_static_chain_rtx
5200 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5201 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5202 emit_move_insn (saved_static_chain_rtx, chain);
5203 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5204 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5205 }
5206 }
5207
5208 /* The following was moved from init_function_start.
5209 The move was supposed to make sdb output more accurate. */
5210 /* Indicate the beginning of the function body,
5211 as opposed to parm setup. */
5212 emit_note (NOTE_INSN_FUNCTION_BEG);
5213
5214 gcc_assert (NOTE_P (get_last_insn ()));
5215
5216 parm_birth_insn = get_last_insn ();
5217
5218 /* If the function receives a non-local goto, then store the
5219 bits we need to restore the frame pointer. */
5220 if (cfun->nonlocal_goto_save_area)
5221 {
5222 tree t_save;
5223 rtx r_save;
5224
5225 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5226 gcc_assert (DECL_RTL_SET_P (var));
5227
5228 t_save = build4 (ARRAY_REF,
5229 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5230 cfun->nonlocal_goto_save_area,
5231 integer_zero_node, NULL_TREE, NULL_TREE);
5232 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5233 gcc_assert (GET_MODE (r_save) == Pmode);
5234
5235 emit_move_insn (r_save, hard_frame_pointer_rtx);
5236 update_nonlocal_goto_save_area ();
5237 }
5238
5239 if (crtl->profile)
5240 {
5241 #ifdef PROFILE_HOOK
5242 PROFILE_HOOK (current_function_funcdef_no);
5243 #endif
5244 }
5245
5246 /* If we are doing generic stack checking, the probe should go here. */
5247 if (flag_stack_check == GENERIC_STACK_CHECK)
5248 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5249
5250 currently_expanding_function_start = false;
5251 }
5252
5253 void
pop_dummy_function(void)5254 pop_dummy_function (void)
5255 {
5256 pop_cfun ();
5257 in_dummy_function = false;
5258 }
5259
5260 /* Undo the effects of init_dummy_function_start. */
5261 void
expand_dummy_function_end(void)5262 expand_dummy_function_end (void)
5263 {
5264 gcc_assert (in_dummy_function);
5265
5266 /* End any sequences that failed to be closed due to syntax errors. */
5267 while (in_sequence_p ())
5268 end_sequence ();
5269
5270 /* Outside function body, can't compute type's actual size
5271 until next function's body starts. */
5272
5273 free_after_parsing (cfun);
5274 free_after_compilation (cfun);
5275 pop_dummy_function ();
5276 }
5277
5278 /* Helper for diddle_return_value. */
5279
5280 void
diddle_return_value_1(void (* doit)(rtx,void *),void * arg,rtx outgoing)5281 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5282 {
5283 if (! outgoing)
5284 return;
5285
5286 if (REG_P (outgoing))
5287 (*doit) (outgoing, arg);
5288 else if (GET_CODE (outgoing) == PARALLEL)
5289 {
5290 int i;
5291
5292 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5293 {
5294 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5295
5296 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5297 (*doit) (x, arg);
5298 }
5299 }
5300 }
5301
5302 /* Call DOIT for each hard register used as a return value from
5303 the current function. */
5304
5305 void
diddle_return_value(void (* doit)(rtx,void *),void * arg)5306 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5307 {
5308 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5309 }
5310
5311 static void
do_clobber_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5312 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5313 {
5314 emit_clobber (reg);
5315 }
5316
5317 void
clobber_return_register(void)5318 clobber_return_register (void)
5319 {
5320 diddle_return_value (do_clobber_return_reg, NULL);
5321
5322 /* In case we do use pseudo to return value, clobber it too. */
5323 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5324 {
5325 tree decl_result = DECL_RESULT (current_function_decl);
5326 rtx decl_rtl = DECL_RTL (decl_result);
5327 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5328 {
5329 do_clobber_return_reg (decl_rtl, NULL);
5330 }
5331 }
5332 }
5333
5334 static void
do_use_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5335 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5336 {
5337 emit_use (reg);
5338 }
5339
5340 static void
use_return_register(void)5341 use_return_register (void)
5342 {
5343 diddle_return_value (do_use_return_reg, NULL);
5344 }
5345
5346 /* Generate RTL for the end of the current function. */
5347
5348 void
expand_function_end(void)5349 expand_function_end (void)
5350 {
5351 /* If arg_pointer_save_area was referenced only from a nested
5352 function, we will not have initialized it yet. Do that now. */
5353 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5354 get_arg_pointer_save_area ();
5355
5356 /* If we are doing generic stack checking and this function makes calls,
5357 do a stack probe at the start of the function to ensure we have enough
5358 space for another stack frame. */
5359 if (flag_stack_check == GENERIC_STACK_CHECK)
5360 {
5361 rtx_insn *insn, *seq;
5362
5363 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5364 if (CALL_P (insn))
5365 {
5366 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5367 start_sequence ();
5368 if (STACK_CHECK_MOVING_SP)
5369 anti_adjust_stack_and_probe (max_frame_size, true);
5370 else
5371 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5372 seq = get_insns ();
5373 end_sequence ();
5374 set_insn_locations (seq, prologue_location);
5375 emit_insn_before (seq, stack_check_probe_note);
5376 break;
5377 }
5378 }
5379
5380 /* End any sequences that failed to be closed due to syntax errors. */
5381 while (in_sequence_p ())
5382 end_sequence ();
5383
5384 clear_pending_stack_adjust ();
5385 do_pending_stack_adjust ();
5386
5387 /* Output a linenumber for the end of the function.
5388 SDB depended on this. */
5389 set_curr_insn_location (input_location);
5390
5391 /* Before the return label (if any), clobber the return
5392 registers so that they are not propagated live to the rest of
5393 the function. This can only happen with functions that drop
5394 through; if there had been a return statement, there would
5395 have either been a return rtx, or a jump to the return label.
5396
5397 We delay actual code generation after the current_function_value_rtx
5398 is computed. */
5399 rtx_insn *clobber_after = get_last_insn ();
5400
5401 /* Output the label for the actual return from the function. */
5402 emit_label (return_label);
5403
5404 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5405 {
5406 /* Let except.c know where it should emit the call to unregister
5407 the function context for sjlj exceptions. */
5408 if (flag_exceptions)
5409 sjlj_emit_function_exit_after (get_last_insn ());
5410 }
5411
5412 /* If this is an implementation of throw, do what's necessary to
5413 communicate between __builtin_eh_return and the epilogue. */
5414 expand_eh_return ();
5415
5416 /* If stack protection is enabled for this function, check the guard. */
5417 if (crtl->stack_protect_guard
5418 && targetm.stack_protect_runtime_enabled_p ()
5419 && naked_return_label == NULL_RTX)
5420 stack_protect_epilogue ();
5421
5422 /* If scalar return value was computed in a pseudo-reg, or was a named
5423 return value that got dumped to the stack, copy that to the hard
5424 return register. */
5425 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5426 {
5427 tree decl_result = DECL_RESULT (current_function_decl);
5428 rtx decl_rtl = DECL_RTL (decl_result);
5429
5430 if (REG_P (decl_rtl)
5431 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5432 : DECL_REGISTER (decl_result))
5433 {
5434 rtx real_decl_rtl = crtl->return_rtx;
5435 complex_mode cmode;
5436
5437 /* This should be set in assign_parms. */
5438 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5439
5440 /* If this is a BLKmode structure being returned in registers,
5441 then use the mode computed in expand_return. Note that if
5442 decl_rtl is memory, then its mode may have been changed,
5443 but that crtl->return_rtx has not. */
5444 if (GET_MODE (real_decl_rtl) == BLKmode)
5445 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5446
5447 /* If a non-BLKmode return value should be padded at the least
5448 significant end of the register, shift it left by the appropriate
5449 amount. BLKmode results are handled using the group load/store
5450 machinery. */
5451 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5452 && REG_P (real_decl_rtl)
5453 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5454 {
5455 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5456 REGNO (real_decl_rtl)),
5457 decl_rtl);
5458 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5459 }
5460 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5461 {
5462 /* If expand_function_start has created a PARALLEL for decl_rtl,
5463 move the result to the real return registers. Otherwise, do
5464 a group load from decl_rtl for a named return. */
5465 if (GET_CODE (decl_rtl) == PARALLEL)
5466 emit_group_move (real_decl_rtl, decl_rtl);
5467 else
5468 emit_group_load (real_decl_rtl, decl_rtl,
5469 TREE_TYPE (decl_result),
5470 int_size_in_bytes (TREE_TYPE (decl_result)));
5471 }
5472 /* In the case of complex integer modes smaller than a word, we'll
5473 need to generate some non-trivial bitfield insertions. Do that
5474 on a pseudo and not the hard register. */
5475 else if (GET_CODE (decl_rtl) == CONCAT
5476 && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
5477 && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
5478 {
5479 int old_generating_concat_p;
5480 rtx tmp;
5481
5482 old_generating_concat_p = generating_concat_p;
5483 generating_concat_p = 0;
5484 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5485 generating_concat_p = old_generating_concat_p;
5486
5487 emit_move_insn (tmp, decl_rtl);
5488 emit_move_insn (real_decl_rtl, tmp);
5489 }
5490 /* If a named return value dumped decl_return to memory, then
5491 we may need to re-do the PROMOTE_MODE signed/unsigned
5492 extension. */
5493 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5494 {
5495 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5496 promote_function_mode (TREE_TYPE (decl_result),
5497 GET_MODE (decl_rtl), &unsignedp,
5498 TREE_TYPE (current_function_decl), 1);
5499
5500 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5501 }
5502 else
5503 emit_move_insn (real_decl_rtl, decl_rtl);
5504 }
5505 }
5506
5507 /* If returning a structure, arrange to return the address of the value
5508 in a place where debuggers expect to find it.
5509
5510 If returning a structure PCC style,
5511 the caller also depends on this value.
5512 And cfun->returns_pcc_struct is not necessarily set. */
5513 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5514 && !targetm.calls.omit_struct_return_reg)
5515 {
5516 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5517 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5518 rtx outgoing;
5519
5520 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5521 type = TREE_TYPE (type);
5522 else
5523 value_address = XEXP (value_address, 0);
5524
5525 outgoing = targetm.calls.function_value (build_pointer_type (type),
5526 current_function_decl, true);
5527
5528 /* Mark this as a function return value so integrate will delete the
5529 assignment and USE below when inlining this function. */
5530 REG_FUNCTION_VALUE_P (outgoing) = 1;
5531
5532 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5533 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
5534 value_address = convert_memory_address (mode, value_address);
5535
5536 emit_move_insn (outgoing, value_address);
5537
5538 /* Show return register used to hold result (in this case the address
5539 of the result. */
5540 crtl->return_rtx = outgoing;
5541 }
5542
5543 /* Emit the actual code to clobber return register. Don't emit
5544 it if clobber_after is a barrier, then the previous basic block
5545 certainly doesn't fall thru into the exit block. */
5546 if (!BARRIER_P (clobber_after))
5547 {
5548 start_sequence ();
5549 clobber_return_register ();
5550 rtx_insn *seq = get_insns ();
5551 end_sequence ();
5552
5553 emit_insn_after (seq, clobber_after);
5554 }
5555
5556 /* Output the label for the naked return from the function. */
5557 if (naked_return_label)
5558 emit_label (naked_return_label);
5559
5560 /* @@@ This is a kludge. We want to ensure that instructions that
5561 may trap are not moved into the epilogue by scheduling, because
5562 we don't always emit unwind information for the epilogue. */
5563 if (cfun->can_throw_non_call_exceptions
5564 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5565 emit_insn (gen_blockage ());
5566
5567 /* If stack protection is enabled for this function, check the guard. */
5568 if (crtl->stack_protect_guard
5569 && targetm.stack_protect_runtime_enabled_p ()
5570 && naked_return_label)
5571 stack_protect_epilogue ();
5572
5573 /* If we had calls to alloca, and this machine needs
5574 an accurate stack pointer to exit the function,
5575 insert some code to save and restore the stack pointer. */
5576 if (! EXIT_IGNORE_STACK
5577 && cfun->calls_alloca)
5578 {
5579 rtx tem = 0;
5580
5581 start_sequence ();
5582 emit_stack_save (SAVE_FUNCTION, &tem);
5583 rtx_insn *seq = get_insns ();
5584 end_sequence ();
5585 emit_insn_before (seq, parm_birth_insn);
5586
5587 emit_stack_restore (SAVE_FUNCTION, tem);
5588 }
5589
5590 /* ??? This should no longer be necessary since stupid is no longer with
5591 us, but there are some parts of the compiler (eg reload_combine, and
5592 sh mach_dep_reorg) that still try and compute their own lifetime info
5593 instead of using the general framework. */
5594 use_return_register ();
5595 }
5596
5597 rtx
get_arg_pointer_save_area(void)5598 get_arg_pointer_save_area (void)
5599 {
5600 rtx ret = arg_pointer_save_area;
5601
5602 if (! ret)
5603 {
5604 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5605 arg_pointer_save_area = ret;
5606 }
5607
5608 if (! crtl->arg_pointer_save_area_init)
5609 {
5610 /* Save the arg pointer at the beginning of the function. The
5611 generated stack slot may not be a valid memory address, so we
5612 have to check it and fix it if necessary. */
5613 start_sequence ();
5614 emit_move_insn (validize_mem (copy_rtx (ret)),
5615 crtl->args.internal_arg_pointer);
5616 rtx_insn *seq = get_insns ();
5617 end_sequence ();
5618
5619 push_topmost_sequence ();
5620 emit_insn_after (seq, entry_of_function ());
5621 pop_topmost_sequence ();
5622
5623 crtl->arg_pointer_save_area_init = true;
5624 }
5625
5626 return ret;
5627 }
5628
5629
5630 /* If debugging dumps are requested, dump information about how the
5631 target handled -fstack-check=clash for the prologue.
5632
5633 PROBES describes what if any probes were emitted.
5634
5635 RESIDUALS indicates if the prologue had any residual allocation
5636 (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5637
5638 void
dump_stack_clash_frame_info(enum stack_clash_probes probes,bool residuals)5639 dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
5640 {
5641 if (!dump_file)
5642 return;
5643
5644 switch (probes)
5645 {
5646 case NO_PROBE_NO_FRAME:
5647 fprintf (dump_file,
5648 "Stack clash no probe no stack adjustment in prologue.\n");
5649 break;
5650 case NO_PROBE_SMALL_FRAME:
5651 fprintf (dump_file,
5652 "Stack clash no probe small stack adjustment in prologue.\n");
5653 break;
5654 case PROBE_INLINE:
5655 fprintf (dump_file, "Stack clash inline probes in prologue.\n");
5656 break;
5657 case PROBE_LOOP:
5658 fprintf (dump_file, "Stack clash probe loop in prologue.\n");
5659 break;
5660 }
5661
5662 if (residuals)
5663 fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
5664 else
5665 fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
5666
5667 if (frame_pointer_needed)
5668 fprintf (dump_file, "Stack clash frame pointer needed.\n");
5669 else
5670 fprintf (dump_file, "Stack clash no frame pointer needed.\n");
5671
5672 if (TREE_THIS_VOLATILE (cfun->decl))
5673 fprintf (dump_file,
5674 "Stack clash noreturn prologue, assuming no implicit"
5675 " probes in caller.\n");
5676 else
5677 fprintf (dump_file,
5678 "Stack clash not noreturn prologue.\n");
5679 }
5680
5681 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5682 for the first time. */
5683
5684 static void
record_insns(rtx_insn * insns,rtx end,hash_table<insn_cache_hasher> ** hashp)5685 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5686 {
5687 rtx_insn *tmp;
5688 hash_table<insn_cache_hasher> *hash = *hashp;
5689
5690 if (hash == NULL)
5691 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5692
5693 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5694 {
5695 rtx *slot = hash->find_slot (tmp, INSERT);
5696 gcc_assert (*slot == NULL);
5697 *slot = tmp;
5698 }
5699 }
5700
5701 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5702 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5703 insn, then record COPY as well. */
5704
5705 void
maybe_copy_prologue_epilogue_insn(rtx insn,rtx copy)5706 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5707 {
5708 hash_table<insn_cache_hasher> *hash;
5709 rtx *slot;
5710
5711 hash = epilogue_insn_hash;
5712 if (!hash || !hash->find (insn))
5713 {
5714 hash = prologue_insn_hash;
5715 if (!hash || !hash->find (insn))
5716 return;
5717 }
5718
5719 slot = hash->find_slot (copy, INSERT);
5720 gcc_assert (*slot == NULL);
5721 *slot = copy;
5722 }
5723
5724 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5725 we can be running after reorg, SEQUENCE rtl is possible. */
5726
5727 static bool
contains(const rtx_insn * insn,hash_table<insn_cache_hasher> * hash)5728 contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5729 {
5730 if (hash == NULL)
5731 return false;
5732
5733 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5734 {
5735 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5736 int i;
5737 for (i = seq->len () - 1; i >= 0; i--)
5738 if (hash->find (seq->element (i)))
5739 return true;
5740 return false;
5741 }
5742
5743 return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
5744 }
5745
5746 int
prologue_contains(const rtx_insn * insn)5747 prologue_contains (const rtx_insn *insn)
5748 {
5749 return contains (insn, prologue_insn_hash);
5750 }
5751
5752 int
epilogue_contains(const rtx_insn * insn)5753 epilogue_contains (const rtx_insn *insn)
5754 {
5755 return contains (insn, epilogue_insn_hash);
5756 }
5757
5758 int
prologue_epilogue_contains(const rtx_insn * insn)5759 prologue_epilogue_contains (const rtx_insn *insn)
5760 {
5761 if (contains (insn, prologue_insn_hash))
5762 return 1;
5763 if (contains (insn, epilogue_insn_hash))
5764 return 1;
5765 return 0;
5766 }
5767
5768 void
record_prologue_seq(rtx_insn * seq)5769 record_prologue_seq (rtx_insn *seq)
5770 {
5771 record_insns (seq, NULL, &prologue_insn_hash);
5772 }
5773
5774 void
record_epilogue_seq(rtx_insn * seq)5775 record_epilogue_seq (rtx_insn *seq)
5776 {
5777 record_insns (seq, NULL, &epilogue_insn_hash);
5778 }
5779
5780 /* Set JUMP_LABEL for a return insn. */
5781
5782 void
set_return_jump_label(rtx_insn * returnjump)5783 set_return_jump_label (rtx_insn *returnjump)
5784 {
5785 rtx pat = PATTERN (returnjump);
5786 if (GET_CODE (pat) == PARALLEL)
5787 pat = XVECEXP (pat, 0, 0);
5788 if (ANY_RETURN_P (pat))
5789 JUMP_LABEL (returnjump) = pat;
5790 else
5791 JUMP_LABEL (returnjump) = ret_rtx;
5792 }
5793
5794 /* Return a sequence to be used as the split prologue for the current
5795 function, or NULL. */
5796
5797 static rtx_insn *
make_split_prologue_seq(void)5798 make_split_prologue_seq (void)
5799 {
5800 if (!flag_split_stack
5801 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5802 return NULL;
5803
5804 start_sequence ();
5805 emit_insn (targetm.gen_split_stack_prologue ());
5806 rtx_insn *seq = get_insns ();
5807 end_sequence ();
5808
5809 record_insns (seq, NULL, &prologue_insn_hash);
5810 set_insn_locations (seq, prologue_location);
5811
5812 return seq;
5813 }
5814
5815 /* Return a sequence to be used as the prologue for the current function,
5816 or NULL. */
5817
5818 static rtx_insn *
make_prologue_seq(void)5819 make_prologue_seq (void)
5820 {
5821 if (!targetm.have_prologue ())
5822 return NULL;
5823
5824 start_sequence ();
5825 rtx_insn *seq = targetm.gen_prologue ();
5826 emit_insn (seq);
5827
5828 /* Insert an explicit USE for the frame pointer
5829 if the profiling is on and the frame pointer is required. */
5830 if (crtl->profile && frame_pointer_needed)
5831 emit_use (hard_frame_pointer_rtx);
5832
5833 /* Retain a map of the prologue insns. */
5834 record_insns (seq, NULL, &prologue_insn_hash);
5835 emit_note (NOTE_INSN_PROLOGUE_END);
5836
5837 /* Ensure that instructions are not moved into the prologue when
5838 profiling is on. The call to the profiling routine can be
5839 emitted within the live range of a call-clobbered register. */
5840 if (!targetm.profile_before_prologue () && crtl->profile)
5841 emit_insn (gen_blockage ());
5842
5843 seq = get_insns ();
5844 end_sequence ();
5845 set_insn_locations (seq, prologue_location);
5846
5847 return seq;
5848 }
5849
5850 /* Return a sequence to be used as the epilogue for the current function,
5851 or NULL. */
5852
5853 static rtx_insn *
make_epilogue_seq(void)5854 make_epilogue_seq (void)
5855 {
5856 if (!targetm.have_epilogue ())
5857 return NULL;
5858
5859 start_sequence ();
5860 emit_note (NOTE_INSN_EPILOGUE_BEG);
5861 rtx_insn *seq = targetm.gen_epilogue ();
5862 if (seq)
5863 emit_jump_insn (seq);
5864
5865 /* Retain a map of the epilogue insns. */
5866 record_insns (seq, NULL, &epilogue_insn_hash);
5867 set_insn_locations (seq, epilogue_location);
5868
5869 seq = get_insns ();
5870 rtx_insn *returnjump = get_last_insn ();
5871 end_sequence ();
5872
5873 if (JUMP_P (returnjump))
5874 set_return_jump_label (returnjump);
5875
5876 return seq;
5877 }
5878
5879
5880 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5881 this into place with notes indicating where the prologue ends and where
5882 the epilogue begins. Update the basic block information when possible.
5883
5884 Notes on epilogue placement:
5885 There are several kinds of edges to the exit block:
5886 * a single fallthru edge from LAST_BB
5887 * possibly, edges from blocks containing sibcalls
5888 * possibly, fake edges from infinite loops
5889
5890 The epilogue is always emitted on the fallthru edge from the last basic
5891 block in the function, LAST_BB, into the exit block.
5892
5893 If LAST_BB is empty except for a label, it is the target of every
5894 other basic block in the function that ends in a return. If a
5895 target has a return or simple_return pattern (possibly with
5896 conditional variants), these basic blocks can be changed so that a
5897 return insn is emitted into them, and their target is adjusted to
5898 the real exit block.
5899
5900 Notes on shrink wrapping: We implement a fairly conservative
5901 version of shrink-wrapping rather than the textbook one. We only
5902 generate a single prologue and a single epilogue. This is
5903 sufficient to catch a number of interesting cases involving early
5904 exits.
5905
5906 First, we identify the blocks that require the prologue to occur before
5907 them. These are the ones that modify a call-saved register, or reference
5908 any of the stack or frame pointer registers. To simplify things, we then
5909 mark everything reachable from these blocks as also requiring a prologue.
5910 This takes care of loops automatically, and avoids the need to examine
5911 whether MEMs reference the frame, since it is sufficient to check for
5912 occurrences of the stack or frame pointer.
5913
5914 We then compute the set of blocks for which the need for a prologue
5915 is anticipatable (borrowing terminology from the shrink-wrapping
5916 description in Muchnick's book). These are the blocks which either
5917 require a prologue themselves, or those that have only successors
5918 where the prologue is anticipatable. The prologue needs to be
5919 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5920 is not. For the moment, we ensure that only one such edge exists.
5921
5922 The epilogue is placed as described above, but we make a
5923 distinction between inserting return and simple_return patterns
5924 when modifying other blocks that end in a return. Blocks that end
5925 in a sibcall omit the sibcall_epilogue if the block is not in
5926 ANTIC. */
5927
5928 void
thread_prologue_and_epilogue_insns(void)5929 thread_prologue_and_epilogue_insns (void)
5930 {
5931 df_analyze ();
5932
5933 /* Can't deal with multiple successors of the entry block at the
5934 moment. Function should always have at least one entry
5935 point. */
5936 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5937
5938 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5939 edge orig_entry_edge = entry_edge;
5940
5941 rtx_insn *split_prologue_seq = make_split_prologue_seq ();
5942 rtx_insn *prologue_seq = make_prologue_seq ();
5943 rtx_insn *epilogue_seq = make_epilogue_seq ();
5944
5945 /* Try to perform a kind of shrink-wrapping, making sure the
5946 prologue/epilogue is emitted only around those parts of the
5947 function that require it. */
5948 try_shrink_wrapping (&entry_edge, prologue_seq);
5949
5950 /* If the target can handle splitting the prologue/epilogue into separate
5951 components, try to shrink-wrap these components separately. */
5952 try_shrink_wrapping_separate (entry_edge->dest);
5953
5954 /* If that did anything for any component we now need the generate the
5955 "main" prologue again. Because some targets require some of these
5956 to be called in a specific order (i386 requires the split prologue
5957 to be first, for example), we create all three sequences again here.
5958 If this does not work for some target, that target should not enable
5959 separate shrink-wrapping. */
5960 if (crtl->shrink_wrapped_separate)
5961 {
5962 split_prologue_seq = make_split_prologue_seq ();
5963 prologue_seq = make_prologue_seq ();
5964 epilogue_seq = make_epilogue_seq ();
5965 }
5966
5967 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5968
5969 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5970 this marker for the splits of EH_RETURN patterns, and nothing else
5971 uses the flag in the meantime. */
5972 epilogue_completed = 1;
5973
5974 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5975 some targets, these get split to a special version of the epilogue
5976 code. In order to be able to properly annotate these with unwind
5977 info, try to split them now. If we get a valid split, drop an
5978 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5979 edge e;
5980 edge_iterator ei;
5981 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5982 {
5983 rtx_insn *prev, *last, *trial;
5984
5985 if (e->flags & EDGE_FALLTHRU)
5986 continue;
5987 last = BB_END (e->src);
5988 if (!eh_returnjump_p (last))
5989 continue;
5990
5991 prev = PREV_INSN (last);
5992 trial = try_split (PATTERN (last), last, 1);
5993 if (trial == last)
5994 continue;
5995
5996 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5997 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5998 }
5999
6000 edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6001
6002 if (exit_fallthru_edge)
6003 {
6004 if (epilogue_seq)
6005 {
6006 insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
6007 commit_edge_insertions ();
6008
6009 /* The epilogue insns we inserted may cause the exit edge to no longer
6010 be fallthru. */
6011 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6012 {
6013 if (((e->flags & EDGE_FALLTHRU) != 0)
6014 && returnjump_p (BB_END (e->src)))
6015 e->flags &= ~EDGE_FALLTHRU;
6016 }
6017 }
6018 else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
6019 {
6020 /* We have a fall-through edge to the exit block, the source is not
6021 at the end of the function, and there will be an assembler epilogue
6022 at the end of the function.
6023 We can't use force_nonfallthru here, because that would try to
6024 use return. Inserting a jump 'by hand' is extremely messy, so
6025 we take advantage of cfg_layout_finalize using
6026 fixup_fallthru_exit_predecessor. */
6027 cfg_layout_initialize (0);
6028 basic_block cur_bb;
6029 FOR_EACH_BB_FN (cur_bb, cfun)
6030 if (cur_bb->index >= NUM_FIXED_BLOCKS
6031 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6032 cur_bb->aux = cur_bb->next_bb;
6033 cfg_layout_finalize ();
6034 }
6035 }
6036
6037 /* Insert the prologue. */
6038
6039 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6040
6041 if (split_prologue_seq || prologue_seq)
6042 {
6043 rtx_insn *split_prologue_insn = split_prologue_seq;
6044 if (split_prologue_seq)
6045 {
6046 while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
6047 split_prologue_insn = NEXT_INSN (split_prologue_insn);
6048 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6049 }
6050
6051 rtx_insn *prologue_insn = prologue_seq;
6052 if (prologue_seq)
6053 {
6054 while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
6055 prologue_insn = NEXT_INSN (prologue_insn);
6056 insert_insn_on_edge (prologue_seq, entry_edge);
6057 }
6058
6059 commit_edge_insertions ();
6060
6061 /* Look for basic blocks within the prologue insns. */
6062 if (split_prologue_insn
6063 && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
6064 split_prologue_insn = NULL;
6065 if (prologue_insn
6066 && BLOCK_FOR_INSN (prologue_insn) == NULL)
6067 prologue_insn = NULL;
6068 if (split_prologue_insn || prologue_insn)
6069 {
6070 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
6071 bitmap_clear (blocks);
6072 if (split_prologue_insn)
6073 bitmap_set_bit (blocks,
6074 BLOCK_FOR_INSN (split_prologue_insn)->index);
6075 if (prologue_insn)
6076 bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
6077 find_many_sub_basic_blocks (blocks);
6078 }
6079 }
6080
6081 default_rtl_profile ();
6082
6083 /* Emit sibling epilogues before any sibling call sites. */
6084 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6085 (e = ei_safe_edge (ei));
6086 ei_next (&ei))
6087 {
6088 /* Skip those already handled, the ones that run without prologue. */
6089 if (e->flags & EDGE_IGNORE)
6090 {
6091 e->flags &= ~EDGE_IGNORE;
6092 continue;
6093 }
6094
6095 rtx_insn *insn = BB_END (e->src);
6096
6097 if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6098 continue;
6099
6100 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6101 {
6102 start_sequence ();
6103 emit_note (NOTE_INSN_EPILOGUE_BEG);
6104 emit_insn (ep_seq);
6105 rtx_insn *seq = get_insns ();
6106 end_sequence ();
6107
6108 /* Retain a map of the epilogue insns. Used in life analysis to
6109 avoid getting rid of sibcall epilogue insns. Do this before we
6110 actually emit the sequence. */
6111 record_insns (seq, NULL, &epilogue_insn_hash);
6112 set_insn_locations (seq, epilogue_location);
6113
6114 emit_insn_before (seq, insn);
6115 }
6116 }
6117
6118 if (epilogue_seq)
6119 {
6120 rtx_insn *insn, *next;
6121
6122 /* Similarly, move any line notes that appear after the epilogue.
6123 There is no need, however, to be quite so anal about the existence
6124 of such a note. Also possibly move
6125 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6126 info generation. */
6127 for (insn = epilogue_seq; insn; insn = next)
6128 {
6129 next = NEXT_INSN (insn);
6130 if (NOTE_P (insn)
6131 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6132 reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
6133 }
6134 }
6135
6136 /* Threading the prologue and epilogue changes the artificial refs
6137 in the entry and exit blocks. */
6138 epilogue_completed = 1;
6139 df_update_entry_exit_and_calls ();
6140 }
6141
6142 /* Reposition the prologue-end and epilogue-begin notes after
6143 instruction scheduling. */
6144
6145 void
reposition_prologue_and_epilogue_notes(void)6146 reposition_prologue_and_epilogue_notes (void)
6147 {
6148 if (!targetm.have_prologue ()
6149 && !targetm.have_epilogue ()
6150 && !targetm.have_sibcall_epilogue ())
6151 return;
6152
6153 /* Since the hash table is created on demand, the fact that it is
6154 non-null is a signal that it is non-empty. */
6155 if (prologue_insn_hash != NULL)
6156 {
6157 size_t len = prologue_insn_hash->elements ();
6158 rtx_insn *insn, *last = NULL, *note = NULL;
6159
6160 /* Scan from the beginning until we reach the last prologue insn. */
6161 /* ??? While we do have the CFG intact, there are two problems:
6162 (1) The prologue can contain loops (typically probing the stack),
6163 which means that the end of the prologue isn't in the first bb.
6164 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6165 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6166 {
6167 if (NOTE_P (insn))
6168 {
6169 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6170 note = insn;
6171 }
6172 else if (contains (insn, prologue_insn_hash))
6173 {
6174 last = insn;
6175 if (--len == 0)
6176 break;
6177 }
6178 }
6179
6180 if (last)
6181 {
6182 if (note == NULL)
6183 {
6184 /* Scan forward looking for the PROLOGUE_END note. It should
6185 be right at the beginning of the block, possibly with other
6186 insn notes that got moved there. */
6187 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6188 {
6189 if (NOTE_P (note)
6190 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6191 break;
6192 }
6193 }
6194
6195 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6196 if (LABEL_P (last))
6197 last = NEXT_INSN (last);
6198 reorder_insns (note, note, last);
6199 }
6200 }
6201
6202 if (epilogue_insn_hash != NULL)
6203 {
6204 edge_iterator ei;
6205 edge e;
6206
6207 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6208 {
6209 rtx_insn *insn, *first = NULL, *note = NULL;
6210 basic_block bb = e->src;
6211
6212 /* Scan from the beginning until we reach the first epilogue insn. */
6213 FOR_BB_INSNS (bb, insn)
6214 {
6215 if (NOTE_P (insn))
6216 {
6217 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6218 {
6219 note = insn;
6220 if (first != NULL)
6221 break;
6222 }
6223 }
6224 else if (first == NULL && contains (insn, epilogue_insn_hash))
6225 {
6226 first = insn;
6227 if (note != NULL)
6228 break;
6229 }
6230 }
6231
6232 if (note)
6233 {
6234 /* If the function has a single basic block, and no real
6235 epilogue insns (e.g. sibcall with no cleanup), the
6236 epilogue note can get scheduled before the prologue
6237 note. If we have frame related prologue insns, having
6238 them scanned during the epilogue will result in a crash.
6239 In this case re-order the epilogue note to just before
6240 the last insn in the block. */
6241 if (first == NULL)
6242 first = BB_END (bb);
6243
6244 if (PREV_INSN (first) != note)
6245 reorder_insns (note, note, PREV_INSN (first));
6246 }
6247 }
6248 }
6249 }
6250
6251 /* Returns the name of function declared by FNDECL. */
6252 const char *
fndecl_name(tree fndecl)6253 fndecl_name (tree fndecl)
6254 {
6255 if (fndecl == NULL)
6256 return "(nofn)";
6257 return lang_hooks.decl_printable_name (fndecl, 1);
6258 }
6259
6260 /* Returns the name of function FN. */
6261 const char *
function_name(struct function * fn)6262 function_name (struct function *fn)
6263 {
6264 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6265 return fndecl_name (fndecl);
6266 }
6267
6268 /* Returns the name of the current function. */
6269 const char *
current_function_name(void)6270 current_function_name (void)
6271 {
6272 return function_name (cfun);
6273 }
6274
6275
6276 static unsigned int
rest_of_handle_check_leaf_regs(void)6277 rest_of_handle_check_leaf_regs (void)
6278 {
6279 #ifdef LEAF_REGISTERS
6280 crtl->uses_only_leaf_regs
6281 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6282 #endif
6283 return 0;
6284 }
6285
6286 /* Insert a TYPE into the used types hash table of CFUN. */
6287
6288 static void
used_types_insert_helper(tree type,struct function * func)6289 used_types_insert_helper (tree type, struct function *func)
6290 {
6291 if (type != NULL && func != NULL)
6292 {
6293 if (func->used_types_hash == NULL)
6294 func->used_types_hash = hash_set<tree>::create_ggc (37);
6295
6296 func->used_types_hash->add (type);
6297 }
6298 }
6299
6300 /* Given a type, insert it into the used hash table in cfun. */
6301 void
used_types_insert(tree t)6302 used_types_insert (tree t)
6303 {
6304 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6305 if (TYPE_NAME (t))
6306 break;
6307 else
6308 t = TREE_TYPE (t);
6309 if (TREE_CODE (t) == ERROR_MARK)
6310 return;
6311 if (TYPE_NAME (t) == NULL_TREE
6312 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6313 t = TYPE_MAIN_VARIANT (t);
6314 if (debug_info_level > DINFO_LEVEL_NONE)
6315 {
6316 if (cfun)
6317 used_types_insert_helper (t, cfun);
6318 else
6319 {
6320 /* So this might be a type referenced by a global variable.
6321 Record that type so that we can later decide to emit its
6322 debug information. */
6323 vec_safe_push (types_used_by_cur_var_decl, t);
6324 }
6325 }
6326 }
6327
6328 /* Helper to Hash a struct types_used_by_vars_entry. */
6329
6330 static hashval_t
hash_types_used_by_vars_entry(const struct types_used_by_vars_entry * entry)6331 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6332 {
6333 gcc_assert (entry && entry->var_decl && entry->type);
6334
6335 return iterative_hash_object (entry->type,
6336 iterative_hash_object (entry->var_decl, 0));
6337 }
6338
6339 /* Hash function of the types_used_by_vars_entry hash table. */
6340
6341 hashval_t
hash(types_used_by_vars_entry * entry)6342 used_type_hasher::hash (types_used_by_vars_entry *entry)
6343 {
6344 return hash_types_used_by_vars_entry (entry);
6345 }
6346
6347 /*Equality function of the types_used_by_vars_entry hash table. */
6348
6349 bool
equal(types_used_by_vars_entry * e1,types_used_by_vars_entry * e2)6350 used_type_hasher::equal (types_used_by_vars_entry *e1,
6351 types_used_by_vars_entry *e2)
6352 {
6353 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6354 }
6355
6356 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6357
6358 void
types_used_by_var_decl_insert(tree type,tree var_decl)6359 types_used_by_var_decl_insert (tree type, tree var_decl)
6360 {
6361 if (type != NULL && var_decl != NULL)
6362 {
6363 types_used_by_vars_entry **slot;
6364 struct types_used_by_vars_entry e;
6365 e.var_decl = var_decl;
6366 e.type = type;
6367 if (types_used_by_vars_hash == NULL)
6368 types_used_by_vars_hash
6369 = hash_table<used_type_hasher>::create_ggc (37);
6370
6371 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6372 if (*slot == NULL)
6373 {
6374 struct types_used_by_vars_entry *entry;
6375 entry = ggc_alloc<types_used_by_vars_entry> ();
6376 entry->type = type;
6377 entry->var_decl = var_decl;
6378 *slot = entry;
6379 }
6380 }
6381 }
6382
6383 namespace {
6384
6385 const pass_data pass_data_leaf_regs =
6386 {
6387 RTL_PASS, /* type */
6388 "*leaf_regs", /* name */
6389 OPTGROUP_NONE, /* optinfo_flags */
6390 TV_NONE, /* tv_id */
6391 0, /* properties_required */
6392 0, /* properties_provided */
6393 0, /* properties_destroyed */
6394 0, /* todo_flags_start */
6395 0, /* todo_flags_finish */
6396 };
6397
6398 class pass_leaf_regs : public rtl_opt_pass
6399 {
6400 public:
pass_leaf_regs(gcc::context * ctxt)6401 pass_leaf_regs (gcc::context *ctxt)
6402 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6403 {}
6404
6405 /* opt_pass methods: */
execute(function *)6406 virtual unsigned int execute (function *)
6407 {
6408 return rest_of_handle_check_leaf_regs ();
6409 }
6410
6411 }; // class pass_leaf_regs
6412
6413 } // anon namespace
6414
6415 rtl_opt_pass *
make_pass_leaf_regs(gcc::context * ctxt)6416 make_pass_leaf_regs (gcc::context *ctxt)
6417 {
6418 return new pass_leaf_regs (ctxt);
6419 }
6420
6421 static unsigned int
rest_of_handle_thread_prologue_and_epilogue(void)6422 rest_of_handle_thread_prologue_and_epilogue (void)
6423 {
6424 /* prepare_shrink_wrap is sensitive to the block structure of the control
6425 flow graph, so clean it up first. */
6426 if (optimize)
6427 cleanup_cfg (0);
6428
6429 /* On some machines, the prologue and epilogue code, or parts thereof,
6430 can be represented as RTL. Doing so lets us schedule insns between
6431 it and the rest of the code and also allows delayed branch
6432 scheduling to operate in the epilogue. */
6433 thread_prologue_and_epilogue_insns ();
6434
6435 /* Some non-cold blocks may now be only reachable from cold blocks.
6436 Fix that up. */
6437 fixup_partitions ();
6438
6439 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6440 see PR57320. */
6441 cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6442
6443 /* The stack usage info is finalized during prologue expansion. */
6444 if (flag_stack_usage_info || flag_callgraph_info)
6445 output_stack_usage ();
6446
6447 return 0;
6448 }
6449
6450 /* Record a final call to CALLEE at LOCATION. */
6451
6452 void
record_final_call(tree callee,location_t location)6453 record_final_call (tree callee, location_t location)
6454 {
6455 struct callinfo_callee datum = { location, callee };
6456 vec_safe_push (cfun->su->callees, datum);
6457 }
6458
6459 /* Record a dynamic allocation made for DECL_OR_EXP. */
6460
6461 void
record_dynamic_alloc(tree decl_or_exp)6462 record_dynamic_alloc (tree decl_or_exp)
6463 {
6464 struct callinfo_dalloc datum;
6465
6466 if (DECL_P (decl_or_exp))
6467 {
6468 datum.location = DECL_SOURCE_LOCATION (decl_or_exp);
6469 const char *name = lang_hooks.decl_printable_name (decl_or_exp, 2);
6470 const char *dot = strrchr (name, '.');
6471 if (dot)
6472 name = dot + 1;
6473 datum.name = ggc_strdup (name);
6474 }
6475 else
6476 {
6477 datum.location = EXPR_LOCATION (decl_or_exp);
6478 datum.name = NULL;
6479 }
6480
6481 vec_safe_push (cfun->su->dallocs, datum);
6482 }
6483
6484 namespace {
6485
6486 const pass_data pass_data_thread_prologue_and_epilogue =
6487 {
6488 RTL_PASS, /* type */
6489 "pro_and_epilogue", /* name */
6490 OPTGROUP_NONE, /* optinfo_flags */
6491 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6492 0, /* properties_required */
6493 0, /* properties_provided */
6494 0, /* properties_destroyed */
6495 0, /* todo_flags_start */
6496 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6497 };
6498
6499 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6500 {
6501 public:
pass_thread_prologue_and_epilogue(gcc::context * ctxt)6502 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6503 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6504 {}
6505
6506 /* opt_pass methods: */
execute(function *)6507 virtual unsigned int execute (function *)
6508 {
6509 return rest_of_handle_thread_prologue_and_epilogue ();
6510 }
6511
6512 }; // class pass_thread_prologue_and_epilogue
6513
6514 } // anon namespace
6515
6516 rtl_opt_pass *
make_pass_thread_prologue_and_epilogue(gcc::context * ctxt)6517 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6518 {
6519 return new pass_thread_prologue_and_epilogue (ctxt);
6520 }
6521
6522
6523 /* If CONSTRAINT is a matching constraint, then return its number.
6524 Otherwise, return -1. */
6525
6526 static int
matching_constraint_num(const char * constraint)6527 matching_constraint_num (const char *constraint)
6528 {
6529 if (*constraint == '%')
6530 constraint++;
6531
6532 if (IN_RANGE (*constraint, '0', '9'))
6533 return strtoul (constraint, NULL, 10);
6534
6535 return -1;
6536 }
6537
6538 /* This mini-pass fixes fall-out from SSA in asm statements that have
6539 in-out constraints. Say you start with
6540
6541 orig = inout;
6542 asm ("": "+mr" (inout));
6543 use (orig);
6544
6545 which is transformed very early to use explicit output and match operands:
6546
6547 orig = inout;
6548 asm ("": "=mr" (inout) : "0" (inout));
6549 use (orig);
6550
6551 Or, after SSA and copyprop,
6552
6553 asm ("": "=mr" (inout_2) : "0" (inout_1));
6554 use (inout_1);
6555
6556 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6557 they represent two separate values, so they will get different pseudo
6558 registers during expansion. Then, since the two operands need to match
6559 per the constraints, but use different pseudo registers, reload can
6560 only register a reload for these operands. But reloads can only be
6561 satisfied by hardregs, not by memory, so we need a register for this
6562 reload, just because we are presented with non-matching operands.
6563 So, even though we allow memory for this operand, no memory can be
6564 used for it, just because the two operands don't match. This can
6565 cause reload failures on register-starved targets.
6566
6567 So it's a symptom of reload not being able to use memory for reloads
6568 or, alternatively it's also a symptom of both operands not coming into
6569 reload as matching (in which case the pseudo could go to memory just
6570 fine, as the alternative allows it, and no reload would be necessary).
6571 We fix the latter problem here, by transforming
6572
6573 asm ("": "=mr" (inout_2) : "0" (inout_1));
6574
6575 back to
6576
6577 inout_2 = inout_1;
6578 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6579
6580 static void
match_asm_constraints_1(rtx_insn * insn,rtx * p_sets,int noutputs)6581 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6582 {
6583 int i;
6584 bool changed = false;
6585 rtx op = SET_SRC (p_sets[0]);
6586 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6587 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6588 bool *output_matched = XALLOCAVEC (bool, noutputs);
6589
6590 memset (output_matched, 0, noutputs * sizeof (bool));
6591 for (i = 0; i < ninputs; i++)
6592 {
6593 rtx input, output;
6594 rtx_insn *insns;
6595 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6596 int match, j;
6597
6598 match = matching_constraint_num (constraint);
6599 if (match < 0)
6600 continue;
6601
6602 gcc_assert (match < noutputs);
6603 output = SET_DEST (p_sets[match]);
6604 input = RTVEC_ELT (inputs, i);
6605 /* Only do the transformation for pseudos. */
6606 if (! REG_P (output)
6607 || rtx_equal_p (output, input)
6608 || !(REG_P (input) || SUBREG_P (input)
6609 || MEM_P (input) || CONSTANT_P (input))
6610 || !general_operand (input, GET_MODE (output)))
6611 continue;
6612
6613 /* We can't do anything if the output is also used as input,
6614 as we're going to overwrite it. */
6615 for (j = 0; j < ninputs; j++)
6616 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6617 break;
6618 if (j != ninputs)
6619 continue;
6620
6621 /* Avoid changing the same input several times. For
6622 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6623 only change it once (to out1), rather than changing it
6624 first to out1 and afterwards to out2. */
6625 if (i > 0)
6626 {
6627 for (j = 0; j < noutputs; j++)
6628 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6629 break;
6630 if (j != noutputs)
6631 continue;
6632 }
6633 output_matched[match] = true;
6634
6635 start_sequence ();
6636 emit_move_insn (output, copy_rtx (input));
6637 insns = get_insns ();
6638 end_sequence ();
6639 emit_insn_before (insns, insn);
6640
6641 constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
6642 bool early_clobber_p = strchr (constraint, '&') != NULL;
6643
6644 /* Now replace all mentions of the input with output. We can't
6645 just replace the occurrence in inputs[i], as the register might
6646 also be used in some other input (or even in an address of an
6647 output), which would mean possibly increasing the number of
6648 inputs by one (namely 'output' in addition), which might pose
6649 a too complicated problem for reload to solve. E.g. this situation:
6650
6651 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6652
6653 Here 'input' is used in two occurrences as input (once for the
6654 input operand, once for the address in the second output operand).
6655 If we would replace only the occurrence of the input operand (to
6656 make the matching) we would be left with this:
6657
6658 output = input
6659 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6660
6661 Now we suddenly have two different input values (containing the same
6662 value, but different pseudos) where we formerly had only one.
6663 With more complicated asms this might lead to reload failures
6664 which wouldn't have happen without this pass. So, iterate over
6665 all operands and replace all occurrences of the register used.
6666
6667 However, if one or more of the 'input' uses have a non-matching
6668 constraint and the matched output operand is an early clobber
6669 operand, then do not replace the input operand, since by definition
6670 it conflicts with the output operand and cannot share the same
6671 register. See PR89313 for details. */
6672
6673 for (j = 0; j < noutputs; j++)
6674 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6675 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6676 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6677 input, output);
6678 for (j = 0; j < ninputs; j++)
6679 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6680 {
6681 if (!early_clobber_p
6682 || match == matching_constraint_num
6683 (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
6684 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6685 input, output);
6686 }
6687
6688 changed = true;
6689 }
6690
6691 if (changed)
6692 df_insn_rescan (insn);
6693 }
6694
6695 /* Add the decl D to the local_decls list of FUN. */
6696
6697 void
add_local_decl(struct function * fun,tree d)6698 add_local_decl (struct function *fun, tree d)
6699 {
6700 gcc_assert (VAR_P (d));
6701 vec_safe_push (fun->local_decls, d);
6702 }
6703
6704 namespace {
6705
6706 const pass_data pass_data_match_asm_constraints =
6707 {
6708 RTL_PASS, /* type */
6709 "asmcons", /* name */
6710 OPTGROUP_NONE, /* optinfo_flags */
6711 TV_NONE, /* tv_id */
6712 0, /* properties_required */
6713 0, /* properties_provided */
6714 0, /* properties_destroyed */
6715 0, /* todo_flags_start */
6716 0, /* todo_flags_finish */
6717 };
6718
6719 class pass_match_asm_constraints : public rtl_opt_pass
6720 {
6721 public:
pass_match_asm_constraints(gcc::context * ctxt)6722 pass_match_asm_constraints (gcc::context *ctxt)
6723 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6724 {}
6725
6726 /* opt_pass methods: */
6727 virtual unsigned int execute (function *);
6728
6729 }; // class pass_match_asm_constraints
6730
6731 unsigned
execute(function * fun)6732 pass_match_asm_constraints::execute (function *fun)
6733 {
6734 basic_block bb;
6735 rtx_insn *insn;
6736 rtx pat, *p_sets;
6737 int noutputs;
6738
6739 if (!crtl->has_asm_statement)
6740 return 0;
6741
6742 df_set_flags (DF_DEFER_INSN_RESCAN);
6743 FOR_EACH_BB_FN (bb, fun)
6744 {
6745 FOR_BB_INSNS (bb, insn)
6746 {
6747 if (!INSN_P (insn))
6748 continue;
6749
6750 pat = PATTERN (insn);
6751 if (GET_CODE (pat) == PARALLEL)
6752 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6753 else if (GET_CODE (pat) == SET)
6754 p_sets = &PATTERN (insn), noutputs = 1;
6755 else
6756 continue;
6757
6758 if (GET_CODE (*p_sets) == SET
6759 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6760 match_asm_constraints_1 (insn, p_sets, noutputs);
6761 }
6762 }
6763
6764 return TODO_df_finish;
6765 }
6766
6767 } // anon namespace
6768
6769 rtl_opt_pass *
make_pass_match_asm_constraints(gcc::context * ctxt)6770 make_pass_match_asm_constraints (gcc::context *ctxt)
6771 {
6772 return new pass_match_asm_constraints (ctxt);
6773 }
6774
6775
6776 #include "gt-function.h"
6777