xref: /openbsd-src/gnu/usr.bin/gcc/gcc/protector.c (revision 949214e78156715e5aa22cab8087a8c84c239fda)
1 /* RTL buffer overflow protection function for GNU C compiler
2    Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING.  If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "machmode.h"
24 
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "tm_p.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "insn-flags.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "recog.h"
35 #include "hard-reg-set.h"
36 #include "real.h"
37 #include "except.h"
38 #include "function.h"
39 #include "toplev.h"
40 #include "conditions.h"
41 #include "insn-attr.h"
42 #include "c-tree.h"
43 #include "optabs.h"
44 #include "reload.h"
45 #include "protector.h"
46 
47 
48 /* Warn when not issuing stack smashing protection for some reason */
49 int warn_stack_protector;
50 
51 /* Round a value to the lowest integer less than it that is a multiple of
52    the required alignment.  Avoid using division in case the value is
53    negative.  Assume the alignment is a power of two.  */
54 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
55 
56 /* Similar, but round to the next highest integer that meets the
57    alignment.  */
58 #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
59 
60 
61 /* Nonzero means use propolice as a stack protection method */
62 extern int flag_propolice_protection;
63 
64 /* This file contains several memory arrangement functions to protect
65    the return address and the frame pointer of the stack
66    from a stack-smashing attack. It also
67    provides the function that protects pointer variables. */
68 
69 /* Nonzero if function being compiled can define string buffers that may be
70    damaged by the stack-smash attack */
71 static int current_function_defines_vulnerable_string;
72 static int current_function_defines_short_string;
73 static int current_function_has_variable_string;
74 static int current_function_defines_vsized_array;
75 static int current_function_is_inlinable;
76 static int is_array;
77 
78 static rtx guard_area, _guard;
79 static rtx function_first_insn, prologue_insert_point;
80 
81 /*  */
82 static HOST_WIDE_INT sweep_frame_offset;
83 static HOST_WIDE_INT push_allocated_offset = 0;
84 static HOST_WIDE_INT push_frame_offset = 0;
85 static int saved_cse_not_expected = 0;
86 
87 static int search_string_from_argsandvars PARAMS ((int caller));
88 static int search_string_from_local_vars PARAMS ((tree block));
89 static int search_pointer_def PARAMS ((tree names));
90 static int search_func_pointer PARAMS ((tree type));
91 static int check_used_flag PARAMS ((rtx x));
92 static void reset_used_flags_for_insns PARAMS ((rtx insn));
93 static void reset_used_flags_for_decls PARAMS ((tree block));
94 static void reset_used_flags_of_plus PARAMS ((rtx x));
95 static void rtl_prologue PARAMS ((rtx insn));
96 static void rtl_epilogue PARAMS ((rtx fnlastinsn));
97 static void arrange_var_order PARAMS ((tree blocks));
98 static void copy_args_for_protection PARAMS ((void));
99 static void sweep_string_variable
100 	PARAMS ((rtx sweep_var, HOST_WIDE_INT var_size));
101 static void sweep_string_in_decls
102 	PARAMS ((tree block, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
103 static void sweep_string_in_args
104 	PARAMS ((tree parms, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
105 static void sweep_string_use_of_insns
106 	PARAMS ((rtx insn, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
107 static void sweep_string_in_operand
108 	PARAMS ((rtx insn, rtx *loc,
109 		 HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
110 static void move_arg_location
111 	PARAMS ((rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size));
112 static void change_arg_use_of_insns
113 	PARAMS ((rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size));
114 static void change_arg_use_in_operand
115 	PARAMS ((rtx insn, rtx x, rtx orig, rtx *new, HOST_WIDE_INT size));
116 static void validate_insns_of_varrefs PARAMS ((rtx insn));
117 static void validate_operand_of_varrefs PARAMS ((rtx insn, rtx *loc));
118 
119 #define SUSPICIOUS_BUF_SIZE 8
120 
121 #define AUTO_BASEPTR(X) \
122   (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
123 #define AUTO_OFFSET(X) \
124   (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
125 #undef PARM_PASSED_IN_MEMORY
126 #define PARM_PASSED_IN_MEMORY(PARM) \
127  (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
128 #define VIRTUAL_STACK_VARS_P(X) \
129  ((X) == virtual_stack_vars_rtx || (GET_CODE (X) == REG && (X)->used))
130 #define TREE_VISITED(NODE) ((NODE)->common.unused_0)
131 
132 
133 
134 void
prepare_stack_protection(inlinable)135 prepare_stack_protection (inlinable)
136      int inlinable;
137 {
138   tree blocks = DECL_INITIAL (current_function_decl);
139   current_function_is_inlinable = inlinable && !flag_no_inline;
140   push_frame_offset = push_allocated_offset = 0;
141   saved_cse_not_expected = 0;
142 
143   /*
144     skip the protection if the function has no block
145     or it is an inline function
146   */
147   if (current_function_is_inlinable) validate_insns_of_varrefs (get_insns ());
148   if (! blocks || current_function_is_inlinable) return;
149 
150   current_function_defines_vulnerable_string
151     = search_string_from_argsandvars (0);
152 
153   if (current_function_defines_vulnerable_string
154       || flag_stack_protection)
155     {
156       HOST_WIDE_INT offset;
157       function_first_insn = get_insns ();
158 
159       if (current_function_contains_functions) {
160 	  if (warn_stack_protector)
161              warning ("not protecting function: it contains functions");
162 	  return;
163       }
164 
165       /* Initialize recognition, indicating that volatile is OK.  */
166       init_recog ();
167 
168       sweep_frame_offset = 0;
169 
170 #ifdef STACK_GROWS_DOWNWARD
171       /*
172 	frame_offset: offset to end of allocated area of stack frame.
173 	 It is defined in the function.c
174       */
175 
176       /* the location must be before buffers */
177       guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
178       PUT_MODE (guard_area, GUARD_m);
179       MEM_VOLATILE_P (guard_area) = 1;
180 
181 #ifndef FRAME_GROWS_DOWNWARD
182       sweep_frame_offset = frame_offset;
183 #endif
184 
185       /* For making room for guard value, scan all insns and fix the offset
186 	 address of the variable that is based on frame pointer.
187 	 Scan all declarations of variables and fix the offset address
188 	 of the variable that is based on the frame pointer */
189       sweep_string_variable (guard_area, UNITS_PER_GUARD);
190 
191 
192       /* the location of guard area moves to the beginning of stack frame */
193       if ((offset = AUTO_OFFSET(XEXP (guard_area, 0))))
194 	XEXP (XEXP (guard_area, 0), 1)
195 	  = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
196 
197 
198       /* Insert prologue rtl instructions */
199       rtl_prologue (function_first_insn);
200 
201       if (! current_function_has_variable_string)
202 	{
203 	  /* Generate argument saving instruction */
204 	  copy_args_for_protection ();
205 
206 #ifndef FRAME_GROWS_DOWNWARD
207 	  /* If frame grows upward, character string copied from an arg
208 	     stays top of the guard variable.
209 	     So sweep the guard variable again */
210 	  sweep_frame_offset = CEIL_ROUND (frame_offset,
211 					   BIGGEST_ALIGNMENT / BITS_PER_UNIT);
212 	  sweep_string_variable (guard_area, UNITS_PER_GUARD);
213 #endif
214 	}
215       else if (warn_stack_protector)
216 	warning ("not protecting variables: it has a variable length buffer");
217 #endif
218 #ifndef FRAME_GROWS_DOWNWARD
219       if (STARTING_FRAME_OFFSET == 0)
220 	{
221 	  /* this may be only for alpha */
222 	  push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
223 	  assign_stack_local (BLKmode, push_allocated_offset, -1);
224 	  sweep_frame_offset = frame_offset;
225 	  sweep_string_variable (const0_rtx, -push_allocated_offset);
226 	  sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
227 	}
228 #endif
229 
230       /* Arrange the order of local variables */
231       arrange_var_order (blocks);
232 
233 #ifdef STACK_GROWS_DOWNWARD
234       /* Insert epilogue rtl instructions */
235       rtl_epilogue (get_last_insn ());
236 #endif
237       init_recog_no_volatile ();
238     }
239   else if (current_function_defines_short_string
240 	   && warn_stack_protector)
241     warning ("not protecting function: buffer is less than %d bytes long",
242 	     SUSPICIOUS_BUF_SIZE);
243 }
244 
245 /*
246   search string from arguments and local variables
247   caller: 0 means call from protector_stack_protection
248           1 means call from push_frame
249 */
250 static int
search_string_from_argsandvars(caller)251 search_string_from_argsandvars (caller)
252      int caller;
253 {
254   tree blocks, parms;
255   int string_p;
256 
257   /* saves a latest search result as a cached information */
258   static tree __latest_search_decl = 0;
259   static int  __latest_search_result = FALSE;
260 
261   if (__latest_search_decl == current_function_decl)
262     return __latest_search_result;
263   else if (caller) return FALSE;
264   __latest_search_decl = current_function_decl;
265   __latest_search_result = TRUE;
266 
267   current_function_defines_short_string = FALSE;
268   current_function_has_variable_string = FALSE;
269   current_function_defines_vsized_array = FALSE;
270 
271   /*
272     search a string variable from local variables
273   */
274   blocks = DECL_INITIAL (current_function_decl);
275   string_p = search_string_from_local_vars (blocks);
276 
277   if (!current_function_defines_vsized_array && current_function_calls_alloca)
278     {
279       current_function_has_variable_string = TRUE;
280       return TRUE;
281     }
282 
283   if (string_p) return TRUE;
284 
285 #ifdef STACK_GROWS_DOWNWARD
286   /*
287     search a string variable from arguments
288   */
289   parms = DECL_ARGUMENTS (current_function_decl);
290 
291   for (; parms; parms = TREE_CHAIN (parms))
292     if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
293       {
294 	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
295 	  {
296 	    string_p = search_string_def (TREE_TYPE (parms));
297 	    if (string_p) return TRUE;
298 	  }
299       }
300 #endif
301 
302   __latest_search_result = FALSE;
303   return FALSE;
304 }
305 
306 
307 static int
search_string_from_local_vars(block)308 search_string_from_local_vars (block)
309      tree block;
310 {
311   tree types;
312   int found = FALSE;
313 
314   while (block && TREE_CODE(block)==BLOCK)
315     {
316       types = BLOCK_VARS(block);
317 
318       while (types)
319 	{
320 	  /* skip the declaration that refers an external variable */
321 	  /* name: types.decl.name.identifier.id                   */
322 	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
323 	      && TREE_CODE (types) == VAR_DECL
324 	      && ! DECL_ARTIFICIAL (types)
325 	      && DECL_RTL_SET_P (types)
326 	      && GET_CODE (DECL_RTL (types)) == MEM
327 
328 	      && search_string_def (TREE_TYPE (types)))
329 	    {
330 	      rtx home = DECL_RTL (types);
331 
332 	      if (GET_CODE (home) == MEM
333 		  && (GET_CODE (XEXP (home, 0)) == MEM
334 		      ||
335 		      (GET_CODE (XEXP (home, 0)) == REG
336 		       && XEXP (home, 0) != virtual_stack_vars_rtx
337 		       && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
338 		       && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
339 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
340 		       && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
341 #endif
342 		       )))
343 		/* If the value is indirect by memory or by a register
344 		   that isn't the frame pointer then it means the object is
345 		   variable-sized and address through
346 		   that register or stack slot.
347 		   The protection has no way to hide pointer variables
348 		   behind the array, so all we can do is staying
349 		   the order of variables and arguments. */
350 		{
351 		  current_function_has_variable_string = TRUE;
352 		}
353 
354 	      /* found character array */
355 	      found = TRUE;
356 	    }
357 
358 	  types = TREE_CHAIN(types);
359 	}
360 
361       if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
362 	{
363 	  found = TRUE;
364 	}
365 
366       block = BLOCK_CHAIN (block);
367     }
368 
369   return found;
370 }
371 
372 
373 /*
374  * search a character array from the specified type tree
375  */
376 int
search_string_def(type)377 search_string_def (type)
378      tree type;
379 {
380   tree tem;
381 
382   if (! type)
383     return FALSE;
384 
385   if (flag_strong_protection
386       && TREE_CODE (type) == ARRAY_TYPE)
387     return TRUE;
388 
389   switch (TREE_CODE (type))
390     {
391     case ARRAY_TYPE:
392       /* Check if the array is a variable-sized array */
393       if (TYPE_DOMAIN (type) == 0
394 	  || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
395 	      && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
396 	current_function_defines_vsized_array = TRUE;
397 
398       /* TREE_CODE( TREE_TYPE(type) ) == INTEGER_TYPE */
399       if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
400 	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
401 	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
402 	{
403 	  /* Check if the string is a variable string */
404 	  if (TYPE_DOMAIN (type) == 0
405 	      ||
406 	      (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
407 	       && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
408 	    return TRUE;
409 
410 	  /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE */
411 #if SUSPICIOUS_BUF_SIZE > 0
412 	  if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
413 	      &&
414 	      TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1
415 	      >= SUSPICIOUS_BUF_SIZE)
416 	    return TRUE;
417 
418 	  current_function_defines_short_string = TRUE;
419 #else
420 	  return TRUE;
421 #endif
422 	}
423 
424       /* to protect every functions, sweep any arrays to the frame top */
425       is_array = TRUE;
426 
427       return search_string_def(TREE_TYPE (type));
428 
429     case UNION_TYPE:
430     case QUAL_UNION_TYPE:
431     case RECORD_TYPE:
432       if (! TREE_VISITED (type))
433 	{
434 	  /* mark the type as having been visited already */
435 	  TREE_VISITED (type) = 1;
436 
437 	  /* Output the name, type, position (in bits), size (in bits) of each
438 	     field.  */
439 	  for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
440 	    {
441 	      /* Omit here local type decls until we know how to support
442 		 them. */
443 	      if ((TREE_CODE (tem) == TYPE_DECL)
444 		  || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
445 	        continue;
446 
447 	      if (search_string_def(TREE_TYPE (tem)))
448 		{
449 		  TREE_VISITED (type) = 0;
450 		  return TRUE;
451 		}
452 	    }
453 
454 	  TREE_VISITED (type) = 0;
455 	}
456       break;
457 
458     case POINTER_TYPE:
459     case REFERENCE_TYPE:
460       /* I'm not sure whether OFFSET_TYPE needs this treatment,
461 	 so I'll play safe and return 1.  */
462     case OFFSET_TYPE:
463     default:
464       break;
465     }
466 
467   return FALSE;
468 }
469 
470 /*
471  * examine whether the input contains frame pointer addressing
472  */
473 int
contains_fp(op)474 contains_fp (op)
475      rtx op;
476 {
477   register enum rtx_code code;
478   rtx x;
479   int i, j;
480   const char *fmt;
481 
482   x = op;
483   if (x == 0)
484     return FALSE;
485 
486   code = GET_CODE (x);
487 
488   switch (code)
489     {
490     case CONST_INT:
491     case CONST_DOUBLE:
492     case CONST:
493     case SYMBOL_REF:
494     case CODE_LABEL:
495     case REG:
496     case ADDRESSOF:
497       return FALSE;
498 
499     case PLUS:
500       if (XEXP (x, 0) == virtual_stack_vars_rtx
501 	  && CONSTANT_P (XEXP (x, 1)))
502 	return TRUE;
503 
504     default:
505       break;
506     }
507 
508   /* Scan all subexpressions.  */
509   fmt = GET_RTX_FORMAT (code);
510   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
511     if (*fmt == 'e')
512       {
513 	if (contains_fp (XEXP (x, i))) return TRUE;
514       }
515     else if (*fmt == 'E')
516       for (j = 0; j < XVECLEN (x, i); j++)
517 	if (contains_fp (XVECEXP (x, i, j))) return TRUE;
518 
519   return FALSE;
520 }
521 
522 
523 static int
search_pointer_def(type)524 search_pointer_def (type)
525      tree type;
526 {
527   tree tem;
528 
529   if (! type)
530     return FALSE;
531 
532   switch (TREE_CODE (type))
533     {
534     case UNION_TYPE:
535     case QUAL_UNION_TYPE:
536     case RECORD_TYPE:
537       if (! TREE_VISITED (type))
538 	{
539 	  /* mark the type as having been visited already */
540 	  TREE_VISITED (type) = 1;
541 
542 	  /* Output the name, type, position (in bits), size (in bits) of each
543 	     field.  */
544 	  for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
545 	    {
546 	      /* Omit here local type decls until we know how to support
547 		 them. */
548 	      if ((TREE_CODE (tem) == TYPE_DECL)
549 		  || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
550 	        continue;
551 
552 	      if (search_pointer_def(TREE_TYPE(tem)))
553 		{
554 		  TREE_VISITED (type) = 0;
555 		  return TRUE;
556 		}
557 	    }
558 
559 	  TREE_VISITED (type) = 0;
560 	}
561       break;
562 
563     case ARRAY_TYPE:
564       return search_pointer_def (TREE_TYPE(type));
565 
566     case POINTER_TYPE:
567     case REFERENCE_TYPE:
568     case OFFSET_TYPE:
569       if (TYPE_READONLY (TREE_TYPE (type)))
570 	{
571 	  /* unless this pointer contains function pointer,
572 	     it should be protected */
573 	  return search_func_pointer (TREE_TYPE (type));
574 	}
575       return TRUE;
576 
577     default:
578       break;
579     }
580 
581   return FALSE;
582 }
583 
584 
585 static int
search_func_pointer(type)586 search_func_pointer (type)
587      tree type;
588 {
589   tree tem;
590 
591   if (! type)
592     return FALSE;
593 
594   switch (TREE_CODE (type))
595     {
596     case UNION_TYPE:
597     case QUAL_UNION_TYPE:
598     case RECORD_TYPE:
599 	if (! TREE_VISITED (type))
600 	  {
601 	    /* mark the type as having been visited already */
602 	    TREE_VISITED (type) = 1;
603 
604 	    /* Output the name, type, position (in bits), size (in bits) of
605 	       each field.  */
606 	    for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
607 	      {
608 		if (TREE_CODE (tem) == FIELD_DECL
609 		    && search_func_pointer (TREE_TYPE(tem))) {
610 		  TREE_VISITED (type) = 0;
611 		  return TRUE;
612 		}
613 	      }
614 
615 	    TREE_VISITED (type) = 0;
616 	  }
617 	break;
618 
619     case ARRAY_TYPE:
620       return search_func_pointer (TREE_TYPE(type));
621 
622     case POINTER_TYPE:
623     case REFERENCE_TYPE:
624       /* I'm not sure whether OFFSET_TYPE needs this treatment,
625 	 so I'll play safe and return 1.  */
626     case OFFSET_TYPE:
627       if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
628 	return TRUE;
629       return search_func_pointer (TREE_TYPE(type));
630 
631     default:
632       break;
633     }
634 
635   return FALSE;
636 }
637 
638 
639 /*
640  * check whether the specified rtx contains PLUS rtx with used flag.
641  */
642 static int
check_used_flag(x)643 check_used_flag (x)
644      rtx x;
645 {
646   register int i, j;
647   register enum rtx_code code;
648   register const char *format_ptr;
649 
650   if (x == 0)
651     return FALSE;
652 
653   code = GET_CODE (x);
654 
655   switch (code)
656     {
657     case REG:
658     case QUEUED:
659     case CONST_INT:
660     case CONST_DOUBLE:
661     case SYMBOL_REF:
662     case CODE_LABEL:
663     case PC:
664     case CC0:
665       return FALSE;
666 
667     case PLUS:
668       if (x->used)
669 	return TRUE;
670 
671     default:
672       break;
673     }
674 
675   format_ptr = GET_RTX_FORMAT (code);
676   for (i = 0; i < GET_RTX_LENGTH (code); i++)
677     {
678       switch (*format_ptr++)
679 	{
680 	case 'e':
681 	  if (check_used_flag (XEXP (x, i)))
682 	    return TRUE;
683 	  break;
684 
685 	case 'E':
686 	  for (j = 0; j < XVECLEN (x, i); j++)
687 	    if (check_used_flag (XVECEXP (x, i, j)))
688 	      return TRUE;
689 	  break;
690 	}
691     }
692 
693   return FALSE;
694 }
695 
696 
697 static void
reset_used_flags_for_insns(insn)698 reset_used_flags_for_insns (insn)
699      rtx insn;
700 {
701   register int i, j;
702   register enum rtx_code code;
703   register const char *format_ptr;
704 
705   for (; insn; insn = NEXT_INSN (insn))
706     if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
707 	|| GET_CODE (insn) == CALL_INSN)
708       {
709 	code = GET_CODE (insn);
710 	insn->used = 0;
711 	format_ptr = GET_RTX_FORMAT (code);
712 
713 	for (i = 0; i < GET_RTX_LENGTH (code); i++)
714 	  {
715 	    switch (*format_ptr++) {
716 	    case 'e':
717 	      reset_used_flags_of_plus (XEXP (insn, i));
718 	      break;
719 
720 	    case 'E':
721 	      for (j = 0; j < XVECLEN (insn, i); j++)
722 		reset_used_flags_of_plus (XVECEXP (insn, i, j));
723 	      break;
724 	    }
725 	  }
726       }
727 }
728 
729 static void
reset_used_flags_for_decls(block)730 reset_used_flags_for_decls (block)
731      tree block;
732 {
733   tree types;
734   rtx home;
735 
736   while (block && TREE_CODE(block)==BLOCK)
737     {
738       types = BLOCK_VARS(block);
739 
740       while (types)
741 	{
742 	  /* skip the declaration that refers an external variable and
743 	     also skip an global variable */
744 	  if (! DECL_EXTERNAL (types))
745 	    {
746 	      if (!DECL_RTL_SET_P (types)) goto next;
747 	      home = DECL_RTL (types);
748 
749 	      if (GET_CODE (home) == MEM
750 		  && GET_CODE (XEXP (home, 0)) == PLUS
751 		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
752 		{
753 		  XEXP (home, 0)->used = 0;
754 		}
755 	    }
756 	next:
757 	  types = TREE_CHAIN(types);
758 	}
759 
760       reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
761 
762       block = BLOCK_CHAIN (block);
763     }
764 }
765 
766 /* Clear the USED bits only of type PLUS in X */
767 
768 static void
reset_used_flags_of_plus(x)769 reset_used_flags_of_plus (x)
770      rtx x;
771 {
772   register int i, j;
773   register enum rtx_code code;
774   register const char *format_ptr;
775 
776   if (x == 0)
777     return;
778 
779   code = GET_CODE (x);
780 
781   /* These types may be freely shared so we needn't do any resetting
782      for them.  */
783 
784   switch (code)
785     {
786     case REG:
787     case QUEUED:
788     case CONST_INT:
789     case CONST_DOUBLE:
790     case SYMBOL_REF:
791     case CODE_LABEL:
792     case PC:
793     case CC0:
794       return;
795 
796     case INSN:
797     case JUMP_INSN:
798     case CALL_INSN:
799     case NOTE:
800     case LABEL_REF:
801     case BARRIER:
802       /* The chain of insns is not being copied.  */
803       return;
804 
805     case PLUS:
806       x->used = 0;
807       break;
808 
809     case CALL_PLACEHOLDER:
810       reset_used_flags_for_insns (XEXP (x, 0));
811       reset_used_flags_for_insns (XEXP (x, 1));
812       reset_used_flags_for_insns (XEXP (x, 2));
813       break;
814 
815     default:
816       break;
817     }
818 
819   format_ptr = GET_RTX_FORMAT (code);
820   for (i = 0; i < GET_RTX_LENGTH (code); i++)
821     {
822       switch (*format_ptr++)
823 	{
824 	case 'e':
825 	  reset_used_flags_of_plus (XEXP (x, i));
826 	  break;
827 
828 	case 'E':
829 	  for (j = 0; j < XVECLEN (x, i); j++)
830 	    reset_used_flags_of_plus (XVECEXP (x, i, j));
831 	  break;
832 	}
833     }
834 }
835 
836 
837 static void
rtl_prologue(insn)838 rtl_prologue (insn)
839      rtx insn;
840 {
841 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
842 #undef HAS_INIT_SECTION
843 #define HAS_INIT_SECTION
844 #endif
845 
846   rtx _val;
847 
848   for (; insn; insn = NEXT_INSN (insn))
849     if (GET_CODE (insn) == NOTE
850 	&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
851       break;
852 
853 #if !defined (HAS_INIT_SECTION)
854   /* If this function is `main', skip a call to `__main'
855      to run guard instruments after global initializers, etc.  */
856   if (DECL_NAME (current_function_decl)
857       && MAIN_NAME_P (DECL_NAME (current_function_decl))
858       && DECL_CONTEXT (current_function_decl) == NULL_TREE)
859     {
860       rtx fbinsn = insn;
861       for (; insn; insn = NEXT_INSN (insn))
862 	if (GET_CODE (insn) == NOTE
863 	    && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
864 	  break;
865       if (insn == 0) insn = fbinsn;
866     }
867 #endif
868 
869   /* mark the next insn of FUNCTION_BEG insn */
870   prologue_insert_point = NEXT_INSN (insn);
871 
872   start_sequence ();
873 
874   _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard_local"));
875   emit_move_insn ( guard_area, _guard);
876 
877   _val = get_insns ();
878   end_sequence ();
879 
880   emit_insn_before (_val, prologue_insert_point);
881 }
882 
883 static void
rtl_epilogue(insn)884 rtl_epilogue (insn)
885      rtx insn;
886 {
887   rtx if_false_label;
888   rtx _val;
889   rtx funcname;
890   tree funcstr;
891   int  flag_have_return = FALSE;
892 
893   start_sequence ();
894 
895 #ifdef HAVE_return
896   if (HAVE_return)
897     {
898       rtx insn;
899       return_label = gen_label_rtx ();
900 
901       for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
902 	if (GET_CODE (insn) == JUMP_INSN
903 	    && GET_CODE (PATTERN (insn)) == RETURN
904 	    && GET_MODE (PATTERN (insn)) == VOIDmode)
905 	  {
906 	    rtx pat = gen_rtx_SET (VOIDmode,
907 				   pc_rtx,
908 				   gen_rtx_LABEL_REF (VOIDmode,
909 						      return_label));
910 	    PATTERN (insn) = pat;
911 	    flag_have_return = TRUE;
912 	  }
913 
914 
915       emit_label (return_label);
916     }
917 #endif
918 
919   /*                                          if (guard_area != _guard) */
920   compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX);
921 
922   if_false_label = gen_label_rtx ();		/* { */
923   emit_jump_insn ( gen_beq(if_false_label));
924 
925   /* generate string for the current function name */
926   funcstr = build_string (strlen(current_function_name)+1,
927 			  current_function_name);
928   TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);
929   funcname = output_constant_def (funcstr, 1);
930 
931   emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__stack_smash_handler"),
932 		     0, VOIDmode, 2,
933                      XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
934 
935   /* generate RTL to return from the current function */
936 
937   emit_barrier ();				/* } */
938   emit_label (if_false_label);
939 
940   /* generate RTL to return from the current function */
941   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
942     use_return_register ();
943 
944 #ifdef HAVE_return
945   if (HAVE_return && flag_have_return)
946     {
947       emit_jump_insn (gen_return ());
948       emit_barrier ();
949     }
950 #endif
951 
952   _val = get_insns ();
953   end_sequence ();
954 
955   emit_insn_after (_val, insn);
956 }
957 
958 
959 static void
arrange_var_order(block)960 arrange_var_order (block)
961      tree block;
962 {
963   tree types;
964   HOST_WIDE_INT offset;
965 
966   while (block && TREE_CODE(block)==BLOCK)
967     {
968       /* arrange the location of character arrays in depth first.  */
969       arrange_var_order (BLOCK_SUBBLOCKS (block));
970 
971       types = BLOCK_VARS (block);
972 
973       while (types)
974 	{
975 	  /* skip the declaration that refers an external variable */
976 	  /* name: types.decl.assembler_name.id			   */
977 	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
978 	      && TREE_CODE (types) == VAR_DECL
979 	      && ! DECL_ARTIFICIAL (types)
980 	      && ! DECL_VAR_INLINE (types)	/* don't sweep inlined string */
981 	      && DECL_RTL_SET_P (types)
982 	      && GET_CODE (DECL_RTL (types)) == MEM
983 	      && GET_MODE (DECL_RTL (types)) == BLKmode
984 
985 	      && (is_array=0, search_string_def (TREE_TYPE (types))
986 		  || (! current_function_defines_vulnerable_string
987 		      && is_array)))
988 	    {
989 	      rtx home = DECL_RTL (types);
990 
991 	      if (!(GET_CODE (home) == MEM
992 		    && (GET_CODE (XEXP (home, 0)) == MEM
993 			||
994 			(GET_CODE (XEXP (home, 0)) == REG
995 			 && XEXP (home, 0) != virtual_stack_vars_rtx
996 			 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
997 			 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
998 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
999 			 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
1000 #endif
1001 			 ))))
1002 		{
1003 		  /* found a string variable */
1004 		  HOST_WIDE_INT var_size =
1005 		    ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
1006 		     / BITS_PER_UNIT);
1007 
1008 		  /* confirmed it is BLKmode.  */
1009 		  int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1010 		  var_size = CEIL_ROUND (var_size, alignment);
1011 
1012 		  /* skip the variable if it is top of the region
1013 		     specified by sweep_frame_offset */
1014 		  offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
1015 		  if (offset == sweep_frame_offset - var_size)
1016 		    sweep_frame_offset -= var_size;
1017 
1018 		  else if (offset < sweep_frame_offset - var_size)
1019 		    sweep_string_variable (DECL_RTL (types), var_size);
1020 		}
1021 	    }
1022 
1023 	  types = TREE_CHAIN(types);
1024 	}
1025 
1026       block = BLOCK_CHAIN (block);
1027     }
1028 }
1029 
1030 
1031 static void
copy_args_for_protection()1032 copy_args_for_protection ()
1033 {
1034   tree parms = DECL_ARGUMENTS (current_function_decl);
1035   rtx temp_rtx;
1036 
1037   parms = DECL_ARGUMENTS (current_function_decl);
1038   for (; parms; parms = TREE_CHAIN (parms))
1039     if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1040       {
1041 	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1042 	  {
1043 	    int string_p;
1044 	    rtx seq;
1045 
1046 	    /*
1047 	      skip argument protection if the last argument is used
1048 	      for the variable argument
1049 	    */
1050 	    /*
1051 	      tree fntype;
1052 	      if (TREE_CHAIN (parms) == 0)
1053 	      {
1054 	        fntype = TREE_TYPE (current_function_decl);
1055 
1056 	        if ((TYPE_ARG_TYPES (fntype) != 0
1057 		     && TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1058 		          != void_type_node)
1059 	             || current_function_varargs)
1060 	          continue;
1061 	      }
1062 	    */
1063 
1064 	    string_p = search_string_def (TREE_TYPE (parms));
1065 
1066 	    /* check if it is a candidate to move */
1067 	    if (string_p || search_pointer_def (TREE_TYPE (parms)))
1068 	      {
1069 		int arg_size
1070 		  = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
1071 		     / BITS_PER_UNIT);
1072 		tree passed_type = DECL_ARG_TYPE (parms);
1073 		tree nominal_type = TREE_TYPE (parms);
1074 
1075 		start_sequence ();
1076 
1077 		if (GET_CODE (DECL_RTL (parms)) == REG)
1078 		  {
1079 		    rtx safe = 0;
1080 
1081 		    change_arg_use_of_insns (prologue_insert_point,
1082 					     DECL_RTL (parms), &safe, 0);
1083 		    if (safe)
1084 		      {
1085 			/* generate codes for copying the content */
1086 			rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
1087 
1088 			/* avoid register elimination in gcse.c (COPY-PROP)*/
1089 			PATTERN (movinsn)->volatil = 1;
1090 
1091 			/* save debugger info */
1092 			DECL_INCOMING_RTL (parms) = safe;
1093 		      }
1094 		  }
1095 		else if (GET_CODE (DECL_RTL (parms)) == MEM
1096 			 && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
1097 		  {
1098 		    rtx movinsn;
1099 		    rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
1100 
1101 		    /* generate codes for copying the content */
1102 		    movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
1103 		    /* avoid register elimination in gcse.c (COPY-PROP)*/
1104 		    PATTERN (movinsn)->volatil = 1;
1105 
1106 		    /* change the addressof information to the newly
1107 		       allocated pseudo register */
1108 		    emit_move_insn (DECL_RTL (parms), safe);
1109 
1110 		    /* save debugger info */
1111 		    DECL_INCOMING_RTL (parms) = safe;
1112 		  }
1113 
1114 		/* See if the frontend wants to pass this by invisible
1115 		   reference.  */
1116 		else if (passed_type != nominal_type
1117 			 && POINTER_TYPE_P (passed_type)
1118 			 && TREE_TYPE (passed_type) == nominal_type)
1119 		  {
1120 		    rtx safe = 0, orig = XEXP (DECL_RTL (parms), 0);
1121 
1122 		    change_arg_use_of_insns (prologue_insert_point,
1123 					     orig, &safe, 0);
1124 		    if (safe)
1125 		      {
1126 			/* generate codes for copying the content */
1127 			rtx movinsn = emit_move_insn (safe, orig);
1128 
1129 			/* avoid register elimination in gcse.c (COPY-PROP)*/
1130 			PATTERN (movinsn)->volatil = 1;
1131 
1132 			/* save debugger info */
1133 			DECL_INCOMING_RTL (parms) = safe;
1134 		      }
1135 		  }
1136 
1137 		else
1138 		  {
1139 		    /* declare temporary local variable DECL_NAME (parms) */
1140 		    temp_rtx
1141 		      = assign_stack_local (DECL_MODE (parms), arg_size,
1142 					    DECL_MODE (parms) == BLKmode ?
1143 					    -1 : 0);
1144 
1145 		    MEM_IN_STRUCT_P (temp_rtx)
1146 		      = AGGREGATE_TYPE_P (TREE_TYPE (parms));
1147 		    set_mem_alias_set (temp_rtx, get_alias_set (parms));
1148 
1149 		    /* move_arg_location may change the contents of
1150 		       DECL_RTL (parms). to avoid this, copies the contents */
1151 		    SET_DECL_RTL (parms, copy_rtx (DECL_RTL (parms)));
1152 
1153 		    /* generate codes for copying the content */
1154 		    store_expr (parms, temp_rtx, 0);
1155 
1156 		    /* change the reference for each instructions */
1157 		    move_arg_location (prologue_insert_point, DECL_RTL (parms),
1158 				       temp_rtx, arg_size);
1159 
1160 		    /* change the location of parms variable */
1161 		    SET_DECL_RTL (parms, temp_rtx);
1162 
1163 		    /* change debugger info */
1164 		    DECL_INCOMING_RTL (parms) = temp_rtx;
1165 		  }
1166 
1167 		seq = get_insns ();
1168 		end_sequence ();
1169 		emit_insn_before (seq, prologue_insert_point);
1170 
1171 #ifdef FRAME_GROWS_DOWNWARD
1172 		/* process the string argument */
1173 		if (string_p && DECL_MODE (parms) == BLKmode)
1174 		  {
1175 		    int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1176 		    arg_size = CEIL_ROUND (arg_size, alignment);
1177 
1178 		    /* change the reference for each instructions */
1179 		    sweep_string_variable (DECL_RTL (parms), arg_size);
1180 		  }
1181 #endif
1182 	      }
1183 	  }
1184       }
1185 }
1186 
1187 
1188 /*
1189   sweep a string variable to the local variable addressed
1190   by sweep_frame_offset, that is a last position of string variables.
1191 */
1192 static void
sweep_string_variable(sweep_var,var_size)1193 sweep_string_variable (sweep_var, var_size)
1194      rtx sweep_var;
1195      HOST_WIDE_INT var_size;
1196 {
1197   HOST_WIDE_INT sweep_offset;
1198 
1199   switch (GET_CODE (sweep_var))
1200     {
1201     case MEM:
1202       if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
1203 	  && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
1204 	return;
1205       sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
1206       break;
1207     case CONST_INT:
1208       sweep_offset = INTVAL (sweep_var);
1209       break;
1210     default:
1211       abort ();
1212     }
1213 
1214   /* scan all declarations of variables and fix the offset address of
1215      the variable based on the frame pointer */
1216   sweep_string_in_decls (DECL_INITIAL (current_function_decl),
1217 			 sweep_offset, var_size);
1218 
1219   /* scan all argument variable and fix the offset address based on
1220      the frame pointer */
1221   sweep_string_in_args (DECL_ARGUMENTS (current_function_decl),
1222 			sweep_offset, var_size);
1223 
1224   /* For making room for sweep variable, scan all insns and
1225      fix the offset address of the variable that is based on frame pointer */
1226   sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
1227 
1228 
1229   /* Clear all the USED bits in operands of all insns and declarations of
1230      local vars */
1231   reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
1232   reset_used_flags_for_insns (function_first_insn);
1233 
1234   sweep_frame_offset -= var_size;
1235 }
1236 
1237 
1238 
1239 /*
1240   move an argument to the local variable addressed by frame_offset
1241 */
1242 static void
move_arg_location(insn,orig,new,var_size)1243 move_arg_location (insn, orig, new, var_size)
1244      rtx  insn, orig, new;
1245      HOST_WIDE_INT var_size;
1246 {
1247   /* For making room for sweep variable, scan all insns and
1248      fix the offset address of the variable that is based on frame pointer */
1249   change_arg_use_of_insns (insn, orig, &new, var_size);
1250 
1251 
1252   /* Clear all the USED bits in operands of all insns and declarations
1253      of local vars */
1254   reset_used_flags_for_insns (insn);
1255 }
1256 
1257 
1258 static void
sweep_string_in_decls(block,sweep_offset,sweep_size)1259 sweep_string_in_decls (block, sweep_offset, sweep_size)
1260      tree block;
1261      HOST_WIDE_INT sweep_offset, sweep_size;
1262 {
1263   tree types;
1264   HOST_WIDE_INT offset;
1265   rtx home;
1266 
1267   while (block && TREE_CODE(block)==BLOCK)
1268     {
1269       types = BLOCK_VARS(block);
1270 
1271       while (types)
1272 	{
1273 	  /* skip the declaration that refers an external variable and
1274 	     also skip an global variable */
1275 	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
1276 
1277 	    if (!DECL_RTL_SET_P (types)) goto next;
1278 	    home = DECL_RTL (types);
1279 
1280 	    /* process for static local variable */
1281 	    if (GET_CODE (home) == MEM
1282 		&& GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
1283 	      goto next;
1284 
1285 	    if (GET_CODE (home) == MEM
1286 		&& XEXP (home, 0) == virtual_stack_vars_rtx)
1287 	      {
1288 		offset = 0;
1289 
1290 		/* the operand related to the sweep variable */
1291 		if (sweep_offset <= offset
1292 		    && offset < sweep_offset + sweep_size)
1293 		  {
1294 		    offset = sweep_frame_offset - sweep_size - sweep_offset;
1295 
1296 		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
1297 						    offset);
1298 		    XEXP (home, 0)->used = 1;
1299 		  }
1300 		else if (sweep_offset <= offset
1301 			 && offset < sweep_frame_offset)
1302 		  {
1303 		    /* the rest of variables under sweep_frame_offset,
1304 		       shift the location */
1305 		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
1306 						    -sweep_size);
1307 		    XEXP (home, 0)->used = 1;
1308 		  }
1309 	      }
1310 
1311 	    if (GET_CODE (home) == MEM
1312 		&& GET_CODE (XEXP (home, 0)) == MEM)
1313 	      {
1314 		/* process for dynamically allocated aray */
1315 		home = XEXP (home, 0);
1316 	      }
1317 
1318 	    if (GET_CODE (home) == MEM
1319 		&& GET_CODE (XEXP (home, 0)) == PLUS
1320 		&& XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
1321 		&& GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1322 	      {
1323 		if (! XEXP (home, 0)->used)
1324 		  {
1325 		    offset = AUTO_OFFSET(XEXP (home, 0));
1326 
1327 		    /* the operand related to the sweep variable */
1328 		    if (sweep_offset <= offset
1329 			&& offset < sweep_offset + sweep_size)
1330 		      {
1331 
1332 			offset
1333 			  += sweep_frame_offset - sweep_size - sweep_offset;
1334 			XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
1335 								      offset);
1336 
1337 			/* mark */
1338 			XEXP (home, 0)->used = 1;
1339 		      }
1340 		    else if (sweep_offset <= offset
1341 			     && offset < sweep_frame_offset)
1342 		      {	/* the rest of variables under sweep_frame_offset,
1343 			   so shift the location */
1344 
1345 			XEXP (XEXP (home, 0), 1)
1346 			  = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1347 
1348 			/* mark */
1349 			XEXP (home, 0)->used = 1;
1350 		      }
1351 		  }
1352 	      }
1353 
1354 	  }
1355 	next:
1356 	  types = TREE_CHAIN(types);
1357 	}
1358 
1359       sweep_string_in_decls (BLOCK_SUBBLOCKS (block),
1360 			     sweep_offset, sweep_size);
1361       block = BLOCK_CHAIN (block);
1362     }
1363 }
1364 
1365 
1366 static void
sweep_string_in_args(parms,sweep_offset,sweep_size)1367 sweep_string_in_args (parms, sweep_offset, sweep_size)
1368      tree parms;
1369      HOST_WIDE_INT sweep_offset, sweep_size;
1370 {
1371   rtx home;
1372   HOST_WIDE_INT offset;
1373 
1374   for (; parms; parms = TREE_CHAIN (parms))
1375     if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1376       {
1377 	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1378 	  {
1379 	    home = DECL_INCOMING_RTL (parms);
1380 
1381 	    if (XEXP (home, 0)->used) continue;
1382 
1383 	    offset = AUTO_OFFSET(XEXP (home, 0));
1384 
1385 	    /* the operand related to the sweep variable */
1386 	    if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
1387 	      {
1388 		if (sweep_offset <= offset
1389 		    && offset < sweep_offset + sweep_size)
1390 		  {
1391 		    offset += sweep_frame_offset - sweep_size - sweep_offset;
1392 		    XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
1393 								  offset);
1394 
1395 		    /* mark */
1396 		    XEXP (home, 0)->used = 1;
1397 		  }
1398 		else if (sweep_offset <= offset
1399 			 && offset < sweep_frame_offset)
1400 		  {
1401 		    /* the rest of variables under sweep_frame_offset,
1402 		       shift the location */
1403 		    XEXP (XEXP (home, 0), 1)
1404 		      = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1405 
1406 		    /* mark */
1407 		    XEXP (home, 0)->used = 1;
1408 		  }
1409 	      }
1410 	  }
1411       }
1412 }
1413 
1414 
1415 static int has_virtual_reg;
1416 
1417 static void
sweep_string_use_of_insns(insn,sweep_offset,sweep_size)1418 sweep_string_use_of_insns (insn, sweep_offset, sweep_size)
1419      rtx insn;
1420      HOST_WIDE_INT sweep_offset, sweep_size;
1421 {
1422   for (; insn; insn = NEXT_INSN (insn))
1423     if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1424 	|| GET_CODE (insn) == CALL_INSN)
1425       {
1426 	has_virtual_reg = FALSE;
1427 	sweep_string_in_operand (insn, &PATTERN (insn),
1428 				 sweep_offset, sweep_size);
1429 	sweep_string_in_operand (insn, &REG_NOTES (insn),
1430 				 sweep_offset, sweep_size);
1431       }
1432 }
1433 
1434 
1435 static void
sweep_string_in_operand(insn,loc,sweep_offset,sweep_size)1436 sweep_string_in_operand (insn, loc, sweep_offset, sweep_size)
1437      rtx insn, *loc;
1438      HOST_WIDE_INT sweep_offset, sweep_size;
1439 {
1440   register rtx x = *loc;
1441   register enum rtx_code code;
1442   int i, j, k = 0;
1443   HOST_WIDE_INT offset;
1444   const char *fmt;
1445 
1446   if (x == 0)
1447     return;
1448 
1449   code = GET_CODE (x);
1450 
1451   switch (code)
1452     {
1453     case CONST_INT:
1454     case CONST_DOUBLE:
1455     case CONST:
1456     case SYMBOL_REF:
1457     case CODE_LABEL:
1458     case PC:
1459     case CC0:
1460     case ASM_INPUT:
1461     case ADDR_VEC:
1462     case ADDR_DIFF_VEC:
1463     case RETURN:
1464     case ADDRESSOF:
1465       return;
1466 
1467     case REG:
1468       if (x == virtual_incoming_args_rtx
1469 	  || x == virtual_stack_vars_rtx
1470 	  || x == virtual_stack_dynamic_rtx
1471 	  || x == virtual_outgoing_args_rtx
1472 	  || x == virtual_cfa_rtx)
1473 	has_virtual_reg = TRUE;
1474       return;
1475 
1476     case SET:
1477       /*
1478 	skip setjmp setup insn and setjmp restore insn
1479 	Example:
1480 	(set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
1481 	(set (virtual_stack_vars_rtx) (REG))
1482       */
1483       if (GET_CODE (XEXP (x, 0)) == MEM
1484 	  && XEXP (x, 1) == virtual_stack_vars_rtx)
1485 	return;
1486       if (XEXP (x, 0) == virtual_stack_vars_rtx
1487 	  && GET_CODE (XEXP (x, 1)) == REG)
1488 	return;
1489       break;
1490 
1491     case PLUS:
1492       /* Handle typical case of frame register plus constant.  */
1493       if (XEXP (x, 0) == virtual_stack_vars_rtx
1494 	  && CONSTANT_P (XEXP (x, 1)))
1495 	{
1496 	  if (x->used) goto single_use_of_virtual_reg;
1497 
1498 	  offset = AUTO_OFFSET(x);
1499 	  if (RTX_INTEGRATED_P (x)) k = -1; /* for inline base ptr */
1500 
1501 	  /* the operand related to the sweep variable */
1502 	  if (sweep_offset <= offset + k
1503 	      && offset + k < sweep_offset + sweep_size)
1504 	    {
1505 	      offset += sweep_frame_offset - sweep_size - sweep_offset;
1506 
1507 	      XEXP (x, 0) = virtual_stack_vars_rtx;
1508 	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1509 	      x->used = 1;
1510 	    }
1511 	  else if (sweep_offset <= offset + k
1512 		   && offset + k < sweep_frame_offset)
1513 	    {
1514 	      /* the rest of variables under sweep_frame_offset,
1515 		 shift the location */
1516 	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1517 	      x->used = 1;
1518 	    }
1519 
1520 	single_use_of_virtual_reg:
1521 	  if (has_virtual_reg) {
1522 	    /* excerpt from insn_invalid_p in recog.c */
1523 	    int icode = recog_memoized (insn);
1524 
1525 	    if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1526 	      {
1527 		rtx temp, seq;
1528 
1529 		start_sequence ();
1530 		temp = force_operand (x, NULL_RTX);
1531 		seq = get_insns ();
1532 		end_sequence ();
1533 
1534 		emit_insn_before (seq, insn);
1535 		if (! validate_change (insn, loc, temp, 0)
1536 		    && ! validate_replace_rtx (x, temp, insn))
1537 		  fatal_insn ("sweep_string_in_operand", insn);
1538 	      }
1539 	  }
1540 
1541 	  has_virtual_reg = TRUE;
1542 	  return;
1543 	}
1544 
1545 #ifdef FRAME_GROWS_DOWNWARD
1546       /*
1547 	alert the case of frame register plus constant given by reg.
1548 	*/
1549       else if (XEXP (x, 0) == virtual_stack_vars_rtx
1550 	       && GET_CODE (XEXP (x, 1)) == REG)
1551 	fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
1552 #endif
1553 
1554       /*
1555 	process further subtree:
1556 	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1557 	(const_int 5))
1558       */
1559       break;
1560 
1561     case CALL_PLACEHOLDER:
1562       for (i = 0; i < 3; i++)
1563 	{
1564 	  rtx seq = XEXP (x, i);
1565 	  if (seq)
1566 	    {
1567 	      push_to_sequence (seq);
1568 	      sweep_string_use_of_insns (XEXP (x, i),
1569 					 sweep_offset, sweep_size);
1570 	      XEXP (x, i) = get_insns ();
1571 	      end_sequence ();
1572 	    }
1573 	}
1574       break;
1575 
1576     default:
1577       break;
1578     }
1579 
1580   /* Scan all subexpressions.  */
1581   fmt = GET_RTX_FORMAT (code);
1582   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1583     if (*fmt == 'e')
1584       {
1585 	/*
1586 	  virtual_stack_vars_rtx without offset
1587 	  Example:
1588 	    (set (reg:SI xx) (reg:SI 78))
1589 	    (set (reg:SI xx) (MEM (reg:SI 78)))
1590 	*/
1591 	if (XEXP (x, i) == virtual_stack_vars_rtx)
1592 	  fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
1593 	sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
1594       }
1595     else if (*fmt == 'E')
1596       for (j = 0; j < XVECLEN (x, i); j++)
1597 	sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
1598 }
1599 
1600 
1601 /*
1602   change a argument variable to the local variable addressed
1603   by the "new" variable.
1604 */
1605 static void
change_arg_use_of_insns(insn,orig,new,size)1606 change_arg_use_of_insns (insn, orig, new, size)
1607      rtx insn, orig, *new;
1608      HOST_WIDE_INT size;
1609 {
1610   for (; insn; insn = NEXT_INSN (insn))
1611     if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1612 	|| GET_CODE (insn) == CALL_INSN)
1613       {
1614 	rtx seq;
1615 
1616 	start_sequence ();
1617 	change_arg_use_in_operand (insn, PATTERN (insn), orig, new, size);
1618 
1619 	seq = get_insns ();
1620 	end_sequence ();
1621 	emit_insn_before (seq, insn);
1622 
1623 	/* load_multiple insn from virtual_incoming_args_rtx have several
1624 	   load insns. If every insn change the load address of arg
1625 	   to frame region, those insns are moved before the PARALLEL insn
1626 	   and remove the PARALLEL insn.  */
1627 	if (GET_CODE (PATTERN (insn)) == PARALLEL
1628 	    && XVECLEN (PATTERN (insn), 0) == 0)
1629 	  delete_insn (insn);
1630       }
1631 }
1632 
1633 
1634 
1635 static void
change_arg_use_in_operand(insn,x,orig,new,size)1636 change_arg_use_in_operand (insn, x, orig, new, size)
1637      rtx insn, x, orig, *new;
1638      HOST_WIDE_INT size;
1639 {
1640   register enum rtx_code code;
1641   int i, j;
1642   HOST_WIDE_INT offset;
1643   const char *fmt;
1644 
1645   if (x == 0)
1646     return;
1647 
1648   code = GET_CODE (x);
1649 
1650   switch (code)
1651     {
1652     case CONST_INT:
1653     case CONST_DOUBLE:
1654     case CONST:
1655     case SYMBOL_REF:
1656     case CODE_LABEL:
1657     case PC:
1658     case CC0:
1659     case ASM_INPUT:
1660     case ADDR_VEC:
1661     case ADDR_DIFF_VEC:
1662     case RETURN:
1663     case REG:
1664     case ADDRESSOF:
1665       return;
1666 
1667     case MEM:
1668       /* Handle special case of MEM (incoming_args)  */
1669       if (GET_CODE (orig) == MEM
1670 	  && XEXP (x, 0) == virtual_incoming_args_rtx)
1671 	{
1672 	  offset = 0;
1673 
1674 	  /* the operand related to the sweep variable */
1675 	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1676 	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
1677 
1678 	    offset = AUTO_OFFSET(XEXP (*new, 0))
1679 	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
1680 
1681 	    XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
1682 	    XEXP (x, 0)->used = 1;
1683 
1684 	    return;
1685 	  }
1686 	}
1687       break;
1688 
1689     case PLUS:
1690       /* Handle special case of frame register plus constant.  */
1691       if (GET_CODE (orig) == MEM
1692 	  && XEXP (x, 0) == virtual_incoming_args_rtx
1693 	  && CONSTANT_P (XEXP (x, 1))
1694 	  && ! x->used)
1695 	{
1696 	  offset = AUTO_OFFSET(x);
1697 
1698 	  /* the operand related to the sweep variable */
1699 	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1700 	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
1701 
1702 	    offset = AUTO_OFFSET(XEXP (*new, 0))
1703 	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
1704 
1705 	    XEXP (x, 0) = virtual_stack_vars_rtx;
1706 	    XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1707 	    x->used = 1;
1708 
1709 	    return;
1710 	  }
1711 
1712 	  /*
1713 	    process further subtree:
1714 	    Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1715 	    (const_int 5))
1716 	  */
1717 	}
1718       break;
1719 
1720     case SET:
1721       /* Handle special case of "set (REG or MEM) (incoming_args)".
1722 	 It means that the the address of the 1st argument is stored. */
1723       if (GET_CODE (orig) == MEM
1724 	  && XEXP (x, 1) == virtual_incoming_args_rtx)
1725 	{
1726 	  offset = 0;
1727 
1728 	  /* the operand related to the sweep variable */
1729 	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1730 	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
1731 
1732 	    offset = AUTO_OFFSET(XEXP (*new, 0))
1733 	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
1734 
1735 	    XEXP (x, 1) = force_operand (plus_constant (virtual_stack_vars_rtx,
1736 							offset), NULL_RTX);
1737 	    XEXP (x, 1)->used = 1;
1738 
1739 	    return;
1740 	  }
1741 	}
1742       break;
1743 
1744     case CALL_PLACEHOLDER:
1745       for (i = 0; i < 3; i++)
1746 	{
1747 	  rtx seq = XEXP (x, i);
1748 	  if (seq)
1749 	    {
1750 	      push_to_sequence (seq);
1751 	      change_arg_use_of_insns (XEXP (x, i), orig, new, size);
1752 	      XEXP (x, i) = get_insns ();
1753 	      end_sequence ();
1754 	    }
1755 	}
1756       break;
1757 
1758     case PARALLEL:
1759       for (j = 0; j < XVECLEN (x, 0); j++)
1760   	{
1761 	  change_arg_use_in_operand (insn, XVECEXP (x, 0, j), orig, new, size);
1762 	}
1763       if (recog_memoized (insn) < 0)
1764 	{
1765 	  for (i = 0, j = 0; j < XVECLEN (x, 0); j++)
1766 	    {
1767 	      /* if parallel insn has a insn used virtual_incoming_args_rtx,
1768 		 the insn is removed from this PARALLEL insn.  */
1769 	      if (check_used_flag (XVECEXP (x, 0, j)))
1770 		{
1771 		  emit_insn (XVECEXP (x, 0, j));
1772 		  XVECEXP (x, 0, j) = NULL;
1773 		}
1774 	      else
1775 		XVECEXP (x, 0, i++) = XVECEXP (x, 0, j);
1776 	    }
1777 	  PUT_NUM_ELEM (XVEC (x, 0), i);
1778 	}
1779       return;
1780 
1781     default:
1782       break;
1783     }
1784 
1785   /* Scan all subexpressions.  */
1786   fmt = GET_RTX_FORMAT (code);
1787   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1788     if (*fmt == 'e')
1789       {
1790 	if (XEXP (x, i) == orig)
1791 	  {
1792 	    if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig));
1793 	    XEXP (x, i) = *new;
1794 	    continue;
1795 	  }
1796 	change_arg_use_in_operand (insn, XEXP (x, i), orig, new, size);
1797       }
1798     else if (*fmt == 'E')
1799       for (j = 0; j < XVECLEN (x, i); j++)
1800 	{
1801 
1802 	  if (XVECEXP (x, i, j) == orig)
1803 	    {
1804 	      if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig));
1805 	      XVECEXP (x, i, j) = *new;
1806 	      continue;
1807 	    }
1808 	  change_arg_use_in_operand (insn, XVECEXP (x, i, j), orig, new, size);
1809 	}
1810 }
1811 
1812 
1813 static void
validate_insns_of_varrefs(insn)1814 validate_insns_of_varrefs (insn)
1815      rtx insn;
1816 {
1817   rtx next;
1818 
1819   /* Initialize recognition, indicating that volatile is OK.  */
1820   init_recog ();
1821 
1822   for (; insn; insn = next)
1823     {
1824       next = NEXT_INSN (insn);
1825       if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1826 	  || GET_CODE (insn) == CALL_INSN)
1827 	{
1828 	  /* excerpt from insn_invalid_p in recog.c */
1829 	  int icode = recog_memoized (insn);
1830 
1831 	  if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1832 	    validate_operand_of_varrefs (insn, &PATTERN (insn));
1833 	}
1834     }
1835 
1836   init_recog_no_volatile ();
1837 }
1838 
1839 
1840 static void
validate_operand_of_varrefs(insn,loc)1841 validate_operand_of_varrefs (insn, loc)
1842      rtx insn, *loc;
1843 {
1844   register enum rtx_code code;
1845   rtx x, temp, seq;
1846   int i, j;
1847   const char *fmt;
1848 
1849   x = *loc;
1850   if (x == 0)
1851     return;
1852 
1853   code = GET_CODE (x);
1854 
1855   switch (code)
1856     {
1857     case USE:
1858     case CONST_INT:
1859     case CONST_DOUBLE:
1860     case CONST:
1861     case SYMBOL_REF:
1862     case CODE_LABEL:
1863     case PC:
1864     case CC0:
1865     case ASM_INPUT:
1866     case ADDR_VEC:
1867     case ADDR_DIFF_VEC:
1868     case RETURN:
1869     case REG:
1870     case ADDRESSOF:
1871       return;
1872 
1873     case PLUS:
1874       /* validate insn of frame register plus constant.  */
1875       if (GET_CODE (x) == PLUS
1876 	  && XEXP (x, 0) == virtual_stack_vars_rtx
1877 	  && CONSTANT_P (XEXP (x, 1)))
1878 	{
1879 	  start_sequence ();
1880 
1881 	  { /* excerpt from expand_binop in optabs.c */
1882 	    optab binoptab = add_optab;
1883 	    enum machine_mode mode = GET_MODE (x);
1884 	    int icode = (int) binoptab->handlers[(int) mode].insn_code;
1885 	    enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1886 	    rtx pat;
1887 	    rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
1888 	    temp = gen_reg_rtx (mode);
1889 
1890 	    /* Now, if insn's predicates don't allow offset operands,
1891 	       put them into pseudo regs.  */
1892 
1893 	    if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
1894 		&& mode1 != VOIDmode)
1895 	      xop1 = copy_to_mode_reg (mode1, xop1);
1896 
1897 	    pat = GEN_FCN (icode) (temp, xop0, xop1);
1898 	    if (pat)
1899 	      emit_insn (pat);
1900 	    else
1901 	      abort (); /* there must be add_optab handler.  */
1902 	  }
1903 	  seq = get_insns ();
1904 	  end_sequence ();
1905 
1906 	  emit_insn_before (seq, insn);
1907 	  if (! validate_change (insn, loc, temp, 0))
1908 	    abort ();
1909 	  return;
1910 	}
1911 	break;
1912 
1913 
1914     case CALL_PLACEHOLDER:
1915       for (i = 0; i < 3; i++)
1916 	{
1917 	  rtx seq = XEXP (x, i);
1918 	  if (seq)
1919 	    {
1920 	      push_to_sequence (seq);
1921 	      validate_insns_of_varrefs (XEXP (x, i));
1922 	      XEXP (x, i) = get_insns ();
1923 	      end_sequence ();
1924 	    }
1925 	}
1926       break;
1927 
1928     default:
1929       break;
1930     }
1931 
1932   /* Scan all subexpressions.  */
1933   fmt = GET_RTX_FORMAT (code);
1934   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1935     if (*fmt == 'e')
1936       validate_operand_of_varrefs (insn, &XEXP (x, i));
1937     else if (*fmt == 'E')
1938       for (j = 0; j < XVECLEN (x, i); j++)
1939 	validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
1940 }
1941 
1942 
1943 
1944 /* Return size that is not allocated for stack frame. It will be allocated
1945    to modify the home of pseudo registers called from global_alloc.  */
1946 
1947 HOST_WIDE_INT
get_frame_free_size()1948 get_frame_free_size ()
1949 {
1950   if (! flag_propolice_protection)
1951     return 0;
1952 
1953   return push_allocated_offset - push_frame_offset;
1954 }
1955 
1956 
1957 /*
1958   The following codes are invoked after the instantiation of pseuso registers.
1959 
1960   Reorder local variables to place a peudo register after buffers to avoid
1961   the corruption of local variables that could be used to further corrupt
1962   arbitrary memory locations.
1963 */
1964 #if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
1965 static void push_frame
1966 	PARAMS ((HOST_WIDE_INT var_size, HOST_WIDE_INT boundary));
1967 static void push_frame_in_decls
1968 	PARAMS ((tree block, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1969 static void push_frame_in_args
1970 	PARAMS ((tree parms, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1971 static void push_frame_of_insns
1972 	PARAMS ((rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1973 static void push_frame_in_operand
1974 	PARAMS ((rtx insn, rtx orig,
1975 		 HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1976 static void push_frame_of_reg_equiv_memory_loc
1977 	PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1978 static void push_frame_of_reg_equiv_constant
1979 	PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1980 static void reset_used_flags_for_push_frame PARAMS ((void));
1981 static int check_out_of_frame_access
1982 	PARAMS ((rtx insn, HOST_WIDE_INT boundary));
1983 static int check_out_of_frame_access_in_operand
1984 	PARAMS ((rtx, HOST_WIDE_INT boundary));
1985 #endif
1986 
1987 rtx
assign_stack_local_for_pseudo_reg(mode,size,align)1988 assign_stack_local_for_pseudo_reg (mode, size, align)
1989      enum machine_mode mode;
1990      HOST_WIDE_INT size;
1991      int align;
1992 {
1993 #if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
1994   return assign_stack_local (mode, size, align);
1995 #else
1996   tree blocks = DECL_INITIAL (current_function_decl);
1997   rtx new;
1998   HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
1999   int first_call_from_purge_addressof, first_call_from_global_alloc;
2000 
2001   if (! flag_propolice_protection
2002       || size == 0
2003       || ! blocks
2004       || current_function_is_inlinable
2005       || ! search_string_from_argsandvars (1)
2006       || current_function_contains_functions)
2007     return assign_stack_local (mode, size, align);
2008 
2009   first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
2010   first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
2011   saved_cse_not_expected = cse_not_expected;
2012 
2013   starting_frame = (STARTING_FRAME_OFFSET)?
2014     STARTING_FRAME_OFFSET:BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2015   units_per_push = MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2016 		       GET_MODE_SIZE (mode));
2017 
2018   if (first_call_from_purge_addressof)
2019     {
2020       push_frame_offset = push_allocated_offset;
2021       if (check_out_of_frame_access (get_insns (), starting_frame))
2022 	{
2023 	  /* if there is an access beyond frame, push dummy region to separate
2024 	     the address of instantiated variables */
2025 	  push_frame (GET_MODE_SIZE (DImode), 0);
2026 	  assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2027 	}
2028     }
2029 
2030   if (first_call_from_global_alloc)
2031     {
2032       push_frame_offset = push_allocated_offset = 0;
2033       if (check_out_of_frame_access (get_insns (), starting_frame))
2034 	{
2035 	  if (STARTING_FRAME_OFFSET)
2036 	    {
2037 	      /* if there is an access beyond frame, push dummy region
2038 		 to separate the address of instantiated variables */
2039 	      push_frame (GET_MODE_SIZE (DImode), 0);
2040 	      assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2041 	    }
2042 	  else
2043 	    push_allocated_offset = starting_frame;
2044 	}
2045     }
2046 
2047   saved_frame_offset = frame_offset;
2048   frame_offset = push_frame_offset;
2049 
2050   new = assign_stack_local (mode, size, align);
2051 
2052   push_frame_offset = frame_offset;
2053   frame_offset = saved_frame_offset;
2054 
2055   if (push_frame_offset > push_allocated_offset)
2056     {
2057       push_frame (units_per_push,
2058 		  push_allocated_offset + STARTING_FRAME_OFFSET);
2059 
2060       assign_stack_local (BLKmode, units_per_push, -1);
2061       push_allocated_offset += units_per_push;
2062     }
2063 
2064   /* At the second call from global alloc, alpha push frame and assign
2065      a local variable to the top of the stack */
2066   if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
2067     push_frame_offset = push_allocated_offset = 0;
2068 
2069   return new;
2070 #endif
2071 }
2072 
2073 
2074 #if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2075 /*
2076   push frame information for instantiating pseudo register at the top of stack.
2077   This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is
2078   not defined.
2079 
2080   It is called by purge_addressof function and global_alloc (or reload)
2081   function.
2082 */
2083 static void
push_frame(var_size,boundary)2084 push_frame (var_size, boundary)
2085      HOST_WIDE_INT var_size, boundary;
2086 {
2087   reset_used_flags_for_push_frame();
2088 
2089   /* scan all declarations of variables and fix the offset address of
2090      the variable based on the frame pointer */
2091   push_frame_in_decls (DECL_INITIAL (current_function_decl),
2092 		       var_size, boundary);
2093 
2094   /* scan all argument variable and fix the offset address based on
2095      the frame pointer */
2096   push_frame_in_args (DECL_ARGUMENTS (current_function_decl),
2097 		      var_size, boundary);
2098 
2099   /* scan all operands of all insns and fix the offset address
2100      based on the frame pointer */
2101   push_frame_of_insns (get_insns (), var_size, boundary);
2102 
2103   /* scan all reg_equiv_memory_loc and reg_equiv_constant*/
2104   push_frame_of_reg_equiv_memory_loc (var_size, boundary);
2105   push_frame_of_reg_equiv_constant (var_size, boundary);
2106 
2107   reset_used_flags_for_push_frame();
2108 }
2109 
2110 static void
reset_used_flags_for_push_frame()2111 reset_used_flags_for_push_frame()
2112 {
2113   int i;
2114   extern rtx *reg_equiv_memory_loc;
2115   extern rtx *reg_equiv_constant;
2116 
2117   /* Clear all the USED bits in operands of all insns and declarations of
2118      local vars */
2119   reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2120   reset_used_flags_for_insns (get_insns ());
2121 
2122 
2123   /* The following codes are processed if the push_frame is called from
2124      global_alloc (or reload) function */
2125   if (reg_equiv_memory_loc == 0) return;
2126 
2127   for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2128     if (reg_equiv_memory_loc[i])
2129       {
2130 	rtx x = reg_equiv_memory_loc[i];
2131 
2132 	if (GET_CODE (x) == MEM
2133 	    && GET_CODE (XEXP (x, 0)) == PLUS
2134 	    && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
2135 	  {
2136 	    /* reset */
2137 	    XEXP (x, 0)->used = 0;
2138 	  }
2139       }
2140 
2141 
2142   if (reg_equiv_constant == 0) return;
2143 
2144   for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2145     if (reg_equiv_constant[i])
2146       {
2147 	rtx x = reg_equiv_constant[i];
2148 
2149 	if (GET_CODE (x) == PLUS
2150 	    && AUTO_BASEPTR (x) == frame_pointer_rtx)
2151 	  {
2152 	    /* reset */
2153 	    x->used = 0;
2154 	  }
2155       }
2156 }
2157 
2158 static void
push_frame_in_decls(block,push_size,boundary)2159 push_frame_in_decls (block, push_size, boundary)
2160      tree block;
2161      HOST_WIDE_INT push_size, boundary;
2162 {
2163   tree types;
2164   HOST_WIDE_INT offset;
2165   rtx home;
2166 
2167   while (block && TREE_CODE(block)==BLOCK)
2168     {
2169       types = BLOCK_VARS(block);
2170 
2171       while (types)
2172 	{
2173 	  /* skip the declaration that refers an external variable and
2174 	     also skip an global variable */
2175 	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
2176 	    {
2177 
2178 	      if (!DECL_RTL_SET_P (types)) goto next;
2179 	      home = DECL_RTL (types);
2180 
2181 	      /* process for static local variable */
2182 	      if (GET_CODE (home) == MEM
2183 		  && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
2184 		goto next;
2185 
2186 	      if (GET_CODE (home) == MEM
2187 		  && GET_CODE (XEXP (home, 0)) == REG)
2188 		{
2189 		  if (XEXP (home, 0) != frame_pointer_rtx
2190 		      || boundary != 0)
2191 		    goto next;
2192 
2193 		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2194 						  push_size);
2195 
2196 		  /* mark */
2197 		  XEXP (home, 0)->used = 1;
2198 		}
2199 
2200 	      if (GET_CODE (home) == MEM
2201 		  && GET_CODE (XEXP (home, 0)) == MEM)
2202 		{
2203 
2204 		  /* process for dynamically allocated aray */
2205 		  home = XEXP (home, 0);
2206 		}
2207 
2208 	      if (GET_CODE (home) == MEM
2209 		  && GET_CODE (XEXP (home, 0)) == PLUS
2210 		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2211 		{
2212 		  offset = AUTO_OFFSET(XEXP (home, 0));
2213 
2214 		  if (! XEXP (home, 0)->used
2215 		      && offset >= boundary)
2216 		    {
2217 		      offset += push_size;
2218 		      XEXP (XEXP (home, 0), 1)
2219 			= gen_rtx_CONST_INT (VOIDmode, offset);
2220 
2221 		      /* mark */
2222 		      XEXP (home, 0)->used = 1;
2223 		    }
2224 		}
2225 
2226 	    }
2227 	next:
2228 	  types = TREE_CHAIN(types);
2229 	}
2230 
2231       push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
2232       block = BLOCK_CHAIN (block);
2233     }
2234 }
2235 
2236 
2237 static void
push_frame_in_args(parms,push_size,boundary)2238 push_frame_in_args (parms, push_size, boundary)
2239      tree parms;
2240      HOST_WIDE_INT push_size, boundary;
2241 {
2242   rtx home;
2243   HOST_WIDE_INT offset;
2244 
2245   for (; parms; parms = TREE_CHAIN (parms))
2246     if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2247       {
2248 	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
2249 	  {
2250 	    home = DECL_INCOMING_RTL (parms);
2251 	    offset = AUTO_OFFSET(XEXP (home, 0));
2252 
2253 	    if (XEXP (home, 0)->used || offset < boundary) continue;
2254 
2255 	    /* the operand related to the sweep variable */
2256 	    if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
2257 	      {
2258 		if (XEXP (home, 0) == frame_pointer_rtx)
2259 		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2260 						  push_size);
2261 		else {
2262 		  offset += push_size;
2263 		  XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2264 								offset);
2265 		}
2266 
2267 		/* mark */
2268 		XEXP (home, 0)->used = 1;
2269 	      }
2270 	  }
2271       }
2272 }
2273 
2274 
2275 static int insn_pushed;
2276 static int *fp_equiv = 0;
2277 
2278 static void
push_frame_of_insns(insn,push_size,boundary)2279 push_frame_of_insns (insn, push_size, boundary)
2280      rtx insn;
2281      HOST_WIDE_INT push_size, boundary;
2282 {
2283   /* init fp_equiv */
2284   fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
2285 
2286   for (; insn; insn = NEXT_INSN (insn))
2287     if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2288 	|| GET_CODE (insn) == CALL_INSN)
2289       {
2290 	rtx last;
2291 
2292 	insn_pushed = FALSE;
2293 
2294 	/* push frame in INSN operation */
2295 	push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
2296 
2297 	/* push frame in NOTE */
2298 	push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
2299 
2300 	/* push frame in CALL EXPR_LIST */
2301 	if (GET_CODE (insn) == CALL_INSN)
2302 	  push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn),
2303 				 push_size, boundary);
2304 
2305 	if (insn_pushed
2306 	    && (last = try_split (PATTERN (insn), insn, 1)) != insn)
2307 	  {
2308 	    rtx first = NEXT_INSN (insn);
2309 	    rtx trial = NEXT_INSN (first);
2310 	    rtx pattern = PATTERN (trial);
2311 	    rtx set;
2312 
2313 	    /* update REG_EQUIV info to the first splitted insn */
2314 	    if ((set = single_set (insn))
2315 		&& find_reg_note (insn, REG_EQUIV, SET_SRC (set))
2316 		&& GET_CODE (PATTERN (first)) == SET)
2317 	      {
2318 		REG_NOTES (first)
2319 		  = gen_rtx_EXPR_LIST (REG_EQUIV,
2320 				       SET_SRC (PATTERN (first)),
2321 				       REG_NOTES (first));
2322 	      }
2323 
2324 	    /* copy the first insn of splitted insns to the original insn and
2325 	       delete the first insn,
2326 	       because the original insn is pointed from records:
2327 	       insn_chain, reg_equiv_init, used for global_alloc.  */
2328 	    if (cse_not_expected)
2329 	      {
2330 		add_insn_before (insn, first);
2331 
2332 		/* Copy the various flags, and other information.  */
2333 		memcpy (insn, first, sizeof (struct rtx_def) - sizeof (rtunion));
2334 		PATTERN (insn) = PATTERN (first);
2335 		INSN_CODE (insn) = INSN_CODE (first);
2336 		LOG_LINKS (insn) = LOG_LINKS (first);
2337 		REG_NOTES (insn) = REG_NOTES (first);
2338 
2339 		/* then remove the first insn of splitted insns.  */
2340 		remove_insn (first);
2341 		INSN_DELETED_P (first) = 1;
2342 	      }
2343 
2344 	    if (GET_CODE (pattern) == SET
2345 		&& GET_CODE (XEXP (pattern, 0)) == REG
2346 		&& GET_CODE (XEXP (pattern, 1)) == PLUS
2347 		&& XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
2348 		&& CONSTANT_P (XEXP (XEXP (pattern, 1), 1)))
2349 	      {
2350 		rtx offset = XEXP (XEXP (pattern, 1), 1);
2351 		fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
2352 
2353 		delete_insn (trial);
2354 	      }
2355 
2356 	    insn = last;
2357 	  }
2358       }
2359 
2360   /* Clean up.  */
2361   free (fp_equiv);
2362 }
2363 
2364 
2365 static void
push_frame_in_operand(insn,orig,push_size,boundary)2366 push_frame_in_operand (insn, orig, push_size, boundary)
2367      rtx insn, orig;
2368      HOST_WIDE_INT push_size, boundary;
2369 {
2370   register rtx x = orig, prev_insn;
2371   register enum rtx_code code;
2372   int i, j;
2373   HOST_WIDE_INT offset;
2374   const char *fmt;
2375 
2376   if (x == 0)
2377     return;
2378 
2379   code = GET_CODE (x);
2380 
2381   switch (code)
2382     {
2383     case CONST_INT:
2384     case CONST_DOUBLE:
2385     case CONST:
2386     case SYMBOL_REF:
2387     case CODE_LABEL:
2388     case PC:
2389     case CC0:
2390     case ASM_INPUT:
2391     case ADDR_VEC:
2392     case ADDR_DIFF_VEC:
2393     case RETURN:
2394     case REG:
2395     case ADDRESSOF:
2396     case USE:
2397       return;
2398 
2399     case SET:
2400       /*
2401 	skip setjmp setup insn and setjmp restore insn
2402 	alpha case:
2403 	(set (MEM (reg:SI xx)) (frame_pointer_rtx)))
2404 	(set (frame_pointer_rtx) (REG))
2405       */
2406       if (GET_CODE (XEXP (x, 0)) == MEM
2407 	  && XEXP (x, 1) == frame_pointer_rtx)
2408 	return;
2409       if (XEXP (x, 0) == frame_pointer_rtx
2410 	  && GET_CODE (XEXP (x, 1)) == REG)
2411 	return;
2412 
2413       /*
2414 	powerpc case: restores setjmp address
2415 	(set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
2416 	or
2417 	(set (reg) (plus frame_pointer_rtx const_int -n))
2418 	(set (frame_pointer_rtx) (reg))
2419       */
2420       if (GET_CODE (XEXP (x, 0)) == REG
2421 	  && GET_CODE (XEXP (x, 1)) == PLUS
2422 	  && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
2423 	  && CONSTANT_P (XEXP (XEXP (x, 1), 1))
2424 	  && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
2425 	{
2426 	  x = XEXP (x, 1);
2427 	  offset = AUTO_OFFSET(x);
2428 	  if (x->used || -offset < boundary)
2429 	    return;
2430 
2431 	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
2432 	  x->used = 1; insn_pushed = TRUE;
2433 	  return;
2434 	}
2435 
2436       /* reset fp_equiv register */
2437       else if (GET_CODE (XEXP (x, 0)) == REG
2438 	  && fp_equiv[REGNO (XEXP (x, 0))])
2439 	fp_equiv[REGNO (XEXP (x, 0))] = 0;
2440 
2441       /* propagate fp_equiv register */
2442       else if (GET_CODE (XEXP (x, 0)) == REG
2443 	       && GET_CODE (XEXP (x, 1)) == REG
2444 	       && fp_equiv[REGNO (XEXP (x, 1))])
2445 	if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
2446 	    || (reg_renumber != 0 && reg_renumber[REGNO (XEXP (x, 0))] >= 0))
2447 	  fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
2448       break;
2449 
2450     case MEM:
2451       if (XEXP (x, 0) == frame_pointer_rtx
2452 	  && boundary == 0)
2453 	{
2454 	  XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2455 	  XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2456 	  return;
2457 	}
2458       break;
2459 
2460     case PLUS:
2461       offset = AUTO_OFFSET(x);
2462       prev_insn = prev_nonnote_insn (insn);
2463 
2464       /* Handle special case of frame register plus constant.  */
2465       if (CONSTANT_P (XEXP (x, 1))
2466 	  && XEXP (x, 0) == frame_pointer_rtx)
2467 	{
2468 	  if (x->used || offset < boundary)
2469 	    return;
2470 
2471 	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2472 	  x->used = 1; insn_pushed = TRUE;
2473 
2474 	  return;
2475 	}
2476       /*
2477 	Handle alpha case:
2478 	 (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
2479       */
2480       if (CONSTANT_P (XEXP (x, 1))
2481 	  && GET_CODE (XEXP (x, 0)) == SUBREG
2482 	  && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
2483 	{
2484 	  if (x->used || offset < boundary)
2485 	    return;
2486 
2487 	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2488 	  x->used = 1; insn_pushed = TRUE;
2489 
2490 	  return;
2491 	}
2492       /*
2493 	Handle powerpc case:
2494 	 (set (reg x) (plus fp const))
2495 	 (set (.....) (... (plus (reg x) (const B))))
2496       */
2497       else if (CONSTANT_P (XEXP (x, 1))
2498 	       && GET_CODE (XEXP (x, 0)) == REG
2499 	       && fp_equiv[REGNO (XEXP (x, 0))])
2500 	{
2501 	  if (x->used) return;
2502 
2503 	  offset += fp_equiv[REGNO (XEXP (x, 0))];
2504 
2505 	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2506 	  x->used = 1; insn_pushed = TRUE;
2507 
2508 	  return;
2509 	}
2510       /*
2511 	Handle special case of frame register plus reg (constant).
2512 	 (set (reg x) (const B))
2513 	 (set (....) (...(plus fp (reg x))))
2514       */
2515       else if (XEXP (x, 0) == frame_pointer_rtx
2516 	       && GET_CODE (XEXP (x, 1)) == REG
2517 	       && prev_insn
2518 	       && PATTERN (prev_insn)
2519 	       && SET_DEST (PATTERN (prev_insn)) == XEXP (x, 1)
2520 	       && CONSTANT_P (SET_SRC (PATTERN (prev_insn))))
2521 	{
2522 	  HOST_WIDE_INT offset = INTVAL (SET_SRC (PATTERN (prev_insn)));
2523 
2524 	  if (x->used || offset < boundary)
2525 	    return;
2526 
2527 	  SET_SRC (PATTERN (prev_insn))
2528 	    = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2529 	  x->used = 1;
2530 	  XEXP (x, 1)->used = 1;
2531 
2532 	  return;
2533 	}
2534       /* Handle special case of frame register plus reg (used).  */
2535       else if (XEXP (x, 0) == frame_pointer_rtx
2536 	       && XEXP (x, 1)->used)
2537 	{
2538 	  x->used = 1;
2539 	  return;
2540 	}
2541       /*
2542 	process further subtree:
2543 	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2544 	(const_int 5))
2545       */
2546       break;
2547 
2548     case CALL_PLACEHOLDER:
2549       push_frame_of_insns (XEXP (x, 0), push_size, boundary);
2550       push_frame_of_insns (XEXP (x, 1), push_size, boundary);
2551       push_frame_of_insns (XEXP (x, 2), push_size, boundary);
2552       break;
2553 
2554     default:
2555       break;
2556     }
2557 
2558   /* Scan all subexpressions.  */
2559   fmt = GET_RTX_FORMAT (code);
2560   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2561     if (*fmt == 'e')
2562       {
2563 	if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
2564 	  fatal_insn ("push_frame_in_operand", insn);
2565 	push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
2566       }
2567     else if (*fmt == 'E')
2568       for (j = 0; j < XVECLEN (x, i); j++)
2569 	push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
2570 }
2571 
2572 static void
push_frame_of_reg_equiv_memory_loc(push_size,boundary)2573 push_frame_of_reg_equiv_memory_loc (push_size, boundary)
2574      HOST_WIDE_INT push_size, boundary;
2575 {
2576   int i;
2577   extern rtx *reg_equiv_memory_loc;
2578 
2579   /* This function is processed if the push_frame is called from
2580      global_alloc (or reload) function */
2581   if (reg_equiv_memory_loc == 0) return;
2582 
2583   for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2584     if (reg_equiv_memory_loc[i])
2585       {
2586 	rtx x = reg_equiv_memory_loc[i];
2587 	int offset;
2588 
2589 	if (GET_CODE (x) == MEM
2590 	    && GET_CODE (XEXP (x, 0)) == PLUS
2591 	    && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
2592 	  {
2593 	    offset = AUTO_OFFSET(XEXP (x, 0));
2594 
2595 	    if (! XEXP (x, 0)->used
2596 		&& offset >= boundary)
2597 	      {
2598 		offset += push_size;
2599 		XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2600 
2601 		/* mark */
2602 		XEXP (x, 0)->used = 1;
2603 	      }
2604 	  }
2605 	else if (GET_CODE (x) == MEM
2606 		 && XEXP (x, 0) == frame_pointer_rtx
2607 		 && boundary == 0)
2608 	  {
2609 	    XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2610 	    XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2611 	  }
2612       }
2613 }
2614 
2615 static void
push_frame_of_reg_equiv_constant(push_size,boundary)2616 push_frame_of_reg_equiv_constant (push_size, boundary)
2617      HOST_WIDE_INT push_size, boundary;
2618 {
2619   int i;
2620   extern rtx *reg_equiv_constant;
2621 
2622   /* This function is processed if the push_frame is called from
2623      global_alloc (or reload) function */
2624   if (reg_equiv_constant == 0) return;
2625 
2626   for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2627     if (reg_equiv_constant[i])
2628       {
2629 	rtx x = reg_equiv_constant[i];
2630 	int offset;
2631 
2632 	if (GET_CODE (x) == PLUS
2633 	    && XEXP (x, 0) == frame_pointer_rtx)
2634 	  {
2635 	    offset = AUTO_OFFSET(x);
2636 
2637 	    if (! x->used
2638 		&& offset >= boundary)
2639 	      {
2640 		offset += push_size;
2641 		XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2642 
2643 		/* mark */
2644 		x->used = 1;
2645 	      }
2646 	  }
2647 	else if (x == frame_pointer_rtx
2648 		 && boundary == 0)
2649 	  {
2650 	    reg_equiv_constant[i]
2651 	      = plus_constant (frame_pointer_rtx, push_size);
2652 	    reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
2653 	  }
2654       }
2655 }
2656 
2657 static int
check_out_of_frame_access(insn,boundary)2658 check_out_of_frame_access (insn, boundary)
2659      rtx insn;
2660      HOST_WIDE_INT boundary;
2661 {
2662   for (; insn; insn = NEXT_INSN (insn))
2663     if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2664 	|| GET_CODE (insn) == CALL_INSN)
2665       {
2666 	if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
2667 	  return TRUE;
2668       }
2669   return FALSE;
2670 }
2671 
2672 
2673 static int
check_out_of_frame_access_in_operand(orig,boundary)2674 check_out_of_frame_access_in_operand (orig, boundary)
2675      rtx orig;
2676      HOST_WIDE_INT boundary;
2677 {
2678   register rtx x = orig;
2679   register enum rtx_code code;
2680   int i, j;
2681   const char *fmt;
2682 
2683   if (x == 0)
2684     return FALSE;
2685 
2686   code = GET_CODE (x);
2687 
2688   switch (code)
2689     {
2690     case CONST_INT:
2691     case CONST_DOUBLE:
2692     case CONST:
2693     case SYMBOL_REF:
2694     case CODE_LABEL:
2695     case PC:
2696     case CC0:
2697     case ASM_INPUT:
2698     case ADDR_VEC:
2699     case ADDR_DIFF_VEC:
2700     case RETURN:
2701     case REG:
2702     case ADDRESSOF:
2703       return FALSE;
2704 
2705     case MEM:
2706       if (XEXP (x, 0) == frame_pointer_rtx)
2707 	if (0 < boundary) return TRUE;
2708       break;
2709 
2710     case PLUS:
2711       /* Handle special case of frame register plus constant.  */
2712       if (CONSTANT_P (XEXP (x, 1))
2713 	  && XEXP (x, 0) == frame_pointer_rtx)
2714 	{
2715 	  if (0 <= AUTO_OFFSET(x)
2716 	      && AUTO_OFFSET(x) < boundary) return TRUE;
2717 	  return FALSE;
2718 	}
2719       /*
2720 	process further subtree:
2721 	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2722 	(const_int 5))
2723       */
2724       break;
2725 
2726     case CALL_PLACEHOLDER:
2727       if (check_out_of_frame_access (XEXP (x, 0), boundary)) return TRUE;
2728       if (check_out_of_frame_access (XEXP (x, 1), boundary)) return TRUE;
2729       if (check_out_of_frame_access (XEXP (x, 2), boundary)) return TRUE;
2730       break;
2731 
2732     default:
2733       break;
2734     }
2735 
2736   /* Scan all subexpressions.  */
2737   fmt = GET_RTX_FORMAT (code);
2738   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2739     if (*fmt == 'e')
2740       {
2741 	if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
2742 	  return TRUE;
2743       }
2744     else if (*fmt == 'E')
2745       for (j = 0; j < XVECLEN (x, i); j++)
2746 	if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
2747 	  return TRUE;
2748 
2749   return FALSE;
2750 }
2751 #endif
2752