Change the compilers to alias -pthread to -lpthread.
[dragonfly.git] / gnu / usr.bin / cc34 / cc_prep / protector.c
blob6494ddca24f2c6caf022eee5387bbd39942dd66b
1 /* $DragonFly: src/gnu/usr.bin/cc34/cc_prep/protector.c,v 1.3 2007/01/20 03:16:33 corecode Exp $ */
2 /* RTL buffer overflow protection function for GNU C compiler
3 Copyright (C) 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains several memory arrangement functions to protect
23 the return address and the frame pointer of the stack
24 from a stack-smashing attack. It also
25 provides the function that protects pointer variables. */
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "machmode.h"
32 #include "real.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "regs.h"
36 #include "flags.h"
37 #include "insn-config.h"
38 #include "insn-flags.h"
39 #include "expr.h"
40 #include "output.h"
41 #include "recog.h"
42 #include "hard-reg-set.h"
43 #include "except.h"
44 #include "function.h"
45 #include "toplev.h"
46 #include "tm_p.h"
47 #include "conditions.h"
48 #include "insn-attr.h"
49 #include "optabs.h"
50 #include "reload.h"
51 #include "protector.h"
54 /* Round a value to the lowest integer less than it that is a multiple of
55 the required alignment. Avoid using division in case the value is
56 negative. Assume the alignment is a power of two. */
57 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
59 /* Similar, but round to the next highest integer that meets the
60 alignment. */
61 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
64 /* Nonzero if function being compiled can define string buffers that may be
65 damaged by the stack-smash attack. */
66 static int current_function_defines_vulnerable_string;
67 static int current_function_defines_short_string;
68 static int current_function_has_variable_string;
69 static int current_function_defines_vsized_array;
70 static int current_function_is_inlinable;
72 /* Nonzero if search_string_def finds the variable which contains an array. */
73 static int is_array;
75 /* Nonzero if search_string_def finds a byte-pointer variable,
76 which may be assigned to alloca output. */
77 static int may_have_alloca_pointer;
79 static rtx guard_area, _guard;
80 static rtx function_first_insn, prologue_insert_point;
82 /* Offset to end of sweeped area for gathering character arrays. */
83 static HOST_WIDE_INT sweep_frame_offset;
85 /* Offset to end of allocated area for instantiating pseudo registers. */
86 static HOST_WIDE_INT push_allocated_offset = 0;
88 /* Offset to end of assigned area for instantiating pseudo registers. */
89 static HOST_WIDE_INT push_frame_offset = 0;
91 /* Set to 1 after cse_not_expected becomes nonzero. it is used to identify
92 which stage assign_stack_local_for_pseudo_reg is called from. */
93 static int saved_cse_not_expected = 0;
95 static int search_string_from_argsandvars (int);
96 static int search_string_from_local_vars (tree);
97 static int search_pointer_def (tree);
98 static int search_func_pointer (tree);
99 static int check_used_flag (rtx);
100 static void reset_used_flags_for_insns (rtx);
101 static void reset_used_flags_for_decls (tree);
102 static void reset_used_flags_of_plus (rtx);
103 static void rtl_prologue (rtx);
104 static void rtl_epilogue (rtx);
105 static void arrange_var_order (tree);
106 static void copy_args_for_protection (void);
107 static void sweep_string_variable (rtx, HOST_WIDE_INT);
108 static void sweep_string_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
109 static void sweep_string_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
110 static void sweep_string_use_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
111 static void sweep_string_in_operand (rtx, rtx *, HOST_WIDE_INT, HOST_WIDE_INT);
112 static void move_arg_location (rtx, rtx, rtx, HOST_WIDE_INT);
113 static void change_arg_use_of_insns (rtx, rtx, rtx *, HOST_WIDE_INT);
114 static void change_arg_use_in_operand (rtx, rtx, rtx, rtx *, HOST_WIDE_INT);
115 static void validate_insns_of_varrefs (rtx);
116 static void validate_operand_of_varrefs (rtx, rtx *);
118 /* Specify which size of buffers should be protected from a stack smashing
119 attack. Because small buffers are not used in situations which may
120 overflow buffer, the default size sets to the size of 64 bit register. */
121 #ifndef SUSPICIOUS_BUF_SIZE
122 #define SUSPICIOUS_BUF_SIZE 8
123 #endif
125 #define AUTO_BASEPTR(X) \
126 (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
127 #define AUTO_OFFSET(X) \
128 (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
129 #undef PARM_PASSED_IN_MEMORY
130 #define PARM_PASSED_IN_MEMORY(PARM) \
131 (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
132 #define TREE_VISITED(NODE) ((NODE)->common.unused_0)
134 /* Argument values for calling search_string_from_argsandvars. */
135 #define CALL_FROM_PREPARE_STACK_PROTECTION 0
136 #define CALL_FROM_PUSH_FRAME 1
139 /* Prepare several stack protection instruments for the current function
140 if the function has an array as a local variable, which may be vulnerable
141 from a stack smashing attack, and it is not inlinable.
143 The overall steps are as follows;
144 (1)search an array,
145 (2)insert guard_area on the stack,
146 (3)duplicate pointer arguments into local variables, and
147 (4)arrange the location of local variables. */
148 void
149 prepare_stack_protection (int inlinable)
151 tree blocks = DECL_INITIAL (current_function_decl);
152 current_function_is_inlinable = inlinable && !flag_no_inline;
153 push_frame_offset = push_allocated_offset = 0;
154 saved_cse_not_expected = 0;
156 /* Skip the protection if the function has no block
157 or it is an inline function. */
158 if (current_function_is_inlinable)
159 validate_insns_of_varrefs (get_insns ());
160 if (! blocks || current_function_is_inlinable)
161 return;
163 current_function_defines_vulnerable_string
164 = search_string_from_argsandvars (CALL_FROM_PREPARE_STACK_PROTECTION);
166 if (current_function_defines_vulnerable_string
167 || flag_stack_protection)
169 function_first_insn = get_insns ();
171 if (current_function_contains_functions)
173 if (warn_stack_protector)
174 warning ("not protecting function: it contains functions");
175 return;
178 /* Initialize recognition, indicating that volatile is OK. */
179 init_recog ();
181 sweep_frame_offset = 0;
183 #ifdef STACK_GROWS_DOWNWARD
184 /* frame_offset: offset to end of allocated area of stack frame.
185 It is defined in the function.c. */
187 /* the location must be before buffers. */
188 guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
189 PUT_MODE (guard_area, GUARD_m);
190 MEM_VOLATILE_P (guard_area) = 1;
192 #ifndef FRAME_GROWS_DOWNWARD
193 sweep_frame_offset = frame_offset;
194 #endif
196 /* For making room for guard value, scan all insns and fix the offset
197 address of the variable that is based on frame pointer.
198 Scan all declarations of variables and fix the offset address
199 of the variable that is based on the frame pointer. */
200 sweep_string_variable (guard_area, UNITS_PER_GUARD);
203 /* the location of guard area moves to the beginning of stack frame. */
204 if (AUTO_OFFSET(XEXP (guard_area, 0)))
205 XEXP (XEXP (guard_area, 0), 1)
206 = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
209 /* Insert prologue rtl instructions. */
210 rtl_prologue (function_first_insn);
212 if (! current_function_has_variable_string)
214 /* Generate argument saving instruction. */
215 copy_args_for_protection ();
217 #ifndef FRAME_GROWS_DOWNWARD
218 /* If frame grows upward, character arrays for protecting args
219 may copy to the top of the guard variable.
220 So sweep the guard variable again. */
221 sweep_frame_offset = CEIL_ROUND (frame_offset,
222 BIGGEST_ALIGNMENT / BITS_PER_UNIT);
223 sweep_string_variable (guard_area, UNITS_PER_GUARD);
224 #endif
226 /* Variable can't be protected from the overflow of variable length
227 buffer. But variable reordering is still effective against
228 the overflow of fixed size character arrays. */
229 else if (warn_stack_protector)
230 warning ("not protecting variables: it has a variable length buffer");
231 #endif
232 #ifndef FRAME_GROWS_DOWNWARD
233 if (STARTING_FRAME_OFFSET == 0)
235 /* This part may be only for alpha. */
236 push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
237 assign_stack_local (BLKmode, push_allocated_offset, -1);
238 sweep_frame_offset = frame_offset;
239 sweep_string_variable (const0_rtx, -push_allocated_offset);
240 sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
242 #endif
244 /* Arrange the order of local variables. */
245 arrange_var_order (blocks);
247 #ifdef STACK_GROWS_DOWNWARD
248 /* Insert epilogue rtl instructions. */
249 rtl_epilogue (get_last_insn ());
250 #endif
251 init_recog_no_volatile ();
253 else if (current_function_defines_short_string
254 && warn_stack_protector)
255 warning ("not protecting function: buffer is less than %d bytes long",
256 SUSPICIOUS_BUF_SIZE);
260 Search string from arguments and local variables.
261 caller: CALL_FROM_PREPARE_STACK_PROTECTION (0)
262 CALL_FROM_PUSH_FRAME (1)
264 static int
265 search_string_from_argsandvars (int caller)
267 tree blocks, parms;
268 int string_p;
270 /* Saves a latest search result as a cached infomation. */
271 static tree __latest_search_decl = 0;
272 static int __latest_search_result = FALSE;
274 if (__latest_search_decl == current_function_decl)
275 return __latest_search_result;
276 else
277 if (caller == CALL_FROM_PUSH_FRAME)
278 return FALSE;
280 __latest_search_decl = current_function_decl;
281 __latest_search_result = TRUE;
283 current_function_defines_short_string = FALSE;
284 current_function_has_variable_string = FALSE;
285 current_function_defines_vsized_array = FALSE;
286 may_have_alloca_pointer = FALSE;
288 /* Search a string variable from local variables. */
289 blocks = DECL_INITIAL (current_function_decl);
290 string_p = search_string_from_local_vars (blocks);
292 if (! current_function_defines_vsized_array
293 && may_have_alloca_pointer
294 && current_function_calls_alloca)
296 current_function_has_variable_string = TRUE;
297 return TRUE;
300 if (string_p)
301 return TRUE;
303 #ifdef STACK_GROWS_DOWNWARD
304 /* Search a string variable from arguments. */
305 parms = DECL_ARGUMENTS (current_function_decl);
307 for (; parms; parms = TREE_CHAIN (parms))
308 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
310 if (PARM_PASSED_IN_MEMORY (parms))
312 string_p = search_string_def (TREE_TYPE(parms));
313 if (string_p)
314 return TRUE;
317 #endif
319 __latest_search_result = FALSE;
320 return FALSE;
324 /* Search string from local variables in the specified scope. */
325 static int
326 search_string_from_local_vars (tree block)
328 tree types;
329 int found = FALSE;
331 while (block && TREE_CODE(block)==BLOCK)
333 for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
335 /* Skip the declaration that refers an external variable. */
336 /* name: types.decl.name.identifier.id */
337 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
338 && TREE_CODE (types) == VAR_DECL
339 && ! DECL_ARTIFICIAL (types)
340 && DECL_RTL_SET_P (types)
341 && GET_CODE (DECL_RTL (types)) == MEM
343 && search_string_def (TREE_TYPE (types)))
345 rtx home = DECL_RTL (types);
347 if (GET_CODE (home) == MEM
348 && (GET_CODE (XEXP (home, 0)) == MEM
349 || (GET_CODE (XEXP (home, 0)) == REG
350 && XEXP (home, 0) != virtual_stack_vars_rtx
351 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
352 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
353 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
354 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
355 #endif
357 /* If the value is indirect by memory or by a register
358 that isn't the frame pointer then it means the object is
359 variable-sized and address through
360 that register or stack slot.
361 The protection has no way to hide pointer variables
362 behind the array, so all we can do is staying
363 the order of variables and arguments. */
365 current_function_has_variable_string = TRUE;
368 /* Found character array. */
369 found = TRUE;
373 if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
375 found = TRUE;
378 block = BLOCK_CHAIN (block);
381 return found;
385 /* Search a character array from the specified type tree. */
387 search_string_def (tree type)
389 tree tem;
391 if (! type)
392 return FALSE;
394 switch (TREE_CODE (type))
396 case ARRAY_TYPE:
397 /* Check if the array is a variable-sized array. */
398 if (TYPE_DOMAIN (type) == 0
399 || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
400 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
401 current_function_defines_vsized_array = TRUE;
403 /* Check if the array is related to char array. */
404 if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
405 || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
406 || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
408 /* Check if the string is a variable string. */
409 if (TYPE_DOMAIN (type) == 0
410 || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
411 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
412 return TRUE;
414 /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE. */
415 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
416 && (TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1
417 >= SUSPICIOUS_BUF_SIZE))
418 return TRUE;
420 current_function_defines_short_string = TRUE;
423 /* to protect every functions, sweep any arrays to the frame top. */
424 is_array = TRUE;
426 return search_string_def(TREE_TYPE(type));
428 case UNION_TYPE:
429 case QUAL_UNION_TYPE:
430 case RECORD_TYPE:
431 /* Check if each field has character arrays. */
432 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
434 /* Omit here local type decls until we know how to support them. */
435 if ((TREE_CODE (tem) == TYPE_DECL)
436 || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
437 continue;
439 if (search_string_def(TREE_TYPE(tem)))
440 return TRUE;
442 break;
444 case POINTER_TYPE:
445 /* Check if pointer variables, which may be a pointer assigned
446 by alloca function call, are declared. */
447 if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
448 || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
449 || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
450 may_have_alloca_pointer = TRUE;
451 break;
453 case REFERENCE_TYPE:
454 case OFFSET_TYPE:
455 default:
456 break;
459 return FALSE;
463 /* Examine whether the input contains frame pointer addressing. */
465 contains_fp (rtx op)
467 enum rtx_code code;
468 rtx x;
469 int i, j;
470 const char *fmt;
472 x = op;
473 if (x == 0)
474 return FALSE;
476 code = GET_CODE (x);
478 switch (code)
480 case CONST_INT:
481 case CONST_DOUBLE:
482 case CONST:
483 case SYMBOL_REF:
484 case CODE_LABEL:
485 case REG:
486 case ADDRESSOF:
487 return FALSE;
489 case MEM:
490 /* This case is not generated at the stack protection.
491 see plus_constant_wide and simplify_plus_minus function. */
492 if (XEXP (x, 0) == virtual_stack_vars_rtx)
493 abort ();
495 case PLUS:
496 if (XEXP (x, 0) == virtual_stack_vars_rtx
497 && GET_CODE (XEXP (x, 1)) == CONST_INT)
498 return TRUE;
500 default:
501 break;
504 /* Scan all subexpressions. */
505 fmt = GET_RTX_FORMAT (code);
506 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
507 if (*fmt == 'e')
509 if (contains_fp (XEXP (x, i)))
510 return TRUE;
512 else if (*fmt == 'E')
513 for (j = 0; j < XVECLEN (x, i); j++)
514 if (contains_fp (XVECEXP (x, i, j)))
515 return TRUE;
517 return FALSE;
521 /* Examine whether the input contains any pointer. */
522 static int
523 search_pointer_def (tree type)
525 tree tem;
527 if (! type)
528 return FALSE;
530 switch (TREE_CODE (type))
532 case UNION_TYPE:
533 case QUAL_UNION_TYPE:
534 case RECORD_TYPE:
535 /* Check if each field has a pointer. */
536 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
538 if ((TREE_CODE (tem) == TYPE_DECL)
539 || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
540 continue;
542 if (search_pointer_def (TREE_TYPE(tem)))
543 return TRUE;
545 break;
547 case ARRAY_TYPE:
548 return search_pointer_def (TREE_TYPE(type));
550 case POINTER_TYPE:
551 case REFERENCE_TYPE:
552 case OFFSET_TYPE:
553 if (TYPE_READONLY (TREE_TYPE (type)))
555 /* If this pointer contains function pointer,
556 it should be protected. */
557 return search_func_pointer (TREE_TYPE (type));
559 return TRUE;
561 default:
562 break;
565 return FALSE;
569 /* Examine whether the input contains function pointer. */
570 static int
571 search_func_pointer (tree type)
573 tree tem;
575 if (! type)
576 return FALSE;
578 switch (TREE_CODE (type))
580 case UNION_TYPE:
581 case QUAL_UNION_TYPE:
582 case RECORD_TYPE:
583 if (! TREE_VISITED (type))
585 /* Mark the type as having been visited already. */
586 TREE_VISITED (type) = 1;
588 /* Check if each field has a function pointer. */
589 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
591 if (TREE_CODE (tem) == FIELD_DECL
592 && search_func_pointer (TREE_TYPE(tem)))
594 TREE_VISITED (type) = 0;
595 return TRUE;
599 TREE_VISITED (type) = 0;
601 break;
603 case ARRAY_TYPE:
604 return search_func_pointer (TREE_TYPE(type));
606 case POINTER_TYPE:
607 case REFERENCE_TYPE:
608 case OFFSET_TYPE:
609 if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
610 return TRUE;
611 return search_func_pointer (TREE_TYPE(type));
613 default:
614 break;
617 return FALSE;
621 /* Check whether the specified rtx contains PLUS rtx with used flag. */
622 static int
623 check_used_flag (rtx x)
625 register int i, j;
626 register enum rtx_code code;
627 register const char *format_ptr;
629 if (x == 0)
630 return FALSE;
632 code = GET_CODE (x);
634 switch (code)
636 case REG:
637 case QUEUED:
638 case CONST_INT:
639 case CONST_DOUBLE:
640 case SYMBOL_REF:
641 case CODE_LABEL:
642 case PC:
643 case CC0:
644 return FALSE;
646 case PLUS:
647 if (x->used)
648 return TRUE;
650 default:
651 break;
654 format_ptr = GET_RTX_FORMAT (code);
655 for (i = 0; i < GET_RTX_LENGTH (code); i++)
657 switch (*format_ptr++)
659 case 'e':
660 if (check_used_flag (XEXP (x, i)))
661 return TRUE;
662 break;
664 case 'E':
665 for (j = 0; j < XVECLEN (x, i); j++)
666 if (check_used_flag (XVECEXP (x, i, j)))
667 return TRUE;
668 break;
672 return FALSE;
676 /* Reset used flag of every insns after the spcecified insn. */
677 static void
678 reset_used_flags_for_insns (rtx insn)
680 int i, j;
681 enum rtx_code code;
682 const char *format_ptr;
684 for (; insn; insn = NEXT_INSN (insn))
685 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
686 || GET_CODE (insn) == CALL_INSN)
688 code = GET_CODE (insn);
689 insn->used = 0;
690 format_ptr = GET_RTX_FORMAT (code);
692 for (i = 0; i < GET_RTX_LENGTH (code); i++)
694 switch (*format_ptr++)
696 case 'e':
697 reset_used_flags_of_plus (XEXP (insn, i));
698 break;
700 case 'E':
701 for (j = 0; j < XVECLEN (insn, i); j++)
702 reset_used_flags_of_plus (XVECEXP (insn, i, j));
703 break;
710 /* Reset used flag of every variables in the specified block. */
711 static void
712 reset_used_flags_for_decls (tree block)
714 tree types;
715 rtx home;
717 while (block && TREE_CODE(block)==BLOCK)
719 types = BLOCK_VARS(block);
721 for (types= BLOCK_VARS(block); types; types = TREE_CHAIN(types))
723 /* Skip the declaration that refers an external variable and
724 also skip an global variable. */
725 if (! DECL_EXTERNAL (types))
727 if (! DECL_RTL_SET_P (types))
728 continue;
729 home = DECL_RTL (types);
731 if (GET_CODE (home) == MEM
732 && GET_CODE (XEXP (home, 0)) == PLUS
733 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
735 XEXP (home, 0)->used = 0;
740 reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
742 block = BLOCK_CHAIN (block);
747 /* Reset the used flag of every PLUS rtx derived from the specified rtx. */
748 static void
749 reset_used_flags_of_plus (rtx x)
751 int i, j;
752 enum rtx_code code;
753 const char *format_ptr;
755 if (x == 0)
756 return;
758 code = GET_CODE (x);
760 switch (code)
762 /* These types may be freely shared so we needn't do any resetting
763 for them. */
764 case REG:
765 case QUEUED:
766 case CONST_INT:
767 case CONST_DOUBLE:
768 case SYMBOL_REF:
769 case CODE_LABEL:
770 case PC:
771 case CC0:
772 return;
774 case INSN:
775 case JUMP_INSN:
776 case CALL_INSN:
777 case NOTE:
778 case LABEL_REF:
779 case BARRIER:
780 /* The chain of insns is not being copied. */
781 return;
783 case PLUS:
784 x->used = 0;
785 break;
787 case CALL_PLACEHOLDER:
788 reset_used_flags_for_insns (XEXP (x, 0));
789 reset_used_flags_for_insns (XEXP (x, 1));
790 reset_used_flags_for_insns (XEXP (x, 2));
791 break;
793 default:
794 break;
797 format_ptr = GET_RTX_FORMAT (code);
798 for (i = 0; i < GET_RTX_LENGTH (code); i++)
800 switch (*format_ptr++)
802 case 'e':
803 reset_used_flags_of_plus (XEXP (x, i));
804 break;
806 case 'E':
807 for (j = 0; j < XVECLEN (x, i); j++)
808 reset_used_flags_of_plus (XVECEXP (x, i, j));
809 break;
815 /* Generate the prologue insns of the protector into the specified insn. */
816 static void
817 rtl_prologue (rtx insn)
819 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
820 #undef HAS_INIT_SECTION
821 #define HAS_INIT_SECTION
822 #endif
824 rtx _val;
826 for (; insn; insn = NEXT_INSN (insn))
827 if (GET_CODE (insn) == NOTE
828 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
829 break;
831 #if !defined (HAS_INIT_SECTION)
832 /* If this function is `main', skip a call to `__main'
833 to run guard instruments after global initializers, etc. */
834 if (DECL_NAME (current_function_decl)
835 && MAIN_NAME_P (DECL_NAME (current_function_decl))
836 && DECL_CONTEXT (current_function_decl) == NULL_TREE)
838 rtx fbinsn = insn;
839 for (; insn; insn = NEXT_INSN (insn))
840 if (GET_CODE (insn) == NOTE
841 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
842 break;
843 if (insn == 0)
844 insn = fbinsn;
846 #endif
848 /* Mark the next insn of FUNCTION_BEG insn. */
849 prologue_insert_point = NEXT_INSN (insn);
851 start_sequence ();
853 _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
854 emit_move_insn ( guard_area, _guard);
856 _val = get_insns ();
857 end_sequence ();
859 emit_insn_before (_val, prologue_insert_point);
863 /* Generate the epilogue insns of the protector into the specified insn. */
864 static void
865 rtl_epilogue (rtx insn)
867 rtx if_false_label;
868 rtx _val;
869 rtx funcname;
870 tree funcstr;
871 int flag_have_return = FALSE;
873 start_sequence ();
875 #ifdef HAVE_return
876 if (HAVE_return)
878 rtx insn;
879 return_label = gen_label_rtx ();
881 for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
882 if (GET_CODE (insn) == JUMP_INSN
883 && GET_CODE (PATTERN (insn)) == RETURN
884 && GET_MODE (PATTERN (insn)) == VOIDmode)
886 rtx pat = gen_rtx_SET (VOIDmode,
887 pc_rtx,
888 gen_rtx_LABEL_REF (VOIDmode,
889 return_label));
890 PATTERN (insn) = pat;
891 flag_have_return = TRUE;
895 emit_label (return_label);
897 #endif
899 /* if (guard_area != _guard) */
900 compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX);
902 if_false_label = gen_label_rtx (); /* { */
903 emit_jump_insn ( gen_beq(if_false_label));
905 /* generate string for the current function name */
906 funcstr = build_string (strlen(current_function_name ())+1,
907 current_function_name ());
908 TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);
909 funcname = output_constant_def (funcstr, 1);
911 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__stack_smash_handler"),
912 0, VOIDmode, 2,
913 XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
915 /* generate RTL to return from the current function */
917 emit_barrier (); /* } */
918 emit_label (if_false_label);
920 /* generate RTL to return from the current function */
921 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
922 use_return_register ();
924 #ifdef HAVE_return
925 if (HAVE_return && flag_have_return)
927 emit_jump_insn (gen_return ());
928 emit_barrier ();
930 #endif
932 _val = get_insns ();
933 end_sequence ();
935 emit_insn_after (_val, insn);
939 /* For every variable which type is character array, moves its location
940 in the stack frame to the sweep_frame_offset position. */
941 static void
942 arrange_var_order (tree block)
944 tree types;
945 HOST_WIDE_INT offset;
947 while (block && TREE_CODE(block)==BLOCK)
949 /* arrange the location of character arrays in depth first. */
950 arrange_var_order (BLOCK_SUBBLOCKS (block));
952 for (types = BLOCK_VARS (block); types; types = TREE_CHAIN(types))
954 /* Skip the declaration that refers an external variable. */
955 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
956 && TREE_CODE (types) == VAR_DECL
957 && ! DECL_ARTIFICIAL (types)
958 && DECL_RTL_SET_P (types)
959 && GET_CODE (DECL_RTL (types)) == MEM
960 && GET_MODE (DECL_RTL (types)) == BLKmode
962 && (is_array=0,
963 search_string_def (TREE_TYPE (types))
964 || (! current_function_defines_vulnerable_string && is_array)))
966 rtx home = DECL_RTL (types);
968 if (!(GET_CODE (home) == MEM
969 && (GET_CODE (XEXP (home, 0)) == MEM
970 || (GET_CODE (XEXP (home, 0)) == REG
971 && XEXP (home, 0) != virtual_stack_vars_rtx
972 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
973 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
974 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
975 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
976 #endif
977 ))))
979 /* Found a string variable. */
980 HOST_WIDE_INT var_size =
981 ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
982 / BITS_PER_UNIT);
984 /* Confirmed it is BLKmode. */
985 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
986 var_size = CEIL_ROUND (var_size, alignment);
988 /* Skip the variable if it is top of the region
989 specified by sweep_frame_offset. */
990 offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
991 if (offset == sweep_frame_offset - var_size)
992 sweep_frame_offset -= var_size;
994 else if (offset < sweep_frame_offset - var_size)
995 sweep_string_variable (DECL_RTL (types), var_size);
1000 block = BLOCK_CHAIN (block);
1005 /* To protect every pointer argument and move character arrays in the argument,
1006 Copy those variables to the top of the stack frame and move the location of
1007 character arrays to the posion of sweep_frame_offset. */
1008 static void
1009 copy_args_for_protection (void)
1011 tree parms = DECL_ARGUMENTS (current_function_decl);
1012 rtx temp_rtx;
1014 parms = DECL_ARGUMENTS (current_function_decl);
1015 for (; parms; parms = TREE_CHAIN (parms))
1016 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1018 if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1020 int string_p;
1021 rtx seq;
1023 string_p = search_string_def (TREE_TYPE(parms));
1025 /* Check if it is a candidate to move. */
1026 if (string_p || search_pointer_def (TREE_TYPE (parms)))
1028 int arg_size
1029 = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
1030 / BITS_PER_UNIT);
1031 tree passed_type = DECL_ARG_TYPE (parms);
1032 tree nominal_type = TREE_TYPE (parms);
1034 start_sequence ();
1036 if (GET_CODE (DECL_RTL (parms)) == REG)
1038 rtx safe = 0;
1040 change_arg_use_of_insns (prologue_insert_point,
1041 DECL_RTL (parms), &safe, 0);
1042 if (safe)
1044 /* Generate codes for copying the content. */
1045 rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
1047 /* Avoid register elimination in gcse.c. */
1048 PATTERN (movinsn)->volatil = 1;
1050 /* Save debugger info. */
1051 SET_DECL_RTL (parms, safe);
1054 else if (GET_CODE (DECL_RTL (parms)) == MEM
1055 && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
1057 rtx movinsn;
1058 rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
1060 /* Generate codes for copying the content. */
1061 movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
1062 /* Avoid register elimination in gcse.c. */
1063 PATTERN (movinsn)->volatil = 1;
1065 /* Change the addressof information to the newly
1066 allocated pseudo register. */
1067 emit_move_insn (DECL_RTL (parms), safe);
1069 /* Save debugger info. */
1070 SET_DECL_RTL (parms, safe);
1073 /* See if the frontend wants to pass this by invisible
1074 reference. */
1075 else if (passed_type != nominal_type
1076 && POINTER_TYPE_P (passed_type)
1077 && TREE_TYPE (passed_type) == nominal_type)
1079 rtx safe = 0, orig = XEXP (DECL_RTL (parms), 0);
1081 change_arg_use_of_insns (prologue_insert_point,
1082 orig, &safe, 0);
1083 if (safe)
1085 /* Generate codes for copying the content. */
1086 rtx movinsn = emit_move_insn (safe, orig);
1088 /* Avoid register elimination in gcse.c */
1089 PATTERN (movinsn)->volatil = 1;
1091 /* Save debugger info. */
1092 SET_DECL_RTL (parms, safe);
1096 else
1098 /* Declare temporary local variable for parms. */
1099 temp_rtx
1100 = assign_stack_local (DECL_MODE (parms), arg_size,
1101 DECL_MODE (parms) == BLKmode ?
1102 -1 : 0);
1104 MEM_IN_STRUCT_P (temp_rtx)
1105 = AGGREGATE_TYPE_P (TREE_TYPE (parms));
1106 set_mem_alias_set (temp_rtx, get_alias_set (parms));
1108 /* Generate codes for copying the content. */
1109 store_expr (parms, temp_rtx, 0);
1111 /* Change the reference for each instructions. */
1112 move_arg_location (prologue_insert_point, DECL_RTL (parms),
1113 temp_rtx, arg_size);
1115 /* Change the location of parms variable. */
1116 SET_DECL_RTL (parms, temp_rtx);
1119 seq = get_insns ();
1120 end_sequence ();
1121 emit_insn_before (seq, prologue_insert_point);
1123 #ifdef FRAME_GROWS_DOWNWARD
1124 /* Process the string argument. */
1125 if (string_p && DECL_MODE (parms) == BLKmode)
1127 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1128 arg_size = CEIL_ROUND (arg_size, alignment);
1130 /* Change the reference for each instructions. */
1131 sweep_string_variable (DECL_RTL (parms), arg_size);
1133 #endif
1140 /* Sweep a string variable to the positon of sweep_frame_offset in the
1141 stack frame, that is a last position of string variables. */
1142 static void
1143 sweep_string_variable (rtx sweep_var, HOST_WIDE_INT var_size)
1145 HOST_WIDE_INT sweep_offset;
1147 switch (GET_CODE (sweep_var))
1149 case REG:
1150 /* Kevin F. Quinn May 2006
1151 * arrange_var_order can clearly call this function with
1152 * the code REG in sweep_var, so we need to handle the case
1153 * at least. This does nothing, as it appears there's nothing
1154 * to sweep.
1155 * Reached if a class variable is passed by value.
1157 if (warn_stack_protector)
1159 warning ("sweep string type REG (%d) ignored - rtl:\n",
1160 GET_CODE(sweep_var));
1161 print_rtl(stderr,sweep_var);
1162 fputs("\n",stderr);
1164 return;
1165 break;
1166 case MEM:
1167 if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
1168 && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
1169 return;
1170 sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
1171 break;
1172 case CONST_INT:
1173 sweep_offset = INTVAL (sweep_var);
1174 break;
1175 default:
1176 if (warn_stack_protector)
1178 warning ("sweep string type %d unexpected - rtl:\n",
1179 GET_CODE(sweep_var));
1180 print_rtl(stderr,sweep_var);
1181 fputs("\n",stderr);
1183 abort ();
1186 /* Scan all declarations of variables and fix the offset address of
1187 the variable based on the frame pointer. */
1188 sweep_string_in_decls (DECL_INITIAL (current_function_decl),
1189 sweep_offset, var_size);
1191 /* Scan all argument variable and fix the offset address based on
1192 the frame pointer. */
1193 sweep_string_in_args (DECL_ARGUMENTS (current_function_decl),
1194 sweep_offset, var_size);
1196 /* For making room for sweep variable, scan all insns and
1197 fix the offset address of the variable that is based on frame pointer. */
1198 sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
1201 /* Clear all the USED bits in operands of all insns and declarations of
1202 local variables. */
1203 reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
1204 reset_used_flags_for_insns (function_first_insn);
1206 sweep_frame_offset -= var_size;
1211 /* Move an argument to the local variable addressed by frame_offset. */
1212 static void
1213 move_arg_location (rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size)
1215 /* For making room for sweep variable, scan all insns and
1216 fix the offset address of the variable that is based on frame pointer. */
1217 change_arg_use_of_insns (insn, orig, &new, var_size);
1220 /* Clear all the USED bits in operands of all insns and declarations
1221 of local variables. */
1222 reset_used_flags_for_insns (insn);
1226 /* Sweep character arrays declared as local variable. */
1227 static void
1228 sweep_string_in_decls (tree block, HOST_WIDE_INT sweep_offset,
1229 HOST_WIDE_INT sweep_size)
1231 tree types;
1232 HOST_WIDE_INT offset;
1233 rtx home;
1235 while (block && TREE_CODE(block)==BLOCK)
1237 for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
1239 /* Skip the declaration that refers an external variable and
1240 also skip an global variable. */
1241 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
1243 if (! DECL_RTL_SET_P (types))
1244 continue;
1246 home = DECL_RTL (types);
1248 /* Process for static local variable. */
1249 if (GET_CODE (home) == MEM
1250 && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
1251 continue;
1253 if (GET_CODE (home) == MEM
1254 && XEXP (home, 0) == virtual_stack_vars_rtx)
1256 offset = 0;
1258 /* the operand related to the sweep variable. */
1259 if (sweep_offset <= offset
1260 && offset < sweep_offset + sweep_size)
1262 offset = sweep_frame_offset - sweep_size - sweep_offset;
1264 XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
1265 offset);
1266 XEXP (home, 0)->used = 1;
1268 else if (sweep_offset <= offset
1269 && offset < sweep_frame_offset)
1271 /* the rest of variables under sweep_frame_offset,
1272 shift the location. */
1273 XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
1274 -sweep_size);
1275 XEXP (home, 0)->used = 1;
1279 if (GET_CODE (home) == MEM
1280 && GET_CODE (XEXP (home, 0)) == MEM)
1282 /* Process for dynamically allocated array. */
1283 home = XEXP (home, 0);
1286 if (GET_CODE (home) == MEM
1287 && GET_CODE (XEXP (home, 0)) == PLUS
1288 && XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
1289 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1291 if (! XEXP (home, 0)->used)
1293 offset = AUTO_OFFSET(XEXP (home, 0));
1295 /* the operand related to the sweep variable. */
1296 if (sweep_offset <= offset
1297 && offset < sweep_offset + sweep_size)
1300 offset
1301 += sweep_frame_offset - sweep_size - sweep_offset;
1302 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
1303 offset);
1305 /* mark */
1306 XEXP (home, 0)->used = 1;
1308 else if (sweep_offset <= offset
1309 && offset < sweep_frame_offset)
1311 /* the rest of variables under sweep_frame_offset,
1312 so shift the location. */
1314 XEXP (XEXP (home, 0), 1)
1315 = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1317 /* mark */
1318 XEXP (home, 0)->used = 1;
1325 sweep_string_in_decls (BLOCK_SUBBLOCKS (block),
1326 sweep_offset, sweep_size);
1328 block = BLOCK_CHAIN (block);
1333 /* Sweep character arrays declared as argument. */
1334 static void
1335 sweep_string_in_args (tree parms, HOST_WIDE_INT sweep_offset,
1336 HOST_WIDE_INT sweep_size)
1338 rtx home;
1339 HOST_WIDE_INT offset;
1341 for (; parms; parms = TREE_CHAIN (parms))
1342 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1344 if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1346 home = DECL_INCOMING_RTL (parms);
1348 if (XEXP (home, 0)->used)
1349 continue;
1351 offset = AUTO_OFFSET(XEXP (home, 0));
1353 /* the operand related to the sweep variable. */
1354 if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
1356 if (sweep_offset <= offset
1357 && offset < sweep_offset + sweep_size)
1359 offset += sweep_frame_offset - sweep_size - sweep_offset;
1360 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
1361 offset);
1363 /* mark */
1364 XEXP (home, 0)->used = 1;
1366 else if (sweep_offset <= offset
1367 && offset < sweep_frame_offset)
1369 /* the rest of variables under sweep_frame_offset,
1370 shift the location. */
1371 XEXP (XEXP (home, 0), 1)
1372 = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1374 /* mark */
1375 XEXP (home, 0)->used = 1;
1383 /* Set to 1 when the instruction contains virtual registers. */
1384 static int has_virtual_reg;
1386 /* Sweep the specified character array for every insns. The array starts from
1387 the sweep_offset and its size is sweep_size. */
1388 static void
1389 sweep_string_use_of_insns (rtx insn, HOST_WIDE_INT sweep_offset,
1390 HOST_WIDE_INT sweep_size)
1392 for (; insn; insn = NEXT_INSN (insn))
1393 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1394 || GET_CODE (insn) == CALL_INSN)
1396 has_virtual_reg = FALSE;
1397 sweep_string_in_operand (insn, &PATTERN (insn),
1398 sweep_offset, sweep_size);
1399 sweep_string_in_operand (insn, &REG_NOTES (insn),
1400 sweep_offset, sweep_size);
1405 /* Sweep the specified character array, which starts from the sweep_offset and
1406 its size is sweep_size.
1408 When a pointer is given,
1409 if it points the address higher than the array, it stays.
1410 if it points the address inside the array, it changes to point inside
1411 the sweeped array.
1412 if it points the address lower than the array, it shifts higher address by
1413 the sweep_size. */
1414 static void
1415 sweep_string_in_operand (rtx insn, rtx *loc,
1416 HOST_WIDE_INT sweep_offset, HOST_WIDE_INT sweep_size)
1418 rtx x = *loc;
1419 enum rtx_code code;
1420 int i, j, k = 0;
1421 HOST_WIDE_INT offset;
1422 const char *fmt;
1424 if (x == 0)
1425 return;
1427 code = GET_CODE (x);
1429 switch (code)
1431 case CONST_INT:
1432 case CONST_DOUBLE:
1433 case CONST:
1434 case SYMBOL_REF:
1435 case CODE_LABEL:
1436 case PC:
1437 case CC0:
1438 case ASM_INPUT:
1439 case ADDR_VEC:
1440 case ADDR_DIFF_VEC:
1441 case RETURN:
1442 case ADDRESSOF:
1443 return;
1445 case REG:
1446 if (x == virtual_incoming_args_rtx
1447 || x == virtual_stack_vars_rtx
1448 || x == virtual_stack_dynamic_rtx
1449 || x == virtual_outgoing_args_rtx
1450 || x == virtual_cfa_rtx)
1451 has_virtual_reg = TRUE;
1452 return;
1454 case SET:
1456 skip setjmp setup insn and setjmp restore insn
1457 Example:
1458 (set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
1459 (set (virtual_stack_vars_rtx) (REG))
1461 if (GET_CODE (XEXP (x, 0)) == MEM
1462 && XEXP (x, 1) == virtual_stack_vars_rtx)
1463 return;
1464 if (XEXP (x, 0) == virtual_stack_vars_rtx
1465 && GET_CODE (XEXP (x, 1)) == REG)
1466 return;
1467 break;
1469 case PLUS:
1470 /* Handle typical case of frame register plus constant. */
1471 if (XEXP (x, 0) == virtual_stack_vars_rtx
1472 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1474 if (x->used)
1475 goto single_use_of_virtual_reg;
1477 offset = AUTO_OFFSET(x);
1479 /* When arguments grow downward, the virtual incoming
1480 args pointer points to the top of the argument block,
1481 so block is identified by the pointer - 1.
1482 The flag is set at the copy_rtx_and_substitute in integrate.c */
1483 if (RTX_INTEGRATED_P (x))
1484 k = -1;
1486 /* the operand related to the sweep variable. */
1487 if (sweep_offset <= offset + k
1488 && offset + k < sweep_offset + sweep_size)
1490 offset += sweep_frame_offset - sweep_size - sweep_offset;
1492 XEXP (x, 0) = virtual_stack_vars_rtx;
1493 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1494 x->used = 1;
1496 else if (sweep_offset <= offset + k
1497 && offset + k < sweep_frame_offset)
1499 /* the rest of variables under sweep_frame_offset,
1500 shift the location. */
1501 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1502 x->used = 1;
1505 single_use_of_virtual_reg:
1506 if (has_virtual_reg) {
1507 /* excerpt from insn_invalid_p in recog.c */
1508 int icode = recog_memoized (insn);
1510 if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1512 rtx temp, seq;
1514 start_sequence ();
1515 temp = force_operand (x, NULL_RTX);
1516 seq = get_insns ();
1517 end_sequence ();
1519 emit_insn_before (seq, insn);
1520 if (! validate_change (insn, loc, temp, 0)
1521 && !validate_replace_rtx (x, temp, insn))
1522 fatal_insn ("sweep_string_in_operand", insn);
1526 has_virtual_reg = TRUE;
1527 return;
1530 #ifdef FRAME_GROWS_DOWNWARD
1531 /* Alert the case of frame register plus constant given by reg. */
1532 else if (XEXP (x, 0) == virtual_stack_vars_rtx
1533 && GET_CODE (XEXP (x, 1)) == REG)
1534 fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
1535 #endif
1538 process further subtree:
1539 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1540 (const_int 5))
1542 break;
1544 case CALL_PLACEHOLDER:
1545 for (i = 0; i < 3; i++)
1547 rtx seq = XEXP (x, i);
1548 if (seq)
1550 push_to_sequence (seq);
1551 sweep_string_use_of_insns (XEXP (x, i),
1552 sweep_offset, sweep_size);
1553 XEXP (x, i) = get_insns ();
1554 end_sequence ();
1557 break;
1559 default:
1560 break;
1563 /* Scan all subexpressions. */
1564 fmt = GET_RTX_FORMAT (code);
1565 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1566 if (*fmt == 'e')
1569 virtual_stack_vars_rtx without offset
1570 Example:
1571 (set (reg:SI xx) (reg:SI 78))
1572 (set (reg:SI xx) (MEM (reg:SI 78)))
1574 if (XEXP (x, i) == virtual_stack_vars_rtx)
1575 fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
1576 sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
1578 else if (*fmt == 'E')
1579 for (j = 0; j < XVECLEN (x, i); j++)
1580 sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
1584 /* Change the use of an argument to the use of the duplicated variable for
1585 every insns, The variable is addressed by new rtx. */
1586 static void
1587 change_arg_use_of_insns (rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size)
1589 for (; insn; insn = NEXT_INSN (insn))
1590 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1591 || GET_CODE (insn) == CALL_INSN)
1593 rtx seq;
1595 start_sequence ();
1596 change_arg_use_in_operand (insn, PATTERN (insn), orig, new, size);
1598 seq = get_insns ();
1599 end_sequence ();
1600 emit_insn_before (seq, insn);
1602 /* load_multiple insn from virtual_incoming_args_rtx have several
1603 load insns. If every insn change the load address of arg
1604 to frame region, those insns are moved before the PARALLEL insn
1605 and remove the PARALLEL insn. */
1606 if (GET_CODE (PATTERN (insn)) == PARALLEL
1607 && XVECLEN (PATTERN (insn), 0) == 0)
1608 delete_insn (insn);
1613 /* Change the use of an argument to the use of the duplicated variable for
1614 every rtx derived from the x. */
1615 static void
1616 change_arg_use_in_operand (rtx insn, rtx x, rtx orig, rtx *new, HOST_WIDE_INT size)
1618 enum rtx_code code;
1619 int i, j;
1620 HOST_WIDE_INT offset;
1621 const char *fmt;
1623 if (x == 0)
1624 return;
1626 code = GET_CODE (x);
1628 switch (code)
1630 case CONST_INT:
1631 case CONST_DOUBLE:
1632 case CONST:
1633 case SYMBOL_REF:
1634 case CODE_LABEL:
1635 case PC:
1636 case CC0:
1637 case ASM_INPUT:
1638 case ADDR_VEC:
1639 case ADDR_DIFF_VEC:
1640 case RETURN:
1641 case REG:
1642 case ADDRESSOF:
1643 return;
1645 case MEM:
1646 /* Handle special case of MEM (incoming_args). */
1647 if (GET_CODE (orig) == MEM
1648 && XEXP (x, 0) == virtual_incoming_args_rtx)
1650 offset = 0;
1652 /* the operand related to the sweep variable. */
1653 if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1654 offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
1656 offset = AUTO_OFFSET(XEXP (*new, 0))
1657 + (offset - AUTO_OFFSET(XEXP (orig, 0)));
1659 XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
1660 XEXP (x, 0)->used = 1;
1662 return;
1665 break;
1667 case PLUS:
1668 /* Handle special case of frame register plus constant. */
1669 if (GET_CODE (orig) == MEM
1670 && XEXP (x, 0) == virtual_incoming_args_rtx
1671 && GET_CODE (XEXP (x, 1)) == CONST_INT
1672 && ! x->used)
1674 offset = AUTO_OFFSET(x);
1676 /* the operand related to the sweep variable. */
1677 if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1678 offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
1681 offset = (AUTO_OFFSET(XEXP (*new, 0))
1682 + (offset - AUTO_OFFSET(XEXP (orig, 0))));
1684 XEXP (x, 0) = virtual_stack_vars_rtx;
1685 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1686 x->used = 1;
1688 return;
1692 process further subtree:
1693 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1694 (const_int 5))
1697 break;
1699 case SET:
1700 /* Handle special case of "set (REG or MEM) (incoming_args)".
1701 It means that the the address of the 1st argument is stored. */
1702 if (GET_CODE (orig) == MEM
1703 && XEXP (x, 1) == virtual_incoming_args_rtx)
1705 offset = 0;
1707 /* the operand related to the sweep variable. */
1708 if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1709 offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
1711 offset = (AUTO_OFFSET(XEXP (*new, 0))
1712 + (offset - AUTO_OFFSET(XEXP (orig, 0))));
1714 XEXP (x, 1) = force_operand (plus_constant (virtual_stack_vars_rtx,
1715 offset), NULL_RTX);
1716 XEXP (x, 1)->used = 1;
1718 return;
1721 break;
1723 case CALL_PLACEHOLDER:
1724 for (i = 0; i < 3; i++)
1726 rtx seq = XEXP (x, i);
1727 if (seq)
1729 push_to_sequence (seq);
1730 change_arg_use_of_insns (XEXP (x, i), orig, new, size);
1731 XEXP (x, i) = get_insns ();
1732 end_sequence ();
1735 break;
1737 case PARALLEL:
1738 for (j = 0; j < XVECLEN (x, 0); j++)
1740 change_arg_use_in_operand (insn, XVECEXP (x, 0, j), orig, new, size);
1742 if (recog_memoized (insn) < 0)
1744 for (i = 0, j = 0; j < XVECLEN (x, 0); j++)
1746 /* if parallel insn has a insn used virtual_incoming_args_rtx,
1747 the insn is removed from this PARALLEL insn. */
1748 if (check_used_flag (XVECEXP (x, 0, j)))
1750 emit_insn (XVECEXP (x, 0, j));
1751 XVECEXP (x, 0, j) = NULL;
1753 else
1754 XVECEXP (x, 0, i++) = XVECEXP (x, 0, j);
1756 PUT_NUM_ELEM (XVEC (x, 0), i);
1758 return;
1760 default:
1761 break;
1764 /* Scan all subexpressions. */
1765 fmt = GET_RTX_FORMAT (code);
1766 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1767 if (*fmt == 'e')
1769 if (XEXP (x, i) == orig)
1771 if (*new == 0)
1772 *new = gen_reg_rtx (GET_MODE (orig));
1773 XEXP (x, i) = *new;
1774 continue;
1776 change_arg_use_in_operand (insn, XEXP (x, i), orig, new, size);
1778 else if (*fmt == 'E')
1779 for (j = 0; j < XVECLEN (x, i); j++)
1781 if (XVECEXP (x, i, j) == orig)
1783 if (*new == 0)
1784 *new = gen_reg_rtx (GET_MODE (orig));
1785 XVECEXP (x, i, j) = *new;
1786 continue;
1788 change_arg_use_in_operand (insn, XVECEXP (x, i, j), orig, new, size);
1793 /* Validate every instructions from the specified instruction.
1795 The stack protector prohibits to generate machine specific frame addressing
1796 for the first rtl generation. The prepare_stack_protection must convert
1797 machine independent frame addressing to machine specific frame addressing,
1798 so instructions for inline functions, which skip the conversion of
1799 the stack protection, validate every instructions. */
1800 static void
1801 validate_insns_of_varrefs (rtx insn)
1803 rtx next;
1805 /* Initialize recognition, indicating that volatile is OK. */
1806 init_recog ();
1808 for (; insn; insn = next)
1810 next = NEXT_INSN (insn);
1811 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1812 || GET_CODE (insn) == CALL_INSN)
1814 /* excerpt from insn_invalid_p in recog.c */
1815 int icode = recog_memoized (insn);
1817 if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1818 validate_operand_of_varrefs (insn, &PATTERN (insn));
1822 init_recog_no_volatile ();
1826 /* Validate frame addressing of the rtx and covert it to machine specific one. */
1827 static void
1828 validate_operand_of_varrefs (rtx insn, rtx *loc)
1830 enum rtx_code code;
1831 rtx x, temp, seq;
1832 int i, j;
1833 const char *fmt;
1835 x = *loc;
1836 if (x == 0)
1837 return;
1839 code = GET_CODE (x);
1841 switch (code)
1843 case USE:
1844 case CONST_INT:
1845 case CONST_DOUBLE:
1846 case CONST:
1847 case SYMBOL_REF:
1848 case CODE_LABEL:
1849 case PC:
1850 case CC0:
1851 case ASM_INPUT:
1852 case ADDR_VEC:
1853 case ADDR_DIFF_VEC:
1854 case RETURN:
1855 case REG:
1856 case ADDRESSOF:
1857 return;
1859 case PLUS:
1860 /* validate insn of frame register plus constant. */
1861 if (GET_CODE (x) == PLUS
1862 && XEXP (x, 0) == virtual_stack_vars_rtx
1863 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1865 start_sequence ();
1867 { /* excerpt from expand_binop in optabs.c */
1868 optab binoptab = add_optab;
1869 enum machine_mode mode = GET_MODE (x);
1870 int icode = (int) binoptab->handlers[(int) mode].insn_code;
1871 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1872 rtx pat;
1873 rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
1874 temp = gen_reg_rtx (mode);
1876 /* Now, if insn's predicates don't allow offset operands,
1877 put them into pseudo regs. */
1879 if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
1880 && mode1 != VOIDmode)
1881 xop1 = copy_to_mode_reg (mode1, xop1);
1883 pat = GEN_FCN (icode) (temp, xop0, xop1);
1884 if (pat)
1885 emit_insn (pat);
1886 else
1887 abort (); /* there must be add_optab handler. */
1889 seq = get_insns ();
1890 end_sequence ();
1892 emit_insn_before (seq, insn);
1893 if (! validate_change (insn, loc, temp, 0))
1894 abort ();
1895 return;
1897 break;
1900 case CALL_PLACEHOLDER:
1901 for (i = 0; i < 3; i++)
1903 rtx seq = XEXP (x, i);
1904 if (seq)
1906 push_to_sequence (seq);
1907 validate_insns_of_varrefs (XEXP (x, i));
1908 XEXP (x, i) = get_insns ();
1909 end_sequence ();
1912 break;
1914 default:
1915 break;
1918 /* Scan all subexpressions. */
1919 fmt = GET_RTX_FORMAT (code);
1920 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1921 if (*fmt == 'e')
1922 validate_operand_of_varrefs (insn, &XEXP (x, i));
1923 else if (*fmt == 'E')
1924 for (j = 0; j < XVECLEN (x, i); j++)
1925 validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
1930 /* Return size that is not allocated for stack frame. It will be allocated
1931 to modify the home of pseudo registers called from global_alloc. */
1932 HOST_WIDE_INT
1933 get_frame_free_size (void)
1935 if (! flag_propolice_protection)
1936 return 0;
1938 return push_allocated_offset - push_frame_offset;
1942 /* The following codes are invoked after the instantiation of pseudo registers.
1944 Reorder local variables to place a peudo register after buffers to avoid
1945 the corruption of local variables that could be used to further corrupt
1946 arbitrary memory locations. */
1947 #if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
1948 static void push_frame (HOST_WIDE_INT, HOST_WIDE_INT);
1949 static void push_frame_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
1950 static void push_frame_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
1951 static void push_frame_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
1952 static void push_frame_in_operand (rtx, rtx, HOST_WIDE_INT, HOST_WIDE_INT);
1953 static void push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT, HOST_WIDE_INT);
1954 static void push_frame_of_reg_equiv_constant (HOST_WIDE_INT, HOST_WIDE_INT);
1955 static void reset_used_flags_for_push_frame (void);
1956 static int check_out_of_frame_access (rtx, HOST_WIDE_INT);
1957 static int check_out_of_frame_access_in_operand (rtx, HOST_WIDE_INT);
1958 #endif
1961 /* Assign stack local at the stage of register allocater. if a pseudo reg is
1962 spilled out from such an allocation, it is allocated on the stack.
1963 The protector keep the location be lower stack region than the location of
1964 sweeped arrays. */
1966 assign_stack_local_for_pseudo_reg (enum machine_mode mode,
1967 HOST_WIDE_INT size, int align)
1969 #if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
1970 return assign_stack_local (mode, size, align);
1971 #else
1972 tree blocks = DECL_INITIAL (current_function_decl);
1973 rtx new;
1974 HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
1975 int first_call_from_purge_addressof, first_call_from_global_alloc;
1977 if (! flag_propolice_protection
1978 || size == 0
1979 || ! blocks
1980 || current_function_is_inlinable
1981 || ! search_string_from_argsandvars (CALL_FROM_PUSH_FRAME)
1982 || current_function_contains_functions)
1983 return assign_stack_local (mode, size, align);
1985 first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
1986 first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
1987 saved_cse_not_expected = cse_not_expected;
1989 starting_frame = ((STARTING_FRAME_OFFSET)
1990 ? STARTING_FRAME_OFFSET : BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1991 units_per_push = MAX (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1992 GET_MODE_SIZE (mode));
1994 if (first_call_from_purge_addressof)
1996 push_frame_offset = push_allocated_offset;
1997 if (check_out_of_frame_access (get_insns (), starting_frame))
1999 /* After the purge_addressof stage, there may be an instruction which
2000 have the pointer less than the starting_frame.
2001 if there is an access below frame, push dummy region to seperate
2002 the address of instantiated variables. */
2003 push_frame (GET_MODE_SIZE (DImode), 0);
2004 assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2008 if (first_call_from_global_alloc)
2010 push_frame_offset = push_allocated_offset = 0;
2011 if (check_out_of_frame_access (get_insns (), starting_frame))
2013 if (STARTING_FRAME_OFFSET)
2015 /* if there is an access below frame, push dummy region
2016 to seperate the address of instantiated variables. */
2017 push_frame (GET_MODE_SIZE (DImode), 0);
2018 assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2020 else
2021 push_allocated_offset = starting_frame;
2025 saved_frame_offset = frame_offset;
2026 frame_offset = push_frame_offset;
2028 new = assign_stack_local (mode, size, align);
2030 push_frame_offset = frame_offset;
2031 frame_offset = saved_frame_offset;
2033 if (push_frame_offset > push_allocated_offset)
2035 push_frame (units_per_push,
2036 push_allocated_offset + STARTING_FRAME_OFFSET);
2038 assign_stack_local (BLKmode, units_per_push, -1);
2039 push_allocated_offset += units_per_push;
2042 /* At the second call from global alloc, alpha push frame and assign
2043 a local variable to the top of the stack. */
2044 if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
2045 push_frame_offset = push_allocated_offset = 0;
2047 return new;
2048 #endif
2052 #if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2054 /* push frame infomation for instantiating pseudo register at the top of stack.
2055 This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is
2056 not defined.
2058 It is called by purge_addressof function and global_alloc (or reload)
2059 function. */
2060 static void
2061 push_frame (HOST_WIDE_INT var_size, HOST_WIDE_INT boundary)
2063 reset_used_flags_for_push_frame();
2065 /* Scan all declarations of variables and fix the offset address of
2066 the variable based on the frame pointer. */
2067 push_frame_in_decls (DECL_INITIAL (current_function_decl),
2068 var_size, boundary);
2070 /* Scan all argument variable and fix the offset address based on
2071 the frame pointer. */
2072 push_frame_in_args (DECL_ARGUMENTS (current_function_decl),
2073 var_size, boundary);
2075 /* Scan all operands of all insns and fix the offset address
2076 based on the frame pointer. */
2077 push_frame_of_insns (get_insns (), var_size, boundary);
2079 /* Scan all reg_equiv_memory_loc and reg_equiv_constant. */
2080 push_frame_of_reg_equiv_memory_loc (var_size, boundary);
2081 push_frame_of_reg_equiv_constant (var_size, boundary);
2083 reset_used_flags_for_push_frame();
2087 /* Reset used flag of every insns, reg_equiv_memory_loc,
2088 and reg_equiv_constant. */
2089 static void
2090 reset_used_flags_for_push_frame(void)
2092 int i;
2093 extern rtx *reg_equiv_memory_loc;
2094 extern rtx *reg_equiv_constant;
2096 /* Clear all the USED bits in operands of all insns and declarations of
2097 local vars. */
2098 reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2099 reset_used_flags_for_insns (get_insns ());
2102 /* The following codes are processed if the push_frame is called from
2103 global_alloc (or reload) function. */
2104 if (reg_equiv_memory_loc == 0)
2105 return;
2107 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2108 if (reg_equiv_memory_loc[i])
2110 rtx x = reg_equiv_memory_loc[i];
2112 if (GET_CODE (x) == MEM
2113 && GET_CODE (XEXP (x, 0)) == PLUS
2114 && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
2116 /* reset */
2117 XEXP (x, 0)->used = 0;
2122 if (reg_equiv_constant == 0)
2123 return;
2125 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2126 if (reg_equiv_constant[i])
2128 rtx x = reg_equiv_constant[i];
2130 if (GET_CODE (x) == PLUS
2131 && AUTO_BASEPTR (x) == frame_pointer_rtx)
2133 /* reset */
2134 x->used = 0;
2140 /* Push every variables declared as a local variable and make a room for
2141 instantiated register. */
2142 static void
2143 push_frame_in_decls (tree block, HOST_WIDE_INT push_size,
2144 HOST_WIDE_INT boundary)
2146 tree types;
2147 HOST_WIDE_INT offset;
2148 rtx home;
2150 while (block && TREE_CODE(block)==BLOCK)
2152 for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
2154 /* Skip the declaration that refers an external variable and
2155 also skip an global variable. */
2156 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
2158 if (! DECL_RTL_SET_P (types))
2159 continue;
2161 home = DECL_RTL (types);
2163 /* Process for static local variable. */
2164 if (GET_CODE (home) == MEM
2165 && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
2166 continue;
2168 if (GET_CODE (home) == MEM
2169 && GET_CODE (XEXP (home, 0)) == REG)
2171 if (XEXP (home, 0) != frame_pointer_rtx
2172 || boundary != 0)
2173 continue;
2175 XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2176 push_size);
2178 /* mark */
2179 XEXP (home, 0)->used = 1;
2182 if (GET_CODE (home) == MEM
2183 && GET_CODE (XEXP (home, 0)) == MEM)
2185 /* Process for dynamically allocated array. */
2186 home = XEXP (home, 0);
2189 if (GET_CODE (home) == MEM
2190 && GET_CODE (XEXP (home, 0)) == PLUS
2191 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2193 offset = AUTO_OFFSET(XEXP (home, 0));
2195 if (! XEXP (home, 0)->used
2196 && offset >= boundary)
2198 offset += push_size;
2199 XEXP (XEXP (home, 0), 1)
2200 = gen_rtx_CONST_INT (VOIDmode, offset);
2202 /* mark */
2203 XEXP (home, 0)->used = 1;
2209 push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
2210 block = BLOCK_CHAIN (block);
2215 /* Push every variables declared as an argument and make a room for
2216 instantiated register. */
2217 static void
2218 push_frame_in_args (tree parms, HOST_WIDE_INT push_size,
2219 HOST_WIDE_INT boundary)
2221 rtx home;
2222 HOST_WIDE_INT offset;
2224 for (; parms; parms = TREE_CHAIN (parms))
2225 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2227 if (PARM_PASSED_IN_MEMORY (parms))
2229 home = DECL_INCOMING_RTL (parms);
2230 offset = AUTO_OFFSET(XEXP (home, 0));
2232 if (XEXP (home, 0)->used || offset < boundary)
2233 continue;
2235 /* the operand related to the sweep variable. */
2236 if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
2238 if (XEXP (home, 0) == frame_pointer_rtx)
2239 XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2240 push_size);
2241 else {
2242 offset += push_size;
2243 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2244 offset);
2247 /* mark */
2248 XEXP (home, 0)->used = 1;
2255 /* Set to 1 when the instruction has the reference to be pushed. */
2256 static int insn_pushed;
2258 /* Tables of equivalent registers with frame pointer. */
2259 static int *fp_equiv = 0;
2262 /* Push the frame region to make a room for allocated local variable. */
2263 static void
2264 push_frame_of_insns (rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
2266 /* init fp_equiv */
2267 fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
2269 for (; insn; insn = NEXT_INSN (insn))
2270 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2271 || GET_CODE (insn) == CALL_INSN)
2273 rtx last;
2275 insn_pushed = FALSE;
2277 /* Push frame in INSN operation. */
2278 push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
2280 /* Push frame in NOTE. */
2281 push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
2283 /* Push frame in CALL EXPR_LIST. */
2284 if (GET_CODE (insn) == CALL_INSN)
2285 push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn),
2286 push_size, boundary);
2288 /* Pushed frame addressing style may not be machine specific one.
2289 so the instruction should be converted to use the machine specific
2290 frame addressing. */
2291 if (insn_pushed
2292 && (last = try_split (PATTERN (insn), insn, 1)) != insn)
2294 rtx first = NEXT_INSN (insn);
2295 rtx trial = NEXT_INSN (first);
2296 rtx pattern = PATTERN (trial);
2297 rtx set;
2299 /* Update REG_EQUIV info to the first splitted insn. */
2300 if ((set = single_set (insn))
2301 && find_reg_note (insn, REG_EQUIV, SET_SRC (set))
2302 && GET_CODE (PATTERN (first)) == SET)
2304 REG_NOTES (first)
2305 = gen_rtx_EXPR_LIST (REG_EQUIV,
2306 SET_SRC (PATTERN (first)),
2307 REG_NOTES (first));
2310 /* copy the first insn of splitted insns to the original insn and
2311 delete the first insn,
2312 because the original insn is pointed from records:
2313 insn_chain, reg_equiv_init, used for global_alloc. */
2314 if (cse_not_expected)
2316 add_insn_before (insn, first);
2318 /* Copy the various flags, and other information. */
2319 memcpy (insn, first, sizeof (struct rtx_def) - sizeof (rtunion));
2320 PATTERN (insn) = PATTERN (first);
2321 INSN_CODE (insn) = INSN_CODE (first);
2322 LOG_LINKS (insn) = LOG_LINKS (first);
2323 REG_NOTES (insn) = REG_NOTES (first);
2325 /* then remove the first insn of splitted insns. */
2326 remove_insn (first);
2327 INSN_DELETED_P (first) = 1;
2330 if (GET_CODE (pattern) == SET
2331 && GET_CODE (XEXP (pattern, 0)) == REG
2332 && GET_CODE (XEXP (pattern, 1)) == PLUS
2333 && XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
2334 && GET_CODE (XEXP (XEXP (pattern, 1), 1)) == CONST_INT)
2336 rtx offset = XEXP (XEXP (pattern, 1), 1);
2337 fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
2339 delete_insn (trial);
2342 insn = last;
2346 /* Clean up. */
2347 free (fp_equiv);
2351 /* Push the frame region by changing the operand that points the frame. */
2352 static void
2353 push_frame_in_operand (rtx insn, rtx orig,
2354 HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
2356 rtx x = orig;
2357 enum rtx_code code;
2358 int i, j;
2359 HOST_WIDE_INT offset;
2360 const char *fmt;
2362 if (x == 0)
2363 return;
2365 code = GET_CODE (x);
2367 switch (code)
2369 case CONST_INT:
2370 case CONST_DOUBLE:
2371 case CONST:
2372 case SYMBOL_REF:
2373 case CODE_LABEL:
2374 case PC:
2375 case CC0:
2376 case ASM_INPUT:
2377 case ADDR_VEC:
2378 case ADDR_DIFF_VEC:
2379 case RETURN:
2380 case REG:
2381 case ADDRESSOF:
2382 case USE:
2383 return;
2385 case SET:
2387 Skip setjmp setup insn and setjmp restore insn
2388 alpha case:
2389 (set (MEM (reg:SI xx)) (frame_pointer_rtx)))
2390 (set (frame_pointer_rtx) (REG))
2392 if (GET_CODE (XEXP (x, 0)) == MEM
2393 && XEXP (x, 1) == frame_pointer_rtx)
2394 return;
2395 if (XEXP (x, 0) == frame_pointer_rtx
2396 && GET_CODE (XEXP (x, 1)) == REG)
2397 return;
2400 powerpc case: restores setjmp address
2401 (set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
2403 (set (reg) (plus frame_pointer_rtx const_int -n))
2404 (set (frame_pointer_rtx) (reg))
2406 if (GET_CODE (XEXP (x, 0)) == REG
2407 && GET_CODE (XEXP (x, 1)) == PLUS
2408 && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
2409 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2410 && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
2412 x = XEXP (x, 1);
2413 offset = AUTO_OFFSET(x);
2414 if (x->used || -offset < boundary)
2415 return;
2417 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
2418 x->used = 1; insn_pushed = TRUE;
2419 return;
2422 /* Reset fp_equiv register. */
2423 else if (GET_CODE (XEXP (x, 0)) == REG
2424 && fp_equiv[REGNO (XEXP (x, 0))])
2425 fp_equiv[REGNO (XEXP (x, 0))] = 0;
2427 /* Propagete fp_equiv register. */
2428 else if (GET_CODE (XEXP (x, 0)) == REG
2429 && GET_CODE (XEXP (x, 1)) == REG
2430 && fp_equiv[REGNO (XEXP (x, 1))])
2431 if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
2432 || reg_renumber[REGNO (XEXP (x, 0))] > 0)
2433 fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
2434 break;
2436 case MEM:
2437 if (XEXP (x, 0) == frame_pointer_rtx
2438 && boundary == 0)
2440 XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2441 XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2442 return;
2444 break;
2446 case PLUS:
2447 /* Handle special case of frame register plus constant. */
2448 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2449 && XEXP (x, 0) == frame_pointer_rtx)
2451 offset = AUTO_OFFSET(x);
2453 if (x->used || offset < boundary)
2454 return;
2456 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2457 x->used = 1; insn_pushed = TRUE;
2459 return;
2462 Handle alpha case:
2463 (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
2465 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2466 && GET_CODE (XEXP (x, 0)) == SUBREG
2467 && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
2469 offset = AUTO_OFFSET(x);
2471 if (x->used || offset < boundary)
2472 return;
2474 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2475 x->used = 1; insn_pushed = TRUE;
2477 return;
2480 Handle powerpc case:
2481 (set (reg x) (plus fp const))
2482 (set (.....) (... (plus (reg x) (const B))))
2484 else if (GET_CODE (XEXP (x, 1)) == CONST_INT
2485 && GET_CODE (XEXP (x, 0)) == REG
2486 && fp_equiv[REGNO (XEXP (x, 0))])
2488 offset = AUTO_OFFSET(x);
2490 if (x->used)
2491 return;
2493 offset += fp_equiv[REGNO (XEXP (x, 0))];
2495 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2496 x->used = 1; insn_pushed = TRUE;
2498 return;
2501 Handle special case of frame register plus reg (constant).
2502 (set (reg x) (const B))
2503 (set (....) (...(plus fp (reg x))))
2505 else if (XEXP (x, 0) == frame_pointer_rtx
2506 && GET_CODE (XEXP (x, 1)) == REG
2507 && PREV_INSN (insn)
2508 && PATTERN (PREV_INSN (insn))
2509 && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
2510 && GET_CODE (SET_SRC (PATTERN (PREV_INSN (insn)))) == CONST_INT)
2512 offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
2514 if (x->used || offset < boundary)
2515 return;
2517 SET_SRC (PATTERN (PREV_INSN (insn)))
2518 = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2519 x->used = 1;
2520 XEXP (x, 1)->used = 1;
2522 return;
2525 Handle special case of frame register plus reg (used).
2526 The register already have a pushed offset, just mark this frame
2527 addressing.
2529 else if (XEXP (x, 0) == frame_pointer_rtx
2530 && XEXP (x, 1)->used)
2532 x->used = 1;
2533 return;
2536 Process further subtree:
2537 Example: (plus:SI (mem/s:SI (plus:SI (FP) (const_int 8)))
2538 (const_int 5))
2540 break;
2542 case CALL_PLACEHOLDER:
2543 push_frame_of_insns (XEXP (x, 0), push_size, boundary);
2544 push_frame_of_insns (XEXP (x, 1), push_size, boundary);
2545 push_frame_of_insns (XEXP (x, 2), push_size, boundary);
2546 break;
2548 default:
2549 break;
2552 /* Scan all subexpressions. */
2553 fmt = GET_RTX_FORMAT (code);
2554 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2555 if (*fmt == 'e')
2557 if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
2558 fatal_insn ("push_frame_in_operand", insn);
2559 push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
2561 else if (*fmt == 'E')
2562 for (j = 0; j < XVECLEN (x, i); j++)
2563 push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
2567 /* Change the location pointed in reg_equiv_memory_loc. */
2568 static void
2569 push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT push_size,
2570 HOST_WIDE_INT boundary)
2572 int i;
2573 extern rtx *reg_equiv_memory_loc;
2575 /* This function is processed if the push_frame is called from
2576 global_alloc (or reload) function. */
2577 if (reg_equiv_memory_loc == 0)
2578 return;
2580 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2581 if (reg_equiv_memory_loc[i])
2583 rtx x = reg_equiv_memory_loc[i];
2584 int offset;
2586 if (GET_CODE (x) == MEM
2587 && GET_CODE (XEXP (x, 0)) == PLUS
2588 && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
2590 offset = AUTO_OFFSET(XEXP (x, 0));
2592 if (! XEXP (x, 0)->used
2593 && offset >= boundary)
2595 offset += push_size;
2596 XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2598 /* mark */
2599 XEXP (x, 0)->used = 1;
2602 else if (GET_CODE (x) == MEM
2603 && XEXP (x, 0) == frame_pointer_rtx
2604 && boundary == 0)
2606 XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2607 XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2613 /* Change the location pointed in reg_equiv_constant. */
2614 static void
2615 push_frame_of_reg_equiv_constant (HOST_WIDE_INT push_size,
2616 HOST_WIDE_INT boundary)
2618 int i;
2619 extern rtx *reg_equiv_constant;
2621 /* This function is processed if the push_frame is called from
2622 global_alloc (or reload) function. */
2623 if (reg_equiv_constant == 0)
2624 return;
2626 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
2627 if (reg_equiv_constant[i])
2629 rtx x = reg_equiv_constant[i];
2630 int offset;
2632 if (GET_CODE (x) == PLUS
2633 && XEXP (x, 0) == frame_pointer_rtx)
2635 offset = AUTO_OFFSET(x);
2637 if (! x->used
2638 && offset >= boundary)
2640 offset += push_size;
2641 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2643 /* mark */
2644 x->used = 1;
2647 else if (x == frame_pointer_rtx
2648 && boundary == 0)
2650 reg_equiv_constant[i]
2651 = plus_constant (frame_pointer_rtx, push_size);
2652 reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
2658 /* Check every instructions if insn's memory reference is out of frame. */
2659 static int
2660 check_out_of_frame_access (rtx insn, HOST_WIDE_INT boundary)
2662 for (; insn; insn = NEXT_INSN (insn))
2663 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2664 || GET_CODE (insn) == CALL_INSN)
2666 if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
2667 return TRUE;
2669 return FALSE;
2673 /* Check every operands if the reference is out of frame. */
2674 static int
2675 check_out_of_frame_access_in_operand (rtx orig, HOST_WIDE_INT boundary)
2677 rtx x = orig;
2678 enum rtx_code code;
2679 int i, j;
2680 const char *fmt;
2682 if (x == 0)
2683 return FALSE;
2685 code = GET_CODE (x);
2687 switch (code)
2689 case CONST_INT:
2690 case CONST_DOUBLE:
2691 case CONST:
2692 case SYMBOL_REF:
2693 case CODE_LABEL:
2694 case PC:
2695 case CC0:
2696 case ASM_INPUT:
2697 case ADDR_VEC:
2698 case ADDR_DIFF_VEC:
2699 case RETURN:
2700 case REG:
2701 case ADDRESSOF:
2702 return FALSE;
2704 case MEM:
2705 if (XEXP (x, 0) == frame_pointer_rtx)
2706 if (0 < boundary)
2707 return TRUE;
2708 break;
2710 case PLUS:
2711 /* Handle special case of frame register plus constant. */
2712 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2713 && XEXP (x, 0) == frame_pointer_rtx)
2715 if (0 <= AUTO_OFFSET(x)
2716 && AUTO_OFFSET(x) < boundary)
2717 return TRUE;
2718 return FALSE;
2721 Process further subtree:
2722 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2723 (const_int 5))
2725 break;
2727 case CALL_PLACEHOLDER:
2728 if (check_out_of_frame_access (XEXP (x, 0), boundary))
2729 return TRUE;
2730 if (check_out_of_frame_access (XEXP (x, 1), boundary))
2731 return TRUE;
2732 if (check_out_of_frame_access (XEXP (x, 2), boundary))
2733 return TRUE;
2734 break;
2736 default:
2737 break;
2740 /* Scan all subexpressions. */
2741 fmt = GET_RTX_FORMAT (code);
2742 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2743 if (*fmt == 'e')
2745 if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
2746 return TRUE;
2748 else if (*fmt == 'E')
2749 for (j = 0; j < XVECLEN (x, i); j++)
2750 if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
2751 return TRUE;
2753 return FALSE;
2755 #endif